lang: ast, gapi, interfaces, parser: Print line numbers on error

This adds an initial implementation of printing line numbers on type
unification errors. It also attempts to print a visual position
indicator for most scenarios.

This patch was started by Felix Frank and finished by James Shubin.

Co-authored-by: Felix Frank <Felix.Frank.de@gmail.com>
This commit is contained in:
James Shubin
2025-02-25 20:15:02 -05:00
parent f754bbbf90
commit d7ecc72b41
20 changed files with 714 additions and 172 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -31,6 +31,7 @@ package ast
import (
"fmt"
"os"
"sort"
"strings"
"sync"
@@ -40,6 +41,7 @@ import (
"github.com/purpleidea/mgmt/lang/funcs/vars"
"github.com/purpleidea/mgmt/lang/interfaces"
"github.com/purpleidea/mgmt/lang/types"
"github.com/purpleidea/mgmt/util"
"github.com/purpleidea/mgmt/util/errwrap"
)
@@ -393,3 +395,156 @@ func lambdaScopeFeedback(scope *interfaces.Scope, logf func(format string, v ...
logf("$%s(...)", name)
}
}
// Textarea stores the coordinates of a statement or expression in the form of a
// starting line/column and ending line/column.
type Textarea struct {
// debug represents if we're running in debug mode or not.
debug bool
// logf is a logger which should be used.
logf func(format string, v ...interface{})
// sf is the SourceFinder function implementation that maps a filename
// to the source.
sf interfaces.SourceFinderFunc
// path is the full path/filename where this text area exists.
path string
// This data is zero-based. (Eg: first line of file is 0)
startLine int // first
startColumn int // left
endLine int // last
endColumn int // right
isSet bool
// Bug5819 works around issue https://github.com/golang/go/issues/5819
Bug5819 interface{} // XXX: workaround
}
// Setup is used during AST initialization in order to store in each AST node
// the name of the source file from which it was generated.
func (obj *Textarea) Setup(data *interfaces.Data) {
obj.debug = data.Debug
obj.logf = data.Logf
obj.sf = data.SourceFinder
obj.path = data.AbsFilename()
}
// IsSet returns if the position was already set with Locate already.
func (obj *Textarea) IsSet() bool {
return obj.isSet
}
// Locate is used by the parser to store the token positions in AST nodes. The
// path will be filled during AST node initialization usually, because the
// parser does not know the name of the file it is processing.
func (obj *Textarea) Locate(line int, col int, endline int, endcol int) {
obj.startLine = line
obj.startColumn = col
obj.endLine = endline
obj.endColumn = endcol
obj.isSet = true
}
// Pos returns the starting line/column of an AST node.
func (obj *Textarea) Pos() (int, int) {
return obj.startLine, obj.startColumn
}
// End returns the end line/column of an AST node.
func (obj *Textarea) End() (int, int) {
return obj.endLine, obj.endColumn
}
// Path returns the name of the source file that holds the code for an AST node.
func (obj *Textarea) Path() string {
return obj.path
}
// Filename returns the printable filename that we'd like to display. It tries
// to return a relative version if possible.
func (obj *Textarea) Filename() string {
if obj.path == "" {
return "<unknown>" // TODO: should this be <stdin> ?
}
wd, _ := os.Getwd() // ignore error since "" would just pass through
wd += "/" // it's a dir
if s, err := util.RemoveBasePath(obj.path, wd); err == nil {
return s
}
return obj.path
}
// Byline gives a succinct representation of the Textarea, but is useful only in
// debugging. In order to generate pretty error messages, see HighlightText.
func (obj *Textarea) Byline() string {
// We convert to 1-based for user display.
return fmt.Sprintf("%s @ %d:%d-%d:%d", obj.Filename(), obj.startLine+1, obj.startColumn+1, obj.endLine+1, obj.endColumn+1)
}
// HighlightText generates a generic description that just visually indicates
// part of the line described by a Textarea. If the coordinates that are passed
// span multiple lines, don't show those lines, but just a description of the
// area. If it can't generate a valid snippet, then it returns the empty string.
func (obj *Textarea) HighlightText() string {
b, err := obj.sf(obj.path) // source finder!
if err != nil {
return ""
}
contents := string(b)
result := &strings.Builder{}
result.WriteString(obj.Byline())
lines := strings.Split(contents, "\n")
if len(lines) < obj.endLine-1 {
// XXX: out of bounds?
return ""
}
result.WriteString("\n--\n")
if obj.startLine == obj.endLine {
line := lines[obj.startLine] + "\n"
text := strings.TrimLeft(line, " \t")
indent := strings.TrimSuffix(line, text)
offset := len(indent)
result.WriteString(line)
result.WriteString(indent)
result.WriteString(strings.Repeat(" ", obj.startColumn-offset))
// TODO: add on the width of the second element as well
result.WriteString(strings.Repeat("^", obj.endColumn-obj.startColumn+1))
result.WriteString("\n")
return result.String()
}
line := lines[obj.startLine] + "\n"
text := strings.TrimLeft(line, " \t")
indent := strings.TrimSuffix(line, text)
offset := len(indent)
result.WriteString(line)
result.WriteString(indent)
result.WriteString(strings.Repeat(" ", obj.startColumn-offset))
result.WriteString("^ from here ...\n")
line = lines[obj.endLine] + "\n"
text = strings.TrimLeft(line, " \t")
indent = strings.TrimSuffix(line, text)
offset = len(indent)
result.WriteString(line)
result.WriteString(indent)
result.WriteString(strings.Repeat(" ", obj.startColumn-offset))
result.WriteString("^ ... to here\n")
return result.String()
}

View File

@@ -214,6 +214,7 @@ func (obj *GAPI) Cli(info *gapi.Info) (*gapi.Deploy, error) {
LexParser: parser.LexParse,
Downloader: downloader,
StrInterpolater: interpolate.StrInterpolate,
SourceFinder: os.ReadFile,
//Local: obj.Local, // TODO: do we need this?
//World: obj.World, // TODO: do we need this?

View File

@@ -230,6 +230,15 @@ type Data struct {
// cycles.
StrInterpolater func(string, *Pos, *Data) (Expr, error)
// SourceFinder is a function that returns the contents of a source file
// when requested by filename. This data is used to annotate error
// messages with some context from the source, and as a result is
// optional. This function is passed in this way so that the different
// consumers of this can use different methods to find the source. The
// three main users are: (1) normal GAPI CLI, before the bundle is
// created, (2) the main bundled execution, and (3) the tests.
SourceFinder SourceFinderFunc
//World engine.World // TODO: do we need this?
// Prefix provides a unique path prefix that we can namespace in. It is
@@ -244,6 +253,16 @@ type Data struct {
Logf func(format string, v ...interface{})
}
// AbsFilename returns the absolute filename path to the code this Data struct
// is running. This is used to pull out a filename for error messages.
func (obj *Data) AbsFilename() string {
// TODO: is this correct? Do we want to check if Metadata is nil?
if obj == nil || obj.Metadata == nil { // for tests
return ""
}
return obj.Base + obj.Metadata.Main
}
// Scope represents a mapping between a variables identifier and the
// corresponding expression it is bound to. Local scopes in this language exist
// and are formed by nesting within if statements. Child scopes can shadow
@@ -424,3 +443,39 @@ func EmptyOutput() *Output {
Edges: []*Edge{},
}
}
// PositionableNode is the interface implemented by AST nodes that store their
// code position. It is implemented by node types that embed Textarea.
type PositionableNode interface {
// IsSet returns if the position was already set with Locate already.
IsSet() bool
// Locate sets the position in zero-based (start line, start column, end
// line, end column) format.
Locate(int, int, int, int)
// Pos returns the zero-based start line and then start column position.
Pos() (int, int)
// End returns the zero-based end line and then end column position.
End() (int, int)
// String returns a friendly representation of the positions.
String() string
}
// TextDisplayer is a graph node that is aware of its position in the source
// code, and can emit a textual representation of that part of the source.
type TextDisplayer interface {
// Byline returns a simple version of the error location.
Byline() string
// HighlightText returns a textual representation of this definition
// for this node in source.
HighlightText() string
}
// SourceFinderFunc is the function signature used to return the contents of a
// source file when requested by filename. This data is used to annotate error
// messages with some context from the source, and as a result is optional.
type SourceFinderFunc = func(string) ([]byte, error)

View File

@@ -39,8 +39,12 @@ import (
// solution. Those two types are symmetrical in that it doesn't matter which is
// used where, it only affects how we print out error messages.
type UnificationInvariant struct { // formerly the SamInvariant
// Expr is the expression we are determining the type for. This improves
// our error messages.
// Node is the AST node holding the expression. This improves our error
// messages.
Node Node
// Expr is the expression we are determining the type for. This is what
// we are unifying. This improves our error messages.
Expr Expr
// Expect is one of the two types to unify.
@@ -65,6 +69,7 @@ func GenericCheck(obj Expr, typ *types.Type) ([]*UnificationInvariant, error) {
invar := &UnificationInvariant{
Expr: obj,
Node: obj,
Expect: typ, // sam says not backwards
Actual: actual,
}

View File

@@ -155,6 +155,7 @@ func HilInterpolate(str string, pos *interfaces.Pos, data *interfaces.Data) (int
LexParser: data.LexParser,
Downloader: data.Downloader,
StrInterpolater: data.StrInterpolater,
SourceFinder: data.SourceFinder,
//World: data.World, // TODO: do we need this?
Prefix: data.Prefix,

View File

@@ -34,6 +34,7 @@ package interpolate
import (
"fmt"
"reflect"
"regexp"
"strings"
"testing"
@@ -228,6 +229,9 @@ func TestInterpolate0(t *testing.T) {
data := &interfaces.Data{
// TODO: add missing fields here if/when needed
StrInterpolater: StrInterpolate,
SourceFinder: func(string) ([]byte, error) {
return nil, fmt.Errorf("not implemented")
},
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {
@@ -271,7 +275,7 @@ func TestInterpolate0(t *testing.T) {
StripPackageNames: true,
HidePrivateFields: true,
HideZeroValues: true,
//FieldExclusions: regexp.MustCompile(`^(data)$`),
FieldExclusions: regexp.MustCompile(`^(Textarea)$`),
//FieldFilter func(reflect.StructField, reflect.Value) bool
//HomePackage string
//Separator string
@@ -480,6 +484,9 @@ func TestInterpolateBasicStmt(t *testing.T) {
data := &interfaces.Data{
// TODO: add missing fields here if/when needed
StrInterpolater: StrInterpolate,
SourceFinder: func(string) ([]byte, error) {
return nil, fmt.Errorf("not implemented")
},
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {
@@ -787,6 +794,9 @@ func TestInterpolateBasicExpr(t *testing.T) {
data := &interfaces.Data{
// TODO: add missing fields here if/when needed
StrInterpolater: StrInterpolate,
SourceFinder: func(string) ([]byte, error) {
return nil, fmt.Errorf("not implemented")
},
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {

View File

@@ -181,11 +181,19 @@ func TestAstFunc1(t *testing.T) {
t.Logf("comment: %s\n", comment)
}
sources := map[string][]byte{}
sourceFinder := func(path string) ([]byte, error) {
if b, exists := sources[path]; exists {
return b, nil
}
return nil, os.ErrNotExist
}
// copy files out into the test temp directory
var testOutput []byte
var testConfig []byte
found := false
for _, file := range archive.Files {
sources["/"+file.Name] = file.Data // store!
if file.Name == "OUTPUT" {
testOutput = file.Data
found = true
@@ -247,6 +255,7 @@ func TestAstFunc1(t *testing.T) {
if strings.HasPrefix(expstr, magicError) {
errStr = strings.TrimPrefix(expstr, magicError)
expstr = errStr
t.Logf("errStr has length %d", len(errStr))
if strings.HasPrefix(expstr, magicErrorLexParse) {
errStr = strings.TrimPrefix(expstr, magicErrorLexParse)
@@ -398,6 +407,7 @@ func TestAstFunc1(t *testing.T) {
LexParser: parser.LexParse,
StrInterpolater: interpolate.StrInterpolate,
SourceFinder: sourceFinder,
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {
@@ -675,11 +685,19 @@ func TestAstFunc2(t *testing.T) {
t.Logf("comment: %s\n", comment)
}
sources := map[string][]byte{}
sourceFinder := func(path string) ([]byte, error) {
if b, exists := sources[path]; exists {
return b, nil
}
return nil, os.ErrNotExist
}
// copy files out into the test temp directory
var testOutput []byte
var testConfig []byte
found := false
for _, file := range archive.Files {
sources["/"+file.Name] = file.Data // store!
if file.Name == "OUTPUT" {
testOutput = file.Data
found = true
@@ -939,6 +957,7 @@ func TestAstFunc2(t *testing.T) {
LexParser: parser.LexParse,
StrInterpolater: interpolate.StrInterpolate,
SourceFinder: sourceFinder,
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {
@@ -1495,11 +1514,19 @@ func TestAstFunc3(t *testing.T) {
t.Logf("comment: %s\n", comment)
}
sources := map[string][]byte{}
sourceFinder := func(path string) ([]byte, error) {
if b, exists := sources[path]; exists {
return b, nil
}
return nil, os.ErrNotExist
}
// copy files out into the test temp directory
var testOutput []byte
var testConfig []byte
found := false
for _, file := range archive.Files {
sources["/"+file.Name] = file.Data // store!
if file.Name == "OUTPUT" {
testOutput = file.Data
found = true
@@ -1759,6 +1786,7 @@ func TestAstFunc3(t *testing.T) {
LexParser: parser.LexParse,
StrInterpolater: interpolate.StrInterpolate,
SourceFinder: sourceFinder,
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {

View File

@@ -12,4 +12,4 @@ class c1($a, $b []str) {
}
}
-- OUTPUT --
# err: errUnify: unify error with: str("hello"): type error: list != str
# err: errUnify: type error: list != str: /main.mcl @ 4:1-4:25

View File

@@ -15,4 +15,4 @@ test ["x",] {
float32 => $b,
}
-- OUTPUT --
# err: errUnify: unify error with: topLevel(func() { <built-in:_operator> }): type error: int != float
# err: errUnify: type error: int != float: /main.mcl @ 10:6-10:22

View File

@@ -2,4 +2,4 @@
import "fmt"
test fmt.printf("%d%d", 42) {} # should not pass, missing second int
-- OUTPUT --
# err: errUnify: unify error with: call:fmt.printf(str("%d%d"), int(42)): type error: str != list
# err: errUnify: type error: str != list: /main.mcl @ 2:1-2:30

View File

@@ -3,4 +3,4 @@ test "t1" {
stringptr => 42, # int, not str
}
-- OUTPUT --
# err: errUnify: unify error with: int(42): type error: str != int
# err: errUnify: type error: str != int: /main.mcl @ 2:2-2:17

View File

@@ -7,4 +7,4 @@ test "test" {}
Test["${name}"] -> Test["test"] # must fail
-- OUTPUT --
# err: errUnify: unify error with: var(name): type error: str != list
# err: errUnify: type error: str != list: /main.mcl @ 6:1-6:15

View File

@@ -7,4 +7,4 @@ test "test" {}
Test["test"] -> Test["${name}"] # must fail
-- OUTPUT --
# err: errUnify: unify error with: var(name): type error: str != list
# err: errUnify: type error: str != list: /main.mcl @ 6:17-6:31

View File

@@ -5,4 +5,4 @@ $name = ["a", "bb", "ccc",]
test "${name}" {} # must fail
-- OUTPUT --
# err: errUnify: unify error with: var(name): type error: str != list
# err: errUnify: type error: str != list: /main.mcl @ 4:1-4:17

View File

@@ -189,6 +189,10 @@ func (obj *Lang) Init(ctx context.Context) error {
LexParser: parser.LexParse,
Downloader: nil, // XXX: is this used here?
StrInterpolater: interpolate.StrInterpolate,
SourceFinder: func(string) ([]byte, error) {
// We're running a bundle as part of a deploy.
return nil, fmt.Errorf("not implemented") // XXX: read from the fs?
},
//Local: obj.Local, // TODO: do we need this?
//World: obj.World, // TODO: do we need this?

View File

@@ -35,6 +35,7 @@ import (
"fmt"
"io"
"reflect"
"regexp"
"strings"
"testing"
@@ -45,8 +46,9 @@ import (
langUtil "github.com/purpleidea/mgmt/lang/util"
"github.com/purpleidea/mgmt/util"
"github.com/davecgh/go-spew/spew"
godiff "github.com/kylelemons/godebug/diff"
"github.com/kylelemons/godebug/pretty"
"github.com/sanity-io/litter"
)
func TestLexParse0(t *testing.T) {
@@ -2221,6 +2223,36 @@ func TestLexParse0(t *testing.T) {
}
// double check because DeepEqual is different since the func exists
lo := &litter.Options{
//Compact: false,
StripPackageNames: true,
HidePrivateFields: true,
HideZeroValues: true,
FieldExclusions: regexp.MustCompile(`^(Textarea)$`),
//FieldFilter func(reflect.StructField, reflect.Value) bool
//HomePackage string
//Separator string
DisablePointerReplacement: true,
}
// The litter package adds pointer comments everywhere,
// which make it not diff correctly. Clean them here!
pattern := regexp.MustCompile(`\ \/\/\ p[0-9]+$`) // the p0, p1 comments...
clean := func(s string) string {
lines := []string{}
for _, line := range strings.Split(s, "\n") {
s := pattern.ReplaceAllLiteralString(line, "")
lines = append(lines, s)
}
return strings.Join(lines, "\n")
}
lo1 := clean(lo.Sdump(exp))
lo2 := clean(lo.Sdump(xast))
if lo1 == lo2 { // simple diff
return
}
// more details, for tricky cases:
diffable := &pretty.Config{
Diffable: true,
@@ -2228,18 +2260,22 @@ func TestLexParse0(t *testing.T) {
//PrintStringers: false, // always false!
//PrintTextMarshalers: false,
SkipZeroFields: true,
//Formatter: map[reflect.Type]interface{}{
// reflect.TypeOf(ast.Textarea{}): func(x ast.Textarea) string {
// return ""
// },
//},
}
diff := diffable.Compare(exp, xast)
if diff == "" { // bonus
return
}
diff = godiff.Diff(lo1, lo2) // for printing
t.Errorf("test #%d: AST did not match expected", index)
// TODO: consider making our own recursive print function
t.Logf("test #%d: actual: \n\n%s\n", index, spew.Sdump(xast))
t.Logf("test #%d: expected: \n\n%s", index, spew.Sdump(exp))
t.Logf("test #%d: actual: \n\n%s\n", index, diffable.Sprint(xast))
t.Logf("test #%d: expected: \n\n%s", index, diffable.Sprint(exp))
t.Logf("test #%d: actual: \n\n%s\n", index, lo1)
t.Logf("test #%d: expected: \n\n%s", index, lo2)
t.Logf("test #%d: diff:\n%s", index, diff)
})
}

View File

@@ -58,12 +58,12 @@ func init() {
//err error // TODO: if we ever match ERROR in the parser
bool bool
str string
int int64 // this is the .int as seen in lexer.nex
float float64
bool bool
str string
int int64 // this is the .int as seen in lexer.nex
float float64
typ *types.Type
typ *types.Type
stmts []interfaces.Stmt
stmt interfaces.Stmt
@@ -166,54 +166,55 @@ prog:
$$.stmt = &ast.StmtProg{
Body: stmts,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
}
;
stmt:
COMMENT
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtComment{
Value: $1.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| bind
{
posLast(yylex, yyDollar) // our pos
$$.stmt = $1.stmt
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| panic
{
posLast(yylex, yyDollar) // our pos
$$.stmt = $1.stmt
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| resource
{
posLast(yylex, yyDollar) // our pos
$$.stmt = $1.stmt
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| edge
{
posLast(yylex, yyDollar) // our pos
$$.stmt = $1.stmt
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| IF expr OPEN_CURLY prog CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtIf{
Condition: $2.expr,
ThenBranch: $4.stmt,
//ElseBranch: nil,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
| IF expr OPEN_CURLY prog CLOSE_CURLY ELSE OPEN_CURLY prog CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtIf{
Condition: $2.expr,
ThenBranch: $4.stmt,
ElseBranch: $8.stmt,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// this is the named version, iow, a user-defined function (statement)
// `func name() { <expr> }`
@@ -221,7 +222,6 @@ stmt:
// `func name(<arg>, <arg>) { <expr> }`
| FUNC_IDENTIFIER IDENTIFIER OPEN_PAREN args CLOSE_PAREN OPEN_CURLY expr CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtFunc{
Name: $2.str,
Func: &ast.ExprFunc{
@@ -230,11 +230,11 @@ stmt:
Body: $7.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `func name(...) <type> { <expr> }`
| FUNC_IDENTIFIER IDENTIFIER OPEN_PAREN args CLOSE_PAREN type OPEN_CURLY expr CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
fn := &ast.ExprFunc{
Args: $4.args,
Return: $6.typ, // return type is known
@@ -271,191 +271,192 @@ stmt:
Func: fn,
Type: typ, // sam says add the type here instead...
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `class name { <prog> }`
| CLASS_IDENTIFIER colon_identifier OPEN_CURLY prog CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtClass{
Name: $2.str,
Args: nil,
Body: $4.stmt,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `class name(<arg>) { <prog> }`
// `class name(<arg>, <arg>) { <prog> }`
| CLASS_IDENTIFIER colon_identifier OPEN_PAREN args CLOSE_PAREN OPEN_CURLY prog CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtClass{
Name: $2.str,
Args: $4.args,
Body: $7.stmt,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `include name`
| INCLUDE_IDENTIFIER dotted_identifier
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtInclude{
Name: $2.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `include name(...)`
| INCLUDE_IDENTIFIER dotted_identifier OPEN_PAREN call_args CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtInclude{
Name: $2.str,
Args: $4.exprs,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `include name as foo`
// TODO: should we support: `include name as *`
| INCLUDE_IDENTIFIER dotted_identifier AS_IDENTIFIER IDENTIFIER
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtInclude{
Name: $2.str,
Alias: $4.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `include name(...) as foo`
// TODO: should we support: `include name(...) as *`
| INCLUDE_IDENTIFIER dotted_identifier OPEN_PAREN call_args CLOSE_PAREN AS_IDENTIFIER IDENTIFIER
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtInclude{
Name: $2.str,
Args: $4.exprs,
Alias: $7.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `import "name"`
| IMPORT_IDENTIFIER STRING
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtImport{
Name: $2.str,
//Alias: "",
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `import "name" as alias`
| IMPORT_IDENTIFIER STRING AS_IDENTIFIER IDENTIFIER
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtImport{
Name: $2.str,
Alias: $4.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `import "name" as *`
| IMPORT_IDENTIFIER STRING AS_IDENTIFIER MULTIPLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtImport{
Name: $2.str,
Alias: $4.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
/*
// resource bind
| rbind
{
posLast(yylex, yyDollar) // our pos
$$.stmt = $1.stmt
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
*/
;
expr:
BOOL
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprBool{
V: $1.bool,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| STRING
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprStr{
V: $1.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| INTEGER
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprInt{
V: $1.int,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| FLOAT
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprFloat{
V: $1.float,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| list
{
posLast(yylex, yyDollar) // our pos
// TODO: list could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| map
{
posLast(yylex, yyDollar) // our pos
// TODO: map could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| struct
{
posLast(yylex, yyDollar) // our pos
// TODO: struct could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| call
{
posLast(yylex, yyDollar) // our pos
// TODO: call could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| var
{
posLast(yylex, yyDollar) // our pos
// TODO: var could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| func
{
posLast(yylex, yyDollar) // our pos
// TODO: var could be squashed in here directly...
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| IF expr OPEN_CURLY expr CLOSE_CURLY ELSE OPEN_CURLY expr CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprIf{
Condition: $2.expr,
ThenBranch: $4.expr,
ElseBranch: $8.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// parenthesis wrap an expression for precedence
| OPEN_PAREN expr CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
$$.expr = $2.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
list:
// `[42, 0, -13]`
OPEN_BRACK list_elements CLOSE_BRACK
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprList{
Elements: $2.exprs,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
list_elements:
@@ -473,18 +474,18 @@ list_elements:
list_element:
expr COMMA
{
posLast(yylex, yyDollar) // our pos
$$.expr = $1.expr
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
map:
// `{"hello" => "there", "world" => "big",}`
OPEN_CURLY map_kvs CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprMap{
KVs: $2.mapKVs,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
map_kvs:
@@ -513,7 +514,6 @@ struct:
// `struct{answer => 0, truth => false, hello => "world",}`
STRUCT_IDENTIFIER OPEN_CURLY struct_fields CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprStruct{
Fields: $3.structFields,
}
@@ -546,18 +546,17 @@ call:
// iter.map(...)
dotted_identifier OPEN_PAREN call_args CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: $1.str,
Args: $3.exprs,
//Var: false, // default
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// calling a function that's stored in a variable (a lambda)
// `$foo(4, "hey")` # call function value
| dotted_var_identifier OPEN_PAREN call_args CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: $1.str,
Args: $3.exprs,
@@ -565,34 +564,34 @@ call:
// prefix to the Name, but I felt this was more elegant.
Var: true, // lambda
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// calling an inline function
| func OPEN_PAREN call_args CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: "", // anonymous!
Args: $3.exprs,
Anon: $1.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr PLUS expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
&ast.ExprStr{ // operator first
V: $2.str, // for PLUS this is a `+` character
&ast.ExprStr{ // operator first
V: $2.str, // for PLUS this is a `+` character
},
$1.expr,
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr MINUS expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -603,10 +602,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr MULTIPLY expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -617,10 +616,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr DIVIDE expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -631,10 +630,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr EQ expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -645,10 +644,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr NEQ expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -659,10 +658,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr LT expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -673,10 +672,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr GT expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -687,10 +686,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr LTE expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -701,10 +700,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr GTE expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -715,10 +714,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr AND expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -729,10 +728,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr OR expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -743,10 +742,10 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| NOT expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: operators.OperatorFuncName,
Args: []interfaces.Expr{
@@ -756,13 +755,13 @@ call:
$2.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// lookup an index in a list or a key in a map
// lookup($foo, $key)
// `$foo[$key]` // no default specifier
| expr OPEN_BRACK expr CLOSE_BRACK
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: funcs.LookupFuncName,
Args: []interfaces.Expr{
@@ -771,13 +770,13 @@ call:
//$6.expr, // the default
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// lookup an index in a list or a key in a map with a default
// lookup_default($foo, $key, $default)
// `$foo[$key] || "default"`
| expr OPEN_BRACK expr CLOSE_BRACK DEFAULT expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: funcs.LookupDefaultFuncName,
Args: []interfaces.Expr{
@@ -786,13 +785,13 @@ call:
$6.expr, // the default
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// lookup a field in a struct
// _struct_lookup($foo, "field")
// $foo->field
| expr ARROW IDENTIFIER
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: funcs.StructLookupFuncName,
Args: []interfaces.Expr{
@@ -803,13 +802,13 @@ call:
//$5.expr, // the default
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// lookup a field in a struct with a default
// _struct_lookup_optional($foo, "field", "default")
// $foo->field || "default"
| expr ARROW IDENTIFIER DEFAULT expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: funcs.StructLookupOptionalFuncName,
Args: []interfaces.Expr{
@@ -820,10 +819,10 @@ call:
$5.expr, // the default
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
| expr IN expr
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprCall{
Name: funcs.ContainsFuncName,
Args: []interfaces.Expr{
@@ -831,6 +830,7 @@ call:
$3.expr,
},
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
// list order gets us the position of the arg, but named params would work too!
@@ -856,10 +856,10 @@ call_args:
var:
dotted_var_identifier
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprVar{
Name: $1.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
func:
@@ -869,17 +869,16 @@ func:
// `func(<arg>, <arg>) { <expr> }`
FUNC_IDENTIFIER OPEN_PAREN args CLOSE_PAREN OPEN_CURLY expr CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprFunc{
Args: $3.args,
//Return: nil,
Body: $6.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
// `func(...) <type> { <expr> }`
| FUNC_IDENTIFIER OPEN_PAREN args CLOSE_PAREN type OPEN_CURLY expr CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.expr = &ast.ExprFunc{
Args: $3.args,
Return: $5.typ, // return type is known
@@ -909,6 +908,7 @@ func:
yylex.Error(fmt.Sprintf("%s: %+v", ErrParseSetType, err))
}
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
args:
@@ -949,17 +949,16 @@ bind:
// `$s = "hey"`
var_identifier EQUALS expr
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtBind{
Ident: $1.str,
Value: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// `$x bool = true`
// `$x int = if true { 42 } else { 13 }`
| var_identifier type EQUALS expr
{
posLast(yylex, yyDollar) // our pos
var expr interfaces.Expr = $4.expr
// XXX: We still need to do this for now it seems...
if err := expr.SetType($2.typ); err != nil {
@@ -971,6 +970,7 @@ bind:
Value: expr,
Type: $2.typ, // sam says add the type here instead...
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
;
panic:
@@ -981,7 +981,6 @@ panic:
//}
PANIC_IDENTIFIER OPEN_PAREN call_args CLOSE_PAREN
{
posLast(yylex, yyDollar) // our pos
call := &ast.ExprCall{
Name: $1.str, // the function name
Args: $3.exprs,
@@ -1000,6 +999,7 @@ panic:
ThenBranch: res,
//ElseBranch: nil,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
;
/* TODO: do we want to include this?
@@ -1007,7 +1007,6 @@ panic:
rbind:
var_identifier EQUALS resource
{
posLast(yylex, yyDollar) // our pos
// XXX: this kind of bind is different than the others, because
// it can only really be used for send->recv stuff, eg:
// foo.SomeString -> bar.SomeOtherString
@@ -1015,6 +1014,7 @@ rbind:
Ident: $1.str,
Value: $3.stmt,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.expr)
}
;
*/
@@ -1022,12 +1022,12 @@ resource:
// `file "/tmp/hello" { ... }` or `aws:ec2 "/tmp/hello" { ... }`
colon_identifier expr OPEN_CURLY resource_body CLOSE_CURLY
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtRes{
Kind: $1.str,
Name: $2.expr,
Contents: $4.resContents,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
;
resource_body:
@@ -1080,53 +1080,52 @@ resource_body:
resource_field:
IDENTIFIER ROCKET expr COMMA
{
posLast(yylex, yyDollar) // our pos
$$.resField = &ast.StmtResField{
Field: $1.str,
Value: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resField)
}
;
conditional_resource_field:
// content => $present ?: "hello",
IDENTIFIER ROCKET expr ELVIS expr COMMA
{
posLast(yylex, yyDollar) // our pos
$$.resField = &ast.StmtResField{
Field: $1.str,
Value: $5.expr,
Condition: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resField)
}
;
resource_edge:
// Before => Test["t1"],
CAPITALIZED_IDENTIFIER ROCKET edge_half COMMA
{
posLast(yylex, yyDollar) // our pos
$$.resEdge = &ast.StmtResEdge{
Property: $1.str,
EdgeHalf: $3.edgeHalf,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resEdge)
}
;
conditional_resource_edge:
// Before => $present ?: Test["t1"],
CAPITALIZED_IDENTIFIER ROCKET expr ELVIS edge_half COMMA
{
posLast(yylex, yyDollar) // our pos
$$.resEdge = &ast.StmtResEdge{
Property: $1.str,
EdgeHalf: $5.edgeHalf,
Condition: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resEdge)
}
;
resource_meta:
// Meta:noop => true,
CAPITALIZED_IDENTIFIER COLON IDENTIFIER ROCKET expr COMMA
{
posLast(yylex, yyDollar) // our pos
if strings.ToLower($1.str) != strings.ToLower(ast.MetaField) {
// this will ultimately cause a parser error to occur...
yylex.Error(fmt.Sprintf("%s: %s", ErrParseResFieldInvalid, $1.str))
@@ -1135,6 +1134,7 @@ resource_meta:
Property: $3.str,
MetaExpr: $5.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resMeta)
}
;
conditional_resource_meta:
@@ -1151,13 +1151,13 @@ conditional_resource_meta:
MetaExpr: $7.expr,
Condition: $5.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resMeta)
}
;
resource_meta_struct:
// Meta => struct{meta => true, retry => 3,},
CAPITALIZED_IDENTIFIER ROCKET expr COMMA
{
posLast(yylex, yyDollar) // our pos
if strings.ToLower($1.str) != strings.ToLower(ast.MetaField) {
// this will ultimately cause a parser error to occur...
yylex.Error(fmt.Sprintf("%s: %s", ErrParseResFieldInvalid, $1.str))
@@ -1166,13 +1166,13 @@ resource_meta_struct:
Property: $1.str,
MetaExpr: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resMeta)
}
;
conditional_resource_meta_struct:
// Meta => $present ?: struct{poll => 60, sema => ["foo:1", "bar:3",],},
CAPITALIZED_IDENTIFIER ROCKET expr ELVIS expr COMMA
{
posLast(yylex, yyDollar) // our pos
if strings.ToLower($1.str) != strings.ToLower(ast.MetaField) {
// this will ultimately cause a parser error to occur...
yylex.Error(fmt.Sprintf("%s: %s", ErrParseResFieldInvalid, $1.str))
@@ -1182,6 +1182,7 @@ conditional_resource_meta_struct:
MetaExpr: $5.expr,
Condition: $3.expr,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.resMeta)
}
;
edge:
@@ -1190,16 +1191,15 @@ edge:
// Test["t1"] -> Test["t2"] -> Test["t3"] # chain or pair
edge_half_list
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtEdge{
EdgeHalfList: $1.edgeHalfList,
//Notify: false, // unused here
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
// Test["t1"].foo_send -> Test["t2"].blah_recv # send/recv
| edge_half_sendrecv ARROW edge_half_sendrecv
{
posLast(yylex, yyDollar) // our pos
$$.stmt = &ast.StmtEdge{
EdgeHalfList: []*ast.StmtEdgeHalf{
$1.edgeHalf,
@@ -1207,6 +1207,7 @@ edge:
},
//Notify: false, // unused here, it is implied (i think)
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.stmt)
}
;
edge_half_list:
@@ -1225,24 +1226,24 @@ edge_half:
// eg: Test["t1"]
capitalized_res_identifier OPEN_BRACK expr CLOSE_BRACK
{
posLast(yylex, yyDollar) // our pos
$$.edgeHalf = &ast.StmtEdgeHalf{
Kind: $1.str,
Name: $3.expr,
//SendRecv: "", // unused
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.edgeHalf)
}
;
edge_half_sendrecv:
// eg: Test["t1"].foo_send
capitalized_res_identifier OPEN_BRACK expr CLOSE_BRACK DOT IDENTIFIER
{
posLast(yylex, yyDollar) // our pos
$$.edgeHalf = &ast.StmtEdgeHalf{
Kind: $1.str,
Name: $3.expr,
SendRecv: $6.str,
}
locate(yylex, $1, yyDollar[len(yyDollar)-1], $$.edgeHalf)
}
;
type:
@@ -1496,7 +1497,21 @@ func cast(y yyLexer) *lexParseAST {
return x.(*lexParseAST)
}
// posLast pulls out the "last token" and does a pos with that. This is a hack!
// locate should be called after creating AST nodes from lexer tokens to store
// the positions of the involved tokens in the AST node.
func locate(y yyLexer, first yySymType, last yySymType, node interface{}) {
pos(y, last)
// Only run Locate on nodes that look like they have not received
// locations yet otherwise the parser will come back and overwrite them
// with invalid ending positions.
if pn, ok := node.(interfaces.PositionableNode); !ok {
return
} else if !pn.IsSet() {
pn.Locate(first.row, first.col, last.row, last.col)
}
}
// posLast runs pos on the last token of the current stmt/expr.
func posLast(y yyLexer, dollars []yySymType) {
// pick the last token in the set matched by the parser
pos(y, dollars[len(dollars)-1]) // our pos

View File

@@ -134,8 +134,15 @@ func (obj *FastInvariantSolver) Solve(ctx context.Context, data *unification.Dat
if err := unificationUtil.Unify(x.Expect, x.Actual); err != nil {
// Storing the Expr with this invariant is so that we
// can generate this more helpful error message here.
// TODO: Improve this error message!
return nil, errwrap.Wrapf(err, "unify error with: %s", x.Expr)
displayer, ok := x.Node.(interfaces.TextDisplayer)
if !ok {
obj.Logf("not displayable: %v\n", x.Node)
return nil, errwrap.Wrapf(err, "unify error with: %s", x.Expr)
}
if highlight := displayer.HighlightText(); highlight != "" {
obj.Logf("%s: %s", err.Error(), highlight)
}
return nil, fmt.Errorf("%s: %s", err.Error(), displayer.Byline())
}
if obj.Debug {
e1, e2 := unificationUtil.Extract(x.Expect), unificationUtil.Extract(x.Actual)

View File

@@ -859,6 +859,11 @@ func TestUnification1(t *testing.T) {
data := &interfaces.Data{
// TODO: add missing fields here if/when needed
Metadata: &interfaces.Metadata{},
SourceFinder: func(string) ([]byte, error) {
return nil, fmt.Errorf("not implemented")
},
Debug: testing.Verbose(), // set via the -test.v flag to `go test`
Logf: func(format string, v ...interface{}) {
t.Logf(fmt.Sprintf("test #%d", index)+": ast: "+format, v...)