From 44956b9a980d87c6ce9ee510c7ba32c68d222c47 Mon Sep 17 00:00:00 2001 From: Julia Ogris Date: Tue, 13 Sep 2022 00:08:58 +1000 Subject: [PATCH 1/2] parser: Factor out parseError function in tests Factor out parseError function in tests as it is repeated throughout the test code in variations. This is a preparatory clean-up step for more scoping tests. --- pkg/parser/parser_test.go | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/pkg/parser/parser_test.go b/pkg/parser/parser_test.go index 32d1b869..bb476b0c 100644 --- a/pkg/parser/parser_test.go +++ b/pkg/parser/parser_test.go @@ -35,7 +35,7 @@ func TestParseDeclaration(t *testing.T) { want := strings.Join(wantSlice, "\n") + "\n" parser := New(input) got := parser.Parse() - assert.Equal(t, 0, len(parser.errors), "input: %s\nerrors:\n%s", input, parser.errorsString()) + assertNoParseError(t, parser, input) assert.Equal(t, want, got.String()) } } @@ -55,7 +55,7 @@ func TestEmptyProgram(t *testing.T) { for _, input := range tests { parser := New(input) got := parser.Parse() - assert.Equal(t, 0, len(parser.errors), "input: %s\nerrors:\n%s", input, parser.errorsString()) + assertNoParseError(t, parser, input) assert.Equal(t, "\n", got.String()) } } @@ -84,7 +84,7 @@ func TestParseDeclarationError(t *testing.T) { for input, err1 := range tests { parser := New(input) _ = parser.Parse() - assert.Equal(t, true, 1 <= len(parser.errors), "input: %s\nerrors:\n%s", input, parser.errorsString()) + assertParseError(t, parser, input) assert.Equal(t, err1, parser.errors[0].String(), "input: %s\nerrors:\n%s", input, parser.errorsString()) } } @@ -107,7 +107,7 @@ func TestFunctionCall(t *testing.T) { want := strings.Join(wantSlice, "\n") + "\n" parser := New(input) got := parser.Parse() - assert.Equal(t, 0, len(parser.errors), "input: %s\nerrors: %s", input, parser.errorsString()) + assertNoParseError(t, parser, input) assert.Equal(t, want, got.String()) } } @@ -137,7 +137,7 @@ func TestFunctionCallError(t *testing.T) { for input, err1 := range tests { parser := NewWithBuiltins(input, builtins) _ = parser.Parse() - assert.Equal(t, true, 1 <= len(parser.errors), "input: %s\nerrors:\n%s", input, parser.errorsString()) + assertParseError(t, parser, input) assert.Equal(t, err1, parser.errors[0].String(), "input: %s\nerrors:\n%s", input, parser.errorsString()) } } @@ -157,7 +157,7 @@ func TestBlock(t *testing.T) { want := strings.Join(wantSlice, "\n") + "\n" parser := New(input) got := parser.Parse() - assert.Equal(t, 0, len(parser.errors), "input: %s\nerrors: %#v", input, parser.errors) + assertNoParseError(t, parser, input) assert.Equal(t, want, got.String()) } } @@ -168,7 +168,7 @@ x := len "123" ` parser := New(input) got := parser.Parse() - assert.Equal(t, 0, len(parser.errors), "errors: %#v", parser.errors) + assertNoParseError(t, parser, input) want := ` x:NUM=len('123') `[1:] @@ -176,7 +176,7 @@ x:NUM=len('123') } func TestFuncDecl(t *testing.T) { - parser := New(` + input := ` c := add 1 2 func add:num n1:num n2:num if c > 10 @@ -189,9 +189,10 @@ on mousedown print c end end -`) +` + parser := New(input) _ = parser.Parse() - assert.Equal(t, 0, len(parser.errors), "errors: %#v", parser.errors) + assertNoParseError(t, parser, input) builtinCnt := len(builtins()) assert.Equal(t, builtinCnt+1, len(parser.funcs)) got := parser.funcs["add"] @@ -218,7 +219,7 @@ if x > 10 end` parser := New(input) got := parser.Parse() - assert.Equal(t, 2, len(parser.errors), "errors: %#v", parser.errors) + assertParseError(t, parser, input) assert.Equal(t, "line 2 column 1: unknown function 'move'", parser.errors[0].String()) assert.Equal(t, "line 3 column 1: unknown function 'line'", parser.errors[1].String()) want := ` @@ -227,3 +228,13 @@ print('x:', x:NUM) `[1:] assert.Equal(t, want, got.String()) } + +func assertParseError(t *testing.T, parser *Parser, input string) { + t.Helper() + assert.Equal(t, true, len(parser.errors) > 0, "expected parser errors, got none: input: %s\n", input) +} + +func assertNoParseError(t *testing.T, parser *Parser, input string) { + t.Helper() + assert.Equal(t, 0, len(parser.errors), "Unexpected parser error\n input: %s\nerrors:\n%s", input, parser.errorsString()) +} From 36c431121ffad4e09928e11d67bf0a1e58b6ddae Mon Sep 17 00:00:00 2001 From: Julia Ogris Date: Tue, 13 Sep 2022 00:23:58 +1000 Subject: [PATCH 2/2] parser: Add scoped typed analysis to declarations Add scoped typed analysis to declarations, ensuring that variables or parameters do not get redeclared in same block. Variables may be re-declared in inner scope of a new block with a new type which does not affect the outer scope. Add detailed tests for various scenarios. As the parser will also take care of type analysis this is a necessary addition and fixes a previous omission. --- pkg/parser/parser.go | 196 ++++++++++++++++++++--------------- pkg/parser/parser_literal.go | 18 ++-- pkg/parser/parser_test.go | 102 ++++++++++++++++++ pkg/parser/scope.go | 34 ++++++ 4 files changed, 260 insertions(+), 90 deletions(-) create mode 100644 pkg/parser/scope.go diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go index e5d121a7..2b73c262 100644 --- a/pkg/parser/parser.go +++ b/pkg/parser/parser.go @@ -29,7 +29,6 @@ type Parser struct { tokens []*lexer.Token funcs map[string]*FuncDecl // all function declaration by name and index in tokens. - vars map[string]*Var // TODO: needs scoping in block statements; // all declared variables with type } // Error is an Evy parse error. @@ -48,10 +47,7 @@ func New(input string) *Parser { func NewWithBuiltins(input string, builtins map[string]*FuncDecl) *Parser { l := lexer.New(input) - p := &Parser{ - vars: map[string]*Var{}, - funcs: builtins, - } + p := &Parser{funcs: builtins} // Read all tokens, collect function declaration tokens by index // funcs temporarily holds FUNC token indices for further processing @@ -94,12 +90,12 @@ func builtins() map[string]*FuncDecl { } func (p *Parser) Parse() *Program { - return p.parseProgram() + return p.parseProgram(newScope()) } // function names matching `parsePROCUTION` align with production names // in grammar doc/syntax_grammar.md -func (p *Parser) parseProgram() *Program { +func (p *Parser) parseProgram(scope *scope) *Program { program := &Program{Statements: []Node{}} p.advanceTo(0) for p.cur.TokenType() != lexer.EOF { @@ -107,11 +103,11 @@ func (p *Parser) parseProgram() *Program { switch p.cur.TokenType() { case lexer.FUNC: - stmt = p.parseFunc() + stmt = p.parseFunc(scope) case lexer.ON: - stmt = p.parseEventHandler() + stmt = p.parseEventHandler(scope) default: - stmt = p.parseStatement() + stmt = p.parseStatement(scope) } if stmt != nil { program.Statements = append(program.Statements, stmt) @@ -120,26 +116,48 @@ func (p *Parser) parseProgram() *Program { return program } -func (p *Parser) parseFunc() Node { +func (p *Parser) parseFunc(scope *scope) Node { p.advance() // advance past FUNC tok := p.cur // function name + funcName := p.cur.Literal p.advancePastNL() // // advance past signature, already parsed into p.funcs earlier - block := p.parseBlock() + fd := p.funcs[funcName] + scope = newEnclosedScope(scope) + p.addParamsToScope(scope, fd) + block := p.parseBlock(scope) // parse to "end" if tok.TokenType() != lexer.IDENT { return nil } - fd := p.funcs[tok.Literal] if fd.Body != nil { - p.appendError("redeclaration of function '" + tok.Literal + "'") + p.appendError("redeclaration of function '" + funcName + "'") return nil } fd.Body = block return fd } -func (p *Parser) parseEventHandler() Node { +func (p *Parser) addParamsToScope(scope *scope, fd *FuncDecl) { + for _, param := range fd.Params { + if scope.inLocalScope(param.Name) { + p.appendErrorForToken("redeclaration of parameter '"+param.Name+"'", param.Token) + } + if _, ok := p.funcs[param.Name]; ok { + p.appendErrorForToken("invalid declaration of parameter '"+param.Name+"', already used as function name", param.Token) + } + scope.set(param.Name, param) + } + if fd.VariadicParam != nil { + param := fd.VariadicParam + if _, ok := p.funcs[param.Name]; ok { + p.appendErrorForToken("invalid declaration of parameter '"+param.Name+"', already used as function name", param.Token) + } + scope.set(param.Name, param) + } +} + +func (p *Parser) parseEventHandler(scope *scope) Node { p.advance() // advance past ON token e := &EventHandler{} if p.assertToken(lexer.IDENT) { @@ -148,11 +166,11 @@ func (p *Parser) parseEventHandler() Node { p.assertEOL() } p.advancePastNL() // advance past `on EVENT_NAME` - e.Body = p.parseBlock() + e.Body = p.parseBlock(scope) return e } -func (p *Parser) parseStatement() Node { +func (p *Parser) parseStatement(scope *scope) Node { switch p.cur.TokenType() { // empty statement case lexer.NL, lexer.EOF, lexer.COMMENT: @@ -161,35 +179,36 @@ func (p *Parser) parseStatement() Node { case lexer.IDENT: switch p.peek.Type { case lexer.ASSIGN, lexer.LBRACKET, lexer.DOT: - return p.parseAssignStatement() // TODO + return p.parseAssignStatement(scope) // TODO case lexer.COLON: - return p.parseTypedDeclStatement() + return p.parseTypedDeclStatement(scope) case lexer.DECLARE: - return p.parseInferredDeclStatement() + return p.parseInferredDeclStatement(scope) } if p.isFuncCall(p.cur) { - return p.parseFunCallStatement() + return p.parseFunCallStatement(scope) } p.appendError("unknown function '" + p.cur.Literal + "'") p.advancePastNL() return nil case lexer.RETURN: - return p.parseReturnStatment() // TODO + return p.parseReturnStatment(scope) // TODO case lexer.BREAK: return p.parseBreakStatment() // TODO case lexer.FOR: - return p.parseForStatment() // TODO + return p.parseForStatment(scope) // TODO case lexer.WHILE: - return p.parseWhileStatment() // TODO + return p.parseWhileStatment(scope) // TODO case lexer.IF: - return p.parseIfStatment() // TODO + return p.parseIfStatment(scope) // TODO } p.appendError("unexpected input " + p.cur.FormatDetails()) p.advancePastNL() return nil } -func (p *Parser) parseAssignStatement() Node { +func (p *Parser) parseAssignStatement(scope *scope) Node { + p.advancePastNL() return nil } @@ -209,17 +228,13 @@ func (p *Parser) parseFuncDeclSignature() *FuncDecl { p.appendErrorForToken("invalid return type: "+p.cur.FormatDetails(), fd.Token) } } - paramNames := map[string]bool{} for !p.isAtEOL() && p.cur.TokenType() != lexer.DOT3 { - decl := p.parseTypedDecl().(*Declaration) - name := decl.Var.Name - if paramNames[name] { - p.appendError("redeclaration of parameter '" + name + "'") - } - paramNames[name] = true + p.assertToken(lexer.IDENT) + decl := p.parseTypedDecl() fd.Params = append(fd.Params, decl.Var) } if p.cur.TokenType() == lexer.DOT3 { + p.advance() if len(fd.Params) == 1 { fd.VariadicParam = fd.Params[0] fd.Params = nil @@ -232,9 +247,10 @@ func (p *Parser) parseFuncDeclSignature() *FuncDecl { return fd } -func (p *Parser) parseTypedDeclStatement() Node { +func (p *Parser) parseTypedDeclStatement(scope *scope) Node { decl := p.parseTypedDecl() - if decl.Type().Name != ILLEGAL { + if decl.Type().Name != ILLEGAL && p.validateVar(scope, decl.Var, decl.Token) { + scope.set(decl.Var.Name, decl.Var) p.assertEOL() } p.advancePastNL() @@ -243,11 +259,12 @@ func (p *Parser) parseTypedDeclStatement() Node { // parseTypedDecl parses declarations like // `x:num` or `y:any[]{}` -func (p *Parser) parseTypedDecl() Node { - ident := p.cur.Literal +func (p *Parser) parseTypedDecl() *Declaration { + p.assertToken(lexer.IDENT) + varName := p.cur.Literal decl := &Declaration{ Token: p.cur, - Var: &Var{Token: p.cur, Name: ident}, + Var: &Var{Token: p.cur, Name: varName}, } p.advance() // advance past IDENT p.advance() // advance past `:` @@ -255,71 +272,85 @@ func (p *Parser) parseTypedDecl() Node { decl.Var.nType = v decl.Value = zeroValue(v.Name) if v == ILLEGAL_TYPE { - p.appendErrorForToken("invalid type declaration for '"+ident+"'", decl.Token) - } else { - p.vars[ident] = decl.Var + p.appendErrorForToken("invalid type declaration for '"+varName+"'", decl.Token) } return decl } +func (p *Parser) validateVar(scope *scope, v *Var, tok *lexer.Token) bool { + if scope.inLocalScope(v.Name) { // already declared in current scope + p.appendErrorForToken("redeclaration of '"+v.Name+"'", tok) + return false + } + if _, ok := p.funcs[v.Name]; ok { + p.appendErrorForToken("invalid declaration of '"+v.Name+"', already used as function name", tok) + return false + } + return true +} + func matchParen(t1, t2 lexer.TokenType) bool { return (t1 == lexer.LBRACKET && t2 == lexer.RBRACKET) || (t1 == lexer.LCURLY && t2 == lexer.RCURLY) } -func (p *Parser) parseInferredDeclStatement() Node { - ident := p.cur.Literal +func (p *Parser) parseInferredDeclStatement(scope *scope) Node { + p.assertToken(lexer.IDENT) + varName := p.cur.Literal decl := &Declaration{ Token: p.cur, - Var: &Var{Token: p.cur, Name: ident}, // , nType: &Type{Name: ILLEGAL}}, + Var: &Var{Token: p.cur, Name: varName}, } p.advance() // advance past IDENT p.advance() // advance past `:=` valToken := p.cur - val := p.parseTopLevelExpression() - if val == nil || val.Type() == nil || val.Type() == ILLEGAL_TYPE { - decl.Var.nType = ILLEGAL_TYPE - p.appendError("invalid inferred declaration for '" + ident + "'") - } else if val.Type() == NONE_TYPE { - decl.Var.nType = ILLEGAL_TYPE + val := p.parseTopLevelExpression(scope) + defer p.advancePastNL() + if val == nil || val.Type() == nil { + p.appendError("invalid inferred declaration for '" + varName + "'") + return nil + } + if val.Type() == NONE_TYPE { p.appendError("invalid declaration, function '" + valToken.Literal + "' has no return value") - } else { - decl.Value = val - decl.Var.nType = val.Type() - p.vars[ident] = decl.Var - p.assertEOL() + return nil } - p.advancePastNL() + decl.Var.nType = val.Type() + if !p.validateVar(scope, decl.Var, decl.Token) { + return nil + } + decl.Value = val + scope.set(varName, decl.Var) + p.assertEOL() return decl } -func (p *Parser) parseTopLevelExpression() Node { +func (p *Parser) parseTopLevelExpression(scope *scope) Node { tt := p.cur.TokenType() if tt == lexer.IDENT && p.isFuncCall(p.cur) { - return p.parseFuncCall() + return p.parseFuncCall(scope) } - return p.parseExpression() + return p.parseExpression(scope) } -func (p *Parser) parseExpression() Node { - return p.parseTerm() +func (p *Parser) parseExpression(scope *scope) Node { + return p.parseTerm(scope) } -func (p *Parser) parseTerm() Node { +func (p *Parser) parseTerm(scope *scope) Node { //TODO: UNARY_OP Term; composite literals; assignable; slice; type_assertion; "(" toplevel_expr ")" tt := p.cur.TokenType() if tt == lexer.IDENT { - ident := p.cur.Literal + varName := p.cur.Literal p.advance() - v, ok := p.vars[ident] + v, ok := scope.get(varName) if !ok { - p.appendError("unknown identifier '" + ident + "'") + p.appendError("unknown variable name '" + varName + "'") return nil } return v } if p.isLiteral() { - lit := p.parseLiteral() + lit := p.parseLiteral(scope) if lit == nil { return nil } @@ -337,19 +368,19 @@ func (p *Parser) isFuncCall(tok *lexer.Token) bool { return ok } -func (p *Parser) parseFunCallStatement() Node { - fc := p.parseFuncCall() +func (p *Parser) parseFunCallStatement(scope *scope) Node { + fc := p.parseFuncCall(scope) p.assertEOL() p.advancePastNL() return fc } -func (p *Parser) parseFuncCall() Node { +func (p *Parser) parseFuncCall(scope *scope) Node { funcToken := p.cur funcName := p.cur.Literal decl := p.funcs[funcName] p.advance() // advance past function name IDENT - args := p.parseTerms() + args := p.parseTerms(scope) p.assertArgTypes(decl, args) return &FunctionCall{ Name: funcName, @@ -383,10 +414,10 @@ func (p *Parser) assertArgTypes(decl *FuncDecl, args []Node) { } } -func (p *Parser) parseTerms() []Node { +func (p *Parser) parseTerms(scope *scope) []Node { var terms []Node for !p.isTermsEnd() { - term := p.parseTerm() + term := p.parseTerm(scope) if term != nil { terms = append(terms, term) } @@ -439,11 +470,11 @@ func (p *Parser) appendErrorForToken(message string, token *lexer.Token) { p.errors = append(p.errors, Error{message: message, token: token}) } -func (p *Parser) parseBlock() *BlockStatement { +func (p *Parser) parseBlock(scope *scope) *BlockStatement { tok := p.cur var stmts []Node for p.cur.TokenType() != lexer.END && p.cur.TokenType() != lexer.EOF { - stmt := p.parseStatement() + stmt := p.parseStatement(scope) if stmt != nil { stmts = append(stmts, stmt) } @@ -480,7 +511,7 @@ func (p *Parser) errorsString() string { } //TODO: implemented -func (p *Parser) parseReturnStatment() Node { +func (p *Parser) parseReturnStatment(scope *scope) Node { p.advancePastNL() return nil } @@ -492,22 +523,25 @@ func (p *Parser) parseBreakStatment() Node { } //TODO: implemented -func (p *Parser) parseForStatment() Node { +func (p *Parser) parseForStatment(scope *scope) Node { + scope = newEnclosedScope(scope) p.advancePastNL() - p.parseBlock() + p.parseBlock(scope) return nil } //TODO: implemented -func (p *Parser) parseWhileStatment() Node { +func (p *Parser) parseWhileStatment(scope *scope) Node { + scope = newEnclosedScope(scope) p.advancePastNL() - p.parseBlock() + p.parseBlock(scope) return nil } //TODO: implemented -func (p *Parser) parseIfStatment() Node { +func (p *Parser) parseIfStatment(scope *scope) Node { + scope = newEnclosedScope(scope) p.advancePastNL() - p.parseBlock() + p.parseBlock(scope) return nil } diff --git a/pkg/parser/parser_literal.go b/pkg/parser/parser_literal.go index 07dc6413..3243bf45 100644 --- a/pkg/parser/parser_literal.go +++ b/pkg/parser/parser_literal.go @@ -18,7 +18,7 @@ func (p *Parser) isLiteral() bool { return peek == lexer.LBRACKET || peek == lexer.LCURLY } -func (p *Parser) parseLiteral() Node { +func (p *Parser) parseLiteral(scope *scope) Node { tok := p.cur tt := tok.TokenType() switch tt { @@ -37,18 +37,18 @@ func (p *Parser) parseLiteral() Node { p.advance() return &Bool{Token: tok, Value: tt == lexer.TRUE} } - return p.parseCompositeLiteral() + return p.parseCompositeLiteral(scope) } -func (p *Parser) parseCompositeLiteral() Node { +func (p *Parser) parseCompositeLiteral(scope *scope) Node { tok := p.cur litType := p.parseLiteralType() switch litType.Name { case ARRAY: - elements := p.parseArrayElements(litType.Sub) + elements := p.parseArrayElements(scope, litType.Sub) return &ArrayLiteral{Token: tok, Elements: elements, nType: litType} case MAP: - pairs, order := p.parseMapPairs(litType.Sub) + pairs, order := p.parseMapPairs(scope, litType.Sub) return &MapLiteral{ Token: tok, Pairs: pairs, @@ -60,8 +60,8 @@ func (p *Parser) parseCompositeLiteral() Node { return nil } -func (p *Parser) parseArrayElements(t *Type) []Node { - terms := p.parseTerms() +func (p *Parser) parseArrayElements(scope *scope, t *Type) []Node { + terms := p.parseTerms(scope) tt := p.cur.TokenType() p.advance() if tt != lexer.RBRACKET { @@ -76,7 +76,7 @@ func (p *Parser) parseArrayElements(t *Type) []Node { return terms } -func (p *Parser) parseMapPairs(t *Type) (map[string]Node, []string) { +func (p *Parser) parseMapPairs(scope *scope, t *Type) (map[string]Node, []string) { pairs := map[string]Node{} var order []string for !p.isTermsEnd() { @@ -85,7 +85,7 @@ func (p *Parser) parseMapPairs(t *Type) (map[string]Node, []string) { p.advance() p.assertToken(lexer.COLON) p.advance() - val := p.parseTerm() + val := p.parseTerm(scope) if tt != lexer.IDENT { p.appendError("invalid map key '" + tt.Format() + "'") continue diff --git a/pkg/parser/parser_test.go b/pkg/parser/parser_test.go index bb476b0c..bd225601 100644 --- a/pkg/parser/parser_test.go +++ b/pkg/parser/parser_test.go @@ -207,6 +207,108 @@ end assert.Equal(t, 0, len(got.Body.Statements)) } +func TestScope(t *testing.T) { + inputs := []string{` +x := 1 +func foo + x := "abc" +end +`, ` +x := 1 +func foo x:string + x = "abc" +end +`, ` +x := 1 +func foo x:string... + print x +end +`, ` +x := 1 +if true + x := "abc" // block scope +end +`, + } + for _, input := range inputs { + parser := New(input) + _ = parser.Parse() + assertNoParseError(t, parser, input) + } +} + +func TestScopeErr(t *testing.T) { + inputs := map[string]string{ + ` +x := 1 +x := 2 +`: "line 3 column 1: redeclaration of 'x'", + ` +x := 1 +x := "abc" +`: "line 3 column 1: redeclaration of 'x'", + ` +x :num +x := "abc" +`: "line 3 column 1: redeclaration of 'x'", + ` +x := "abc" +x :num +`: "line 3 column 1: redeclaration of 'x'", + ` +x :num +x :num +`: "line 3 column 1: redeclaration of 'x'", + ` +x :num +x :string +`: "line 3 column 1: redeclaration of 'x'", + ` +x :num +func x + print "abc" +end +`: "line 2 column 1: invalid declaration of 'x', already used as function name", + ` +func x in:num + in:string +end +`: "line 3 column 4: redeclaration of 'in'", + ` +func foo + x := 0 + x := 0 +end +`: "line 4 column 4: redeclaration of 'x'", + ` +func x + x := 0 +end +`: "line 3 column 4: invalid declaration of 'x', already used as function name", + ` +func x in:string in:string + print in +end +`: "line 2 column 18: redeclaration of parameter 'in'", + ` +func x x:string + print x +end +`: "line 2 column 8: invalid declaration of parameter 'x', already used as function name", + ` +func x x:string... + print x +end +`: "line 2 column 8: invalid declaration of parameter 'x', already used as function name", + } + for input, wantErr := range inputs { + parser := New(input) + _ = parser.Parse() + assertParseError(t, parser, input) + assert.Equal(t, wantErr, parser.errors[0].String()) + } +} + func TestDemo(t *testing.T) { input := ` move 10 10 diff --git a/pkg/parser/scope.go b/pkg/parser/scope.go new file mode 100644 index 00000000..5e1b06df --- /dev/null +++ b/pkg/parser/scope.go @@ -0,0 +1,34 @@ +package parser + +type scope struct { + vars map[string]*Var + outer *scope +} + +func newScope() *scope { + return &scope{vars: map[string]*Var{}} +} + +func newEnclosedScope(outer *scope) *scope { + return &scope{vars: map[string]*Var{}, outer: outer} +} + +func (s *scope) inLocalScope(name string) bool { + _, ok := s.vars[name] + return ok +} + +func (s *scope) get(name string) (*Var, bool) { + if s == nil { + return nil, false + } + if v, ok := s.vars[name]; ok { + return v, ok + } + return s.outer.get(name) +} + +func (s *scope) set(name string, v *Var) *Var { + s.vars[name] = v + return v +}