Skip to content

Commit

Permalink
词法分析支持 a++
Browse files Browse the repository at this point in the history
  • Loading branch information
lollipopkit committed Oct 28, 2022
1 parent ed7cffb commit a94e98c
Show file tree
Hide file tree
Showing 7 changed files with 100 additions and 15 deletions.
17 changes: 7 additions & 10 deletions LANG.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ shy tb = {
'b': 2,
}

shy a = 'b'
// `:=` 表示该变量为私有,等同于 `shy a = 'b'`
a := 'b'

print(tb[a]) // 2
print(tb.a) // 1
Expand All @@ -53,20 +54,16 @@ print(tb['a']) // 1
```js
a = 1
if true {
shy a = 3
a := 3
print(a) // 3
}
print(a) // 1
```
`shy` 表示该变量为局部变量,只在当前作用域内有效。
**尽量**使用 `shy` 关键字,这会提高程序运行速度(因为不需要全局寻找变量)。
`shy` & `:=` 表示该变量为局部变量,只在当前作用域内有效。
`a := 1` 等同于 `shy a = 1`
**尽量**使用声明为私有变量,这会提高程序运行速度(因为不需要全局寻找变量)。
`LK` 中已经声明的变量可以再次声明,这时会在其作用域内覆盖原来的值。

```js
// 等同于 `shy a = 1`
a := 1
```
去除使用 `shy` 关键字,还可以使用 `:=` 进行声明。


```js
a, b = 1
Expand Down
32 changes: 32 additions & 0 deletions compiler/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,18 +98,44 @@ func (self *Lexer) NextToken() (line, kind int, token string) {
self.next(1)
return self.line, TOKEN_SEP_RCURLY, "}"
case '+':
if self.test("++") {
self.next(2)
return self.line, TOKEN_OP_INC, "++"
} else if self.test("+=") {
self.next(2)
return self.line, TOKEN_OP_ADD_EQ, "+="
}
self.next(1)
return self.line, TOKEN_OP_ADD, "+"
case '-':
if self.test("--") {
self.next(2)
return self.line, TOKEN_OP_DEC, "--"
} else if self.test("-=") {
self.next(2)
return self.line, TOKEN_OP_MINUS_EQ, "-="
}
self.next(1)
return self.line, TOKEN_OP_MINUS, "-"
case '*':
if self.test("*=") {
self.next(2)
return self.line, TOKEN_OP_MUL_EQ, "*="
}
self.next(1)
return self.line, TOKEN_OP_MUL, "*"
case '^':
if self.test("^=") {
self.next(2)
return self.line, TOKEN_OP_POW_EQ, "^="
}
self.next(1)
return self.line, TOKEN_OP_POW, "^"
case '%':
if self.test("%=") {
self.next(2)
return self.line, TOKEN_OP_MOD_EQ, "%="
}
self.next(1)
return self.line, TOKEN_OP_MOD, "%"
case '&':
Expand All @@ -129,6 +155,10 @@ func (self *Lexer) NextToken() (line, kind int, token string) {
self.next(1)
return self.line, TOKEN_SEP_COLON, ":"
case '/':
if self.test("/=") {
self.next(2)
return self.line, TOKEN_OP_DIV_EQ, "/="
}
self.next(1)
return self.line, TOKEN_OP_DIV, "/"
case '~':
Expand All @@ -143,6 +173,8 @@ func (self *Lexer) NextToken() (line, kind int, token string) {
self.next(2)
return self.line, TOKEN_OP_NE, "!="
}
self.next(1)
return self.line, TOKEN_OP_NOT, "!"
case '=':
if self.test("==") {
self.next(2)
Expand Down
42 changes: 42 additions & 0 deletions compiler/lexer/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,42 @@ const (
TOKEN_OP_ASSIGNSHY
// =>
TOKEN_OP_ARROW
// -=
TOKEN_OP_MINUS_EQ
// +=
TOKEN_OP_ADD_EQ
// *=
TOKEN_OP_MUL_EQ
// /=
TOKEN_OP_DIV_EQ
// ^=
TOKEN_OP_POW_EQ
// %=
TOKEN_OP_MOD_EQ
// ++
TOKEN_OP_INC
// --
TOKEN_OP_DEC
)

var tokenOpEq = map[int]int{
TOKEN_OP_MINUS_EQ: TOKEN_OP_MINUS,
TOKEN_OP_ADD_EQ: TOKEN_OP_ADD,
TOKEN_OP_MUL_EQ: TOKEN_OP_MUL,
TOKEN_OP_DIV_EQ: TOKEN_OP_DIV,
TOKEN_OP_POW_EQ: TOKEN_OP_POW,
TOKEN_OP_MOD_EQ: TOKEN_OP_MOD,
TOKEN_OP_INC: TOKEN_OP_ADD,
TOKEN_OP_DEC: TOKEN_OP_MINUS,
}

func SourceOp(op int) int {
if op, ok := tokenOpEq[op]; ok {
return op
}
return op
}

var tokenNames = map[int]string{
TOKEN_EOF: "EOF",
TOKEN_VARARG: "...",
Expand Down Expand Up @@ -124,6 +158,14 @@ var tokenNames = map[int]string{
TOKEN_OP_NILCOALESCING: "??",
TOKEN_OP_ASSIGNSHY: ":=",
TOKEN_OP_ARROW: "=>",
TOKEN_OP_MINUS_EQ: "-=",
TOKEN_OP_ADD_EQ: "+=",
TOKEN_OP_MUL_EQ: "*=",
TOKEN_OP_DIV_EQ: "/=",
TOKEN_OP_POW_EQ: "^=",
TOKEN_OP_MOD_EQ: "%=",
TOKEN_OP_INC: "++",
TOKEN_OP_DEC: "--",
}

func tokenName(token int) string {
Expand Down
4 changes: 3 additions & 1 deletion compiler/parser/parse_exp.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ exp ::= nil | false | true | Numeral | LiteralString | ‘...’ | functiondef
prefixexp | tableconstructor | exp binop exp | unop exp
*/
/*
exp ::= exp14
exp ::= exp16
exp16 ::= exp15 {'++' | '--'}
exp15 ::= exp14 {'+=' | '-=' | '*=' | '/=' | '%=' | '^=' exp14}
exp14 ::= exp13 {'??' exp13}
exp13 ::= exp12 {'?' exp12 : exp12}
exp12 ::= exp11 {or exp11}
Expand Down
12 changes: 12 additions & 0 deletions compiler/parser/parse_stat.go
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,18 @@ func parseAssignStat(lexer *Lexer, var0 Exp) Stat {
}
return &LocalVarDeclStat{lexer.Line(), strExps, expList}
}
switch lexer.LookAhead() {
case TOKEN_OP_MINUS_EQ, TOKEN_OP_ADD_EQ,
TOKEN_OP_MUL_EQ, TOKEN_OP_DIV_EQ,
TOKEN_OP_MOD_EQ, TOKEN_OP_POW_EQ:
line, op, _ := lexer.NextToken()
expList := []Exp{&BinopExp{line, SourceOp(op), varList[0], parseExp(lexer)}}
return &AssignStat{line, varList, expList}
case TOKEN_OP_INC, TOKEN_OP_DEC:
line, op, _ := lexer.NextToken()
expList := []Exp{&BinopExp{line, SourceOp(op), varList[0], &IntegerExp{line, 1}}}
return &AssignStat{line, varList, expList}
}
lexer.NextTokenOfKind(TOKEN_OP_ASSIGN) // =
expList := parseExpList(lexer) // explist
lastLine := lexer.Line()
Expand Down
4 changes: 2 additions & 2 deletions scripts/test.lk
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ skip_files := {
}

for _, file in files {
if not strs.contains(file, '.lkc') {
if not table.contains(skip_files, file) {
if !strs.contains(file, '.lkc') {
if !table.contains(skip_files, file) {
print(fmt('====== %s ======', file))
do_file(dir + file)
}
Expand Down
4 changes: 2 additions & 2 deletions test/module.lk
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ class test2 {
test2.const = 'const'

shy fn add(n) {
a += n
a = n + a
}

fn test2:add() {
add(self.b)
self.b += a
self.b = a + self.b
}

0 comments on commit a94e98c

Please sign in to comment.