v: fix unary `+`,`-` & rename token.TokenKind to token.Kind

pull/3285/head
joe-conigliaro 2019-12-31 20:53:30 +11:00 committed by Alexander Medvednikov
parent 3a2491e97f
commit 80da2341aa
8 changed files with 153 additions and 135 deletions

View File

@ -132,15 +132,15 @@ pub:
pub struct Ident { pub struct Ident {
pub: pub:
name string name string
tok_kind token.TokenKind tok_kind token.Kind
value string value string
} }
pub struct BinaryExpr { pub struct BinaryExpr {
pub: pub:
// tok_kind token.TokenKind // tok_kind token.Kind
// op BinaryOp // op BinaryOp
op token.TokenKind op token.Kind
left Expr left Expr
// left_type Type // left_type Type
right Expr right Expr
@ -149,15 +149,15 @@ pub:
pub struct UnaryExpr { pub struct UnaryExpr {
pub: pub:
// tok_kind token.TokenKind // tok_kind token.Kind
// op BinaryOp // op BinaryOp
op token.TokenKind op token.Kind
left Expr left Expr
} }
pub struct IfExpr { pub struct IfExpr {
pub: pub:
tok_kind token.TokenKind tok_kind token.Kind
cond Expr cond Expr
stmts []Stmt stmts []Stmt
else_ []Stmt else_ []Stmt
@ -170,7 +170,7 @@ pub:
} }
pub struct ReturnStmt { pub struct ReturnStmt {
tok_kind token.TokenKind // or pos tok_kind token.Kind // or pos
results []Expr results []Expr
} }
@ -178,7 +178,7 @@ pub struct AssignStmt {
pub: pub:
left Expr left Expr
right Expr right Expr
op token.TokenKind op token.Kind
} }
pub struct ArrayInit { pub struct ArrayInit {

View File

@ -112,8 +112,14 @@ fn (g mut Gen) expr(node ast.Expr) {
g.write(it.val) g.write(it.val)
} }
ast.UnaryExpr { ast.UnaryExpr {
// probably not :D
if it.op in [.inc, .dec] {
g.expr(it.left) g.expr(it.left)
g.write(it.op.str()) g.write(it.op.str())
} else {
g.write(it.op.str())
g.expr(it.left)
}
} }
ast.StringLiteral { ast.StringLiteral {
g.write('tos3("$it.val")') g.write('tos3("$it.val")')

View File

@ -1,5 +1,7 @@
int main() { int main() {
int a = 10; int a = 10;
a++; a++;
int c = -a;
a == 1;
return 0; return 0;
} }

View File

@ -1,4 +1,6 @@
fn main() { fn main() {
a := 10 a := 10
a++ a++
c := -a
a == 1
} }

View File

@ -138,7 +138,7 @@ fn (p mut Parser) next() {
// println(p.tok.str()) // println(p.tok.str())
} }
fn (p mut Parser) check(expected token.TokenKind) { fn (p mut Parser) check(expected token.Kind) {
if p.tok.kind != expected { if p.tok.kind != expected {
s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`' s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`'
p.error(s) p.error(s)
@ -290,7 +290,8 @@ pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) {
field_name := p.check_name() field_name := p.check_name()
field_names << field_name field_names << field_name
p.check(.colon) p.check(.colon)
expr,field_type := p.expr(0) // expr,field_type := p.expr(0)
expr,_ := p.expr(0)
exprs << expr exprs << expr
} }
node = ast.StructInit{ node = ast.StructInit{
@ -329,15 +330,21 @@ pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) {
node,typ = p.if_expr() node,typ = p.if_expr()
} }
.lpar { .lpar {
node,typ = p.expr(0) p.check(.lpar)
p.next()
node,typ = p.expr(token.lowest_prec)
p.check(.rpar) p.check(.rpar)
} }
else { else {
if p.tok.is_unary() { if p.tok.is_unary() {
expr,_ := p.expr(token.highest_prec) pt := p.tok
p.next()
expr,t2 := p.expr(token.lowest_prec)
node = ast.UnaryExpr{ node = ast.UnaryExpr{
left: expr left: expr
op: pt.kind
} }
typ = t2
} }
else { else {
verror('!unknown token ' + p.tok.str()) verror('!unknown token ' + p.tok.str())
@ -373,8 +380,7 @@ pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) {
} }
} else { } else {
mut expr := ast.Expr{} mut expr := ast.Expr{}
expr,t2 = p.expr(prev_tok.precedence()) expr,t2 = p.expr(prev_tok.precedence() - 1)
op := prev_tok.kind
if prev_tok.is_relational() { if prev_tok.is_relational() {
typ = types.bool_type typ = types.bool_type
} }

View File

@ -75,9 +75,9 @@ pub fn new_scanner(text string) &Scanner {
} }
} }
fn (s &Scanner) scan_res(tok token.TokenKind, lit string) token.Token { fn (s &Scanner) scan_res(tok_kind token.Kind, lit string) token.Token {
return token.Token{ return token.Token{
tok,lit,s.line_nr + 1} tok_kind,lit,s.line_nr + 1}
} }
fn (s mut Scanner) ident_name() string { fn (s mut Scanner) ident_name() string {

View File

@ -10,7 +10,7 @@ import (
fn test_scan() { fn test_scan() {
text := 'println(2 + 3)' text := 'println(2 + 3)'
mut scanner := new_scanner(text) mut scanner := new_scanner(text)
mut token_kinds := []token.TokenKind mut token_kinds := []token.Kind
for { for {
tok := scanner.scan() tok := scanner.scan()
if tok.kind == .eof { if tok.kind == .eof {

View File

@ -5,14 +5,14 @@ module token
pub struct Token { pub struct Token {
pub: pub:
kind TokenKind // the token number/enum; for quick comparisons kind Kind // the token number/enum; for quick comparisons
lit string // literal representation of the token lit string // literal representation of the token
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occured
// name_idx int // name table index for O(1) lookup // name_idx int // name table index for O(1) lookup
// pos int // the position of the token in scanner text // pos int // the position of the token in scanner text
} }
pub enum TokenKind { pub enum Kind {
eof eof
name // user name // user
number // 123 number // 123
@ -124,7 +124,7 @@ pub enum TokenKind {
} }
const ( const (
assign_tokens = [TokenKind.assign, .plus_assign, .minus_assign, .mult_assign, assign_tokens = [Kind.assign, .plus_assign, .minus_assign, .mult_assign,
.div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign, .div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign,
.righ_shift_assign, .left_shift_assign] .righ_shift_assign, .left_shift_assign]
nr_tokens = 141 nr_tokens = 141
@ -133,119 +133,119 @@ const (
// Keywords['return'] == .key_return // Keywords['return'] == .key_return
fn build_keys() map[string]int { fn build_keys() map[string]int {
mut res := map[string]int mut res := map[string]int
for t := int(TokenKind.keyword_beg) + 1; t < int(TokenKind.keyword_end); t++ { for t := int(Kind.keyword_beg) + 1; t < int(Kind.keyword_end); t++ {
key := token_str[t] key := token_str[t]
res[key] = t res[key] = t
} }
return res return res
} }
// TODO remove once we have `enum TokenKind { name('name') if('if') ... }` // TODO remove once we have `enum Kind { name('name') if('if') ... }`
fn build_token_str() []string { fn build_token_str() []string {
mut s := [''].repeat(nr_tokens) mut s := [''].repeat(nr_tokens)
s[TokenKind.keyword_beg] = '' s[Kind.keyword_beg] = ''
s[TokenKind.keyword_end] = '' s[Kind.keyword_end] = ''
s[TokenKind.eof] = 'eof' s[Kind.eof] = 'eof'
s[TokenKind.name] = 'name' s[Kind.name] = 'name'
s[TokenKind.number] = 'number' s[Kind.number] = 'number'
s[TokenKind.str] = 'STR' s[Kind.str] = 'STR'
s[TokenKind.chartoken] = 'char' s[Kind.chartoken] = 'char'
s[TokenKind.plus] = '+' s[Kind.plus] = '+'
s[TokenKind.minus] = '-' s[Kind.minus] = '-'
s[TokenKind.mul] = '*' s[Kind.mul] = '*'
s[TokenKind.div] = '/' s[Kind.div] = '/'
s[TokenKind.mod] = '%' s[Kind.mod] = '%'
s[TokenKind.xor] = '^' s[Kind.xor] = '^'
s[TokenKind.bit_not] = '~' s[Kind.bit_not] = '~'
s[TokenKind.pipe] = '|' s[Kind.pipe] = '|'
s[TokenKind.hash] = '#' s[Kind.hash] = '#'
s[TokenKind.amp] = '&' s[Kind.amp] = '&'
s[TokenKind.inc] = '++' s[Kind.inc] = '++'
s[TokenKind.dec] = '--' s[Kind.dec] = '--'
s[TokenKind.and] = '&&' s[Kind.and] = '&&'
s[TokenKind.logical_or] = '||' s[Kind.logical_or] = '||'
s[TokenKind.not] = '!' s[Kind.not] = '!'
s[TokenKind.dot] = '.' s[Kind.dot] = '.'
s[TokenKind.dotdot] = '..' s[Kind.dotdot] = '..'
s[TokenKind.ellipsis] = '...' s[Kind.ellipsis] = '...'
s[TokenKind.comma] = ',' s[Kind.comma] = ','
// s[TokenKind.at] = '@' // s[Kind.at] = '@'
s[TokenKind.semicolon] = ';' s[Kind.semicolon] = ';'
s[TokenKind.colon] = ':' s[Kind.colon] = ':'
s[TokenKind.arrow] = '=>' s[Kind.arrow] = '=>'
s[TokenKind.assign] = '=' s[Kind.assign] = '='
s[TokenKind.decl_assign] = ':=' s[Kind.decl_assign] = ':='
s[TokenKind.plus_assign] = '+=' s[Kind.plus_assign] = '+='
s[TokenKind.minus_assign] = '-=' s[Kind.minus_assign] = '-='
s[TokenKind.mult_assign] = '*=' s[Kind.mult_assign] = '*='
s[TokenKind.div_assign] = '/=' s[Kind.div_assign] = '/='
s[TokenKind.xor_assign] = '^=' s[Kind.xor_assign] = '^='
s[TokenKind.mod_assign] = '%=' s[Kind.mod_assign] = '%='
s[TokenKind.or_assign] = '|=' s[Kind.or_assign] = '|='
s[TokenKind.and_assign] = '&=' s[Kind.and_assign] = '&='
s[TokenKind.righ_shift_assign] = '>>=' s[Kind.righ_shift_assign] = '>>='
s[TokenKind.left_shift_assign] = '<<=' s[Kind.left_shift_assign] = '<<='
s[TokenKind.lcbr] = '{' s[Kind.lcbr] = '{'
s[TokenKind.rcbr] = '}' s[Kind.rcbr] = '}'
s[TokenKind.lpar] = '(' s[Kind.lpar] = '('
s[TokenKind.rpar] = ')' s[Kind.rpar] = ')'
s[TokenKind.lsbr] = '[' s[Kind.lsbr] = '['
s[TokenKind.rsbr] = ']' s[Kind.rsbr] = ']'
s[TokenKind.eq] = '==' s[Kind.eq] = '=='
s[TokenKind.ne] = '!=' s[Kind.ne] = '!='
s[TokenKind.gt] = '>' s[Kind.gt] = '>'
s[TokenKind.lt] = '<' s[Kind.lt] = '<'
s[TokenKind.ge] = '>=' s[Kind.ge] = '>='
s[TokenKind.le] = '<=' s[Kind.le] = '<='
s[TokenKind.question] = '?' s[Kind.question] = '?'
s[TokenKind.left_shift] = '<<' s[Kind.left_shift] = '<<'
s[TokenKind.righ_shift] = '>>' s[Kind.righ_shift] = '>>'
s[TokenKind.line_comment] = '// line comment' s[Kind.line_comment] = '// line comment'
s[TokenKind.mline_comment] = '/* mline comment */' s[Kind.mline_comment] = '/* mline comment */'
s[TokenKind.nl] = 'NLL' s[Kind.nl] = 'NLL'
s[TokenKind.dollar] = '$' s[Kind.dollar] = '$'
s[TokenKind.str_dollar] = '$2' s[Kind.str_dollar] = '$2'
s[TokenKind.key_assert] = 'assert' s[Kind.key_assert] = 'assert'
s[TokenKind.key_struct] = 'struct' s[Kind.key_struct] = 'struct'
s[TokenKind.key_if] = 'if' s[Kind.key_if] = 'if'
// s[TokenKind.key_it] = 'it' // s[Kind.key_it] = 'it'
s[TokenKind.key_else] = 'else' s[Kind.key_else] = 'else'
s[TokenKind.key_asm] = 'asm' s[Kind.key_asm] = 'asm'
s[TokenKind.key_return] = 'return' s[Kind.key_return] = 'return'
s[TokenKind.key_module] = 'module' s[Kind.key_module] = 'module'
s[TokenKind.key_sizeof] = 'sizeof' s[Kind.key_sizeof] = 'sizeof'
s[TokenKind.key_go] = 'go' s[Kind.key_go] = 'go'
s[TokenKind.key_goto] = 'goto' s[Kind.key_goto] = 'goto'
s[TokenKind.key_const] = 'const' s[Kind.key_const] = 'const'
s[TokenKind.key_mut] = 'mut' s[Kind.key_mut] = 'mut'
s[TokenKind.key_type] = 'type' s[Kind.key_type] = 'type'
s[TokenKind.key_for] = 'for' s[Kind.key_for] = 'for'
s[TokenKind.key_switch] = 'switch' s[Kind.key_switch] = 'switch'
s[TokenKind.key_fn] = 'fn' s[Kind.key_fn] = 'fn'
s[TokenKind.key_true] = 'true' s[Kind.key_true] = 'true'
s[TokenKind.key_false] = 'false' s[Kind.key_false] = 'false'
s[TokenKind.key_continue] = 'continue' s[Kind.key_continue] = 'continue'
s[TokenKind.key_break] = 'break' s[Kind.key_break] = 'break'
s[TokenKind.key_import] = 'import' s[Kind.key_import] = 'import'
s[TokenKind.key_embed] = 'embed' s[Kind.key_embed] = 'embed'
s[TokenKind.key_unsafe] = 'unsafe' s[Kind.key_unsafe] = 'unsafe'
// TokenKinds[key_typeof] = 'typeof' // Kinds[key_typeof] = 'typeof'
s[TokenKind.key_enum] = 'enum' s[Kind.key_enum] = 'enum'
s[TokenKind.key_interface] = 'interface' s[Kind.key_interface] = 'interface'
s[TokenKind.key_pub] = 'pub' s[Kind.key_pub] = 'pub'
s[TokenKind.key_import_const] = 'import_const' s[Kind.key_import_const] = 'import_const'
s[TokenKind.key_in] = 'in' s[Kind.key_in] = 'in'
s[TokenKind.key_atomic] = 'atomic' s[Kind.key_atomic] = 'atomic'
s[TokenKind.key_orelse] = 'or' s[Kind.key_orelse] = 'or'
s[TokenKind.key_global] = '__global' s[Kind.key_global] = '__global'
s[TokenKind.key_union] = 'union' s[Kind.key_union] = 'union'
s[TokenKind.key_static] = 'static' s[Kind.key_static] = 'static'
s[TokenKind.key_as] = 'as' s[Kind.key_as] = 'as'
s[TokenKind.key_defer] = 'defer' s[Kind.key_defer] = 'defer'
s[TokenKind.key_match] = 'match' s[Kind.key_match] = 'match'
s[TokenKind.key_select] = 'select' s[Kind.key_select] = 'select'
s[TokenKind.key_none] = 'none' s[Kind.key_none] = 'none'
s[TokenKind.key_offsetof] = '__offsetof' s[Kind.key_offsetof] = '__offsetof'
return s return s
} }
@ -254,8 +254,8 @@ const (
keywords = build_keys() keywords = build_keys()
) )
pub fn key_to_token(key string) TokenKind { pub fn key_to_token(key string) Kind {
a := TokenKind(keywords[key]) a := Kind(keywords[key])
return a return a
} }
@ -263,7 +263,7 @@ pub fn is_key(key string) bool {
return int(key_to_token(key)) > 0 return int(key_to_token(key)) > 0
} }
pub fn is_decl(t TokenKind) bool { pub fn is_decl(t Kind) bool {
return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_import_const, .key_pub, .eof] return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_import_const, .key_pub, .eof]
} }
@ -271,7 +271,7 @@ pub fn (t Token) is_assign() bool {
return t.kind in assign_tokens return t.kind in assign_tokens
} }
fn (t []TokenKind) contains(val TokenKind) bool { fn (t []Kind) contains(val Kind) bool {
for tt in t { for tt in t {
if tt == val { if tt == val {
return true return true
@ -280,7 +280,7 @@ fn (t []TokenKind) contains(val TokenKind) bool {
return false return false
} }
pub fn (t TokenKind) str() string { pub fn (t Kind) str() string {
if t == .number { if t == .number {
return 'number' return 'number'
} }
@ -368,6 +368,8 @@ pub fn (tok Token) is_left_assoc() bool {
.mul, .div, .mod, .mul, .div, .mod,
// `^` | `||` | `&` // `^` | `||` | `&`
.xor, .logical_or, .and, .xor, .logical_or, .and,
// `==` | `!=`
.eq, .ne,
// `<` | `<=` | `>` | `>=` // `<` | `<=` | `>` | `>=`
.lt, .le, .gt, .ge, .lt, .le, .gt, .ge,
// `,` // `,`