v: update parser / token & split parsing into methods

pull/3242/head
joe-conigliaro 2019-12-28 19:15:32 +11:00 committed by Alexander Medvednikov
parent f40f023487
commit a986321b6d
5 changed files with 316 additions and 279 deletions

View File

@ -34,6 +34,22 @@ pub:
val string val string
} }
// module decleration
pub struct Module {
pub:
name string
path string
expr Expr
}
// import statement
pub struct Import {
pub:
name string
expr Expr
// imports map[string]string
}
pub struct FnDecl { pub struct FnDecl {
pub: pub:
name string name string
@ -80,15 +96,15 @@ pub:
// A single identifier // A single identifier
struct Ident { struct Ident {
token token.Token tok_kind token.TokenKind
value string value string
} }
pub struct BinaryExpr { pub struct BinaryExpr {
pub: pub:
token token.Token tok_kind token.TokenKind
//op BinaryOp //op BinaryOp
op token.Token op token.TokenKind
left Expr left Expr
//left_type Type //left_type Type
right Expr right Expr
@ -97,21 +113,21 @@ pub:
pub struct UnaryExpr { pub struct UnaryExpr {
pub: pub:
// token token.Token // tok_kind token.TokenKind
//op BinaryOp //op BinaryOp
op token.Token op token.TokenKind
left Expr left Expr
} }
struct IfExpr { struct IfExpr {
token token.Token tok_kind token.TokenKind
cond Expr cond Expr
body []Stmt body []Stmt
else_ []Stmt else_ []Stmt
} }
struct ReturnStmt { struct ReturnStmt {
token token.Token // or pos tok_kind token.TokenKind // or pos
results []Expr results []Expr
} }

View File

@ -14,22 +14,21 @@ import (
struct Parser { struct Parser {
scanner &scanner.Scanner scanner &scanner.Scanner
mut: mut:
tok token.Token tok token.Token
lit string peek_tok token.Token
//vars []string //vars []string
table &table.Table table &table.Table
return_type types.Type return_type types.Type
} }
pub fn parse_expr(text string, table &table.Table) ast.Expr { pub fn parse_expr(text string, table &table.Table) ast.Expr {
mut s := scanner.new_scanner(text) s := scanner.new_scanner(text)
res := s.scan()
mut p := Parser{ mut p := Parser{
scanner: s scanner: s
tok: res.tok
lit: res.lit
table: table table: table
} }
p.next()
p.next()
expr,_ := p.expr(token.lowest_prec) expr,_ := p.expr(token.lowest_prec)
return expr return expr
} }
@ -38,14 +37,15 @@ pub fn (p mut Parser) get_type() types.Type {
defer { defer {
p.next() p.next()
} }
if p.lit == 'int' { return types.int_type } match p.tok.lit {
else if p.lit == 'string' { return types.string_type } 'int' { return types.int_type }
else if p.lit == 'f64' { return types.f64_type } 'f64' { return types.f64_type }
else { 'string' { return types.string_type }
verror('bad type lit') else {
exit(1) verror('bad type lit')
exit(1)
}
} }
} }
pub fn parse_file(text string, table &table.Table) ast.Program { pub fn parse_file(text string, table &table.Table) ast.Program {
@ -53,14 +53,13 @@ pub fn parse_file(text string, table &table.Table) ast.Program {
mut exprs := []ast.Expr mut exprs := []ast.Expr
mut p := Parser{ mut p := Parser{
scanner: s scanner: s
//tok: res.tok
//lit: res.lit
table: table table: table
} }
p.next() p.next()
p.next()
for { for {
//res := s.scan() //res := s.scan()
if p.tok == .eof { if p.tok.kind == .eof {
break break
} }
//println('expr at ' + p.tok.str()) //println('expr at ' + p.tok.str())
@ -77,7 +76,7 @@ pub fn (p mut Parser) parse_block() []ast.Expr {
for { for {
//res := s.scan() //res := s.scan()
if p.tok == .eof || p.tok == .rcbr { if p.tok.kind in [.eof, .rcbr] {
break break
} }
//println('expr at ' + p.tok.str()) //println('expr at ' + p.tok.str())
@ -105,22 +104,21 @@ pub fn parse_stmt(text string) ast.Stmt {
fn (p mut Parser) next() { fn (p mut Parser) next() {
res := p.scanner.scan() p.tok = p.peek_tok
p.tok = res.tok p.peek_tok = p.scanner.scan()
// println(p.tok.str()) // println(p.tok.str())
p.lit = res.lit
} }
fn (p mut Parser) check(expected token.Token) { fn (p mut Parser) check(expected token.TokenKind) {
if p.tok != expected { if p.tok.kind != expected {
s := 'syntax error: unexpected `${p.tok.str()}`, expecting `${expected.str()}`' s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`'
verror(s) verror(s)
} }
p.next() p.next()
} }
fn (p mut Parser) check_name() string { fn (p mut Parser) check_name() string {
name := p.lit name := p.tok.lit
p.check(.name) p.check(.name)
return name return name
} }
@ -128,139 +126,70 @@ fn (p mut Parser) check_name() string {
// Implementation of Pratt Precedence // Implementation of Pratt Precedence
pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) { pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) {
// null denotation (prefix) // null denotation (prefix)
tok := p.tok
lit := p.lit
if p.tok == .key_fn {
p.next()
name := p.lit
println('fn decl $name')
p.check(.name)
p.check(.lpar)
p.check(.rpar)
// Return type
mut typ := types.void_type
if p.tok == .name {
typ = p.get_type()
p.return_type = typ
}
p.check(.lcbr)
//p.check(.rcbr)
println('OK!')
exprs := p.parse_block()
mut node := ast.Expr{}
node = ast.FnDecl{name: name, exprs: exprs, typ: typ}
return node, types.void_type
}
else if p.tok == .key_return {
p.next()
mut node := ast.Expr{}
expr, typ := p.expr(0)
if !types.check(p.return_type, typ) {
verror('bad ret type')
}
node = ast.Return{expr: expr}
return node, types.void_type
}
else if p.tok == .name {
name := p.lit
p.next()
if p.tok == .decl_assign {
p.next()
mut node := ast.Expr{}
expr,t :=p.expr(token.lowest_prec)
if name in p.table.names {
verror('redefinition of `$name`')
}
p.table.names << name
println(p.table.names)
println('added $name')
// TODO can't return VarDecl{}
node = ast.VarDecl{
name: name
expr: expr//p.expr(token.lowest_prec)
typ: t
}//, ast.void_type
return node, types.void_type
}
} else {
p.next()
}
mut node := ast.Expr{} mut node := ast.Expr{}
mut typ := types.void_type mut typ := types.void_type
match tok { match p.tok.kind {
.key_module { return p.module_decl() }
.key_import { return p.import_stmt() }
.key_fn { return p.fn_decl() }
.key_return { return p.return_stmt() }
.name {
if p.peek_tok.kind == .decl_assign {
return p.var_decl()
}
}
.str { node, typ = p.parse_string_literal() }
.number { node, typ = p.parse_number_literal() }
.lpar { .lpar {
node,typ = p.expr(0) node,typ = p.expr(0)
if p.tok != .rpar { if p.tok.kind != .rpar {
panic('Parse Error: expected )') panic('Parse Error: expected )')
} }
p.next()
}
.str {
node = ast.StringLiteral{
val: lit
}
typ = types.string_type
}
.number {
if lit.contains('.') {
node = ast.FloatLiteral{
//val: lit.f64()
val: lit
}
typ = types.int_type
} else {
node = ast.IntegerLiteral{
val: lit.int()
}
typ = types.int_type
}
} }
else { else {
// TODO: fix bug. note odd conditon instead of else if (same below) p.next()
if tok.is_unary() { if p.tok.is_unary() {
expr,_ := p.expr(token.highest_prec) expr,_ := p.expr(token.highest_prec)
node = ast.UnaryExpr{ node = ast.UnaryExpr{
// left: p.expr(token.highest_prec) // left: p.expr(token.highest_prec)
left: expr left: expr
op: tok op: p.tok.kind
} }
} }
} }
} }
// left binding power // left binding power
for rbp < p.tok.precedence() { for rbp < p.tok.precedence() {
tok2 := p.tok prev_tok := p.tok
p.next() p.next()
mut t2 := types.Type{} mut t2 := types.Type{}
// left denotation (infix) // left denotation (infix)
if tok2.is_right_assoc() { if prev_tok.is_right_assoc() {
mut expr := ast.Expr{} mut expr := ast.Expr{}
expr,t2 = p.expr(tok2.precedence() - 1) expr,t2 = p.expr(prev_tok.precedence() - 1)
node = ast.BinaryExpr{ node = ast.BinaryExpr{
left: node left: node
//left_type: t1 //left_type: t1
op: tok2 op: prev_tok.kind
// right: p.expr(tok2.precedence() - 1) // right: p.expr(prev_tok.precedence() - 1)
right: expr right: expr
} }
if !types.check(&typ, &t2) { if !types.check(&typ, &t2) {
verror('cannot convert `$t2.name` to `$typ.name`') verror('cannot convert `$t2.name` to `$typ.name`')
} }
} }
if !tok2.is_right_assoc() && tok2.is_left_assoc() { else if prev_tok.is_left_assoc() {
mut expr := ast.Expr{} mut expr := ast.Expr{}
expr,t2 = p.expr(tok2.precedence()) expr,t2 = p.expr(prev_tok.precedence())
node = ast.BinaryExpr{ node = ast.BinaryExpr{
left: node left: node
op: tok2 op: prev_tok.kind
right: expr right: expr
} }
} }
} }
return node,typ return node, typ
} }
/* /*
@ -290,6 +219,103 @@ fn (p mut Parser) stmt() ast.Stmt {
} }
*/ */
fn (p mut Parser) parse_string_literal() (ast.Expr,types.Type) {
mut node := ast.Expr{}
node = ast.StringLiteral{
val: p.tok.lit
}
p.next()
return node, types.string_type
}
fn (p mut Parser) parse_number_literal() (ast.Expr,types.Type) {
lit := p.tok.lit
mut node := ast.Expr{}
mut typ := types.int_type
if lit.contains('.') {
node = ast.FloatLiteral{
//val: lit.f64()
val: lit
}
typ = types.int_type
} else {
node = ast.IntegerLiteral{
val: lit.int()
}
typ = types.int_type
}
p.next()
return node, typ
}
fn (p mut Parser) module_decl() (ast.Expr,types.Type) {
// p.check(.key_module)
p.next()
return ast.Expr{}, types.void_type
}
fn (p mut Parser) import_stmt() (ast.Expr,types.Type) {
// p.check(.key_import)
p.next()
return ast.Expr{}, types.void_type
}
fn (p mut Parser) fn_decl() (ast.Expr,types.Type) {
p.check(.key_fn)
name := p.tok.lit
println('fn decl $name')
p.check(.name)
p.check(.lpar)
p.check(.rpar)
// Return type
mut typ := types.void_type
if p.tok.kind == .name {
typ = p.get_type()
p.return_type = typ
}
p.check(.lcbr)
//p.check(.rcbr)
println('OK!')
exprs := p.parse_block()
mut node := ast.Expr{}
node = ast.FnDecl{name: name, exprs: exprs, typ: typ}
return node, types.void_type
}
fn (p mut Parser) return_stmt() (ast.Expr,types.Type) {
println('return st')
p.next()
expr, t := p.expr(0)
if !types.check(p.return_type, t) {
verror('bad ret type')
}
mut node := ast.Expr{}
node = ast.Return{expr: expr}
return node, types.void_type
}
fn (p mut Parser) var_decl() (ast.Expr,types.Type) {
name := p.tok.lit
p.next()
p.next()
expr,t :=p.expr(token.lowest_prec)
if name in p.table.names {
verror('redefinition of `$name`')
}
p.table.names << name
println(p.table.names)
println('added $name')
mut node := ast.Expr{}
// TODO can't return VarDecl{}
node = ast.VarDecl{
name: name
expr: expr//p.expr(token.lowest_prec)
typ: t
}//, ast.void_type
return node, types.void_type
}
fn verror(s string) { fn verror(s string) {
println(s) println(s)

View File

@ -75,15 +75,8 @@ pub fn new_scanner(text string) &Scanner {
} }
} }
// TODO remove once multiple return values are implemented fn scan_res(tok token.TokenKind, lit string) token.Token {
pub struct ScanRes { return token.Token{
pub:
tok token.Token
lit string
}
fn scan_res(tok token.Token, lit string) ScanRes {
return ScanRes{
tok,lit} tok,lit}
} }
@ -220,13 +213,13 @@ fn (s mut Scanner) skip_whitespace() {
} }
} }
fn (s mut Scanner) end_of_file() ScanRes { fn (s mut Scanner) end_of_file() token.Token {
s.pos = s.text.len s.pos = s.text.len
s.inc_line_number() s.inc_line_number()
return scan_res(.eof, '') return scan_res(.eof, '')
} }
pub fn (s mut Scanner) scan() ScanRes { pub fn (s mut Scanner) scan() token.Token {
// if s.line_comment != '' { // if s.line_comment != '' {
// s.fgenln('// LC "$s.line_comment"') // s.fgenln('// LC "$s.line_comment"')
// s.line_comment = '' // s.line_comment = ''
@ -796,17 +789,17 @@ fn (s mut Scanner) debug_tokens() {
fname := s.file_path.all_after(os.path_separator) fname := s.file_path.all_after(os.path_separator)
println('\n===DEBUG TOKENS $fname===') println('\n===DEBUG TOKENS $fname===')
for { for {
res := s.scan() tok := s.scan()
tok := res.tok tok_kind := tok.kind
lit := res.lit lit := tok.lit
print(tok.str()) print(tok_kind.str())
if lit != '' { if lit != '' {
println(' `$lit`') println(' `$lit`')
} }
else { else {
println('') println('')
} }
if tok == .eof { if tok_kind == .eof {
println('============ END OF DEBUG TOKENS ==================') println('============ END OF DEBUG TOKENS ==================')
break break
} }

View File

@ -10,21 +10,21 @@ import (
fn test_scan() { fn test_scan() {
text := 'println(2 + 3)' text := 'println(2 + 3)'
mut scanner := new_scanner(text) mut scanner := new_scanner(text)
mut tokens := []token.Token mut token_kinds := []token.TokenKind
for { for {
res := scanner.scan() tok := scanner.scan()
if res.tok == .eof { if tok.kind == .eof {
break break
} }
tokens << res.tok token_kinds << tok.kind
} }
assert tokens.len == 6 assert token_kinds.len == 6
assert tokens[0] == .name assert token_kinds[0] == .name
assert tokens[1] == .lpar assert token_kinds[1] == .lpar
assert tokens[2] == .number assert token_kinds[2] == .number
assert tokens[3] == .plus assert token_kinds[3] == .plus
assert tokens[4] == .number assert token_kinds[4] == .number
assert tokens[5] == .rpar assert token_kinds[5] == .rpar
} }

View File

@ -3,17 +3,16 @@
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
module token module token
/* pub struct Token {
struct Token { pub:
tok TokenKind // the token number/enum; for quick comparisons kind TokenKind // the token number/enum; for quick comparisons
lit string // literal representation of the token lit string // literal representation of the token
line_nr int // the line number in the source where the token occured // line_nr int // the line number in the source where the token occured
//name_idx int // name table index for O(1) lookup // name_idx int // name table index for O(1) lookup
pos int // the position of the token in scanner text // pos int // the position of the token in scanner text
} }
*/
pub enum Token { pub enum TokenKind {
eof eof
name // user name // user
number // 123 number // 123
@ -125,7 +124,7 @@ pub enum Token {
} }
const ( const (
assign_tokens = [Token.assign, .plus_assign, .minus_assign, .mult_assign, assign_tokens = [TokenKind.assign, .plus_assign, .minus_assign, .mult_assign,
.div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign, .div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign,
.righ_shift_assign, .left_shift_assign] .righ_shift_assign, .left_shift_assign]
@ -137,119 +136,119 @@ const (
// Keywords['return'] == .key_return // Keywords['return'] == .key_return
fn build_keys() map[string]int { fn build_keys() map[string]int {
mut res := map[string]int mut res := map[string]int
for t := int(Token.keyword_beg) + 1; t < int(Token.keyword_end); t++ { for t := int(TokenKind.keyword_beg) + 1; t < int(TokenKind.keyword_end); t++ {
key := token_str[t] key := token_str[t]
res[key] = t res[key] = t
} }
return res return res
} }
// TODO remove once we have `enum Token { name('name') if('if') ... }` // TODO remove once we have `enum TokenKind { name('name') if('if') ... }`
fn build_token_str() []string { fn build_token_str() []string {
mut s := [''].repeat(nr_tokens) mut s := [''].repeat(nr_tokens)
s[Token.keyword_beg] = '' s[TokenKind.keyword_beg] = ''
s[Token.keyword_end] = '' s[TokenKind.keyword_end] = ''
s[Token.eof] = 'eof' s[TokenKind.eof] = 'eof'
s[Token.name] = 'name' s[TokenKind.name] = 'name'
s[Token.number] = 'number' s[TokenKind.number] = 'number'
s[Token.str] = 'STR' s[TokenKind.str] = 'STR'
s[Token.chartoken] = 'char' s[TokenKind.chartoken] = 'char'
s[Token.plus] = '+' s[TokenKind.plus] = '+'
s[Token.minus] = '-' s[TokenKind.minus] = '-'
s[Token.mul] = '*' s[TokenKind.mul] = '*'
s[Token.div] = '/' s[TokenKind.div] = '/'
s[Token.mod] = '%' s[TokenKind.mod] = '%'
s[Token.xor] = '^' s[TokenKind.xor] = '^'
s[Token.bit_not] = '~' s[TokenKind.bit_not] = '~'
s[Token.pipe] = '|' s[TokenKind.pipe] = '|'
s[Token.hash] = '#' s[TokenKind.hash] = '#'
s[Token.amp] = '&' s[TokenKind.amp] = '&'
s[Token.inc] = '++' s[TokenKind.inc] = '++'
s[Token.dec] = '--' s[TokenKind.dec] = '--'
s[Token.and] = '&&' s[TokenKind.and] = '&&'
s[Token.logical_or] = '||' s[TokenKind.logical_or] = '||'
s[Token.not] = '!' s[TokenKind.not] = '!'
s[Token.dot] = '.' s[TokenKind.dot] = '.'
s[Token.dotdot] = '..' s[TokenKind.dotdot] = '..'
s[Token.ellipsis] = '...' s[TokenKind.ellipsis] = '...'
s[Token.comma] = ',' s[TokenKind.comma] = ','
// s[Token.at] = '@' // s[TokenKind.at] = '@'
s[Token.semicolon] = ';' s[TokenKind.semicolon] = ';'
s[Token.colon] = ':' s[TokenKind.colon] = ':'
s[Token.arrow] = '=>' s[TokenKind.arrow] = '=>'
s[Token.assign] = '=' s[TokenKind.assign] = '='
s[Token.decl_assign] = ':=' s[TokenKind.decl_assign] = ':='
s[Token.plus_assign] = '+=' s[TokenKind.plus_assign] = '+='
s[Token.minus_assign] = '-=' s[TokenKind.minus_assign] = '-='
s[Token.mult_assign] = '*=' s[TokenKind.mult_assign] = '*='
s[Token.div_assign] = '/=' s[TokenKind.div_assign] = '/='
s[Token.xor_assign] = '^=' s[TokenKind.xor_assign] = '^='
s[Token.mod_assign] = '%=' s[TokenKind.mod_assign] = '%='
s[Token.or_assign] = '|=' s[TokenKind.or_assign] = '|='
s[Token.and_assign] = '&=' s[TokenKind.and_assign] = '&='
s[Token.righ_shift_assign] = '>>=' s[TokenKind.righ_shift_assign] = '>>='
s[Token.left_shift_assign] = '<<=' s[TokenKind.left_shift_assign] = '<<='
s[Token.lcbr] = '{' s[TokenKind.lcbr] = '{'
s[Token.rcbr] = '}' s[TokenKind.rcbr] = '}'
s[Token.lpar] = '(' s[TokenKind.lpar] = '('
s[Token.rpar] = ')' s[TokenKind.rpar] = ')'
s[Token.lsbr] = '[' s[TokenKind.lsbr] = '['
s[Token.rsbr] = ']' s[TokenKind.rsbr] = ']'
s[Token.eq] = '==' s[TokenKind.eq] = '=='
s[Token.ne] = '!=' s[TokenKind.ne] = '!='
s[Token.gt] = '>' s[TokenKind.gt] = '>'
s[Token.lt] = '<' s[TokenKind.lt] = '<'
s[Token.ge] = '>=' s[TokenKind.ge] = '>='
s[Token.le] = '<=' s[TokenKind.le] = '<='
s[Token.question] = '?' s[TokenKind.question] = '?'
s[Token.left_shift] = '<<' s[TokenKind.left_shift] = '<<'
s[Token.righ_shift] = '>>' s[TokenKind.righ_shift] = '>>'
s[Token.line_comment] = '// line comment' s[TokenKind.line_comment] = '// line comment'
s[Token.mline_comment] = '/* mline comment */' s[TokenKind.mline_comment] = '/* mline comment */'
s[Token.nl] = 'NLL' s[TokenKind.nl] = 'NLL'
s[Token.dollar] = '$' s[TokenKind.dollar] = '$'
s[Token.str_dollar] = '$2' s[TokenKind.str_dollar] = '$2'
s[Token.key_assert] = 'assert' s[TokenKind.key_assert] = 'assert'
s[Token.key_struct] = 'struct' s[TokenKind.key_struct] = 'struct'
s[Token.key_if] = 'if' s[TokenKind.key_if] = 'if'
// s[Token.key_it] = 'it' // s[TokenKind.key_it] = 'it'
s[Token.key_else] = 'else' s[TokenKind.key_else] = 'else'
s[Token.key_asm] = 'asm' s[TokenKind.key_asm] = 'asm'
s[Token.key_return] = 'return' s[TokenKind.key_return] = 'return'
s[Token.key_module] = 'module' s[TokenKind.key_module] = 'module'
s[Token.key_sizeof] = 'sizeof' s[TokenKind.key_sizeof] = 'sizeof'
s[Token.key_go] = 'go' s[TokenKind.key_go] = 'go'
s[Token.key_goto] = 'goto' s[TokenKind.key_goto] = 'goto'
s[Token.key_const] = 'const' s[TokenKind.key_const] = 'const'
s[Token.key_mut] = 'mut' s[TokenKind.key_mut] = 'mut'
s[Token.key_type] = 'type' s[TokenKind.key_type] = 'type'
s[Token.key_for] = 'for' s[TokenKind.key_for] = 'for'
s[Token.key_switch] = 'switch' s[TokenKind.key_switch] = 'switch'
s[Token.key_fn] = 'fn' s[TokenKind.key_fn] = 'fn'
s[Token.key_true] = 'true' s[TokenKind.key_true] = 'true'
s[Token.key_false] = 'false' s[TokenKind.key_false] = 'false'
s[Token.key_continue] = 'continue' s[TokenKind.key_continue] = 'continue'
s[Token.key_break] = 'break' s[TokenKind.key_break] = 'break'
s[Token.key_import] = 'import' s[TokenKind.key_import] = 'import'
s[Token.key_embed] = 'embed' s[TokenKind.key_embed] = 'embed'
s[Token.key_unsafe] = 'unsafe' s[TokenKind.key_unsafe] = 'unsafe'
// Tokens[key_typeof] = 'typeof' // TokenKinds[key_typeof] = 'typeof'
s[Token.key_enum] = 'enum' s[TokenKind.key_enum] = 'enum'
s[Token.key_interface] = 'interface' s[TokenKind.key_interface] = 'interface'
s[Token.key_pub] = 'pub' s[TokenKind.key_pub] = 'pub'
s[Token.key_import_const] = 'import_const' s[TokenKind.key_import_const] = 'import_const'
s[Token.key_in] = 'in' s[TokenKind.key_in] = 'in'
s[Token.key_atomic] = 'atomic' s[TokenKind.key_atomic] = 'atomic'
s[Token.key_orelse] = 'or' s[TokenKind.key_orelse] = 'or'
s[Token.key_global] = '__global' s[TokenKind.key_global] = '__global'
s[Token.key_union] = 'union' s[TokenKind.key_union] = 'union'
s[Token.key_static] = 'static' s[TokenKind.key_static] = 'static'
s[Token.key_as] = 'as' s[TokenKind.key_as] = 'as'
s[Token.key_defer] = 'defer' s[TokenKind.key_defer] = 'defer'
s[Token.key_match] = 'match' s[TokenKind.key_match] = 'match'
s[Token.key_select] = 'select' s[TokenKind.key_select] = 'select'
s[Token.key_none] = 'none' s[TokenKind.key_none] = 'none'
s[Token.key_offsetof] = '__offsetof' s[TokenKind.key_offsetof] = '__offsetof'
return s return s
} }
@ -258,8 +257,8 @@ const (
keywords = build_keys() keywords = build_keys()
) )
pub fn key_to_token(key string) Token { pub fn key_to_token(key string) TokenKind {
a := Token(keywords[key]) a := TokenKind(keywords[key])
return a return a
} }
@ -267,17 +266,17 @@ pub fn is_key(key string) bool {
return int(key_to_token(key)) > 0 return int(key_to_token(key)) > 0
} }
pub fn is_decl(t Token) bool { pub fn is_decl(t TokenKind) bool {
return t in [.key_enum, return t in [.key_enum,
.key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_import_const, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_import_const,
.key_pub, .eof] .key_pub, .eof]
} }
fn (t Token) is_assign() bool { fn (t TokenKind) is_assign() bool {
return t in assign_tokens return t in assign_tokens
} }
fn (t []Token) contains(val Token) bool { fn (t []TokenKind) contains(val TokenKind) bool {
for tt in t { for tt in t {
if tt == val { if tt == val {
return true return true
@ -286,7 +285,7 @@ fn (t []Token) contains(val Token) bool {
return false return false
} }
pub fn (t Token) str() string { pub fn (t TokenKind) str() string {
if t == .number { if t == .number {
return 'number' return 'number'
} }
@ -304,6 +303,10 @@ pub fn (t Token) str() string {
return token_str[int(t)] return token_str[int(t)]
} }
pub fn (t Token) str() string {
return '$t.kind.str() "$t.lit"'
}
// Representation of highest and lowest precedence // Representation of highest and lowest precedence
pub const ( pub const (
@ -313,7 +316,7 @@ pub const (
// Precedence returns a tokens precedence if defined, otherwise lowest_prec // Precedence returns a tokens precedence if defined, otherwise lowest_prec
pub fn (tok Token) precedence() int { pub fn (tok Token) precedence() int {
match tok { match tok.kind {
// `*` | `/` | `%` | `<<` | `>>` | `&` // `*` | `/` | `%` | `<<` | `>>` | `&`
.mul, .div, .left_shift, .righ_shift, .amp { return 7 } .mul, .div, .left_shift, .righ_shift, .amp { return 7 }
// `+` | `-` | `|` | `^` // `+` | `-` | `|` | `^`
@ -330,12 +333,12 @@ pub fn (tok Token) precedence() int {
// is_scalar returns true if the token is a scalar // is_scalar returns true if the token is a scalar
pub fn (tok Token) is_scalar() bool { pub fn (tok Token) is_scalar() bool {
return tok in [.number, .str] return tok.kind in [.number, .str]
} }
// is_unary returns true if the token can be in a unary expression // is_unary returns true if the token can be in a unary expression
pub fn (tok Token) is_unary() bool { pub fn (tok Token) is_unary() bool {
return tok in [ return tok.kind in [
// `+` | `-` | `!` | `~` | `*` | `&` // `+` | `-` | `!` | `~` | `*` | `&`
.plus, .minus, .not, .bit_not, .mul, .amp .plus, .minus, .not, .bit_not, .mul, .amp
] ]
@ -346,8 +349,7 @@ pub fn (tok Token) is_unary() bool {
// is_left_assoc returns true if the token is left associative // is_left_assoc returns true if the token is left associative
pub fn (tok Token) is_left_assoc() bool { pub fn (tok Token) is_left_assoc() bool {
return tok in [ return tok.kind in [
// .number, // .number,
// `*` | `/` | `%` // `*` | `/` | `%`
.mul, .div, .mod, .mul, .div, .mod,
@ -360,7 +362,7 @@ pub fn (tok Token) is_left_assoc() bool {
// is_right_assoc returns true if the token is right associative // is_right_assoc returns true if the token is right associative
pub fn (tok Token) is_right_assoc() bool { pub fn (tok Token) is_right_assoc() bool {
return tok in [ return tok.kind in [
// `+` | `-` | `!` | `++` | `--` // `+` | `-` | `!` | `++` | `--`
.plus, .minus, .not, .inc, .dec, .plus, .minus, .not, .inc, .dec,
// `=` | `+=` | `-=` | `*=` | `/=` // `=` | `+=` | `-=` | `*=` | `/=`