v: update parser / token & split parsing into methods

pull/3242/head
joe-conigliaro 2019-12-28 19:15:32 +11:00 committed by Alexander Medvednikov
parent f40f023487
commit a986321b6d
5 changed files with 316 additions and 279 deletions

View File

@ -34,6 +34,22 @@ pub:
val string
}
// module decleration
pub struct Module {
pub:
name string
path string
expr Expr
}
// import statement
pub struct Import {
pub:
name string
expr Expr
// imports map[string]string
}
pub struct FnDecl {
pub:
name string
@ -80,15 +96,15 @@ pub:
// A single identifier
struct Ident {
token token.Token
tok_kind token.TokenKind
value string
}
pub struct BinaryExpr {
pub:
token token.Token
tok_kind token.TokenKind
//op BinaryOp
op token.Token
op token.TokenKind
left Expr
//left_type Type
right Expr
@ -97,21 +113,21 @@ pub:
pub struct UnaryExpr {
pub:
// token token.Token
// tok_kind token.TokenKind
//op BinaryOp
op token.Token
op token.TokenKind
left Expr
}
struct IfExpr {
token token.Token
tok_kind token.TokenKind
cond Expr
body []Stmt
else_ []Stmt
}
struct ReturnStmt {
token token.Token // or pos
tok_kind token.TokenKind // or pos
results []Expr
}

View File

@ -15,21 +15,20 @@ struct Parser {
scanner &scanner.Scanner
mut:
tok token.Token
lit string
peek_tok token.Token
//vars []string
table &table.Table
return_type types.Type
}
pub fn parse_expr(text string, table &table.Table) ast.Expr {
mut s := scanner.new_scanner(text)
res := s.scan()
s := scanner.new_scanner(text)
mut p := Parser{
scanner: s
tok: res.tok
lit: res.lit
table: table
}
p.next()
p.next()
expr,_ := p.expr(token.lowest_prec)
return expr
}
@ -38,14 +37,15 @@ pub fn (p mut Parser) get_type() types.Type {
defer {
p.next()
}
if p.lit == 'int' { return types.int_type }
else if p.lit == 'string' { return types.string_type }
else if p.lit == 'f64' { return types.f64_type }
match p.tok.lit {
'int' { return types.int_type }
'f64' { return types.f64_type }
'string' { return types.string_type }
else {
verror('bad type lit')
exit(1)
}
}
}
pub fn parse_file(text string, table &table.Table) ast.Program {
@ -53,14 +53,13 @@ pub fn parse_file(text string, table &table.Table) ast.Program {
mut exprs := []ast.Expr
mut p := Parser{
scanner: s
//tok: res.tok
//lit: res.lit
table: table
}
p.next()
p.next()
for {
//res := s.scan()
if p.tok == .eof {
if p.tok.kind == .eof {
break
}
//println('expr at ' + p.tok.str())
@ -77,7 +76,7 @@ pub fn (p mut Parser) parse_block() []ast.Expr {
for {
//res := s.scan()
if p.tok == .eof || p.tok == .rcbr {
if p.tok.kind in [.eof, .rcbr] {
break
}
//println('expr at ' + p.tok.str())
@ -105,22 +104,21 @@ pub fn parse_stmt(text string) ast.Stmt {
fn (p mut Parser) next() {
res := p.scanner.scan()
p.tok = res.tok
p.tok = p.peek_tok
p.peek_tok = p.scanner.scan()
// println(p.tok.str())
p.lit = res.lit
}
fn (p mut Parser) check(expected token.Token) {
if p.tok != expected {
s := 'syntax error: unexpected `${p.tok.str()}`, expecting `${expected.str()}`'
fn (p mut Parser) check(expected token.TokenKind) {
if p.tok.kind != expected {
s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`'
verror(s)
}
p.next()
}
fn (p mut Parser) check_name() string {
name := p.lit
name := p.tok.lit
p.check(.name)
return name
}
@ -128,139 +126,70 @@ fn (p mut Parser) check_name() string {
// Implementation of Pratt Precedence
pub fn (p mut Parser) expr(rbp int) (ast.Expr,types.Type) {
// null denotation (prefix)
tok := p.tok
lit := p.lit
if p.tok == .key_fn {
p.next()
name := p.lit
println('fn decl $name')
p.check(.name)
p.check(.lpar)
p.check(.rpar)
// Return type
mut typ := types.void_type
if p.tok == .name {
typ = p.get_type()
p.return_type = typ
}
p.check(.lcbr)
//p.check(.rcbr)
println('OK!')
exprs := p.parse_block()
mut node := ast.Expr{}
node = ast.FnDecl{name: name, exprs: exprs, typ: typ}
return node, types.void_type
}
else if p.tok == .key_return {
p.next()
mut node := ast.Expr{}
expr, typ := p.expr(0)
if !types.check(p.return_type, typ) {
verror('bad ret type')
}
node = ast.Return{expr: expr}
return node, types.void_type
}
else if p.tok == .name {
name := p.lit
p.next()
if p.tok == .decl_assign {
p.next()
mut node := ast.Expr{}
expr,t :=p.expr(token.lowest_prec)
if name in p.table.names {
verror('redefinition of `$name`')
}
p.table.names << name
println(p.table.names)
println('added $name')
// TODO can't return VarDecl{}
node = ast.VarDecl{
name: name
expr: expr//p.expr(token.lowest_prec)
typ: t
}//, ast.void_type
return node, types.void_type
}
} else {
p.next()
}
mut node := ast.Expr{}
mut typ := types.void_type
match tok {
match p.tok.kind {
.key_module { return p.module_decl() }
.key_import { return p.import_stmt() }
.key_fn { return p.fn_decl() }
.key_return { return p.return_stmt() }
.name {
if p.peek_tok.kind == .decl_assign {
return p.var_decl()
}
}
.str { node, typ = p.parse_string_literal() }
.number { node, typ = p.parse_number_literal() }
.lpar {
node,typ = p.expr(0)
if p.tok != .rpar {
if p.tok.kind != .rpar {
panic('Parse Error: expected )')
}
p.next()
}
.str {
node = ast.StringLiteral{
val: lit
}
typ = types.string_type
}
.number {
if lit.contains('.') {
node = ast.FloatLiteral{
//val: lit.f64()
val: lit
}
typ = types.int_type
} else {
node = ast.IntegerLiteral{
val: lit.int()
}
typ = types.int_type
}
}
else {
// TODO: fix bug. note odd conditon instead of else if (same below)
if tok.is_unary() {
p.next()
if p.tok.is_unary() {
expr,_ := p.expr(token.highest_prec)
node = ast.UnaryExpr{
// left: p.expr(token.highest_prec)
left: expr
op: tok
op: p.tok.kind
}
}
}
}
// left binding power
for rbp < p.tok.precedence() {
tok2 := p.tok
prev_tok := p.tok
p.next()
mut t2 := types.Type{}
// left denotation (infix)
if tok2.is_right_assoc() {
if prev_tok.is_right_assoc() {
mut expr := ast.Expr{}
expr,t2 = p.expr(tok2.precedence() - 1)
expr,t2 = p.expr(prev_tok.precedence() - 1)
node = ast.BinaryExpr{
left: node
//left_type: t1
op: tok2
// right: p.expr(tok2.precedence() - 1)
op: prev_tok.kind
// right: p.expr(prev_tok.precedence() - 1)
right: expr
}
if !types.check(&typ, &t2) {
verror('cannot convert `$t2.name` to `$typ.name`')
}
}
if !tok2.is_right_assoc() && tok2.is_left_assoc() {
else if prev_tok.is_left_assoc() {
mut expr := ast.Expr{}
expr,t2 = p.expr(tok2.precedence())
expr,t2 = p.expr(prev_tok.precedence())
node = ast.BinaryExpr{
left: node
op: tok2
op: prev_tok.kind
right: expr
}
}
}
return node,typ
return node, typ
}
/*
@ -290,6 +219,103 @@ fn (p mut Parser) stmt() ast.Stmt {
}
*/
fn (p mut Parser) parse_string_literal() (ast.Expr,types.Type) {
mut node := ast.Expr{}
node = ast.StringLiteral{
val: p.tok.lit
}
p.next()
return node, types.string_type
}
fn (p mut Parser) parse_number_literal() (ast.Expr,types.Type) {
lit := p.tok.lit
mut node := ast.Expr{}
mut typ := types.int_type
if lit.contains('.') {
node = ast.FloatLiteral{
//val: lit.f64()
val: lit
}
typ = types.int_type
} else {
node = ast.IntegerLiteral{
val: lit.int()
}
typ = types.int_type
}
p.next()
return node, typ
}
fn (p mut Parser) module_decl() (ast.Expr,types.Type) {
// p.check(.key_module)
p.next()
return ast.Expr{}, types.void_type
}
fn (p mut Parser) import_stmt() (ast.Expr,types.Type) {
// p.check(.key_import)
p.next()
return ast.Expr{}, types.void_type
}
fn (p mut Parser) fn_decl() (ast.Expr,types.Type) {
p.check(.key_fn)
name := p.tok.lit
println('fn decl $name')
p.check(.name)
p.check(.lpar)
p.check(.rpar)
// Return type
mut typ := types.void_type
if p.tok.kind == .name {
typ = p.get_type()
p.return_type = typ
}
p.check(.lcbr)
//p.check(.rcbr)
println('OK!')
exprs := p.parse_block()
mut node := ast.Expr{}
node = ast.FnDecl{name: name, exprs: exprs, typ: typ}
return node, types.void_type
}
fn (p mut Parser) return_stmt() (ast.Expr,types.Type) {
println('return st')
p.next()
expr, t := p.expr(0)
if !types.check(p.return_type, t) {
verror('bad ret type')
}
mut node := ast.Expr{}
node = ast.Return{expr: expr}
return node, types.void_type
}
fn (p mut Parser) var_decl() (ast.Expr,types.Type) {
name := p.tok.lit
p.next()
p.next()
expr,t :=p.expr(token.lowest_prec)
if name in p.table.names {
verror('redefinition of `$name`')
}
p.table.names << name
println(p.table.names)
println('added $name')
mut node := ast.Expr{}
// TODO can't return VarDecl{}
node = ast.VarDecl{
name: name
expr: expr//p.expr(token.lowest_prec)
typ: t
}//, ast.void_type
return node, types.void_type
}
fn verror(s string) {
println(s)

View File

@ -75,15 +75,8 @@ pub fn new_scanner(text string) &Scanner {
}
}
// TODO remove once multiple return values are implemented
pub struct ScanRes {
pub:
tok token.Token
lit string
}
fn scan_res(tok token.Token, lit string) ScanRes {
return ScanRes{
fn scan_res(tok token.TokenKind, lit string) token.Token {
return token.Token{
tok,lit}
}
@ -220,13 +213,13 @@ fn (s mut Scanner) skip_whitespace() {
}
}
fn (s mut Scanner) end_of_file() ScanRes {
fn (s mut Scanner) end_of_file() token.Token {
s.pos = s.text.len
s.inc_line_number()
return scan_res(.eof, '')
}
pub fn (s mut Scanner) scan() ScanRes {
pub fn (s mut Scanner) scan() token.Token {
// if s.line_comment != '' {
// s.fgenln('// LC "$s.line_comment"')
// s.line_comment = ''
@ -796,17 +789,17 @@ fn (s mut Scanner) debug_tokens() {
fname := s.file_path.all_after(os.path_separator)
println('\n===DEBUG TOKENS $fname===')
for {
res := s.scan()
tok := res.tok
lit := res.lit
print(tok.str())
tok := s.scan()
tok_kind := tok.kind
lit := tok.lit
print(tok_kind.str())
if lit != '' {
println(' `$lit`')
}
else {
println('')
}
if tok == .eof {
if tok_kind == .eof {
println('============ END OF DEBUG TOKENS ==================')
break
}

View File

@ -10,21 +10,21 @@ import (
fn test_scan() {
text := 'println(2 + 3)'
mut scanner := new_scanner(text)
mut tokens := []token.Token
mut token_kinds := []token.TokenKind
for {
res := scanner.scan()
if res.tok == .eof {
tok := scanner.scan()
if tok.kind == .eof {
break
}
tokens << res.tok
token_kinds << tok.kind
}
assert tokens.len == 6
assert tokens[0] == .name
assert tokens[1] == .lpar
assert tokens[2] == .number
assert tokens[3] == .plus
assert tokens[4] == .number
assert tokens[5] == .rpar
assert token_kinds.len == 6
assert token_kinds[0] == .name
assert token_kinds[1] == .lpar
assert token_kinds[2] == .number
assert token_kinds[3] == .plus
assert token_kinds[4] == .number
assert token_kinds[5] == .rpar
}

View File

@ -3,17 +3,16 @@
// that can be found in the LICENSE file.
module token
/*
struct Token {
tok TokenKind // the token number/enum; for quick comparisons
pub struct Token {
pub:
kind TokenKind // the token number/enum; for quick comparisons
lit string // literal representation of the token
line_nr int // the line number in the source where the token occured
//name_idx int // name table index for O(1) lookup
pos int // the position of the token in scanner text
// line_nr int // the line number in the source where the token occured
// name_idx int // name table index for O(1) lookup
// pos int // the position of the token in scanner text
}
*/
pub enum Token {
pub enum TokenKind {
eof
name // user
number // 123
@ -125,7 +124,7 @@ pub enum Token {
}
const (
assign_tokens = [Token.assign, .plus_assign, .minus_assign, .mult_assign,
assign_tokens = [TokenKind.assign, .plus_assign, .minus_assign, .mult_assign,
.div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign,
.righ_shift_assign, .left_shift_assign]
@ -137,119 +136,119 @@ const (
// Keywords['return'] == .key_return
fn build_keys() map[string]int {
mut res := map[string]int
for t := int(Token.keyword_beg) + 1; t < int(Token.keyword_end); t++ {
for t := int(TokenKind.keyword_beg) + 1; t < int(TokenKind.keyword_end); t++ {
key := token_str[t]
res[key] = t
}
return res
}
// TODO remove once we have `enum Token { name('name') if('if') ... }`
// TODO remove once we have `enum TokenKind { name('name') if('if') ... }`
fn build_token_str() []string {
mut s := [''].repeat(nr_tokens)
s[Token.keyword_beg] = ''
s[Token.keyword_end] = ''
s[Token.eof] = 'eof'
s[Token.name] = 'name'
s[Token.number] = 'number'
s[Token.str] = 'STR'
s[Token.chartoken] = 'char'
s[Token.plus] = '+'
s[Token.minus] = '-'
s[Token.mul] = '*'
s[Token.div] = '/'
s[Token.mod] = '%'
s[Token.xor] = '^'
s[Token.bit_not] = '~'
s[Token.pipe] = '|'
s[Token.hash] = '#'
s[Token.amp] = '&'
s[Token.inc] = '++'
s[Token.dec] = '--'
s[Token.and] = '&&'
s[Token.logical_or] = '||'
s[Token.not] = '!'
s[Token.dot] = '.'
s[Token.dotdot] = '..'
s[Token.ellipsis] = '...'
s[Token.comma] = ','
// s[Token.at] = '@'
s[Token.semicolon] = ';'
s[Token.colon] = ':'
s[Token.arrow] = '=>'
s[Token.assign] = '='
s[Token.decl_assign] = ':='
s[Token.plus_assign] = '+='
s[Token.minus_assign] = '-='
s[Token.mult_assign] = '*='
s[Token.div_assign] = '/='
s[Token.xor_assign] = '^='
s[Token.mod_assign] = '%='
s[Token.or_assign] = '|='
s[Token.and_assign] = '&='
s[Token.righ_shift_assign] = '>>='
s[Token.left_shift_assign] = '<<='
s[Token.lcbr] = '{'
s[Token.rcbr] = '}'
s[Token.lpar] = '('
s[Token.rpar] = ')'
s[Token.lsbr] = '['
s[Token.rsbr] = ']'
s[Token.eq] = '=='
s[Token.ne] = '!='
s[Token.gt] = '>'
s[Token.lt] = '<'
s[Token.ge] = '>='
s[Token.le] = '<='
s[Token.question] = '?'
s[Token.left_shift] = '<<'
s[Token.righ_shift] = '>>'
s[Token.line_comment] = '// line comment'
s[Token.mline_comment] = '/* mline comment */'
s[Token.nl] = 'NLL'
s[Token.dollar] = '$'
s[Token.str_dollar] = '$2'
s[Token.key_assert] = 'assert'
s[Token.key_struct] = 'struct'
s[Token.key_if] = 'if'
// s[Token.key_it] = 'it'
s[Token.key_else] = 'else'
s[Token.key_asm] = 'asm'
s[Token.key_return] = 'return'
s[Token.key_module] = 'module'
s[Token.key_sizeof] = 'sizeof'
s[Token.key_go] = 'go'
s[Token.key_goto] = 'goto'
s[Token.key_const] = 'const'
s[Token.key_mut] = 'mut'
s[Token.key_type] = 'type'
s[Token.key_for] = 'for'
s[Token.key_switch] = 'switch'
s[Token.key_fn] = 'fn'
s[Token.key_true] = 'true'
s[Token.key_false] = 'false'
s[Token.key_continue] = 'continue'
s[Token.key_break] = 'break'
s[Token.key_import] = 'import'
s[Token.key_embed] = 'embed'
s[Token.key_unsafe] = 'unsafe'
// Tokens[key_typeof] = 'typeof'
s[Token.key_enum] = 'enum'
s[Token.key_interface] = 'interface'
s[Token.key_pub] = 'pub'
s[Token.key_import_const] = 'import_const'
s[Token.key_in] = 'in'
s[Token.key_atomic] = 'atomic'
s[Token.key_orelse] = 'or'
s[Token.key_global] = '__global'
s[Token.key_union] = 'union'
s[Token.key_static] = 'static'
s[Token.key_as] = 'as'
s[Token.key_defer] = 'defer'
s[Token.key_match] = 'match'
s[Token.key_select] = 'select'
s[Token.key_none] = 'none'
s[Token.key_offsetof] = '__offsetof'
s[TokenKind.keyword_beg] = ''
s[TokenKind.keyword_end] = ''
s[TokenKind.eof] = 'eof'
s[TokenKind.name] = 'name'
s[TokenKind.number] = 'number'
s[TokenKind.str] = 'STR'
s[TokenKind.chartoken] = 'char'
s[TokenKind.plus] = '+'
s[TokenKind.minus] = '-'
s[TokenKind.mul] = '*'
s[TokenKind.div] = '/'
s[TokenKind.mod] = '%'
s[TokenKind.xor] = '^'
s[TokenKind.bit_not] = '~'
s[TokenKind.pipe] = '|'
s[TokenKind.hash] = '#'
s[TokenKind.amp] = '&'
s[TokenKind.inc] = '++'
s[TokenKind.dec] = '--'
s[TokenKind.and] = '&&'
s[TokenKind.logical_or] = '||'
s[TokenKind.not] = '!'
s[TokenKind.dot] = '.'
s[TokenKind.dotdot] = '..'
s[TokenKind.ellipsis] = '...'
s[TokenKind.comma] = ','
// s[TokenKind.at] = '@'
s[TokenKind.semicolon] = ';'
s[TokenKind.colon] = ':'
s[TokenKind.arrow] = '=>'
s[TokenKind.assign] = '='
s[TokenKind.decl_assign] = ':='
s[TokenKind.plus_assign] = '+='
s[TokenKind.minus_assign] = '-='
s[TokenKind.mult_assign] = '*='
s[TokenKind.div_assign] = '/='
s[TokenKind.xor_assign] = '^='
s[TokenKind.mod_assign] = '%='
s[TokenKind.or_assign] = '|='
s[TokenKind.and_assign] = '&='
s[TokenKind.righ_shift_assign] = '>>='
s[TokenKind.left_shift_assign] = '<<='
s[TokenKind.lcbr] = '{'
s[TokenKind.rcbr] = '}'
s[TokenKind.lpar] = '('
s[TokenKind.rpar] = ')'
s[TokenKind.lsbr] = '['
s[TokenKind.rsbr] = ']'
s[TokenKind.eq] = '=='
s[TokenKind.ne] = '!='
s[TokenKind.gt] = '>'
s[TokenKind.lt] = '<'
s[TokenKind.ge] = '>='
s[TokenKind.le] = '<='
s[TokenKind.question] = '?'
s[TokenKind.left_shift] = '<<'
s[TokenKind.righ_shift] = '>>'
s[TokenKind.line_comment] = '// line comment'
s[TokenKind.mline_comment] = '/* mline comment */'
s[TokenKind.nl] = 'NLL'
s[TokenKind.dollar] = '$'
s[TokenKind.str_dollar] = '$2'
s[TokenKind.key_assert] = 'assert'
s[TokenKind.key_struct] = 'struct'
s[TokenKind.key_if] = 'if'
// s[TokenKind.key_it] = 'it'
s[TokenKind.key_else] = 'else'
s[TokenKind.key_asm] = 'asm'
s[TokenKind.key_return] = 'return'
s[TokenKind.key_module] = 'module'
s[TokenKind.key_sizeof] = 'sizeof'
s[TokenKind.key_go] = 'go'
s[TokenKind.key_goto] = 'goto'
s[TokenKind.key_const] = 'const'
s[TokenKind.key_mut] = 'mut'
s[TokenKind.key_type] = 'type'
s[TokenKind.key_for] = 'for'
s[TokenKind.key_switch] = 'switch'
s[TokenKind.key_fn] = 'fn'
s[TokenKind.key_true] = 'true'
s[TokenKind.key_false] = 'false'
s[TokenKind.key_continue] = 'continue'
s[TokenKind.key_break] = 'break'
s[TokenKind.key_import] = 'import'
s[TokenKind.key_embed] = 'embed'
s[TokenKind.key_unsafe] = 'unsafe'
// TokenKinds[key_typeof] = 'typeof'
s[TokenKind.key_enum] = 'enum'
s[TokenKind.key_interface] = 'interface'
s[TokenKind.key_pub] = 'pub'
s[TokenKind.key_import_const] = 'import_const'
s[TokenKind.key_in] = 'in'
s[TokenKind.key_atomic] = 'atomic'
s[TokenKind.key_orelse] = 'or'
s[TokenKind.key_global] = '__global'
s[TokenKind.key_union] = 'union'
s[TokenKind.key_static] = 'static'
s[TokenKind.key_as] = 'as'
s[TokenKind.key_defer] = 'defer'
s[TokenKind.key_match] = 'match'
s[TokenKind.key_select] = 'select'
s[TokenKind.key_none] = 'none'
s[TokenKind.key_offsetof] = '__offsetof'
return s
}
@ -258,8 +257,8 @@ const (
keywords = build_keys()
)
pub fn key_to_token(key string) Token {
a := Token(keywords[key])
pub fn key_to_token(key string) TokenKind {
a := TokenKind(keywords[key])
return a
}
@ -267,17 +266,17 @@ pub fn is_key(key string) bool {
return int(key_to_token(key)) > 0
}
pub fn is_decl(t Token) bool {
pub fn is_decl(t TokenKind) bool {
return t in [.key_enum,
.key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_import_const,
.key_pub, .eof]
}
fn (t Token) is_assign() bool {
fn (t TokenKind) is_assign() bool {
return t in assign_tokens
}
fn (t []Token) contains(val Token) bool {
fn (t []TokenKind) contains(val TokenKind) bool {
for tt in t {
if tt == val {
return true
@ -286,7 +285,7 @@ fn (t []Token) contains(val Token) bool {
return false
}
pub fn (t Token) str() string {
pub fn (t TokenKind) str() string {
if t == .number {
return 'number'
}
@ -304,6 +303,10 @@ pub fn (t Token) str() string {
return token_str[int(t)]
}
pub fn (t Token) str() string {
return '$t.kind.str() "$t.lit"'
}
// Representation of highest and lowest precedence
pub const (
@ -313,7 +316,7 @@ pub const (
// Precedence returns a tokens precedence if defined, otherwise lowest_prec
pub fn (tok Token) precedence() int {
match tok {
match tok.kind {
// `*` | `/` | `%` | `<<` | `>>` | `&`
.mul, .div, .left_shift, .righ_shift, .amp { return 7 }
// `+` | `-` | `|` | `^`
@ -330,12 +333,12 @@ pub fn (tok Token) precedence() int {
// is_scalar returns true if the token is a scalar
pub fn (tok Token) is_scalar() bool {
return tok in [.number, .str]
return tok.kind in [.number, .str]
}
// is_unary returns true if the token can be in a unary expression
pub fn (tok Token) is_unary() bool {
return tok in [
return tok.kind in [
// `+` | `-` | `!` | `~` | `*` | `&`
.plus, .minus, .not, .bit_not, .mul, .amp
]
@ -346,8 +349,7 @@ pub fn (tok Token) is_unary() bool {
// is_left_assoc returns true if the token is left associative
pub fn (tok Token) is_left_assoc() bool {
return tok in [
return tok.kind in [
// .number,
// `*` | `/` | `%`
.mul, .div, .mod,
@ -360,7 +362,7 @@ pub fn (tok Token) is_left_assoc() bool {
// is_right_assoc returns true if the token is right associative
pub fn (tok Token) is_right_assoc() bool {
return tok in [
return tok.kind in [
// `+` | `-` | `!` | `++` | `--`
.plus, .minus, .not, .inc, .dec,
// `=` | `+=` | `-=` | `*=` | `/=`