compiler: rename Tok/Token to Token/TokenKind

pull/2262/head
Anders Busch 2019-10-09 00:05:34 +02:00 committed by Alexander Medvednikov
parent 89ea8a0275
commit c620da9089
6 changed files with 209 additions and 211 deletions

View File

@ -208,7 +208,7 @@ fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
}
// get_scanner_pos_of_token rescans *the whole source* till it reaches {t.line_nr, t.col} .
fn (s mut Scanner) get_scanner_pos_of_token(t &Tok) ScannerPos {
fn (s mut Scanner) get_scanner_pos_of_token(t &Token) ScannerPos {
// This rescanning is done just once on error, so it is fine for now.
// Be careful for the performance implications, if you want to
// do it more frequently. The alternative would be to store

View File

@ -296,7 +296,7 @@ fn (p mut Parser) fn_decl() {
}
// Returns a type?
mut typ := 'void'
if p.tok in [Token.name, .mul, .amp, .lsbr, .question, .lpar] {
if p.tok in [TokenKind.name, .mul, .amp, .lsbr, .question, .lpar] {
p.fgen(' ')
typ = p.get_type()
}

View File

@ -9,10 +9,8 @@ import (
strings
)
// TODO rename to Token
// TODO rename enum Token to TokenType
struct Tok {
tok Token // the token number/enum; for quick comparisons
struct Token {
tok TokenKind // the token number/enum; for quick comparisons
lit string // literal representation of the token
line_nr int // the line number in the source where the token occured
name_idx int // name table index for O(1) lookup
@ -32,11 +30,11 @@ struct Parser {
pref &Preferences // Preferences shared from V struct
mut:
scanner &Scanner
tokens []Tok
tokens []Token
token_idx int
tok Token
prev_tok Token
prev_tok2 Token // TODO remove these once the tokens are cached
tok TokenKind
prev_tok TokenKind
prev_tok2 TokenKind // TODO remove these once the tokens are cached
lit string
cgen &CGen
table &Table
@ -165,7 +163,7 @@ fn (v mut V) new_parser(scanner &Scanner, id string) Parser {
fn (p mut Parser) scan_tokens() {
for {
res := p.scanner.scan()
p.tokens << Tok{
p.tokens << Token{
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
@ -188,7 +186,7 @@ fn (p mut Parser) next() {
p.prev_tok = p.tok
p.scanner.prev_tok = p.tok
if p.token_idx >= p.tokens.len {
p.tok = Token.eof
p.tok = TokenKind.eof
p.lit = ''
return
}
@ -199,25 +197,25 @@ fn (p mut Parser) next() {
p.scanner.line_nr = res.line_nr
}
fn (p & Parser) peek() Token {
fn (p & Parser) peek() TokenKind {
if p.token_idx >= p.tokens.len - 2 {
return Token.eof
return TokenKind.eof
}
tok := p.tokens[p.token_idx]
return tok.tok
}
// TODO remove dups
[inline] fn (p &Parser) prev_token() Tok {
[inline] fn (p &Parser) prev_token() Token {
return p.tokens[p.token_idx - 2]
}
[inline] fn (p &Parser) cur_tok() Tok {
[inline] fn (p &Parser) cur_tok() Token {
return p.tokens[p.token_idx - 1]
}
[inline] fn (p &Parser) peek_token() Tok {
[inline] fn (p &Parser) peek_token() Token {
if p.token_idx >= p.tokens.len - 2 {
return Tok{ tok:Token.eof }
return Token{ tok:TokenKind.eof }
}
return p.tokens[p.token_idx]
}
@ -286,7 +284,7 @@ fn (p mut Parser) parse(pass Pass) {
p.fgenln('')
}
}
case Token.key_enum:
case TokenKind.key_enum:
p.next()
if p.tok == .name {
p.fgen('enum ')
@ -303,7 +301,7 @@ fn (p mut Parser) parse(pass Pass) {
else {
p.check(.name)
}
case Token.key_pub:
case TokenKind.key_pub:
if p.peek() == .func {
p.fn_decl()
} else if p.peek() == .key_struct {
@ -312,27 +310,27 @@ fn (p mut Parser) parse(pass Pass) {
} else {
p.error('wrong pub keyword usage')
}
case Token.func:
case TokenKind.func:
p.fn_decl()
case Token.key_type:
case TokenKind.key_type:
p.type_decl()
case Token.lsbr:
case TokenKind.lsbr:
// `[` can only mean an [attribute] before a function
// or a struct definition
p.attribute()
case Token.key_struct, Token.key_interface, Token.key_union, Token.lsbr:
case TokenKind.key_struct, TokenKind.key_interface, TokenKind.key_union, TokenKind.lsbr:
p.struct_decl()
case Token.key_const:
case TokenKind.key_const:
p.const_decl()
case Token.hash:
case TokenKind.hash:
// insert C code, TODO this is going to be removed ASAP
// some libraries (like UI) still have lots of C code
// # puts("hello");
p.chash()
case Token.dollar:
case TokenKind.dollar:
// $if, $else
p.comp_time()
case Token.key_global:
case TokenKind.key_global:
if !p.pref.translated && !p.pref.is_live &&
!p.builtin_mod && !p.pref.building_v && !os.getwd().contains('/volt') {
p.error('__global is only allowed in translated code')
@ -355,7 +353,7 @@ fn (p mut Parser) parse(pass Pass) {
// p.genln('; // global')
g += '; // global'
p.cgen.consts << g
case Token.eof:
case TokenKind.eof:
//p.log('end of parse()')
// TODO: check why this was added? everything seems to work
// without it, and it's already happening in fn_decl
@ -581,12 +579,12 @@ fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
return method
}
fn key_to_type_cat(tok Token) TypeCategory {
fn key_to_type_cat(tok TokenKind) TypeCategory {
switch tok {
case Token.key_interface: return TypeCategory.interface_
case Token.key_struct: return TypeCategory.struct_
case Token.key_union: return TypeCategory.union_
//Token.key_ => return .interface_
case TokenKind.key_interface: return TypeCategory.interface_
case TokenKind.key_struct: return TypeCategory.struct_
case TokenKind.key_union: return TypeCategory.union_
//TokenKind.key_ => return .interface_
}
verror('Unknown token: $tok')
return TypeCategory.builtin
@ -862,13 +860,13 @@ fn (p &Parser) strtok() string {
// same as check(), but adds a space to the formatter output
// TODO bad name
fn (p mut Parser) check_space(expected Token) {
fn (p mut Parser) check_space(expected TokenKind) {
p.fspace()
p.check(expected)
p.fspace()
}
fn (p mut Parser) check(expected Token) {
fn (p mut Parser) check(expected TokenKind) {
if p.tok != expected {
println('check()')
s := 'expected `${expected.str()}` but got `${p.strtok()}`'
@ -1265,52 +1263,52 @@ fn (p mut Parser) statement(add_semi bool) string {
// `a + 3`, `a(7)`, or just `a`
q = p.bool_expression()
}
case Token.key_goto:
case TokenKind.key_goto:
p.check(.key_goto)
p.fgen(' ')
label := p.check_name()
p.genln('goto $label;')
return ''
case Token.key_defer:
case TokenKind.key_defer:
p.defer_st()
return ''
case Token.hash:
case TokenKind.hash:
p.chash()
return ''
case Token.dollar:
case TokenKind.dollar:
p.comp_time()
case Token.key_if:
case TokenKind.key_if:
p.if_st(false, 0)
case Token.key_for:
case TokenKind.key_for:
p.for_st()
case Token.key_switch:
case TokenKind.key_switch:
p.switch_statement()
case Token.key_match:
case TokenKind.key_match:
p.match_statement(false)
case Token.key_mut, Token.key_static:
case TokenKind.key_mut, TokenKind.key_static:
p.var_decl()
case Token.key_return:
case TokenKind.key_return:
p.return_st()
case Token.lcbr:// {} block
case TokenKind.lcbr:// {} block
p.check(.lcbr)
p.genln('{')
p.statements()
return ''
case Token.key_continue:
case TokenKind.key_continue:
if p.for_expr_cnt == 0 {
p.error('`continue` statement outside `for`')
}
p.genln('continue')
p.check(.key_continue)
case Token.key_break:
case TokenKind.key_break:
if p.for_expr_cnt == 0 {
p.error('`break` statement outside `for`')
}
p.genln('break')
p.check(.key_break)
case Token.key_go:
case TokenKind.key_go:
p.go_statement()
case Token.key_assert:
case TokenKind.key_assert:
p.assert_statement()
default:
// An expression as a statement
@ -1357,11 +1355,11 @@ fn ($v.name mut $v.typ) $p.cur_fn.name (...) {
is_str := v.typ == 'string'
is_ustr := v.typ == 'ustring'
switch tok {
case Token.assign:
case TokenKind.assign:
if !is_map && !p.is_empty_c_struct_init {
p.gen(' = ')
}
case Token.plus_assign:
case TokenKind.plus_assign:
if is_str && !p.is_js {
p.gen('= string_add($v.name, ')// TODO can't do `foo.bar += '!'`
}
@ -1628,42 +1626,42 @@ fn (p mut Parser) bterm() string {
if is_str && !p.is_js { //&& !p.is_sql {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, 'string_eq(')
case Token.ne: p.cgen.set_placeholder(ph, 'string_ne(')
case Token.le: p.cgen.set_placeholder(ph, 'string_le(')
case Token.ge: p.cgen.set_placeholder(ph, 'string_ge(')
case Token.gt: p.cgen.set_placeholder(ph, 'string_gt(')
case Token.lt: p.cgen.set_placeholder(ph, 'string_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, 'string_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, 'string_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, 'string_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, 'string_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, 'string_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, 'string_lt(')
}
/*
Token.eq => p.cgen.set_placeholder(ph, 'string_eq(')
Token.ne => p.cgen.set_placeholder(ph, 'string_ne(')
Token.le => p.cgen.set_placeholder(ph, 'string_le(')
Token.ge => p.cgen.set_placeholder(ph, 'string_ge(')
Token.gt => p.cgen.set_placeholder(ph, 'string_gt(')
Token.lt => p.cgen.set_placeholder(ph, 'string_lt(')
TokenKind.eq => p.cgen.set_placeholder(ph, 'string_eq(')
TokenKind.ne => p.cgen.set_placeholder(ph, 'string_ne(')
TokenKind.le => p.cgen.set_placeholder(ph, 'string_le(')
TokenKind.ge => p.cgen.set_placeholder(ph, 'string_ge(')
TokenKind.gt => p.cgen.set_placeholder(ph, 'string_gt(')
TokenKind.lt => p.cgen.set_placeholder(ph, 'string_lt(')
*/
}
if is_ustr {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, 'ustring_eq(')
case Token.ne: p.cgen.set_placeholder(ph, 'ustring_ne(')
case Token.le: p.cgen.set_placeholder(ph, 'ustring_le(')
case Token.ge: p.cgen.set_placeholder(ph, 'ustring_ge(')
case Token.gt: p.cgen.set_placeholder(ph, 'ustring_gt(')
case Token.lt: p.cgen.set_placeholder(ph, 'ustring_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, 'ustring_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, 'ustring_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, 'ustring_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, 'ustring_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, 'ustring_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, 'ustring_lt(')
}
}
if is_float {
p.gen(')')
switch tok {
case Token.eq: p.cgen.set_placeholder(ph, '${expr_type}_eq(')
case Token.ne: p.cgen.set_placeholder(ph, '${expr_type}_ne(')
case Token.le: p.cgen.set_placeholder(ph, '${expr_type}_le(')
case Token.ge: p.cgen.set_placeholder(ph, '${expr_type}_ge(')
case Token.gt: p.cgen.set_placeholder(ph, '${expr_type}_gt(')
case Token.lt: p.cgen.set_placeholder(ph, '${expr_type}_lt(')
case TokenKind.eq: p.cgen.set_placeholder(ph, '${expr_type}_eq(')
case TokenKind.ne: p.cgen.set_placeholder(ph, '${expr_type}_ne(')
case TokenKind.le: p.cgen.set_placeholder(ph, '${expr_type}_le(')
case TokenKind.ge: p.cgen.set_placeholder(ph, '${expr_type}_ge(')
case TokenKind.gt: p.cgen.set_placeholder(ph, '${expr_type}_gt(')
case TokenKind.lt: p.cgen.set_placeholder(ph, '${expr_type}_lt(')
}
}
}
@ -2477,7 +2475,7 @@ fn (p mut Parser) expression() string {
return 'int'
}
// + - | ^
for p.tok in [Token.plus, .minus, .pipe, .amp, .xor] {
for p.tok in [TokenKind.plus, .minus, .pipe, .amp, .xor] {
tok_op := p.tok
if typ == 'bool' {
p.error('operator ${p.tok.str()} not defined on bool ')
@ -2576,7 +2574,7 @@ fn (p mut Parser) unary() string {
mut typ := ''
tok := p.tok
switch tok {
case Token.not:
case TokenKind.not:
p.gen('!')
p.check(.not)
// typ should be bool type
@ -2585,7 +2583,7 @@ fn (p mut Parser) unary() string {
p.error('operator ! requires bool type, not `$typ`')
}
case Token.bit_not:
case TokenKind.bit_not:
p.gen('~')
p.check(.bit_not)
typ = p.bool_expression()
@ -2606,7 +2604,7 @@ fn (p mut Parser) factor() string {
p.gen('opt_none()')
p.check(.key_none)
return p.expected_type
case Token.number:
case TokenKind.number:
typ = 'int'
// Check if float (`1.0`, `1e+3`) but not if is hexa
if (p.lit.contains('.') || (p.lit.contains('e') || p.lit.contains('E'))) &&
@ -2624,13 +2622,13 @@ fn (p mut Parser) factor() string {
}
p.gen(p.lit)
p.fgen(p.lit)
case Token.minus:
case TokenKind.minus:
p.gen('-')
p.fgen('-')
p.next()
return p.factor()
// Variable
case Token.key_sizeof:
case TokenKind.key_sizeof:
p.gen('sizeof(')
p.fgen('sizeof(')
p.next()
@ -2640,10 +2638,10 @@ fn (p mut Parser) factor() string {
p.gen('$sizeof_typ)')
p.fgen('$sizeof_typ)')
return 'int'
case Token.amp, Token.dot, Token.mul:
case TokenKind.amp, TokenKind.dot, TokenKind.mul:
// (dot is for enum vals: `.green`)
return p.name_expr()
case Token.name:
case TokenKind.name:
// map[string]int
if p.lit == 'map' && p.peek() == .lsbr {
return p.map_init()
@ -2660,7 +2658,7 @@ fn (p mut Parser) factor() string {
//}
typ = p.name_expr()
return typ
case Token.key_default:
case TokenKind.key_default:
p.next()
p.next()
name := p.check_name()
@ -2670,7 +2668,7 @@ fn (p mut Parser) factor() string {
p.gen('default(T)')
p.next()
return 'T'
case Token.lpar:
case TokenKind.lpar:
//p.gen('(/*lpar*/')
p.gen('(')
p.check(.lpar)
@ -2684,38 +2682,38 @@ fn (p mut Parser) factor() string {
p.ptr_cast = false
p.gen(')')
return typ
case Token.chartoken:
case TokenKind.chartoken:
p.char_expr()
typ = 'byte'
return typ
case Token.str:
case TokenKind.str:
p.string_expr()
typ = 'string'
return typ
case Token.key_false:
case TokenKind.key_false:
typ = 'bool'
p.gen('0')
p.fgen('false')
case Token.key_true:
case TokenKind.key_true:
typ = 'bool'
p.gen('1')
p.fgen('true')
case Token.lsbr:
case TokenKind.lsbr:
// `[1,2,3]` or `[]` or `[20]byte`
// TODO have to return because arrayInit does next()
// everything should do next()
return p.array_init()
case Token.lcbr:
case TokenKind.lcbr:
// `m := { 'one': 1 }`
if p.peek() == .str {
return p.map_init()
}
// { user | name :'new name' }
return p.assoc()
case Token.key_if:
case TokenKind.key_if:
typ = p.if_st(true, 0)
return typ
case Token.key_match:
case TokenKind.key_match:
typ = p.match_statement(true)
return typ
default:

View File

@ -33,7 +33,7 @@ mut:
fmt_out strings.Builder
fmt_indent int
fmt_line_empty bool
prev_tok Token
prev_tok TokenKind
fn_name string // needed for @FN
should_print_line_on_error bool
should_print_errors_in_color bool
@ -84,11 +84,11 @@ fn new_scanner(text string) &Scanner {
// TODO remove once multiple return values are implemented
struct ScanRes {
tok Token
tok TokenKind
lit string
}
fn scan_res(tok Token, lit string) ScanRes {
fn scan_res(tok TokenKind, lit string) ScanRes {
return ScanRes{tok, lit}
}

View File

@ -4,7 +4,7 @@
module main
enum Token {
enum TokenKind {
eof
name // user
number // 123
@ -116,115 +116,115 @@ enum Token {
// Keywords['return'] == .key_return
fn build_keys() map[string]int {
mut res := map[string]int
for t := int(Token.keyword_beg) + 1; t < int(Token.keyword_end); t++ {
for t := int(TokenKind.keyword_beg) + 1; t < int(TokenKind.keyword_end); t++ {
key := TokenStr[t]
res[key] = int(t)
}
return res
}
// TODO remove once we have `enum Token { name('name') if('if') ... }`
// TODO remove once we have `enum TokenKind { name('name') if('if') ... }`
fn build_token_str() []string {
mut s := [''].repeat(NrTokens)
s[Token.keyword_beg] = ''
s[Token.keyword_end] = ''
s[Token.eof] = 'eof'
s[Token.name] = 'name'
s[Token.number] = 'number'
s[Token.str] = 'STR'
s[Token.chartoken] = 'char'
s[Token.plus] = '+'
s[Token.minus] = '-'
s[Token.mul] = '*'
s[Token.div] = '/'
s[Token.mod] = '%'
s[Token.xor] = '^'
s[Token.bit_not] = '~'
s[Token.pipe] = '|'
s[Token.hash] = '#'
s[Token.amp] = '&'
s[Token.inc] = '++'
s[Token.dec] = '--'
s[Token.and] = '&&'
s[Token.logical_or] = '||'
s[Token.not] = '!'
s[Token.dot] = '.'
s[Token.dotdot] = '..'
s[Token.ellipsis] = '...'
s[Token.comma] = ','
//s[Token.at] = '@'
s[Token.semicolon] = ';'
s[Token.colon] = ':'
s[Token.arrow] = '=>'
s[Token.assign] = '='
s[Token.decl_assign] = ':='
s[Token.plus_assign] = '+='
s[Token.minus_assign] = '-='
s[Token.mult_assign] = '*='
s[Token.div_assign] = '/='
s[Token.xor_assign] = '^='
s[Token.mod_assign] = '%='
s[Token.or_assign] = '|='
s[Token.and_assign] = '&='
s[Token.righ_shift_assign] = '>>='
s[Token.left_shift_assign] = '<<='
s[Token.lcbr] = '{'
s[Token.rcbr] = '}'
s[Token.lpar] = '('
s[Token.rpar] = ')'
s[Token.lsbr] = '['
s[Token.rsbr] = ']'
s[Token.eq] = '=='
s[Token.ne] = '!='
s[Token.gt] = '>'
s[Token.lt] = '<'
s[Token.ge] = '>='
s[Token.le] = '<='
s[Token.question] = '?'
s[Token.left_shift] = '<<'
s[Token.righ_shift] = '>>'
//s[Token.line_com] = '//'
s[Token.nl] = 'NLL'
s[Token.dollar] = '$'
s[Token.key_assert] = 'assert'
s[Token.key_struct] = 'struct'
s[Token.key_if] = 'if'
s[Token.key_else] = 'else'
s[Token.key_return] = 'return'
s[Token.key_module] = 'module'
s[Token.key_sizeof] = 'sizeof'
s[Token.key_go] = 'go'
s[Token.key_goto] = 'goto'
s[Token.key_const] = 'const'
s[Token.key_mut] = 'mut'
s[Token.key_type] = 'type'
s[Token.key_for] = 'for'
s[Token.key_switch] = 'switch'
s[Token.key_case] = 'case'
s[Token.func] = 'fn'
s[Token.key_true] = 'true'
s[Token.key_false] = 'false'
s[Token.key_continue] = 'continue'
s[Token.key_break] = 'break'
s[Token.key_import] = 'import'
s[Token.key_embed] = 'embed'
s[TokenKind.keyword_beg] = ''
s[TokenKind.keyword_end] = ''
s[TokenKind.eof] = 'eof'
s[TokenKind.name] = 'name'
s[TokenKind.number] = 'number'
s[TokenKind.str] = 'STR'
s[TokenKind.chartoken] = 'char'
s[TokenKind.plus] = '+'
s[TokenKind.minus] = '-'
s[TokenKind.mul] = '*'
s[TokenKind.div] = '/'
s[TokenKind.mod] = '%'
s[TokenKind.xor] = '^'
s[TokenKind.bit_not] = '~'
s[TokenKind.pipe] = '|'
s[TokenKind.hash] = '#'
s[TokenKind.amp] = '&'
s[TokenKind.inc] = '++'
s[TokenKind.dec] = '--'
s[TokenKind.and] = '&&'
s[TokenKind.logical_or] = '||'
s[TokenKind.not] = '!'
s[TokenKind.dot] = '.'
s[TokenKind.dotdot] = '..'
s[TokenKind.ellipsis] = '...'
s[TokenKind.comma] = ','
//s[TokenKind.at] = '@'
s[TokenKind.semicolon] = ';'
s[TokenKind.colon] = ':'
s[TokenKind.arrow] = '=>'
s[TokenKind.assign] = '='
s[TokenKind.decl_assign] = ':='
s[TokenKind.plus_assign] = '+='
s[TokenKind.minus_assign] = '-='
s[TokenKind.mult_assign] = '*='
s[TokenKind.div_assign] = '/='
s[TokenKind.xor_assign] = '^='
s[TokenKind.mod_assign] = '%='
s[TokenKind.or_assign] = '|='
s[TokenKind.and_assign] = '&='
s[TokenKind.righ_shift_assign] = '>>='
s[TokenKind.left_shift_assign] = '<<='
s[TokenKind.lcbr] = '{'
s[TokenKind.rcbr] = '}'
s[TokenKind.lpar] = '('
s[TokenKind.rpar] = ')'
s[TokenKind.lsbr] = '['
s[TokenKind.rsbr] = ']'
s[TokenKind.eq] = '=='
s[TokenKind.ne] = '!='
s[TokenKind.gt] = '>'
s[TokenKind.lt] = '<'
s[TokenKind.ge] = '>='
s[TokenKind.le] = '<='
s[TokenKind.question] = '?'
s[TokenKind.left_shift] = '<<'
s[TokenKind.righ_shift] = '>>'
//s[TokenKind.line_com] = '//'
s[TokenKind.nl] = 'NLL'
s[TokenKind.dollar] = '$'
s[TokenKind.key_assert] = 'assert'
s[TokenKind.key_struct] = 'struct'
s[TokenKind.key_if] = 'if'
s[TokenKind.key_else] = 'else'
s[TokenKind.key_return] = 'return'
s[TokenKind.key_module] = 'module'
s[TokenKind.key_sizeof] = 'sizeof'
s[TokenKind.key_go] = 'go'
s[TokenKind.key_goto] = 'goto'
s[TokenKind.key_const] = 'const'
s[TokenKind.key_mut] = 'mut'
s[TokenKind.key_type] = 'type'
s[TokenKind.key_for] = 'for'
s[TokenKind.key_switch] = 'switch'
s[TokenKind.key_case] = 'case'
s[TokenKind.func] = 'fn'
s[TokenKind.key_true] = 'true'
s[TokenKind.key_false] = 'false'
s[TokenKind.key_continue] = 'continue'
s[TokenKind.key_break] = 'break'
s[TokenKind.key_import] = 'import'
s[TokenKind.key_embed] = 'embed'
//Tokens[key_typeof] = 'typeof'
s[Token.key_default] = 'default'
s[Token.key_enum] = 'enum'
s[Token.key_interface] = 'interface'
s[Token.key_pub] = 'pub'
s[Token.key_import_const] = 'import_const'
s[Token.key_in] = 'in'
s[Token.key_atomic] = 'atomic'
s[Token.key_orelse] = 'or'
s[Token.key_global] = '__global'
s[Token.key_union] = 'union'
s[Token.key_static] = 'static'
s[Token.key_as] = 'as'
s[Token.key_defer] = 'defer'
s[Token.key_match] = 'match'
s[Token.key_select] = 'select'
s[Token.key_none] = 'none'
s[TokenKind.key_default] = 'default'
s[TokenKind.key_enum] = 'enum'
s[TokenKind.key_interface] = 'interface'
s[TokenKind.key_pub] = 'pub'
s[TokenKind.key_import_const] = 'import_const'
s[TokenKind.key_in] = 'in'
s[TokenKind.key_atomic] = 'atomic'
s[TokenKind.key_orelse] = 'or'
s[TokenKind.key_global] = '__global'
s[TokenKind.key_union] = 'union'
s[TokenKind.key_static] = 'static'
s[TokenKind.key_as] = 'as'
s[TokenKind.key_defer] = 'defer'
s[TokenKind.key_match] = 'match'
s[TokenKind.key_select] = 'select'
s[TokenKind.key_none] = 'none'
return s
}
@ -234,8 +234,8 @@ const (
KEYWORDS = build_keys()
)
fn key_to_token(key string) Token {
a := Token(KEYWORDS[key])
fn key_to_token(key string) TokenKind {
a := TokenKind(KEYWORDS[key])
return a
}
@ -243,11 +243,11 @@ fn is_key(key string) bool {
return int(key_to_token(key)) > 0
}
fn (t Token) str() string {
fn (t TokenKind) str() string {
return TokenStr[int(t)]
}
fn (t Token) is_decl() bool {
fn (t TokenKind) is_decl() bool {
// TODO i
//return t in [.key_enum, .key_interface, .func, .typ, .key_const,
//.key_import_const, .key_struct, .key_pub, .eof]
@ -258,20 +258,20 @@ fn (t Token) is_decl() bool {
const (
AssignTokens = [
Token.assign, Token.plus_assign, Token.minus_assign,
Token.mult_assign, Token.div_assign, Token.xor_assign,
Token.mod_assign,
Token.or_assign, Token.and_assign, Token.righ_shift_assign,
Token.left_shift_assign
TokenKind.assign, TokenKind.plus_assign, TokenKind.minus_assign,
TokenKind.mult_assign, TokenKind.div_assign, TokenKind.xor_assign,
TokenKind.mod_assign,
TokenKind.or_assign, TokenKind.and_assign, TokenKind.righ_shift_assign,
TokenKind.left_shift_assign
]
)
fn (t Token) is_assign() bool {
fn (t TokenKind) is_assign() bool {
return t in AssignTokens
}
fn (t []Token) contains(val Token) bool {
fn (t []TokenKind) contains(val TokenKind) bool {
for tt in t {
if tt == val {
return true

View File

@ -38,7 +38,7 @@ fn (p mut Parser) fgenln(s string) {
}
/*
fn (p mut Parser) peek() Token {
fn (p mut Parser) peek() TokenKind {
for {
p.cgen.line = p.scanner.line_nr + 1
tok := p.scanner.peek()