v/compiler/token.v

269 lines
4.9 KiB
Go
Raw Normal View History

2019-06-23 04:21:30 +02:00
// Copyright (c) 2019 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
2019-06-22 20:20:28 +02:00
module main
enum Token {
eof
name
integer
strtoken
chartoken
plus
minus
mul
div
mod
xor
pipe
inc
dec
and
ortok
not
bit_not
question
comma
semicolon
colon
amp
hash
dollar
left_shift
righ_shift
2019-06-22 20:20:28 +02:00
// = := += -=
assign
decl_assign
plus_assign
minus_assign
div_assign
mult_assign
xor_assign
mod_assign
or_assign
and_assign
righ_shift_assign
left_shift_assign
2019-06-22 20:20:28 +02:00
// {} () []
lcbr
rcbr
lpar
rpar
lsbr
rsbr
2019-06-22 20:20:28 +02:00
// == != <= < >= >
eq
ne
gt
lt
ge
le
2019-06-22 20:20:28 +02:00
// comments
line_com
mline_com
nl
dot
dotdot
2019-06-22 20:20:28 +02:00
// keywords
keyword_beg
key_module
key_struct
key_if
key_else
key_return
key_go
key_const
key_import_const
key_mut
typ
key_enum
key_for
key_switch
2019-06-22 20:20:28 +02:00
MATCH
key_case
func
key_true
key_false
key_continue
key_break
key_embed
key_import
//typeof
key_default
key_assert
key_sizeof
key_in
key_atomic
key_interface
key_orelse
key_global
key_union
key_pub
key_goto
key_static
key_as
2019-06-22 20:20:28 +02:00
keyword_end
}
// build_keys genereates a map with keywords' string values:
// Keywords['return'] == .key_return
2019-06-22 20:20:28 +02:00
fn build_keys() map_int {
mut res := map[string]int{}
for t := int(Token.keyword_beg) + 1; t < int(Token.keyword_end); t++ {
2019-06-22 20:20:28 +02:00
key := TOKENSTR[t]
res[key] = int(t)
}
return res
}
// TODO remove once we have `enum Token { name('name') if('if') ... }`
2019-06-22 20:20:28 +02:00
fn build_token_str() []string {
mut s := [''; NrTokens]
s[Token.keyword_beg] = ''
s[Token.keyword_end] = ''
s[Token.eof] = '.eof'
s[Token.name] = '.name'
s[Token.integer] = '.integer'
s[Token.strtoken] = 'STR'
s[Token.chartoken] = '.chartoken'
s[Token.plus] = '+'
s[Token.minus] = '-'
s[Token.mul] = '*'
s[Token.div] = '/'
s[Token.mod] = '%'
s[Token.xor] = '^'
s[Token.bit_not] = '~'
s[Token.pipe] = '|'
s[Token.hash] = '#'
s[Token.amp] = '&'
s[Token.inc] = '++'
s[Token.dec] = '--'
s[Token.and] = '&&'
s[Token.ortok] = '||'
s[Token.not] = '!'
s[Token.dot] = '.'
s[Token.dotdot] = '..'
s[Token.comma] = ','
s[Token.semicolon] = ';'
s[Token.colon] = ':'
s[Token.assign] = '='
s[Token.decl_assign] = ':='
s[Token.plus_assign] = '+='
s[Token.minus_assign] = '-='
s[Token.mult_assign] = '*='
s[Token.div_assign] = '/='
s[Token.xor_assign] = '^='
s[Token.mod_assign] = '%='
s[Token.or_assign] = '|='
s[Token.and_assign] = '&='
s[Token.righ_shift_assign] = '>>='
s[Token.left_shift_assign] = '<<='
s[Token.lcbr] = '{'
s[Token.rcbr] = '}'
s[Token.lpar] = '('
s[Token.rpar] = ')'
s[Token.lsbr] = '['
s[Token.rsbr] = ']'
s[Token.eq] = '=='
s[Token.ne] = '!='
s[Token.gt] = '>'
s[Token.lt] = '<'
s[Token.ge] = '>='
s[Token.le] = '<='
s[Token.question] = '?'
s[Token.left_shift] = '<<'
s[Token.righ_shift] = '>>'
s[Token.line_com] = '//'
s[Token.nl] = 'NLL'
s[Token.dollar] = '$'
s[Token.key_assert] = 'assert'
s[Token.key_struct] = 'struct'
s[Token.key_if] = 'if'
s[Token.key_else] = 'else'
s[Token.key_return] = 'return'
s[Token.key_module] = 'module'
s[Token.key_sizeof] = 'sizeof'
s[Token.key_go] = 'go'
s[Token.key_goto] = 'goto'
s[Token.key_const] = 'const'
s[Token.key_mut] = 'mut'
s[Token.typ] = 'type'
s[Token.key_for] = 'for'
s[Token.key_switch] = 'switch'
//Tokens[MATCH] = 'match'
s[Token.key_case] = 'case'
s[Token.func] = 'fn'
s[Token.key_true] = 'true'
s[Token.key_false] = 'false'
s[Token.key_continue] = 'continue'
s[Token.key_break] = 'break'
s[Token.key_import] = 'import'
s[Token.key_embed] = 'embed'
//Tokens[TYP.eof] = 'typeof'
s[Token.key_default] = 'default'
s[Token.key_enum] = 'enum'
s[Token.key_interface] = 'interface'
s[Token.key_pub] = 'pub'
s[Token.key_import_const] = 'import_const'
s[Token.key_in] = 'in'
s[Token.key_atomic] = 'atomic'
s[Token.key_orelse] = 'or'
s[Token.key_global] = '__global'
s[Token.key_union] = 'union'
s[Token.key_static] = 'static'
s[Token.key_as] = 'as'
2019-06-22 20:20:28 +02:00
return s
}
const (
NrTokens = 140
2019-06-22 20:20:28 +02:00
TOKENSTR = build_token_str()
KEYWORDS = build_keys()
)
fn key_to_token(key string) Token {
a := Token(KEYWORDS[key])
return a
}
fn is_key(key string) bool {
return int(key_to_token(key)) > 0
}
fn (t Token) str() string {
return TOKENSTR[int(t)]
}
fn (t Token) is_decl() bool {
// TODO return t in [.func ,.typ, .key_const, .key_import_.key_const ,AT ,.eof]
return t == .key_enum || t == .key_interface || t == .func ||
t == .key_struct || t == .typ ||
t == .key_const || t == .key_import_const || t == .key_pub || t == .eof
2019-06-22 20:20:28 +02:00
}
const (
AssignTokens = [
Token.assign, Token.plus_assign, Token.minus_assign,
Token.mult_assign, Token.div_assign, Token.xor_assign,
Token.mod_assign,
Token.or_assign, Token.and_assign, Token.righ_shift_assign,
Token.left_shift_assign
2019-06-22 20:20:28 +02:00
]
)
fn (t Token) is_assign() bool {
return t in AssignTokens
}
2019-06-30 13:06:46 +02:00
fn (t []Token) contains(val Token) bool {
2019-06-22 20:20:28 +02:00
for tt in t {
if tt == val {
return true
}
}
return false
}