token: remove old keyword "embed"
							parent
							
								
									ff63fb74d9
								
							
						
					
					
						commit
						05eeb8aa53
					
				| 
						 | 
					@ -60,15 +60,13 @@ pub enum Kind {
 | 
				
			||||||
	or_assign
 | 
						or_assign
 | 
				
			||||||
	and_assign
 | 
						and_assign
 | 
				
			||||||
	right_shift_assign
 | 
						right_shift_assign
 | 
				
			||||||
	left_shift_assign
 | 
						left_shift_assign // {}  () []
 | 
				
			||||||
	// {}  () []
 | 
					 | 
				
			||||||
	lcbr
 | 
						lcbr
 | 
				
			||||||
	rcbr
 | 
						rcbr
 | 
				
			||||||
	lpar
 | 
						lpar
 | 
				
			||||||
	rpar
 | 
						rpar
 | 
				
			||||||
	lsbr
 | 
						lsbr
 | 
				
			||||||
	rsbr
 | 
						rsbr // == != <= < >= >
 | 
				
			||||||
	// == != <= < >= >
 | 
					 | 
				
			||||||
	eq
 | 
						eq
 | 
				
			||||||
	ne
 | 
						ne
 | 
				
			||||||
	gt
 | 
						gt
 | 
				
			||||||
| 
						 | 
					@ -79,8 +77,7 @@ pub enum Kind {
 | 
				
			||||||
	nl
 | 
						nl
 | 
				
			||||||
	dot
 | 
						dot
 | 
				
			||||||
	dotdot
 | 
						dotdot
 | 
				
			||||||
	ellipsis
 | 
						ellipsis // keywords
 | 
				
			||||||
	// keywords
 | 
					 | 
				
			||||||
	keyword_beg
 | 
						keyword_beg
 | 
				
			||||||
	key_as
 | 
						key_as
 | 
				
			||||||
	key_asm
 | 
						key_asm
 | 
				
			||||||
| 
						 | 
					@ -91,7 +88,6 @@ pub enum Kind {
 | 
				
			||||||
	key_continue
 | 
						key_continue
 | 
				
			||||||
	key_defer
 | 
						key_defer
 | 
				
			||||||
	key_else
 | 
						key_else
 | 
				
			||||||
	key_embed
 | 
					 | 
				
			||||||
	key_enum
 | 
						key_enum
 | 
				
			||||||
	key_false
 | 
						key_false
 | 
				
			||||||
	key_for
 | 
						key_for
 | 
				
			||||||
| 
						 | 
					@ -103,8 +99,7 @@ pub enum Kind {
 | 
				
			||||||
	key_import
 | 
						key_import
 | 
				
			||||||
	key_in
 | 
						key_in
 | 
				
			||||||
	key_interface
 | 
						key_interface
 | 
				
			||||||
	key_is
 | 
						key_is // key_it
 | 
				
			||||||
	// key_it
 | 
					 | 
				
			||||||
	key_match
 | 
						key_match
 | 
				
			||||||
	key_module
 | 
						key_module
 | 
				
			||||||
	key_mut
 | 
						key_mut
 | 
				
			||||||
| 
						 | 
					@ -132,10 +127,9 @@ pub enum Kind {
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const (
 | 
					const (
 | 
				
			||||||
	assign_tokens = [Kind.assign, .plus_assign, .minus_assign, .mult_assign,
 | 
						assign_tokens = [Kind.assign, .plus_assign, .minus_assign, .mult_assign, .div_assign, .xor_assign,
 | 
				
			||||||
	.div_assign, .xor_assign, .mod_assign, .or_assign, .and_assign,
 | 
							.mod_assign, .or_assign, .and_assign, .right_shift_assign, .left_shift_assign]
 | 
				
			||||||
	.right_shift_assign, .left_shift_assign]
 | 
						nr_tokens     = int(Kind._end_)
 | 
				
			||||||
	nr_tokens = int(Kind._end_)
 | 
					 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// @FN => will be substituted with the name of the current V function
 | 
					// @FN => will be substituted with the name of the current V function
 | 
				
			||||||
| 
						 | 
					@ -165,14 +159,17 @@ pub enum AtKind {
 | 
				
			||||||
	vhash
 | 
						vhash
 | 
				
			||||||
	vmod_file
 | 
						vmod_file
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const (
 | 
					const (
 | 
				
			||||||
	valid_at_tokens = ['@FN','@MOD','@STRUCT','@VEXE','@FILE','@LINE','@COLUMN','@VHASH','@VMOD_FILE']
 | 
						valid_at_tokens = ['@FN', '@MOD', '@STRUCT', '@VEXE', '@FILE', '@LINE', '@COLUMN', '@VHASH',
 | 
				
			||||||
 | 
							'@VMOD_FILE',
 | 
				
			||||||
 | 
						]
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// build_keys genereates a map with keywords' string values:
 | 
					// build_keys genereates a map with keywords' string values:
 | 
				
			||||||
// Keywords['return'] == .key_return
 | 
					// Keywords['return'] == .key_return
 | 
				
			||||||
fn build_keys() map[string]Kind {
 | 
					fn build_keys() map[string]Kind {
 | 
				
			||||||
	mut res := map[string]Kind
 | 
						mut res := map[string]Kind{}
 | 
				
			||||||
	for t in int(Kind.keyword_beg) + 1 .. int(Kind.keyword_end) {
 | 
						for t in int(Kind.keyword_beg) + 1 .. int(Kind.keyword_end) {
 | 
				
			||||||
		key := token_str[t]
 | 
							key := token_str[t]
 | 
				
			||||||
		res[key] = t
 | 
							res[key] = t
 | 
				
			||||||
| 
						 | 
					@ -182,7 +179,7 @@ fn build_keys() map[string]Kind {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// TODO remove once we have `enum Kind { name('name') if('if') ... }`
 | 
					// TODO remove once we have `enum Kind { name('name') if('if') ... }`
 | 
				
			||||||
fn build_token_str() []string {
 | 
					fn build_token_str() []string {
 | 
				
			||||||
	mut s := []string{len:(nr_tokens)}
 | 
						mut s := []string{len: (nr_tokens)}
 | 
				
			||||||
	s[Kind.unknown] = 'unknown'
 | 
						s[Kind.unknown] = 'unknown'
 | 
				
			||||||
	s[Kind.eof] = 'eof'
 | 
						s[Kind.eof] = 'eof'
 | 
				
			||||||
	s[Kind.name] = 'name'
 | 
						s[Kind.name] = 'name'
 | 
				
			||||||
| 
						 | 
					@ -271,7 +268,6 @@ fn build_token_str() []string {
 | 
				
			||||||
	s[Kind.key_continue] = 'continue'
 | 
						s[Kind.key_continue] = 'continue'
 | 
				
			||||||
	s[Kind.key_break] = 'break'
 | 
						s[Kind.key_break] = 'break'
 | 
				
			||||||
	s[Kind.key_import] = 'import'
 | 
						s[Kind.key_import] = 'import'
 | 
				
			||||||
	s[Kind.key_embed] = 'embed'
 | 
					 | 
				
			||||||
	s[Kind.key_unsafe] = 'unsafe'
 | 
						s[Kind.key_unsafe] = 'unsafe'
 | 
				
			||||||
	s[Kind.key_typeof] = 'typeof'
 | 
						s[Kind.key_typeof] = 'typeof'
 | 
				
			||||||
	s[Kind.key_enum] = 'enum'
 | 
						s[Kind.key_enum] = 'enum'
 | 
				
			||||||
| 
						 | 
					@ -295,7 +291,7 @@ fn build_token_str() []string {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const (
 | 
					const (
 | 
				
			||||||
	token_str = build_token_str()
 | 
						token_str = build_token_str()
 | 
				
			||||||
	keywords = build_keys()
 | 
						keywords  = build_keys()
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn key_to_token(key string) Kind {
 | 
					pub fn key_to_token(key string) Kind {
 | 
				
			||||||
| 
						 | 
					@ -308,7 +304,8 @@ pub fn is_key(key string) bool {
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn is_decl(t Kind) bool {
 | 
					pub fn is_decl(t Kind) bool {
 | 
				
			||||||
	return t in [.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_pub, .eof]
 | 
						return t in
 | 
				
			||||||
 | 
							[.key_enum, .key_interface, .key_fn, .key_struct, .key_type, .key_const, .key_pub, .eof]
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn (t Kind) is_assign() bool {
 | 
					pub fn (t Kind) is_assign() bool {
 | 
				
			||||||
| 
						 | 
					@ -339,7 +336,6 @@ pub const (
 | 
				
			||||||
	highest_prec = 8
 | 
						highest_prec = 8
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
*/
 | 
					*/
 | 
				
			||||||
 | 
					 | 
				
			||||||
pub enum Precedence {
 | 
					pub enum Precedence {
 | 
				
			||||||
	lowest
 | 
						lowest
 | 
				
			||||||
	cond // OR or AND
 | 
						cond // OR or AND
 | 
				
			||||||
| 
						 | 
					@ -357,8 +353,7 @@ pub enum Precedence {
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn build_precedences() []Precedence {
 | 
					pub fn build_precedences() []Precedence {
 | 
				
			||||||
	mut p := []Precedence{len:int(Kind._end_), cap:int(Kind._end_)}
 | 
						mut p := []Precedence{len: int(Kind._end_), cap: int(Kind._end_)}
 | 
				
			||||||
 | 
					 | 
				
			||||||
	p[Kind.lsbr] = .index
 | 
						p[Kind.lsbr] = .index
 | 
				
			||||||
	p[Kind.dot] = .call
 | 
						p[Kind.dot] = .call
 | 
				
			||||||
	// `++` | `--` | `?`
 | 
						// `++` | `--` | `?`
 | 
				
			||||||
| 
						 | 
					@ -398,7 +393,6 @@ pub fn build_precedences() []Precedence {
 | 
				
			||||||
	p[Kind.right_shift_assign] = .assign
 | 
						p[Kind.right_shift_assign] = .assign
 | 
				
			||||||
	p[Kind.mult_assign] = .assign
 | 
						p[Kind.mult_assign] = .assign
 | 
				
			||||||
	p[Kind.xor_assign] = .assign
 | 
						p[Kind.xor_assign] = .assign
 | 
				
			||||||
 | 
					 | 
				
			||||||
	p[Kind.key_in] = .in_as
 | 
						p[Kind.key_in] = .in_as
 | 
				
			||||||
	p[Kind.not_in] = .in_as
 | 
						p[Kind.not_in] = .in_as
 | 
				
			||||||
	p[Kind.key_as] = .in_as
 | 
						p[Kind.key_as] = .in_as
 | 
				
			||||||
| 
						 | 
					@ -406,7 +400,6 @@ pub fn build_precedences() []Precedence {
 | 
				
			||||||
	p[Kind.not_is] = .in_as
 | 
						p[Kind.not_is] = .in_as
 | 
				
			||||||
	p[Kind.logical_or] = .cond
 | 
						p[Kind.logical_or] = .cond
 | 
				
			||||||
	p[Kind.and] = .cond
 | 
						p[Kind.and] = .cond
 | 
				
			||||||
 | 
					 | 
				
			||||||
	return p
 | 
						return p
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -426,15 +419,14 @@ pub fn (tok Token) is_scalar() bool {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// is_unary returns true if the token can be in a unary expression
 | 
					// is_unary returns true if the token can be in a unary expression
 | 
				
			||||||
pub fn (tok Token) is_unary() bool {
 | 
					pub fn (tok Token) is_unary() bool {
 | 
				
			||||||
	return tok.kind in [
 | 
						return tok.kind in
 | 
				
			||||||
	// `+` | `-` | `!` | `~` | `*` | `&`
 | 
							[
 | 
				
			||||||
	.plus, .minus, .not, .bit_not, .mul, .amp, .arrow]
 | 
							/* `+` | `-` | `!` | `~` | `*` | `&` */.plus, .minus, .not, .bit_not, .mul, .amp, .arrow]
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn (tok Kind) is_relational() bool {
 | 
					pub fn (tok Kind) is_relational() bool {
 | 
				
			||||||
	return tok in [
 | 
						return tok in [
 | 
				
			||||||
	// `<` | `<=` | `>` | `>=`
 | 
							/* `<` | `<=` | `>` | `>=` */.lt, .le, .gt, .ge, .eq, .ne]
 | 
				
			||||||
	.lt, .le, .gt, .ge, .eq, .ne]
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn (k Kind) is_start_of_type() bool {
 | 
					pub fn (k Kind) is_start_of_type() bool {
 | 
				
			||||||
| 
						 | 
					@ -446,9 +438,6 @@ pub fn (kind Kind) is_prefix() bool {
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn (kind Kind) is_infix() bool {
 | 
					pub fn (kind Kind) is_infix() bool {
 | 
				
			||||||
	return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in,
 | 
						return kind in
 | 
				
			||||||
	//
 | 
							[.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in, /*  */.key_as, .ge, .le, .logical_or, .xor, .not_in, .key_is, .not_is, /*  */.and, .dot, .pipe, .amp, .left_shift, .right_shift, .arrow]
 | 
				
			||||||
	.key_as, .ge, .le, .logical_or, .xor, .not_in, .key_is, .not_is,
 | 
					 | 
				
			||||||
	//
 | 
					 | 
				
			||||||
	.and, .dot, .pipe, .amp, .left_shift, .right_shift, .arrow]
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue