cgen: lots of fixes

pull/4027/head
Alexander Medvednikov 2020-03-15 00:46:08 +01:00
parent 50ed4004f4
commit 843bb6dac1
15 changed files with 107 additions and 79 deletions

View File

@ -982,7 +982,7 @@ fn (p mut Parser) check_name() string {
fn (p mut Parser) check_string() string { fn (p mut Parser) check_string() string {
s := p.lit s := p.lit
p.check(.str) p.check(.string)
return s return s
} }
@ -1005,7 +1005,7 @@ fn (p &Parser) strtok() string {
} }
return '`$p.lit`' return '`$p.lit`'
} }
if p.tok == .str { if p.tok == .string{
if p.lit.contains("'") && !p.lit.contains('"') { if p.lit.contains("'") && !p.lit.contains('"') {
return '"$p.lit"' return '"$p.lit"'
} }
@ -1296,7 +1296,7 @@ fn (p &Parser) print_tok() {
println(p.lit) println(p.lit)
return return
} }
if p.tok == .str { if p.tok == .string{
println('"$p.lit"') println('"$p.lit"')
return return
} }
@ -2613,7 +2613,7 @@ fn (p mut Parser) map_init() string {
for { for {
key := p.lit key := p.lit
keys_gen += 'tos3("$key"), ' keys_gen += 'tos3("$key"), '
p.check(.str) p.check(.string)
p.check(.colon) p.check(.colon)
p.fspace() p.fspace()
t,val_expr := p.tmp_expr() t,val_expr := p.tmp_expr()
@ -2954,7 +2954,7 @@ fn (p mut Parser) return_st() {
} }
else { else {
// Don't allow `return val` in functions that don't return anything // Don't allow `return val` in functions that don't return anything
if p.tok == .name || p.tok == .number || p.tok == .str { if p.tok == .name || p.tok == .number || p.tok == .string{
p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx) p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx)
} }
p.genln(deferred_text) p.genln(deferred_text)

View File

@ -11,7 +11,7 @@ fn (p mut Parser) inline_asm() {
p.check(.lcbr) p.check(.lcbr)
s := p.check_string() s := p.check_string()
p.genln('asm("$s"') p.genln('asm("$s"')
for p.tok == .str { for p.tok == .string{
p.genln('"$p.lit"') p.genln('"$p.lit"')
p.next() p.next()
} }

View File

@ -286,12 +286,12 @@ fn (p mut Parser) name_expr() string {
name = p.generic_dispatch.inst[name] name = p.generic_dispatch.inst[name]
} }
// Raw string (`s := r'hello \n ') // Raw string (`s := r'hello \n ')
if name == 'r' && p.peek() == .str && p.prev_tok != .str_dollar { if name == 'r' && p.peek() == .string&& p.prev_tok != .str_dollar {
p.string_expr() p.string_expr()
return 'string' return 'string'
} }
// C string (a zero terminated one) C.func( c'hello' ) // C string (a zero terminated one) C.func( c'hello' )
if name == 'c' && p.peek() == .str && p.prev_tok != .str_dollar { if name == 'c' && p.peek() == .string&& p.prev_tok != .str_dollar {
p.string_expr() p.string_expr()
return 'charptr' return 'charptr'
} }
@ -833,7 +833,7 @@ fn (p mut Parser) factor() string {
// TODO: make this work for arbitrary sumtype expressions, not just simple vars // TODO: make this work for arbitrary sumtype expressions, not just simple vars
// NB: __SumTypeNames__[xxx][0] is the name of the sumtype itself; // NB: __SumTypeNames__[xxx][0] is the name of the sumtype itself;
// idx>0 are the names of the sumtype children // idx>0 are the names of the sumtype children
p.gen('tos3(__SumTypeNames__${type_of_var}[${vname}.typ])') p.gen('tos3(__SumTypeNames__${type_of_var}[${vname}.typ])')
}else{ }else{
p.gen('tos3("$type_of_var")') p.gen('tos3("$type_of_var")')
} }
@ -913,7 +913,7 @@ fn (p mut Parser) factor() string {
typ = 'byte' typ = 'byte'
return typ return typ
} }
.str { .string{
p.string_expr() p.string_expr()
typ = 'string' typ = 'string'
return typ return typ
@ -934,7 +934,7 @@ fn (p mut Parser) factor() string {
} }
.lcbr { .lcbr {
// `m := { 'one': 1 }` // `m := { 'one': 1 }`
if p.peek() == .str { if p.peek() == .string{
return p.map_init() return p.map_init()
} }
peek2 := p.tokens[p.token_idx + 1] peek2 := p.tokens[p.token_idx + 1]

View File

@ -1115,7 +1115,7 @@ fn (p mut Parser) fn_call_args(f mut Fn, generic_param_types []string) {
p.gen('/*YY f=$f.name arg=$arg.name is_moved=$arg.is_moved*/string_clone(') p.gen('/*YY f=$f.name arg=$arg.name is_moved=$arg.is_moved*/string_clone(')
} }
// x64 println gen // x64 println gen
if p.pref.backend == .x64 && i == 0 && f.name == 'println' && p.tok == .str && p.peek() == .rpar { if p.pref.backend == .x64 && i == 0 && f.name == 'println' && p.tok == .string&& p.peek() == .rpar {
//p.x64.gen_print(p.lit) //p.x64.gen_print(p.lit)
} }
mut typ := p.bool_expression() mut typ := p.bool_expression()

View File

@ -349,10 +349,10 @@ fn (s mut Scanner) scan() ScanRes {
if s.inter_end { if s.inter_end {
if s.text[s.pos] == s.quote { if s.text[s.pos] == s.quote {
s.inter_end = false s.inter_end = false
return scan_res(.str, '') return scan_res(.string, '')
} }
s.inter_end = false s.inter_end = false
return scan_res(.str, s.ident_string()) return scan_res(.string, s.ident_string())
} }
s.skip_whitespace() s.skip_whitespace()
// end of file // end of file
@ -473,7 +473,7 @@ fn (s mut Scanner) scan() ScanRes {
return scan_res(.question, '') return scan_res(.question, '')
} }
single_quote, double_quote { single_quote, double_quote {
return scan_res(.str, s.ident_string()) return scan_res(.string, s.ident_string())
} }
`\`` { `\`` {
// ` // apostrophe balance comment. do not remove // ` // apostrophe balance comment. do not remove
@ -513,9 +513,9 @@ fn (s mut Scanner) scan() ScanRes {
s.pos++ s.pos++
if s.text[s.pos] == s.quote { if s.text[s.pos] == s.quote {
s.inside_string = false s.inside_string = false
return scan_res(.str, '') return scan_res(.string, '')
} }
return scan_res(.str, s.ident_string()) return scan_res(.string, s.ident_string())
} }
else { else {
return scan_res(.rcbr, '') return scan_res(.rcbr, '')
@ -558,19 +558,19 @@ fn (s mut Scanner) scan() ScanRes {
// println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN) // println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN)
// ... which is useful while debugging/tracing // ... which is useful while debugging/tracing
if name == 'FN' { if name == 'FN' {
return scan_res(.str, s.fn_name) return scan_res(.string, s.fn_name)
} }
if name == 'FILE' { if name == 'FILE' {
return scan_res(.str, cescaped_path(os.realpath(s.file_path))) return scan_res(.string, cescaped_path(os.realpath(s.file_path)))
} }
if name == 'LINE' { if name == 'LINE' {
return scan_res(.str, (s.line_nr + 1).str()) return scan_res(.string, (s.line_nr + 1).str())
} }
if name == 'COLUMN' { if name == 'COLUMN' {
return scan_res(.str, (s.current_column()).str()) return scan_res(.string, (s.current_column()).str())
} }
if name == 'VHASH' { if name == 'VHASH' {
return scan_res(.str, vhash()) return scan_res(.string, vhash())
} }
if !is_key(name) { if !is_key(name) {
s.error('@ must be used before keywords (e.g. `@type string`)') s.error('@ must be used before keywords (e.g. `@type string`)')

View File

@ -52,7 +52,7 @@ fn (p mut Parser) string_expr() {
mut args := '"' mut args := '"'
mut format := '"' mut format := '"'
mut complex_inter := false // for vfmt mut complex_inter := false // for vfmt
for p.tok == .str { for p.tok == .string{
// Add the string between %d's // Add the string between %d's
p.lit = p.lit.replace('%', '%%') p.lit = p.lit.replace('%', '%%')
format += format_str(p.lit) format += format_str(p.lit)
@ -64,7 +64,7 @@ fn (p mut Parser) string_expr() {
p.check(.str_dollar) p.check(.str_dollar)
// If there's no string after current token, it means we are in // If there's no string after current token, it means we are in
// a complex expression (`${...}`) // a complex expression (`${...}`)
if p.peek() != .str { if p.peek() != .string{
p.fgen('{') p.fgen('{')
complex_inter = true complex_inter = true
} }

View File

@ -304,7 +304,7 @@ fn (p mut Parser) struct_decl(generic_param_types []string) {
.name { .name {
val = p.check_name() val = p.check_name()
} }
.str { .string{
val = p.check_string() val = p.check_string()
} }
else { else {

View File

@ -15,7 +15,7 @@ enum TokenKind {
eof eof
name // user name // user
number // 123 number // 123
str // 'foo' string // 'foo'
str_inter // 'name=$user.name' str_inter // 'name=$user.name'
chartoken // `A` chartoken // `A`
plus plus
@ -143,7 +143,7 @@ fn build_token_str() []string {
s[TokenKind.eof] = 'eof' s[TokenKind.eof] = 'eof'
s[TokenKind.name] = 'name' s[TokenKind.name] = 'name'
s[TokenKind.number] = 'number' s[TokenKind.number] = 'number'
s[TokenKind.str] = 'STR' s[TokenKind.string] = 'STR'
s[TokenKind.chartoken] = 'char' s[TokenKind.chartoken] = 'char'
s[TokenKind.plus] = '+' s[TokenKind.plus] = '+'
s[TokenKind.minus] = '-' s[TokenKind.minus] = '-'
@ -292,7 +292,7 @@ pub fn (t Token) str() string {
if t.tok == .chartoken { if t.tok == .chartoken {
return '`$t.lit`' return '`$t.lit`'
} }
if t.tok == .str { if t.tok == .string {
return "'$t.lit'" return "'$t.lit'"
} }
if t.tok == .eof { if t.tok == .eof {

View File

@ -178,8 +178,8 @@ mut:
pub struct CallArg { pub struct CallArg {
pub: pub:
is_mut bool is_mut bool
expr Expr expr Expr
mut: mut:
typ table.Type typ table.Type
expected_type table.Type expected_type table.Type
@ -437,8 +437,9 @@ pub struct EnumVal {
pub: pub:
enum_name string enum_name string
val string val string
mod string // for full path `mod_Enum_val`
// typ table.Type
pos token.Position pos token.Position
// name string
} }
pub struct EnumDecl { pub struct EnumDecl {

View File

@ -132,6 +132,7 @@ pub fn (c mut Checker) infix_expr(infix_expr mut ast.InfixExpr) table.Type {
infix_expr.left_type = left_type infix_expr.left_type = left_type
c.expected_type = left_type c.expected_type = left_type
right_type := c.expr(infix_expr.right) right_type := c.expr(infix_expr.right)
infix_expr.right_type = right_type
if !c.table.check(right_type, left_type) { if !c.table.check(right_type, left_type) {
left := c.table.get_type_symbol(left_type) left := c.table.get_type_symbol(left_type)
right := c.table.get_type_symbol(right_type) right := c.table.get_type_symbol(right_type)

View File

@ -63,7 +63,7 @@ pub fn (g mut Gen) typ(t table.Type) string {
if styp.starts_with('C__') { if styp.starts_with('C__') {
styp = styp[3..] styp = styp[3..]
} }
if styp in ['stat', 'dirent*'] { if styp in ['stat', 'dirent*', 'tm'] {
// TODO perf and other C structs // TODO perf and other C structs
styp = 'struct $styp' styp = 'struct $styp'
} }
@ -106,9 +106,8 @@ pub fn (g mut Gen) write_typedef_types() {
styp := typ.name.replace('.', '__') styp := typ.name.replace('.', '__')
g.definitions.writeln('typedef map $styp;') g.definitions.writeln('typedef map $styp;')
} }
.function { // TODO:
// TODO: .function {}
}
else { else {
continue continue
} }
@ -290,7 +289,10 @@ fn (g mut Gen) stmt(node ast.Stmt) {
} }
ast.HashStmt { ast.HashStmt {
// #include etc // #include etc
g.definitions.writeln('#$it.val') typ := it.val.all_before(' ')
if typ in ['#include', '#define'] {
g.definitions.writeln('#$it.val')
}
} }
ast.Import {} ast.Import {}
ast.Return { ast.Return {
@ -466,7 +468,7 @@ fn (g mut Gen) fn_args(args []table.Arg, is_variadic bool) {
no_names := args.len > 0 && args[0].name == 'arg_1' no_names := args.len > 0 && args[0].name == 'arg_1'
for i, arg in args { for i, arg in args {
arg_type_sym := g.table.get_type_symbol(arg.typ) arg_type_sym := g.table.get_type_symbol(arg.typ)
mut arg_type_name := arg_type_sym.name.replace('.', '__') mut arg_type_name := g.typ(arg.typ) // arg_type_sym.name.replace('.', '__')
is_varg := i == args.len - 1 && is_variadic is_varg := i == args.len - 1 && is_variadic
if is_varg { if is_varg {
g.varaidic_args[int(arg.typ).str()] = 0 g.varaidic_args[int(arg.typ).str()] = 0
@ -486,14 +488,14 @@ fn (g mut Gen) fn_args(args []table.Arg, is_variadic bool) {
} }
else { else {
mut nr_muls := table.type_nr_muls(arg.typ) mut nr_muls := table.type_nr_muls(arg.typ)
mut s := arg_type_name + ' ' + arg.name s := arg_type_name + ' ' + arg.name
if arg.is_mut { if arg.is_mut {
// mut arg needs one * // mut arg needs one *
nr_muls = 1 nr_muls = 1
} }
if nr_muls > 0 && !is_varg { // if nr_muls > 0 && !is_varg {
s = arg_type_name + strings.repeat(`*`, nr_muls) + ' ' + arg.name // s = arg_type_name + strings.repeat(`*`, nr_muls) + ' ' + arg.name
} // }
g.write(s) g.write(s)
g.definitions.write(s) g.definitions.write(s)
} }
@ -625,6 +627,7 @@ fn (g mut Gen) expr(node ast.Expr) {
g.write("'$it.val'") g.write("'$it.val'")
} }
ast.EnumVal { ast.EnumVal {
// g.write('/*EnumVal*/${it.mod}${it.enum_name}_$it.val')
g.write('${it.enum_name}_$it.val') g.write('${it.enum_name}_$it.val')
} }
ast.FloatLiteral { ast.FloatLiteral {
@ -718,8 +721,16 @@ fn (g mut Gen) expr(node ast.Expr) {
} }
g.write('if (') g.write('if (')
for i, expr in branch.exprs { for i, expr in branch.exprs {
g.write('$tmp == ') if type_sym.kind == .string {
g.write('string_eq($tmp, ')
}
else {
g.write('$tmp == ')
}
g.expr(expr) g.expr(expr)
if type_sym.kind == .string {
g.write(')')
}
if i < branch.exprs.len - 1 { if i < branch.exprs.len - 1 {
g.write(' || ') g.write(' || ')
} }
@ -948,16 +959,29 @@ fn (g mut Gen) infix_expr(node ast.InfixExpr) {
} }
// arr << val // arr << val
else if node.op == .left_shift && g.table.get_type_symbol(node.left_type).kind == .array { else if node.op == .left_shift && g.table.get_type_symbol(node.left_type).kind == .array {
sym := g.table.get_type_symbol(node.left_type)
info := sym.info as table.Array
elem_type_str := g.typ(info.elem_type)
// g.write('array_push(&')
tmp := g.new_tmp_var() tmp := g.new_tmp_var()
g.write('_PUSH(&') sym := g.table.get_type_symbol(node.left_type)
g.expr(node.left) right_sym := g.table.get_type_symbol(node.right_type)
g.write(', (') if right_sym.kind == .array {
g.expr(node.right) // push an array => PUSH_MANY
g.write('), $tmp, $elem_type_str)') g.write('_PUSH_MANY(&')
g.expr(node.left)
g.write(', (')
g.expr(node.right)
styp := g.typ(node.left_type)
g.write('), $tmp, $styp)')
}
else {
// push a single element
info := sym.info as table.Array
elem_type_str := g.typ(info.elem_type)
// g.write('array_push(&')
g.write('_PUSH(&')
g.expr(node.left)
g.write(', (')
g.expr(node.right)
g.write('), $tmp, $elem_type_str)')
}
} }
else { else {
// if node.op == .dot { // if node.op == .dot {
@ -1156,16 +1180,16 @@ fn (g mut Gen) call_args(args []ast.CallArg) {
for i, arg in args { for i, arg in args {
if table.type_is_variadic(arg.expected_type) { if table.type_is_variadic(arg.expected_type) {
struct_name := 'varg_' + g.typ(arg.expected_type).replace('*', '_ptr') struct_name := 'varg_' + g.typ(arg.expected_type).replace('*', '_ptr')
len := args.len-i len := args.len - i
type_str := int(arg.expected_type).str() type_str := int(arg.expected_type).str()
if len > g.varaidic_args[type_str] { if len > g.varaidic_args[type_str] {
g.varaidic_args[type_str] = len g.varaidic_args[type_str] = len
} }
g.write('($struct_name){.len=$len,.args={') g.write('($struct_name){.len=$len,.args={')
for j in i..args.len { for j in i .. args.len {
g.ref_or_deref_arg(args[j]) g.ref_or_deref_arg(args[j])
g.expr(args[j].expr) g.expr(args[j].expr)
if j < args.len-1 { if j < args.len - 1 {
g.write(', ') g.write(', ')
} }
} }

View File

@ -46,6 +46,7 @@ const (
// c_headers // c_headers
#include <stdio.h> // TODO remove all these includes, define all function signatures and types manually #include <stdio.h> // TODO remove all these includes, define all function signatures and types manually
#include <stdlib.h> #include <stdlib.h>
#include <float.h>
//#include "fns.h" //#include "fns.h"
#include <signal.h> #include <signal.h>

View File

@ -48,7 +48,7 @@ pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
pref: &pref.Preferences{} pref: &pref.Preferences{}
scope: scope scope: scope
// scope: &ast.Scope{start_pos: 0, parent: 0} // scope: &ast.Scope{start_pos: 0, parent: 0}
} }
p.init_parse_fns() p.init_parse_fns()
p.read_first_token() p.read_first_token()
@ -72,7 +72,7 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
parent: 0 parent: 0
} }
// comments_mode: comments_mode // comments_mode: comments_mode
} }
p.read_first_token() p.read_first_token()
// p.scope = &ast.Scope{start_pos: p.tok.position(), parent: 0} // p.scope = &ast.Scope{start_pos: p.tok.position(), parent: 0}
@ -610,9 +610,10 @@ pub fn (p mut Parser) name_expr() ast.Expr {
p.expr_mod = '' p.expr_mod = ''
return ast.EnumVal{ return ast.EnumVal{
enum_name: enum_name // lp.prepend_mod(enum_name) enum_name: enum_name // lp.prepend_mod(enum_name)
val: val val: val
pos: p.tok.position() pos: p.tok.position()
mod: mod
} }
} }
else { else {
@ -633,7 +634,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
.name { .name {
node = p.name_expr() node = p.name_expr()
} }
.str { .string {
node = p.string_expr() node = p.string_expr()
} }
.dot { .dot {
@ -702,7 +703,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
// Map `{"age": 20}` or `{ x | foo:bar, a:10 }` // Map `{"age": 20}` or `{ x | foo:bar, a:10 }`
.lcbr { .lcbr {
p.next() p.next()
if p.tok.kind == .str { if p.tok.kind == .string{
mut keys := []ast.Expr mut keys := []ast.Expr
mut vals := []ast.Expr mut vals := []ast.Expr
for p.tok.kind != .rcbr && p.tok.kind != .eof { for p.tok.kind != .rcbr && p.tok.kind != .eof {
@ -940,7 +941,7 @@ fn (p mut Parser) infix_expr(left ast.Expr) ast.Expr {
left: left left: left
right: right right: right
// right_type: typ // right_type: typ
op: op op: op
pos: pos pos: pos
} }
@ -1054,7 +1055,7 @@ fn (p mut Parser) for_statement() ast.Stmt {
p.scope.register_var(ast.Var{ p.scope.register_var(ast.Var{
name: var_name name: var_name
// expr: cond // expr: cond
}) })
stmts := p.parse_block() stmts := p.parse_block()
// println('nr stmts=$stmts.len') // println('nr stmts=$stmts.len')
@ -1149,11 +1150,11 @@ fn (p mut Parser) if_expr() ast.Expr {
stmts: stmts stmts: stmts
else_stmts: else_stmts else_stmts: else_stmts
// typ: typ // typ: typ
pos: pos pos: pos
has_else: has_else has_else: has_else
// left: left // left: left
} }
return node return node
} }
@ -1168,7 +1169,7 @@ fn (p mut Parser) string_expr() ast.Expr {
return node return node
} }
// Handle $ interpolation // Handle $ interpolation
for p.tok.kind == .str { for p.tok.kind == .string{
p.next() p.next()
if p.tok.kind != .str_dollar { if p.tok.kind != .str_dollar {
continue continue
@ -1328,7 +1329,7 @@ fn (p mut Parser) const_decl() ast.ConstDecl {
fields << ast.Field{ fields << ast.Field{
name: name name: name
// typ: typ // typ: typ
} }
exprs << expr exprs << expr
// TODO: once consts are fixed reg here & update in checker // TODO: once consts are fixed reg here & update in checker

View File

@ -359,10 +359,10 @@ pub fn (s mut Scanner) scan() token.Token {
if s.inter_end { if s.inter_end {
if s.text[s.pos] == s.quote { if s.text[s.pos] == s.quote {
s.inter_end = false s.inter_end = false
return s.scan_res(.str, '') return s.scan_res(.string, '')
} }
s.inter_end = false s.inter_end = false
return s.scan_res(.str, s.ident_string()) return s.scan_res(.string, s.ident_string())
} }
s.skip_whitespace() s.skip_whitespace()
// end of file // end of file
@ -483,7 +483,7 @@ pub fn (s mut Scanner) scan() token.Token {
return s.scan_res(.question, '') return s.scan_res(.question, '')
} }
single_quote, double_quote { single_quote, double_quote {
return s.scan_res(.str, s.ident_string()) return s.scan_res(.string, s.ident_string())
} }
`\`` { `\`` {
// ` // apostrophe balance comment. do not remove // ` // apostrophe balance comment. do not remove
@ -523,9 +523,9 @@ pub fn (s mut Scanner) scan() token.Token {
s.pos++ s.pos++
if s.text[s.pos] == s.quote { if s.text[s.pos] == s.quote {
s.inside_string = false s.inside_string = false
return s.scan_res(.str, '') return s.scan_res(.string, '')
} }
return s.scan_res(.str, s.ident_string()) return s.scan_res(.string, s.ident_string())
} }
else { else {
return s.scan_res(.rcbr, '') return s.scan_res(.rcbr, '')
@ -568,19 +568,19 @@ pub fn (s mut Scanner) scan() token.Token {
// println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN) // println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN)
// ... which is useful while debugging/tracing // ... which is useful while debugging/tracing
if name == 'FN' { if name == 'FN' {
return s.scan_res(.str, s.fn_name) return s.scan_res(.string, s.fn_name)
} }
if name == 'FILE' { if name == 'FILE' {
return s.scan_res(.str, cescaped_path(os.realpath(s.file_path))) return s.scan_res(.string, cescaped_path(os.realpath(s.file_path)))
} }
if name == 'LINE' { if name == 'LINE' {
return s.scan_res(.str, (s.line_nr + 1).str()) return s.scan_res(.string, (s.line_nr + 1).str())
} }
if name == 'COLUMN' { if name == 'COLUMN' {
return s.scan_res(.str, (s.current_column()).str()) return s.scan_res(.string, (s.current_column()).str())
} }
if name == 'VHASH' { if name == 'VHASH' {
return s.scan_res(.str, vhash()) return s.scan_res(.string, vhash())
} }
if !token.is_key(name) { if !token.is_key(name) {
s.error('@ must be used before keywords (e.g. `@type string`)') s.error('@ must be used before keywords (e.g. `@type string`)')

View File

@ -16,7 +16,7 @@ pub enum Kind {
eof eof
name // user name // user
number // 123 number // 123
str // 'foo' string // 'foo'
str_inter // 'name=$user.name' str_inter // 'name=$user.name'
chartoken // `A` chartoken // `A`
plus plus
@ -148,7 +148,7 @@ fn build_token_str() []string {
s[Kind.eof] = 'eof' s[Kind.eof] = 'eof'
s[Kind.name] = 'name' s[Kind.name] = 'name'
s[Kind.number] = 'number' s[Kind.number] = 'number'
s[Kind.str] = 'STR' s[Kind.string] = 'STR'
s[Kind.chartoken] = 'char' s[Kind.chartoken] = 'char'
s[Kind.plus] = '+' s[Kind.plus] = '+'
s[Kind.minus] = '-' s[Kind.minus] = '-'
@ -287,7 +287,7 @@ pub fn (t Kind) str() string {
if t == .chartoken { if t == .chartoken {
return 'char' // '`lit`' return 'char' // '`lit`'
} }
if t == .str { if t == .string {
return 'str' // "'lit'" return 'str' // "'lit'"
} }
/* /*
@ -417,7 +417,7 @@ pub fn (tok Token) precedence() int {
// is_scalar returns true if the token is a scalar // is_scalar returns true if the token is a scalar
pub fn (tok Token) is_scalar() bool { pub fn (tok Token) is_scalar() bool {
return tok.kind in [.number, .str] return tok.kind in [.number, .string]
} }
// is_unary returns true if the token can be in a unary expression // is_unary returns true if the token can be in a unary expression