token: rename Position to Pos, rename position() to pos() (#13279)

pull/13285/head
yuyi 2022-01-26 18:36:28 +08:00 committed by GitHub
parent d71fc0d13f
commit 291a1ffd8d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
61 changed files with 958 additions and 962 deletions

View File

@ -294,8 +294,8 @@ fn (t Tree) mod(node ast.Module) &Node {
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('short_name', t.string_node(node.short_name)) obj.add('short_name', t.string_node(node.short_name))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('name_pos', t.position(node.name_pos)) obj.add('name_pos', t.pos(node.name_pos))
obj.add_terse('is_skipped', t.bool_node(node.is_skipped)) obj.add_terse('is_skipped', t.bool_node(node.is_skipped))
return obj return obj
} }
@ -327,7 +327,7 @@ fn (t Tree) scope_struct_field(node ast.ScopeStructField) &Node {
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('orig_type', t.type_node(node.orig_type)) obj.add_terse('orig_type', t.type_node(node.orig_type))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('smartcasts', t.array_node_type(node.smartcasts)) obj.add_terse('smartcasts', t.array_node_type(node.smartcasts))
return obj return obj
} }
@ -365,7 +365,7 @@ fn (t Tree) errors(errors []errors.Error) &Node {
obj := new_object() obj := new_object()
obj.add_terse('message', t.string_node(e.message)) obj.add_terse('message', t.string_node(e.message))
obj.add_terse('file_path', t.string_node(e.file_path)) obj.add_terse('file_path', t.string_node(e.file_path))
obj.add('pos', t.position(e.pos)) obj.add('pos', t.pos(e.pos))
obj.add_terse('backtrace', t.string_node(e.backtrace)) obj.add_terse('backtrace', t.string_node(e.backtrace))
obj.add_terse('reporter', t.enum_node(e.reporter)) obj.add_terse('reporter', t.enum_node(e.reporter))
errs.add_item(obj) errs.add_item(obj)
@ -379,7 +379,7 @@ fn (t Tree) warnings(warnings []errors.Warning) &Node {
mut obj := new_object() mut obj := new_object()
obj.add('message', t.string_node(w.message)) obj.add('message', t.string_node(w.message))
obj.add('file_path', t.string_node(w.file_path)) obj.add('file_path', t.string_node(w.file_path))
obj.add('pos', t.position(w.pos)) obj.add('pos', t.pos(w.pos))
obj.add('reporter', t.enum_node(w.reporter)) obj.add('reporter', t.enum_node(w.reporter))
warns.add_item(obj) warns.add_item(obj)
} }
@ -392,7 +392,7 @@ fn (t Tree) notices(notices []errors.Notice) &Node {
mut obj := new_object() mut obj := new_object()
obj.add('message', t.string_node(n.message)) obj.add('message', t.string_node(n.message))
obj.add('file_path', t.string_node(n.file_path)) obj.add('file_path', t.string_node(n.file_path))
obj.add('pos', t.position(n.pos)) obj.add('pos', t.pos(n.pos))
obj.add('reporter', t.enum_node(n.reporter)) obj.add('reporter', t.enum_node(n.reporter))
notice_array.add_item(obj) notice_array.add_item(obj)
} }
@ -449,21 +449,21 @@ fn (t Tree) import_module(node ast.Import) &Node {
obj.add_terse('syms', t.array_node_import_symbol(node.syms)) obj.add_terse('syms', t.array_node_import_symbol(node.syms))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('next_comments', t.array_node_comment(node.next_comments)) obj.add('next_comments', t.array_node_comment(node.next_comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('mod_pos', t.position(node.mod_pos)) obj.add('mod_pos', t.pos(node.mod_pos))
obj.add('alias_pos', t.position(node.alias_pos)) obj.add('alias_pos', t.pos(node.alias_pos))
obj.add('syms_pos', t.position(node.syms_pos)) obj.add('syms_pos', t.pos(node.syms_pos))
return obj return obj
} }
fn (t Tree) import_symbol(node ast.ImportSymbol) &Node { fn (t Tree) import_symbol(node ast.ImportSymbol) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
fn (t Tree) position(p token.Position) &Node { fn (t Tree) pos(p token.Pos) &Node {
mut obj := new_object() mut obj := new_object()
obj.add('line_nr', t.number_node(p.line_nr)) obj.add('line_nr', t.number_node(p.line_nr))
obj.add('last_line', t.number_node(p.last_line)) obj.add('last_line', t.number_node(p.last_line))
@ -478,7 +478,7 @@ fn (t Tree) comment(node ast.Comment) &Node {
obj.add('text', t.string_node(node.text)) obj.add('text', t.string_node(node.text))
obj.add('is_multi', t.bool_node(node.is_multi)) obj.add('is_multi', t.bool_node(node.is_multi))
obj.add('is_inline', t.bool_node(node.is_inline)) obj.add('is_inline', t.bool_node(node.is_inline))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -490,7 +490,7 @@ fn (t Tree) const_decl(node ast.ConstDecl) &Node {
obj.add_terse('fields', t.array_node_const_field(node.fields)) obj.add_terse('fields', t.array_node_const_field(node.fields))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
obj.add('end_comments', t.array_node_comment(node.end_comments)) obj.add('end_comments', t.array_node_comment(node.end_comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -505,7 +505,7 @@ fn (t Tree) const_field(node ast.ConstField) &Node {
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('comptime_expr_value', t.comptime_expr_value(node.comptime_expr_value)) obj.add('comptime_expr_value', t.comptime_expr_value(node.comptime_expr_value))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -542,9 +542,9 @@ fn (t Tree) fn_decl(node ast.FnDecl) &Node {
obj.add('is_keep_alive', t.bool_node(node.is_keep_alive)) obj.add('is_keep_alive', t.bool_node(node.is_keep_alive))
obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe)) obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe))
obj.add_terse('receiver', t.struct_field(node.receiver)) obj.add_terse('receiver', t.struct_field(node.receiver))
obj.add('receiver_pos', t.position(node.receiver_pos)) obj.add('receiver_pos', t.pos(node.receiver_pos))
obj.add_terse('is_method', t.bool_node(node.is_method)) obj.add_terse('is_method', t.bool_node(node.is_method))
obj.add('method_type_pos', t.position(node.method_type_pos)) obj.add('method_type_pos', t.pos(node.method_type_pos))
obj.add('method_idx', t.number_node(node.method_idx)) obj.add('method_idx', t.number_node(node.method_idx))
obj.add_terse('rec_mut', t.bool_node(node.rec_mut)) obj.add_terse('rec_mut', t.bool_node(node.rec_mut))
obj.add('rec_share', t.enum_node(node.rec_share)) obj.add('rec_share', t.enum_node(node.rec_share))
@ -554,9 +554,9 @@ fn (t Tree) fn_decl(node ast.FnDecl) &Node {
obj.add('is_builtin', t.bool_node(node.is_builtin)) obj.add('is_builtin', t.bool_node(node.is_builtin))
obj.add('is_direct_arr', t.bool_node(node.is_direct_arr)) obj.add('is_direct_arr', t.bool_node(node.is_direct_arr))
obj.add('ctdefine_idx', t.number_node(node.ctdefine_idx)) obj.add('ctdefine_idx', t.number_node(node.ctdefine_idx))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('body_pos', t.position(node.body_pos)) obj.add('body_pos', t.pos(node.body_pos))
obj.add('return_type_pos', t.position(node.return_type_pos)) obj.add('return_type_pos', t.pos(node.return_type_pos))
obj.add('file', t.string_node(node.file)) obj.add('file', t.string_node(node.file))
obj.add('has_return', t.bool_node(node.has_return)) obj.add('has_return', t.bool_node(node.has_return))
obj.add('should_be_skipped', t.bool_node(node.should_be_skipped)) obj.add('should_be_skipped', t.bool_node(node.should_be_skipped))
@ -598,7 +598,7 @@ fn (t Tree) struct_decl(node ast.StructDecl) &Node {
obj.add('module_pos', t.number_node(node.module_pos)) obj.add('module_pos', t.number_node(node.module_pos))
obj.add_terse('language', t.enum_node(node.language)) obj.add_terse('language', t.enum_node(node.language))
obj.add_terse('is_union', t.bool_node(node.is_union)) obj.add_terse('is_union', t.bool_node(node.is_union))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('fields', t.array_node_struct_field(node.fields)) obj.add_terse('fields', t.array_node_struct_field(node.fields))
obj.add_terse('generic_types', t.array_node_type(node.generic_types)) obj.add_terse('generic_types', t.array_node_type(node.generic_types))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
@ -612,7 +612,7 @@ fn (t Tree) struct_field(node ast.StructField) &Node {
obj.add_terse('ast_type', t.string_node('StructField')) obj.add_terse('ast_type', t.string_node('StructField'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('type_pos', t.position(node.type_pos)) obj.add('type_pos', t.pos(node.type_pos))
obj.add_terse('has_default_expr', t.bool_node(node.has_default_expr)) obj.add_terse('has_default_expr', t.bool_node(node.has_default_expr))
obj.add_terse('default_expr_typ', t.type_node(node.default_expr_typ)) obj.add_terse('default_expr_typ', t.type_node(node.default_expr_typ))
obj.add_terse('default_expr', t.expr(node.default_expr)) obj.add_terse('default_expr', t.expr(node.default_expr))
@ -622,14 +622,14 @@ fn (t Tree) struct_field(node ast.StructField) &Node {
obj.add_terse('is_volatile', t.bool_node(node.is_volatile)) obj.add_terse('is_volatile', t.bool_node(node.is_volatile))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
fn (t Tree) embed(node ast.Embed) &Node { fn (t Tree) embed(node ast.Embed) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
} }
@ -641,7 +641,7 @@ fn (t Tree) enum_decl(node ast.EnumDecl) &Node {
obj.add_terse('is_pub', t.bool_node(node.is_pub)) obj.add_terse('is_pub', t.bool_node(node.is_pub))
obj.add_terse('is_flag', t.bool_node(node.is_flag)) obj.add_terse('is_flag', t.bool_node(node.is_flag))
obj.add_terse('is_multi_allowed', t.bool_node(node.is_multi_allowed)) obj.add_terse('is_multi_allowed', t.bool_node(node.is_multi_allowed))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('fields', t.array_node_enum_field(node.fields)) obj.add_terse('fields', t.array_node_enum_field(node.fields))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
@ -654,7 +654,7 @@ fn (t Tree) enum_field(node ast.EnumField) &Node {
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('has_expr', t.bool_node(node.has_expr)) obj.add_terse('has_expr', t.bool_node(node.has_expr))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('next_comments', t.array_node_comment(node.next_comments)) obj.add('next_comments', t.array_node_comment(node.next_comments))
return obj return obj
@ -671,9 +671,9 @@ fn (t Tree) interface_decl(node ast.InterfaceDecl) &Node {
obj.add_terse('methods', t.array_node_fn_decl(node.methods)) obj.add_terse('methods', t.array_node_fn_decl(node.methods))
obj.add_terse('fields', t.array_node_struct_field(node.fields)) obj.add_terse('fields', t.array_node_struct_field(node.fields))
obj.add('pre_comments', t.array_node_comment(node.pre_comments)) obj.add('pre_comments', t.array_node_comment(node.pre_comments))
obj.add('name_pos', t.position(node.name_pos)) obj.add('name_pos', t.pos(node.name_pos))
obj.add_terse('language', t.enum_node(node.language)) obj.add_terse('language', t.enum_node(node.language))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('are_ifaces_expanded', t.bool_node(node.are_ifaces_expanded)) obj.add('are_ifaces_expanded', t.bool_node(node.are_ifaces_expanded))
obj.add_terse('ifaces', t.array_node_interface_embedding(node.ifaces)) obj.add_terse('ifaces', t.array_node_interface_embedding(node.ifaces))
obj.add_terse('attrs', t.array_node_attr(node.attrs)) obj.add_terse('attrs', t.array_node_attr(node.attrs))
@ -685,7 +685,7 @@ fn (t Tree) interface_embedding(node ast.InterfaceEmbedding) &Node {
obj.add_terse('ast_type', t.string_node('InterfaceEmbedding')) obj.add_terse('ast_type', t.string_node('InterfaceEmbedding'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
} }
@ -714,7 +714,7 @@ fn (t Tree) hash_stmt(node ast.HashStmt) &Node {
obj.add_terse('msg', t.string_node(node.msg)) obj.add_terse('msg', t.string_node(node.msg))
obj.add_terse('ct_conds', t.array_node_expr(node.ct_conds)) obj.add_terse('ct_conds', t.array_node_expr(node.ct_conds))
obj.add_terse('source_file', t.string_node(node.source_file)) obj.add_terse('source_file', t.string_node(node.source_file))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -724,8 +724,8 @@ fn (t Tree) comptime_for(node ast.ComptimeFor) &Node {
obj.add_terse('val_var', t.string_node(node.val_var)) obj.add_terse('val_var', t.string_node(node.val_var))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('kind', t.enum_node(node.kind)) obj.add_terse('kind', t.enum_node(node.kind))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('typ_pos', t.position(node.pos)) obj.add('typ_pos', t.pos(node.pos))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
return obj return obj
} }
@ -734,7 +734,7 @@ fn (t Tree) global_decl(node ast.GlobalDecl) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('GlobalDecl')) obj.add_terse('ast_type', t.string_node('GlobalDecl'))
obj.add_terse('mod', t.string_node(node.mod)) obj.add_terse('mod', t.string_node(node.mod))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('is_block', t.bool_node(node.is_block)) obj.add_terse('is_block', t.bool_node(node.is_block))
obj.add_terse('fields', t.array_node_global_field(node.fields)) obj.add_terse('fields', t.array_node_global_field(node.fields))
obj.add('end_comments', t.array_node_comment(node.end_comments)) obj.add('end_comments', t.array_node_comment(node.end_comments))
@ -751,8 +751,8 @@ fn (t Tree) global_field(node ast.GlobalField) &Node {
obj.add_terse('has_expr', t.bool_node(node.has_expr)) obj.add_terse('has_expr', t.bool_node(node.has_expr))
obj.add_terse('is_markused', t.bool_node(node.is_markused)) obj.add_terse('is_markused', t.bool_node(node.is_markused))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('typ_pos', t.position(node.typ_pos)) obj.add('typ_pos', t.pos(node.typ_pos))
return obj return obj
} }
@ -763,7 +763,7 @@ fn (t Tree) defer_stmt(node ast.DeferStmt) &Node {
obj.add_terse('defer_vars', t.array_node_ident(node.defer_vars)) obj.add_terse('defer_vars', t.array_node_ident(node.defer_vars))
obj.add_terse('ifdef', t.string_node(node.ifdef)) obj.add_terse('ifdef', t.string_node(node.ifdef))
obj.add('idx_in_fn', t.number_node(node.idx_in_fn)) obj.add('idx_in_fn', t.number_node(node.idx_in_fn))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -782,7 +782,7 @@ fn (t Tree) alias_type_decl(node ast.AliasTypeDecl) &Node {
obj.add_terse('is_pub', t.bool_node(node.is_pub)) obj.add_terse('is_pub', t.bool_node(node.is_pub))
obj.add_terse('parent_type', t.type_node(node.parent_type)) obj.add_terse('parent_type', t.type_node(node.parent_type))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -791,7 +791,7 @@ fn (t Tree) sum_type_decl(node ast.SumTypeDecl) &Node {
obj.add_terse('ast_type', t.string_node('SumTypeDecl')) obj.add_terse('ast_type', t.string_node('SumTypeDecl'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('is_pub', t.bool_node(node.is_pub)) obj.add_terse('is_pub', t.bool_node(node.is_pub))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('generic_types', t.array_node_type(node.generic_types)) obj.add_terse('generic_types', t.array_node_type(node.generic_types))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
@ -805,7 +805,7 @@ fn (t Tree) fn_type_decl(node ast.FnTypeDecl) &Node {
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('is_pub', t.bool_node(node.is_pub)) obj.add_terse('is_pub', t.bool_node(node.is_pub))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
} }
@ -823,7 +823,7 @@ fn (t Tree) goto_label(node ast.GotoLabel) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('GotoLabel')) obj.add_terse('ast_type', t.string_node('GotoLabel'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -831,7 +831,7 @@ fn (t Tree) goto_stmt(node ast.GotoStmt) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('GotoStmt')) obj.add_terse('ast_type', t.string_node('GotoStmt'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -847,7 +847,7 @@ fn (t Tree) assign_stmt(node ast.AssignStmt) &Node {
obj.add_terse('is_volatile', t.bool_node(node.is_volatile)) obj.add_terse('is_volatile', t.bool_node(node.is_volatile))
obj.add_terse('is_simple', t.bool_node(node.is_simple)) obj.add_terse('is_simple', t.bool_node(node.is_simple))
obj.add_terse('has_cross_var', t.bool_node(node.has_cross_var)) obj.add_terse('has_cross_var', t.bool_node(node.has_cross_var))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('end_comments', t.array_node_comment(node.end_comments)) obj.add('end_comments', t.array_node_comment(node.end_comments))
return obj return obj
@ -872,7 +872,7 @@ fn (t Tree) var(node ast.Var) &Node {
obj.add('is_auto_heap', t.bool_node(node.is_auto_heap)) obj.add('is_auto_heap', t.bool_node(node.is_auto_heap))
obj.add('is_stack_obj', t.bool_node(node.is_stack_obj)) obj.add('is_stack_obj', t.bool_node(node.is_stack_obj))
obj.add_terse('share', t.enum_node(node.share)) obj.add_terse('share', t.enum_node(node.share))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('smartcasts', t.array_node_type(node.smartcasts)) obj.add_terse('smartcasts', t.array_node_type(node.smartcasts))
return obj return obj
} }
@ -882,7 +882,7 @@ fn (t Tree) return_(node ast.Return) &Node {
obj.add_terse('ast_type', t.string_node('Return')) obj.add_terse('ast_type', t.string_node('Return'))
obj.add_terse('exprs', t.array_node_expr(node.exprs)) obj.add_terse('exprs', t.array_node_expr(node.exprs))
obj.add_terse('types', t.array_node_type(node.types)) obj.add_terse('types', t.array_node_type(node.types))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -897,7 +897,7 @@ fn (t Tree) for_c_stmt(node ast.ForCStmt) &Node {
obj.add_terse('inc', t.stmt(node.inc)) obj.add_terse('inc', t.stmt(node.inc))
obj.add_terse('is_multi', t.bool_node(node.is_multi)) obj.add_terse('is_multi', t.bool_node(node.is_multi))
obj.add_terse('label', t.string_node(node.label)) obj.add_terse('label', t.string_node(node.label))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
return obj return obj
@ -909,7 +909,7 @@ fn (t Tree) for_stmt(node ast.ForStmt) &Node {
obj.add_terse('cond', t.expr(node.cond)) obj.add_terse('cond', t.expr(node.cond))
obj.add_terse('is_inf', t.bool_node(node.is_inf)) obj.add_terse('is_inf', t.bool_node(node.is_inf))
obj.add_terse('label', t.string_node(node.label)) obj.add_terse('label', t.string_node(node.label))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
return obj return obj
@ -929,7 +929,7 @@ fn (t Tree) for_in_stmt(node ast.ForInStmt) &Node {
obj.add_terse('kind', t.enum_node(node.kind)) obj.add_terse('kind', t.enum_node(node.kind))
obj.add_terse('val_is_mut', t.bool_node(node.val_is_mut)) obj.add_terse('val_is_mut', t.bool_node(node.val_is_mut))
obj.add_terse('label', t.string_node(node.label)) obj.add_terse('label', t.string_node(node.label))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
return obj return obj
@ -940,7 +940,7 @@ fn (t Tree) branch_stmt(node ast.BranchStmt) &Node {
obj.add_terse('ast_type', t.string_node('BranchStmt')) obj.add_terse('ast_type', t.string_node('BranchStmt'))
obj.add_terse('kind', t.token_node(node.kind)) obj.add_terse('kind', t.token_node(node.kind))
obj.add_terse('label', t.string_node(node.label)) obj.add_terse('label', t.string_node(node.label))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -949,7 +949,7 @@ fn (t Tree) assert_stmt(node ast.AssertStmt) &Node {
obj.add_terse('ast_type', t.string_node('AssertStmt')) obj.add_terse('ast_type', t.string_node('AssertStmt'))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add_terse('is_used', t.bool_node(node.is_used)) obj.add_terse('is_used', t.bool_node(node.is_used))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -958,7 +958,7 @@ fn (t Tree) block(node ast.Block) &Node {
obj.add_terse('ast_type', t.string_node('Block')) obj.add_terse('ast_type', t.string_node('Block'))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe)) obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -973,12 +973,12 @@ fn (t Tree) comptime_call(node ast.ComptimeCall) &Node {
obj.add_terse('has_parens', t.bool_node(node.has_parens)) obj.add_terse('has_parens', t.bool_node(node.has_parens))
obj.add_terse('is_embed', t.bool_node(node.is_embed)) obj.add_terse('is_embed', t.bool_node(node.is_embed))
obj.add_terse('embed_file', t.embed_file(node.embed_file)) obj.add_terse('embed_file', t.embed_file(node.embed_file))
obj.add('method_pos', t.position(node.method_pos)) obj.add('method_pos', t.pos(node.method_pos))
obj.add_terse('left_type', t.type_node(node.left_type)) obj.add_terse('left_type', t.type_node(node.left_type))
obj.add_terse('result_type', t.type_node(node.result_type)) obj.add_terse('result_type', t.type_node(node.result_type))
obj.add('scope', t.scope(node.scope)) obj.add('scope', t.scope(node.scope))
obj.add_terse('env_value', t.string_node(node.env_value)) obj.add_terse('env_value', t.string_node(node.env_value))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('args', t.array_node_call_arg(node.args)) obj.add_terse('args', t.array_node_call_arg(node.args))
return obj return obj
} }
@ -991,7 +991,7 @@ fn (t Tree) comptime_selector(node ast.ComptimeSelector) &Node {
obj.add_terse('field_expr', t.expr(node.field_expr)) obj.add_terse('field_expr', t.expr(node.field_expr))
obj.add_terse('left_type', t.type_node(node.left_type)) obj.add_terse('left_type', t.type_node(node.left_type))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1001,7 +1001,7 @@ fn (t Tree) expr_stmt(node ast.ExprStmt) &Node {
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('is_expr', t.bool_node(node.is_expr)) obj.add_terse('is_expr', t.bool_node(node.is_expr))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
} }
@ -1167,7 +1167,7 @@ fn (t Tree) integer_literal(node ast.IntegerLiteral) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('IntegerLiteral')) obj.add_terse('ast_type', t.string_node('IntegerLiteral'))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1175,7 +1175,7 @@ fn (t Tree) float_literal(node ast.FloatLiteral) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('FloatLiteral')) obj.add_terse('ast_type', t.string_node('FloatLiteral'))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1185,7 +1185,7 @@ fn (t Tree) string_literal(node ast.StringLiteral) &Node {
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add_terse('is_raw', t.bool_node(node.is_raw)) obj.add_terse('is_raw', t.bool_node(node.is_raw))
obj.add_terse('language', t.enum_node(node.language)) obj.add_terse('language', t.enum_node(node.language))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1193,7 +1193,7 @@ fn (t Tree) char_literal(node ast.CharLiteral) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('CharLiteral')) obj.add_terse('ast_type', t.string_node('CharLiteral'))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1201,7 +1201,7 @@ fn (t Tree) bool_literal(node ast.BoolLiteral) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('BoolLiteral')) obj.add_terse('ast_type', t.string_node('BoolLiteral'))
obj.add_terse('val', t.bool_node(node.val)) obj.add_terse('val', t.bool_node(node.val))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1218,7 +1218,7 @@ fn (t Tree) string_inter_literal(node ast.StringInterLiteral) &Node {
obj.add_terse('fmt_poss', t.array_node_position(node.fmt_poss)) obj.add_terse('fmt_poss', t.array_node_position(node.fmt_poss))
obj.add_terse('fmts', t.array_node_byte(node.fmts)) obj.add_terse('fmts', t.array_node_byte(node.fmts))
obj.add_terse('need_fmts', t.array_node_bool(node.need_fmts)) obj.add_terse('need_fmts', t.array_node_bool(node.need_fmts))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1229,7 +1229,7 @@ fn (t Tree) enum_val(node ast.EnumVal) &Node {
obj.add_terse('mod', t.string_node(node.mod)) obj.add_terse('mod', t.string_node(node.mod))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1240,7 +1240,7 @@ fn (t Tree) assoc(node ast.Assoc) &Node {
obj.add_terse('fields', t.array_node_string(node.fields)) obj.add_terse('fields', t.array_node_string(node.fields))
obj.add_terse('exprs', t.array_node_expr(node.exprs)) obj.add_terse('exprs', t.array_node_expr(node.exprs))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
return obj return obj
} }
@ -1249,7 +1249,7 @@ fn (t Tree) at_expr(node ast.AtExpr) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('AtExpr')) obj.add_terse('ast_type', t.string_node('AtExpr'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('kind', t.enum_node(node.kind)) obj.add_terse('kind', t.enum_node(node.kind))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
return obj return obj
@ -1265,7 +1265,7 @@ fn (t Tree) cast_expr(node ast.CastExpr) &Node {
obj.add_terse('arg', t.expr(node.arg)) obj.add_terse('arg', t.expr(node.arg))
obj.add_terse('expr_type', t.type_node(node.expr_type)) obj.add_terse('expr_type', t.type_node(node.expr_type))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1275,7 +1275,7 @@ fn (t Tree) as_cast(node ast.AsCast) &Node {
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('expr_type', t.type_node(node.expr_type)) obj.add_terse('expr_type', t.type_node(node.expr_type))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1283,7 +1283,7 @@ fn (t Tree) type_expr(node ast.TypeNode) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('TypeNode')) obj.add_terse('ast_type', t.string_node('TypeNode'))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1293,7 +1293,7 @@ fn (t Tree) size_of(node ast.SizeOf) &Node {
obj.add_terse('is_type', t.bool_node(node.is_type)) obj.add_terse('is_type', t.bool_node(node.is_type))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1303,7 +1303,7 @@ fn (t Tree) is_ref_type(node ast.IsRefType) &Node {
obj.add_terse('is_type', t.bool_node(node.is_type)) obj.add_terse('is_type', t.bool_node(node.is_type))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1315,7 +1315,7 @@ fn (t Tree) prefix_expr(node ast.PrefixExpr) &Node {
obj.add_terse('right_type', t.type_node(node.right_type)) obj.add_terse('right_type', t.type_node(node.right_type))
obj.add_terse('or_block', t.or_expr(node.or_block)) obj.add_terse('or_block', t.or_expr(node.or_block))
obj.add_terse('is_option', t.bool_node(node.is_option)) obj.add_terse('is_option', t.bool_node(node.is_option))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1334,7 +1334,7 @@ fn (t Tree) infix_expr(node ast.InfixExpr) &Node {
obj.add_terse('ct_left_value', t.comptime_expr_value(node.ct_left_value)) obj.add_terse('ct_left_value', t.comptime_expr_value(node.ct_left_value))
obj.add_terse('ct_right_value_evaled', t.bool_node(node.ct_right_value_evaled)) obj.add_terse('ct_right_value_evaled', t.bool_node(node.ct_right_value_evaled))
obj.add_terse('ct_right_value', t.comptime_expr_value(node.ct_right_value)) obj.add_terse('ct_right_value', t.comptime_expr_value(node.ct_right_value))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1347,7 +1347,7 @@ fn (t Tree) index_expr(node ast.IndexExpr) &Node {
obj.add_terse('is_setter', t.bool_node(node.is_setter)) obj.add_terse('is_setter', t.bool_node(node.is_setter))
obj.add_terse('is_direct', t.bool_node(node.is_direct)) obj.add_terse('is_direct', t.bool_node(node.is_direct))
obj.add_terse('or_expr', t.or_expr(node.or_expr)) obj.add_terse('or_expr', t.or_expr(node.or_expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1357,7 +1357,7 @@ fn (t Tree) postfix_expr(node ast.PostfixExpr) &Node {
obj.add_terse('op', t.token_node(node.op)) obj.add_terse('op', t.token_node(node.op))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('auto_locked', t.string_node(node.auto_locked)) obj.add('auto_locked', t.string_node(node.auto_locked))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1372,7 +1372,7 @@ fn (t Tree) selector_expr(node ast.SelectorExpr) &Node {
obj.add_terse('gkind_field', t.enum_node(node.gkind_field)) obj.add_terse('gkind_field', t.enum_node(node.gkind_field))
obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types)) obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types))
obj.add_terse('next_token', t.token_node(node.next_token)) obj.add_terse('next_token', t.token_node(node.next_token))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
return obj return obj
} }
@ -1384,7 +1384,7 @@ fn (t Tree) range_expr(node ast.RangeExpr) &Node {
obj.add_terse('high', t.expr(node.high)) obj.add_terse('high', t.expr(node.high))
obj.add_terse('has_high', t.bool_node(node.has_high)) obj.add_terse('has_high', t.bool_node(node.has_high))
obj.add_terse('has_low', t.bool_node(node.has_low)) obj.add_terse('has_low', t.bool_node(node.has_low))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1398,7 +1398,7 @@ fn (t Tree) if_expr(node ast.IfExpr) &Node {
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add_terse('has_else', t.bool_node(node.has_else)) obj.add_terse('has_else', t.bool_node(node.has_else))
obj.add_terse('is_expr', t.bool_node(node.is_expr)) obj.add_terse('is_expr', t.bool_node(node.is_expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('post_comments', t.array_node_comment(node.post_comments)) obj.add('post_comments', t.array_node_comment(node.post_comments))
return obj return obj
} }
@ -1407,8 +1407,8 @@ fn (t Tree) if_branch(node ast.IfBranch) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('IfBranch')) obj.add_terse('ast_type', t.string_node('IfBranch'))
obj.add_terse('cond', t.expr(node.cond)) obj.add_terse('cond', t.expr(node.cond))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('body_pos', t.position(node.body_pos)) obj.add('body_pos', t.pos(node.body_pos))
obj.add_terse('pkg_exist', t.bool_node(node.pkg_exist)) obj.add_terse('pkg_exist', t.bool_node(node.pkg_exist))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
@ -1427,8 +1427,8 @@ fn (t Tree) ident(node ast.Ident) &Node {
obj.add_terse('tok_kind', t.token_node(node.tok_kind)) obj.add_terse('tok_kind', t.token_node(node.tok_kind))
obj.add_terse('kind', t.enum_node(node.kind)) obj.add_terse('kind', t.enum_node(node.kind))
obj.add_terse('info', t.ident_info(node.info)) obj.add_terse('info', t.ident_info(node.info))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('mut_pos', t.position(node.mut_pos)) obj.add('mut_pos', t.pos(node.mut_pos))
obj.add('obj', t.scope_object(node.obj)) obj.add('obj', t.scope_object(node.obj))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
return obj return obj
@ -1481,11 +1481,11 @@ fn (t Tree) call_expr(node ast.CallExpr) &Node {
obj.add_terse('expected_arg_types', t.array_node_type(node.expected_arg_types)) obj.add_terse('expected_arg_types', t.array_node_type(node.expected_arg_types))
obj.add_terse('concrete_types', t.array_node_type(node.concrete_types)) obj.add_terse('concrete_types', t.array_node_type(node.concrete_types))
obj.add_terse('or_block', t.or_expr(node.or_block)) obj.add_terse('or_block', t.or_expr(node.or_block))
obj.add('concrete_list_pos', t.position(node.concrete_list_pos)) obj.add('concrete_list_pos', t.pos(node.concrete_list_pos))
obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types)) obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('name_pos', t.position(node.name_pos)) obj.add('name_pos', t.pos(node.name_pos))
return obj return obj
} }
@ -1497,7 +1497,7 @@ fn (t Tree) call_arg(node ast.CallArg) &Node {
obj.add_terse('share', t.enum_node(node.share)) obj.add_terse('share', t.enum_node(node.share))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('is_tmp_autofree', t.bool_node(node.is_tmp_autofree)) obj.add('is_tmp_autofree', t.bool_node(node.is_tmp_autofree))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
} }
@ -1507,7 +1507,7 @@ fn (t Tree) or_expr(node ast.OrExpr) &Node {
obj.add_terse('ast_type', t.string_node('OrExpr')) obj.add_terse('ast_type', t.string_node('OrExpr'))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
obj.add_terse('kind', t.enum_node(node.kind)) obj.add_terse('kind', t.enum_node(node.kind))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1520,8 +1520,8 @@ fn (t Tree) struct_init(node ast.StructInit) &Node {
obj.add_terse('has_update_expr', t.bool_node(node.has_update_expr)) obj.add_terse('has_update_expr', t.bool_node(node.has_update_expr))
obj.add_terse('update_expr', t.expr(node.update_expr)) obj.add_terse('update_expr', t.expr(node.update_expr))
obj.add_terse('update_expr_type', t.type_node(node.update_expr_type)) obj.add_terse('update_expr_type', t.type_node(node.update_expr_type))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('name_pos', t.position(node.name_pos)) obj.add('name_pos', t.pos(node.name_pos))
obj.add('update_expr_comments', t.array_node_comment(node.update_expr_comments)) obj.add('update_expr_comments', t.array_node_comment(node.update_expr_comments))
obj.add_terse('fields', t.array_node_struct_init_field(node.fields)) obj.add_terse('fields', t.array_node_struct_init_field(node.fields))
obj.add_terse('embeds', t.array_node_struct_init_embed(node.embeds)) obj.add_terse('embeds', t.array_node_struct_init_embed(node.embeds))
@ -1539,8 +1539,8 @@ fn (t Tree) struct_init_field(node ast.StructInitField) &Node {
obj.add_terse('parent_type', t.type_node(node.parent_type)) obj.add_terse('parent_type', t.type_node(node.parent_type))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('next_comments', t.array_node_comment(node.next_comments)) obj.add('next_comments', t.array_node_comment(node.next_comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add('name_pos', t.position(node.name_pos)) obj.add('name_pos', t.pos(node.name_pos))
return obj return obj
} }
@ -1553,7 +1553,7 @@ fn (t Tree) struct_init_embed(node ast.StructInitEmbed) &Node {
obj.add_terse('expected_type', t.type_node(node.expected_type)) obj.add_terse('expected_type', t.type_node(node.expected_type))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('next_comments', t.array_node_comment(node.next_comments)) obj.add('next_comments', t.array_node_comment(node.next_comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1565,7 +1565,7 @@ fn (t Tree) array_init(node ast.ArrayInit) &Node {
obj.add_terse('exprs', t.array_node_expr(node.exprs)) obj.add_terse('exprs', t.array_node_expr(node.exprs))
obj.add('ecmnts', t.two_dimension_comment(node.ecmnts)) obj.add('ecmnts', t.two_dimension_comment(node.ecmnts))
obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts)) obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts))
obj.add('elem_type_pos', t.position(node.elem_type_pos)) obj.add('elem_type_pos', t.pos(node.elem_type_pos))
obj.add_terse('is_fixed', t.bool_node(node.is_fixed)) obj.add_terse('is_fixed', t.bool_node(node.is_fixed))
obj.add_terse('has_val', t.bool_node(node.has_val)) obj.add_terse('has_val', t.bool_node(node.has_val))
obj.add_terse('mod', t.string_node(node.mod)) obj.add_terse('mod', t.string_node(node.mod))
@ -1577,7 +1577,7 @@ fn (t Tree) array_init(node ast.ArrayInit) &Node {
obj.add_terse('has_default', t.bool_node(node.has_default)) obj.add_terse('has_default', t.bool_node(node.has_default))
obj.add_terse('has_it', t.bool_node(node.has_it)) obj.add_terse('has_it', t.bool_node(node.has_it))
obj.add_terse('expr_types', t.array_node_type(node.expr_types)) obj.add_terse('expr_types', t.array_node_type(node.expr_types))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1591,14 +1591,14 @@ fn (t Tree) map_init(node ast.MapInit) &Node {
obj.add_terse('vals', t.array_node_expr(node.vals)) obj.add_terse('vals', t.array_node_expr(node.vals))
obj.add('comments', t.two_dimension_comment(node.comments)) obj.add('comments', t.two_dimension_comment(node.comments))
obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts)) obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
fn (t Tree) none_expr(node ast.None) &Node { fn (t Tree) none_expr(node ast.None) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('None')) obj.add_terse('ast_type', t.string_node('None'))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1606,7 +1606,7 @@ fn (t Tree) par_expr(node ast.ParExpr) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('ParExpr')) obj.add_terse('ast_type', t.string_node('ParExpr'))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1624,7 +1624,7 @@ fn (t Tree) if_guard_var(node ast.IfGuardVar) &Node {
obj.add_terse('ast_type', t.string_node('IfGuardVar')) obj.add_terse('ast_type', t.string_node('IfGuardVar'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add_terse('is_mut', t.bool_node(node.is_mut)) obj.add_terse('is_mut', t.bool_node(node.is_mut))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1638,7 +1638,7 @@ fn (t Tree) match_expr(node ast.MatchExpr) &Node {
obj.add_terse('expected_type', t.type_node(node.expected_type)) obj.add_terse('expected_type', t.type_node(node.expected_type))
obj.add_terse('is_sum_type', t.bool_node(node.is_sum_type)) obj.add_terse('is_sum_type', t.bool_node(node.is_sum_type))
obj.add_terse('is_expr', t.bool_node(node.is_expr)) obj.add_terse('is_expr', t.bool_node(node.is_expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('branches', t.array_node_match_branch(node.branches)) obj.add_terse('branches', t.array_node_match_branch(node.branches))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
return obj return obj
@ -1650,9 +1650,9 @@ fn (t Tree) match_branch(node ast.MatchBranch) &Node {
obj.add('ecmnts', t.two_dimension_comment(node.ecmnts)) obj.add('ecmnts', t.two_dimension_comment(node.ecmnts))
obj.add_terse('stmts', t.array_node_stmt(node.stmts)) obj.add_terse('stmts', t.array_node_stmt(node.stmts))
obj.add_terse('is_else', t.bool_node(node.is_else)) obj.add_terse('is_else', t.bool_node(node.is_else))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('post_comments', t.array_node_comment(node.post_comments)) obj.add_terse('post_comments', t.array_node_comment(node.post_comments))
obj.add('branch_pos', t.position(node.branch_pos)) obj.add('branch_pos', t.pos(node.branch_pos))
obj.add_terse('exprs', t.array_node_expr(node.exprs)) obj.add_terse('exprs', t.array_node_expr(node.exprs))
obj.add('scope', t.number_node(int(node.scope))) obj.add('scope', t.number_node(int(node.scope)))
return obj return obj
@ -1663,7 +1663,7 @@ fn (t Tree) concat_expr(node ast.ConcatExpr) &Node {
obj.add_terse('ast_type', t.string_node('ConcatExpr')) obj.add_terse('ast_type', t.string_node('ConcatExpr'))
obj.add_terse('vals', t.array_node_expr(node.vals)) obj.add_terse('vals', t.array_node_expr(node.vals))
obj.add_terse('return_type', t.type_node(node.return_type)) obj.add_terse('return_type', t.type_node(node.return_type))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1672,7 +1672,7 @@ fn (t Tree) type_of(node ast.TypeOf) &Node {
obj.add_terse('ast_type', t.string_node('TypeOf')) obj.add_terse('ast_type', t.string_node('TypeOf'))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add_terse('expr_type', t.type_node(node.expr_type)) obj.add_terse('expr_type', t.type_node(node.expr_type))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1681,7 +1681,7 @@ fn (t Tree) likely(node ast.Likely) &Node {
obj.add_terse('ast_type', t.string_node('Likely')) obj.add_terse('ast_type', t.string_node('Likely'))
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add_terse('is_likely', t.bool_node(node.is_likely)) obj.add_terse('is_likely', t.bool_node(node.is_likely))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1698,7 +1698,7 @@ fn (t Tree) sql_expr(node ast.SqlExpr) &Node {
obj.add_terse('order_expr', t.expr(node.order_expr)) obj.add_terse('order_expr', t.expr(node.order_expr))
obj.add_terse('has_desc', t.bool_node(node.has_desc)) obj.add_terse('has_desc', t.bool_node(node.has_desc))
obj.add_terse('is_array', t.bool_node(node.is_array)) obj.add_terse('is_array', t.bool_node(node.is_array))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('has_limit', t.bool_node(node.has_limit)) obj.add_terse('has_limit', t.bool_node(node.has_limit))
obj.add_terse('limit_expr', t.expr(node.limit_expr)) obj.add_terse('limit_expr', t.expr(node.limit_expr))
obj.add_terse('has_offset', t.bool_node(node.has_offset)) obj.add_terse('has_offset', t.bool_node(node.has_offset))
@ -1716,7 +1716,7 @@ fn (t Tree) sql_stmt(node ast.SqlStmt) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('SqlStmt')) obj.add_terse('ast_type', t.string_node('SqlStmt'))
obj.add_terse('db_expr', t.expr(node.db_expr)) obj.add_terse('db_expr', t.expr(node.db_expr))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('lines', t.array_node_sql_stmt_line(node.lines)) obj.add_terse('lines', t.array_node_sql_stmt_line(node.lines))
return obj return obj
} }
@ -1731,7 +1731,7 @@ fn (t Tree) sql_stmt_line(node ast.SqlStmtLine) &Node {
obj.add_terse('fields', t.array_node_struct_field(node.fields)) obj.add_terse('fields', t.array_node_struct_field(node.fields))
obj.add_terse('updated_columns', t.array_node_string(node.updated_columns)) obj.add_terse('updated_columns', t.array_node_string(node.updated_columns))
obj.add_terse('update_exprs', t.array_node_expr(node.update_exprs)) obj.add_terse('update_exprs', t.array_node_expr(node.update_exprs))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
sub_struct_map := new_object() sub_struct_map := new_object()
for key, val in node.sub_structs { for key, val in node.sub_structs {
@ -1746,7 +1746,7 @@ fn (t Tree) lock_expr(expr ast.LockExpr) &Node {
obj.add_terse('ast_type', t.string_node('LockExpr')) obj.add_terse('ast_type', t.string_node('LockExpr'))
obj.add_terse('is_expr', t.bool_node(expr.is_expr)) obj.add_terse('is_expr', t.bool_node(expr.is_expr))
obj.add_terse('typ', t.type_node(expr.typ)) obj.add_terse('typ', t.type_node(expr.typ))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
obj.add_terse('stmts', t.array_node_stmt(expr.stmts)) obj.add_terse('stmts', t.array_node_stmt(expr.stmts))
obj.add_terse('lockeds', t.array_node_expr(expr.lockeds)) obj.add_terse('lockeds', t.array_node_expr(expr.lockeds))
obj.add_terse('r_lock', t.array_node_bool(expr.is_rlock)) obj.add_terse('r_lock', t.array_node_bool(expr.is_rlock))
@ -1757,7 +1757,7 @@ fn (t Tree) unsafe_expr(expr ast.UnsafeExpr) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('UnsafeExpr')) obj.add_terse('ast_type', t.string_node('UnsafeExpr'))
obj.add_terse('expr', t.expr(expr.expr)) obj.add_terse('expr', t.expr(expr.expr))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1768,7 +1768,7 @@ fn (t Tree) chan_init(expr ast.ChanInit) &Node {
obj.add_terse('cap_expr', t.expr(expr.cap_expr)) obj.add_terse('cap_expr', t.expr(expr.cap_expr))
obj.add_terse('typ', t.type_node(expr.typ)) obj.add_terse('typ', t.type_node(expr.typ))
obj.add_terse('elem_type', t.type_node(expr.elem_type)) obj.add_terse('elem_type', t.type_node(expr.elem_type))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1779,7 +1779,7 @@ fn (t Tree) select_expr(expr ast.SelectExpr) &Node {
obj.add_terse('is_expr', t.bool_node(expr.is_expr)) obj.add_terse('is_expr', t.bool_node(expr.is_expr))
obj.add_terse('has_exception', t.bool_node(expr.has_exception)) obj.add_terse('has_exception', t.bool_node(expr.has_exception))
obj.add_terse('expected_type', t.type_node(expr.expected_type)) obj.add_terse('expected_type', t.type_node(expr.expected_type))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1788,7 +1788,7 @@ fn (t Tree) select_branch(expr ast.SelectBranch) &Node {
obj.add_terse('ast_type', t.string_node('SelectBranch')) obj.add_terse('ast_type', t.string_node('SelectBranch'))
obj.add_terse('stmt', t.stmt(expr.stmt)) obj.add_terse('stmt', t.stmt(expr.stmt))
obj.add_terse('stmts', t.array_node_stmt(expr.stmts)) obj.add_terse('stmts', t.array_node_stmt(expr.stmts))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
obj.add('comment', t.comment(expr.comment)) obj.add('comment', t.comment(expr.comment))
obj.add_terse('is_else', t.bool_node(expr.is_else)) obj.add_terse('is_else', t.bool_node(expr.is_else))
obj.add_terse('is_timeout', t.bool_node(expr.is_timeout)) obj.add_terse('is_timeout', t.bool_node(expr.is_timeout))
@ -1802,7 +1802,7 @@ fn (t Tree) array_decompose(expr ast.ArrayDecompose) &Node {
obj.add_terse('expr', t.expr(expr.expr)) obj.add_terse('expr', t.expr(expr.expr))
obj.add_terse('expr_type', t.type_node(expr.expr_type)) obj.add_terse('expr_type', t.type_node(expr.expr_type))
obj.add_terse('arg_type', t.type_node(expr.arg_type)) obj.add_terse('arg_type', t.type_node(expr.arg_type))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1811,7 +1811,7 @@ fn (t Tree) go_expr(expr ast.GoExpr) &Node {
obj.add_terse('ast_type', t.string_node('GoExpr')) obj.add_terse('ast_type', t.string_node('GoExpr'))
obj.add_terse('call_expr', t.call_expr(expr.call_expr)) obj.add_terse('call_expr', t.call_expr(expr.call_expr))
obj.add_terse('is_expr', t.bool_node(expr.is_expr)) obj.add_terse('is_expr', t.bool_node(expr.is_expr))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1820,7 +1820,7 @@ fn (t Tree) offset_of(expr ast.OffsetOf) &Node {
obj.add_terse('ast_type', t.string_node('OffsetOf')) obj.add_terse('ast_type', t.string_node('OffsetOf'))
obj.add_terse('struct_type', t.type_node(expr.struct_type)) obj.add_terse('struct_type', t.type_node(expr.struct_type))
obj.add_terse('field', t.string_node('field')) obj.add_terse('field', t.string_node('field'))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1829,7 +1829,7 @@ fn (t Tree) dump_expr(expr ast.DumpExpr) &Node {
obj.add_terse('ast_type', t.string_node('DumpExpr')) obj.add_terse('ast_type', t.string_node('DumpExpr'))
obj.add_terse('expr', t.expr(expr.expr)) obj.add_terse('expr', t.expr(expr.expr))
obj.add_terse('expr_type', t.type_node(expr.expr_type)) obj.add_terse('expr_type', t.type_node(expr.expr_type))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1837,7 +1837,7 @@ fn (t Tree) node_error(expr ast.NodeError) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('NodeError')) obj.add_terse('ast_type', t.string_node('NodeError'))
obj.add_terse('idx', t.number_node(expr.idx)) obj.add_terse('idx', t.number_node(expr.idx))
obj.add('pos', t.position(expr.pos)) obj.add('pos', t.pos(expr.pos))
return obj return obj
} }
@ -1851,7 +1851,7 @@ fn (t Tree) empty_expr(expr ast.EmptyExpr) &Node {
fn (t Tree) empty_stmt(node ast.EmptyStmt) &Node { fn (t Tree) empty_stmt(node ast.EmptyStmt) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('EmptyStmt')) obj.add_terse('ast_type', t.string_node('EmptyStmt'))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1864,7 +1864,7 @@ fn (t Tree) asm_stmt(node ast.AsmStmt) &Node {
obj.add_terse('is_goto', t.bool_node(node.is_goto)) obj.add_terse('is_goto', t.bool_node(node.is_goto))
obj.add('scope', t.scope(node.scope)) obj.add('scope', t.scope(node.scope))
// obj.add('scope', t.number_node(int(node.scope))) // obj.add('scope', t.number_node(int(node.scope)))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
obj.add_terse('clobbered', t.array_node_asm_clobbered(node.clobbered)) obj.add_terse('clobbered', t.array_node_asm_clobbered(node.clobbered))
obj.add_terse('templates', t.array_node_asm_template(node.templates)) obj.add_terse('templates', t.array_node_asm_template(node.templates))
obj.add_terse('output', t.array_node_asm_io(node.output)) obj.add_terse('output', t.array_node_asm_io(node.output))
@ -1891,7 +1891,7 @@ fn (t Tree) asm_template(node ast.AsmTemplate) &Node {
obj.add_terse('is_directive', t.bool_node(node.is_directive)) obj.add_terse('is_directive', t.bool_node(node.is_directive))
obj.add_terse('args', t.array_node_asm_arg(node.args)) obj.add_terse('args', t.array_node_asm_arg(node.args))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1904,7 +1904,7 @@ fn (t Tree) asm_addressing(node ast.AsmAddressing) &Node {
obj.add_terse('displacement', t.asm_arg(node.displacement)) obj.add_terse('displacement', t.asm_arg(node.displacement))
obj.add_terse('base', t.asm_arg(node.base)) obj.add_terse('base', t.asm_arg(node.base))
obj.add_terse('index', t.asm_arg(node.index)) obj.add_terse('index', t.asm_arg(node.index))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1944,7 +1944,7 @@ fn (t Tree) asm_alias(node ast.AsmAlias) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('AsmAlias')) obj.add_terse('ast_type', t.string_node('AsmAlias'))
obj.add_terse('name', t.string_node(node.name)) obj.add_terse('name', t.string_node(node.name))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1952,7 +1952,7 @@ fn (t Tree) asm_disp(node ast.AsmDisp) &Node {
mut obj := new_object() mut obj := new_object()
obj.add_terse('ast_type', t.string_node('AsmDisp')) obj.add_terse('ast_type', t.string_node('AsmDisp'))
obj.add_terse('val', t.string_node(node.val)) obj.add_terse('val', t.string_node(node.val))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -1972,7 +1972,7 @@ fn (t Tree) asm_io(node ast.AsmIO) &Node {
obj.add_terse('expr', t.expr(node.expr)) obj.add_terse('expr', t.expr(node.expr))
obj.add_terse('typ', t.type_node(node.typ)) obj.add_terse('typ', t.type_node(node.typ))
obj.add('comments', t.array_node_comment(node.comments)) obj.add('comments', t.array_node_comment(node.comments))
obj.add('pos', t.position(node.pos)) obj.add('pos', t.pos(node.pos))
return obj return obj
} }
@ -2032,10 +2032,10 @@ fn (t Tree) array_node_string(nodes []string) &Node {
return arr return arr
} }
fn (t Tree) array_node_position(nodes []token.Position) &Node { fn (t Tree) array_node_position(nodes []token.Pos) &Node {
mut arr := new_array() mut arr := new_array()
for node in nodes { for node in nodes {
arr.add_item(t.position(node)) arr.add_item(t.pos(node))
} }
return arr return arr
} }

View File

@ -222,7 +222,7 @@ fn (vt &Vet) e2string(err vet.Error) string {
} }
fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) { fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
pos := token.Position{ pos := token.Pos{
line_nr: line + 1 line_nr: line + 1
} }
vt.errors << vet.Error{ vt.errors << vet.Error{
@ -236,7 +236,7 @@ fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
} }
fn (mut vt Vet) warn(msg string, line int, fix vet.FixKind) { fn (mut vt Vet) warn(msg string, line int, fix vet.FixKind) {
pos := token.Position{ pos := token.Pos{
line_nr: line + 1 line_nr: line + 1
} }
mut w := vet.Error{ mut w := vet.Error{

View File

@ -70,7 +70,7 @@ pub fn (dtt DateTimeType) str() string {
pub struct Comment { pub struct Comment {
pub: pub:
text string text string
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Comment` type. // str returns the `string` representation of the `Comment` type.
@ -86,7 +86,7 @@ pub fn (c Comment) str() string {
pub struct Null { pub struct Null {
pub: pub:
text string text string
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Null` type // str returns the `string` representation of the `Null` type
@ -100,7 +100,7 @@ pub struct Quoted {
pub mut: pub mut:
text string text string
pub: pub:
pos token.Position pos token.Pos
is_multiline bool is_multiline bool
quote byte quote byte
} }
@ -122,7 +122,7 @@ pub fn (q Quoted) str() string {
pub struct Bare { pub struct Bare {
pub: pub:
text string text string
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Bare` type. // str returns the `string` representation of the `Bare` type.
@ -140,7 +140,7 @@ pub fn (b Bare) str() string {
pub struct Bool { pub struct Bool {
pub: pub:
text string text string
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Bool` type. // str returns the `string` representation of the `Bool` type.
@ -157,7 +157,7 @@ pub fn (b Bool) str() string {
// Number can be integers, floats, infinite, NaN - they can have exponents (`5e2`) and be sign prefixed (`+2`). // Number can be integers, floats, infinite, NaN - they can have exponents (`5e2`) and be sign prefixed (`+2`).
pub struct Number { pub struct Number {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
text string text string
} }
@ -197,7 +197,7 @@ pub fn (n Number) f64() f64 {
pub struct Date { pub struct Date {
pub: pub:
text string text string
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Date` type. // str returns the `string` representation of the `Date` type.
@ -215,7 +215,7 @@ pub struct Time {
pub: pub:
text string text string
offset int offset int
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `Time` type. // str returns the `string` representation of the `Time` type.
@ -234,7 +234,7 @@ pub struct DateTime {
pub mut: pub mut:
text string text string
pub: pub:
pos token.Position pos token.Pos
date Date date Date
time Time time Time
} }
@ -253,7 +253,7 @@ pub fn (dt DateTime) str() string {
// EOF is the data representation of the end of the TOML document. // EOF is the data representation of the end of the TOML document.
pub struct EOF { pub struct EOF {
pub: pub:
pos token.Position pos token.Pos
} }
// str returns the `string` representation of the `EOF` type. // str returns the `string` representation of the `EOF` type.

View File

@ -53,7 +53,7 @@ fn (c Checker) visit(value &ast.Value) ? {
} }
// excerpt returns a string of the token's surroundings // excerpt returns a string of the token's surroundings
fn (c Checker) excerpt(tp token.Position) string { fn (c Checker) excerpt(tp token.Pos) string {
return c.scanner.excerpt(tp.pos, 10) return c.scanner.excerpt(tp.pos, 10)
} }
@ -301,7 +301,7 @@ fn (c Checker) check_date_time(dt ast.DateTime) ? {
// Re-use date and time validation code for detailed testing of each part // Re-use date and time validation code for detailed testing of each part
c.check_date(ast.Date{ c.check_date(ast.Date{
text: split[0] text: split[0]
pos: token.Position{ pos: token.Pos{
len: split[0].len len: split[0].len
line_nr: dt.pos.line_nr line_nr: dt.pos.line_nr
pos: dt.pos.pos pos: dt.pos.pos
@ -310,7 +310,7 @@ fn (c Checker) check_date_time(dt ast.DateTime) ? {
}) ? }) ?
c.check_time(ast.Time{ c.check_time(ast.Time{
text: split[1] text: split[1]
pos: token.Position{ pos: token.Pos{
len: split[1].len len: split[1].len
line_nr: dt.pos.line_nr line_nr: dt.pos.line_nr
pos: dt.pos.pos + split[0].len pos: dt.pos.pos + split[0].len

View File

@ -43,7 +43,7 @@ fn (d Decoder) modify(mut value ast.Value) ? {
} }
// excerpt returns a string of the token's surroundings // excerpt returns a string of the token's surroundings
fn (d Decoder) excerpt(tp token.Position) string { fn (d Decoder) excerpt(tp token.Pos) string {
return d.scanner.excerpt(tp.pos, 10) return d.scanner.excerpt(tp.pos, 10)
} }

View File

@ -1160,7 +1160,7 @@ pub fn (mut p Parser) comment() ast.Comment {
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed hash comment "#$p.tok.lit"') util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed hash comment "#$p.tok.lit"')
return ast.Comment{ return ast.Comment{
text: p.tok.lit text: p.tok.lit
pos: p.tok.position() pos: p.tok.pos()
} }
} }
@ -1173,7 +1173,7 @@ pub fn (mut p Parser) key() ?ast.Key {
if p.tok.kind == .number { if p.tok.kind == .number {
if p.peek_tok.kind == .minus { if p.peek_tok.kind == .minus {
mut lits := p.tok.lit mut lits := p.tok.lit
pos := p.tok.position() pos := p.tok.pos()
for p.peek_tok.kind != .assign && p.peek_tok.kind != .period && p.peek_tok.kind != .rsbr { for p.peek_tok.kind != .assign && p.peek_tok.kind != .period && p.peek_tok.kind != .rsbr {
p.next() ? p.next() ?
if p.tok.kind !in parser.space_formatting { if p.tok.kind !in parser.space_formatting {
@ -1337,7 +1337,7 @@ pub fn (mut p Parser) number_or_date() ?ast.Value {
// bare parse and returns an `ast.Bare` type. // bare parse and returns an `ast.Bare` type.
pub fn (mut p Parser) bare() ?ast.Bare { pub fn (mut p Parser) bare() ?ast.Bare {
mut lits := p.tok.lit mut lits := p.tok.lit
pos := p.tok.position() pos := p.tok.pos()
for p.peek_tok.kind != .assign && p.peek_tok.kind != .period && p.peek_tok.kind != .rsbr for p.peek_tok.kind != .assign && p.peek_tok.kind != .period && p.peek_tok.kind != .rsbr
&& p.peek_tok.kind !in parser.space_formatting { && p.peek_tok.kind !in parser.space_formatting {
p.next() ? p.next() ?
@ -1373,7 +1373,7 @@ pub fn (mut p Parser) quoted() ast.Quoted {
} }
return ast.Quoted{ return ast.Quoted{
text: lit text: lit
pos: p.tok.position() pos: p.tok.pos()
quote: quote quote: quote
is_multiline: is_multiline is_multiline: is_multiline
} }
@ -1387,7 +1387,7 @@ pub fn (mut p Parser) boolean() ?ast.Bool {
} }
return ast.Bool{ return ast.Bool{
text: p.tok.lit text: p.tok.lit
pos: p.tok.position() pos: p.tok.pos()
} }
} }
@ -1395,7 +1395,7 @@ pub fn (mut p Parser) boolean() ?ast.Bool {
pub fn (mut p Parser) number() ast.Number { pub fn (mut p Parser) number() ast.Number {
return ast.Number{ return ast.Number{
text: p.tok.lit text: p.tok.lit
pos: p.tok.position() pos: p.tok.pos()
} }
} }
@ -1404,7 +1404,7 @@ pub fn (mut p Parser) number() ast.Number {
pub fn (mut p Parser) date_time() ?ast.DateTimeType { pub fn (mut p Parser) date_time() ?ast.DateTimeType {
// Date and/or Time // Date and/or Time
mut lit := '' mut lit := ''
pos := p.tok.position() pos := p.tok.pos()
mut date := ast.Date{} mut date := ast.Date{}
mut time := ast.Time{} mut time := ast.Time{}
@ -1447,7 +1447,7 @@ pub fn (mut p Parser) date_time() ?ast.DateTimeType {
pub fn (mut p Parser) date() ?ast.Date { pub fn (mut p Parser) date() ?ast.Date {
// Date // Date
mut lit := p.tok.lit mut lit := p.tok.lit
pos := p.tok.position() pos := p.tok.pos()
p.check(.number) ? p.check(.number) ?
lit += p.tok.lit lit += p.tok.lit
@ -1470,7 +1470,7 @@ pub fn (mut p Parser) date() ?ast.Date {
pub fn (mut p Parser) time() ?ast.Time { pub fn (mut p Parser) time() ?ast.Time {
// Time // Time
mut lit := p.tok.lit mut lit := p.tok.lit
pos := p.tok.position() pos := p.tok.pos()
if p.is_at(.bare) && (lit.starts_with('T') || lit.starts_with('t')) { if p.is_at(.bare) && (lit.starts_with('T') || lit.starts_with('t')) {
if p.tok.lit.starts_with('T') { if p.tok.lit.starts_with('T') {
@ -1530,6 +1530,6 @@ pub fn (mut p Parser) time() ?ast.Time {
// eof returns an `ast.EOF` type. // eof returns an `ast.EOF` type.
pub fn (mut p Parser) eof() ast.EOF { pub fn (mut p Parser) eof() ast.EOF {
return ast.EOF{ return ast.EOF{
pos: p.tok.position() pos: p.tok.pos()
} }
} }

View File

@ -4,7 +4,7 @@
module token module token
// Position represents a position in a TOML document. // Position represents a position in a TOML document.
pub struct Position { pub struct Pos {
pub: pub:
len int // length of the literal in the source len int // length of the literal in the source
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occured

View File

@ -42,8 +42,8 @@ pub enum Kind {
} }
[inline] [inline]
pub fn (tok &Token) position() Position { pub fn (tok &Token) pos() Pos {
return Position{ return Pos{
len: tok.len len: tok.len
line_nr: tok.line_nr - 1 line_nr: tok.line_nr - 1
pos: tok.pos pos: tok.pos

View File

@ -111,7 +111,7 @@ pub type Node = CallArg
pub struct TypeNode { pub struct TypeNode {
pub: pub:
typ Type typ Type
pos token.Position pos token.Pos
} }
pub struct EmptyExpr { pub struct EmptyExpr {
@ -124,7 +124,7 @@ pub fn empty_expr() Expr {
pub struct EmptyStmt { pub struct EmptyStmt {
pub: pub:
pos token.Position pos token.Pos
} }
pub fn empty_stmt() Stmt { pub fn empty_stmt() Stmt {
@ -144,14 +144,14 @@ pub struct Block {
pub: pub:
stmts []Stmt stmts []Stmt
is_unsafe bool is_unsafe bool
pos token.Position pos token.Pos
} }
// | IncDecStmt k // | IncDecStmt k
// Stand-alone expression in a statement list. // Stand-alone expression in a statement list.
pub struct ExprStmt { pub struct ExprStmt {
pub: pub:
pos token.Position pos token.Pos
comments []Comment comments []Comment
pub mut: pub mut:
expr Expr expr Expr
@ -162,13 +162,13 @@ pub mut:
pub struct IntegerLiteral { pub struct IntegerLiteral {
pub: pub:
val string val string
pos token.Position pos token.Pos
} }
pub struct FloatLiteral { pub struct FloatLiteral {
pub: pub:
val string val string
pos token.Position pos token.Pos
} }
pub struct StringLiteral { pub struct StringLiteral {
@ -176,7 +176,7 @@ pub:
val string val string
is_raw bool is_raw bool
language Language language Language
pos token.Position pos token.Pos
} }
// 'name: $name' // 'name: $name'
@ -188,8 +188,8 @@ pub:
precisions []int precisions []int
pluss []bool pluss []bool
fills []bool fills []bool
fmt_poss []token.Position fmt_poss []token.Pos
pos token.Position pos token.Pos
pub mut: pub mut:
expr_types []Type expr_types []Type
fmts []byte fmts []byte
@ -199,13 +199,13 @@ pub mut:
pub struct CharLiteral { pub struct CharLiteral {
pub: pub:
val string val string
pos token.Position pos token.Pos
} }
pub struct BoolLiteral { pub struct BoolLiteral {
pub: pub:
val bool val bool
pos token.Position pos token.Pos
} }
pub enum GenericKindField { pub enum GenericKindField {
@ -217,10 +217,10 @@ pub enum GenericKindField {
// `foo.bar` // `foo.bar`
pub struct SelectorExpr { pub struct SelectorExpr {
pub: pub:
pos token.Position pos token.Pos
field_name string field_name string
is_mut bool // is used for the case `if mut ident.selector is MyType {`, it indicates if the root ident is mutable is_mut bool // is used for the case `if mut ident.selector is MyType {`, it indicates if the root ident is mutable
mut_pos token.Position mut_pos token.Pos
next_token token.Kind next_token token.Kind
pub mut: pub mut:
expr Expr // expr.field_name expr Expr // expr.field_name
@ -251,15 +251,15 @@ pub:
name string // encoding.base64 name string // encoding.base64
short_name string // base64 short_name string // base64
attrs []Attr attrs []Attr
pos token.Position pos token.Pos
name_pos token.Position // `name` in import name name_pos token.Pos // `name` in import name
is_skipped bool // module main can be skipped in single file programs is_skipped bool // module main can be skipped in single file programs
} }
pub struct StructField { pub struct StructField {
pub: pub:
pos token.Position pos token.Pos
type_pos token.Position type_pos token.Pos
comments []Comment comments []Comment
has_default_expr bool has_default_expr bool
attrs []Attr attrs []Attr
@ -290,7 +290,7 @@ pub:
name string name string
is_pub bool is_pub bool
is_markused bool // an explict `[markused]` tag; the const will NOT be removed by `-skip-unused`, no matter what is_markused bool // an explict `[markused]` tag; the const will NOT be removed by `-skip-unused`, no matter what
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr // the value expr of field; everything after `=` expr Expr // the value expr of field; everything after `=`
typ Type // the type of the const field, it can be any type in V typ Type // the type of the const field, it can be any type in V
@ -304,7 +304,7 @@ pub mut:
pub struct ConstDecl { pub struct ConstDecl {
pub: pub:
is_pub bool is_pub bool
pos token.Position pos token.Pos
attrs []Attr // tags like `[markused]`, valid for all the consts in the list attrs []Attr // tags like `[markused]`, valid for all the consts in the list
pub mut: pub mut:
fields []ConstField // all the const fields in the `const (...)` block fields []ConstField // all the const fields in the `const (...)` block
@ -314,7 +314,7 @@ pub mut:
pub struct StructDecl { pub struct StructDecl {
pub: pub:
pos token.Position pos token.Pos
name string name string
generic_types []Type generic_types []Type
is_pub bool is_pub bool
@ -336,7 +336,7 @@ pub mut:
pub struct Embed { pub struct Embed {
pub: pub:
typ Type typ Type
pos token.Position pos token.Pos
comments []Comment comments []Comment
} }
@ -344,7 +344,7 @@ pub struct InterfaceEmbedding {
pub: pub:
name string name string
typ Type typ Type
pos token.Position pos token.Pos
comments []Comment comments []Comment
} }
@ -352,12 +352,12 @@ pub struct InterfaceDecl {
pub: pub:
name string name string
typ Type typ Type
name_pos token.Position name_pos token.Pos
language Language language Language
field_names []string field_names []string
is_pub bool is_pub bool
mut_pos int // mut: mut_pos int // mut:
pos token.Position pos token.Pos
pre_comments []Comment pre_comments []Comment
generic_types []Type generic_types []Type
attrs []Attr attrs []Attr
@ -371,8 +371,8 @@ pub mut:
pub struct StructInitField { pub struct StructInitField {
pub: pub:
pos token.Position pos token.Pos
name_pos token.Position name_pos token.Pos
comments []Comment comments []Comment
next_comments []Comment next_comments []Comment
pub mut: pub mut:
@ -385,7 +385,7 @@ pub mut:
pub struct StructInitEmbed { pub struct StructInitEmbed {
pub: pub:
pos token.Position pos token.Pos
comments []Comment comments []Comment
next_comments []Comment next_comments []Comment
pub mut: pub mut:
@ -397,8 +397,8 @@ pub mut:
pub struct StructInit { pub struct StructInit {
pub: pub:
pos token.Position pos token.Pos
name_pos token.Position name_pos token.Pos
is_short bool is_short bool
pub mut: pub mut:
unresolved bool unresolved bool
@ -418,10 +418,10 @@ pub struct Import {
pub: pub:
mod string // the module name of the import mod string // the module name of the import
alias string // the `x` in `import xxx as x` alias string // the `x` in `import xxx as x`
pos token.Position pos token.Pos
mod_pos token.Position mod_pos token.Pos
alias_pos token.Position alias_pos token.Pos
syms_pos token.Position syms_pos token.Pos
pub mut: pub mut:
syms []ImportSymbol // the list of symbols in `import {symbol1, symbol2}` syms []ImportSymbol // the list of symbols in `import {symbol1, symbol2}`
comments []Comment comments []Comment
@ -431,7 +431,7 @@ pub mut:
// import symbol,for import {symbol} syntax // import symbol,for import {symbol} syntax
pub struct ImportSymbol { pub struct ImportSymbol {
pub: pub:
pos token.Position pos token.Pos
name string name string
} }
@ -453,27 +453,27 @@ pub:
is_pub bool is_pub bool
is_variadic bool is_variadic bool
is_anon bool is_anon bool
is_noreturn bool // true, when [noreturn] is used on a fn is_noreturn bool // true, when [noreturn] is used on a fn
is_manualfree bool // true, when [manualfree] is used on a fn is_manualfree bool // true, when [manualfree] is used on a fn
is_main bool // true for `fn main()` is_main bool // true for `fn main()`
is_test bool // true for `fn test_abcde` is_test bool // true for `fn test_abcde`
is_conditional bool // true for `[if abc] fn abc(){}` is_conditional bool // true for `[if abc] fn abc(){}`
is_exported bool // true for `[export: 'exact_C_name']` is_exported bool // true for `[export: 'exact_C_name']`
is_keep_alive bool // passed memory must not be freed (by GC) before function returns is_keep_alive bool // passed memory must not be freed (by GC) before function returns
is_unsafe bool // true, when [unsafe] is used on a fn is_unsafe bool // true, when [unsafe] is used on a fn
is_markused bool // true, when an explict `[markused]` tag was put on a fn; `-skip-unused` will not remove that fn is_markused bool // true, when an explict `[markused]` tag was put on a fn; `-skip-unused` will not remove that fn
receiver StructField // TODO this is not a struct field receiver StructField // TODO this is not a struct field
receiver_pos token.Position // `(u User)` in `fn (u User) name()` position receiver_pos token.Pos // `(u User)` in `fn (u User) name()` position
is_method bool is_method bool
method_type_pos token.Position // `User` in ` fn (u User)` position method_type_pos token.Pos // `User` in ` fn (u User)` position
method_idx int method_idx int
rec_mut bool // is receiver mutable rec_mut bool // is receiver mutable
rec_share ShareType rec_share ShareType
language Language // V, C, JS language Language // V, C, JS
file_mode Language // whether *the file*, where a function was a '.c.v', '.js.v' etc. file_mode Language // whether *the file*, where a function was a '.c.v', '.js.v' etc.
no_body bool // just a definition `fn C.malloc()` no_body bool // just a definition `fn C.malloc()`
is_builtin bool // this function is defined in builtin/strconv is_builtin bool // this function is defined in builtin/strconv
body_pos token.Position // function bodys position body_pos token.Pos // function bodys position
file string file string
generic_names []string generic_names []string
is_direct_arr bool // direct array access is_direct_arr bool // direct array access
@ -484,9 +484,9 @@ pub mut:
stmts []Stmt stmts []Stmt
defer_stmts []DeferStmt defer_stmts []DeferStmt
return_type Type return_type Type
return_type_pos token.Position // `string` in `fn (u User) name() string` position return_type_pos token.Pos // `string` in `fn (u User) name() string` position
has_return bool has_return bool
should_be_skipped bool // true, when -skip-unused could not find any usages of that function, starting from main + other known used functions should_be_skipped bool // true, when -skip-unused could not find any usages of that function, starting from main + other known used functions
ninstances int // 0 for generic functions with no concrete instances ninstances int // 0 for generic functions with no concrete instances
has_await bool // 'true' if this function uses JS.await has_await bool // 'true' if this function uses JS.await
// //
@ -496,7 +496,7 @@ pub mut:
source_file &File = 0 source_file &File = 0
scope &Scope scope &Scope
label_names []string label_names []string
pos token.Position // function declaration position pos token.Pos // function declaration position
} }
// break, continue // break, continue
@ -504,14 +504,14 @@ pub struct BranchStmt {
pub: pub:
kind token.Kind kind token.Kind
label string label string
pos token.Position pos token.Pos
} }
// function or method call expr // function or method call expr
pub struct CallExpr { pub struct CallExpr {
pub: pub:
pos token.Position pos token.Pos
name_pos token.Position name_pos token.Pos
mod string mod string
pub mut: pub mut:
name string // left.name() name string // left.name()
@ -530,7 +530,7 @@ pub mut:
return_type Type return_type Type
should_be_skipped bool // true for calls to `[if someflag?]` functions, when there is no `-d someflag` should_be_skipped bool // true for calls to `[if someflag?]` functions, when there is no `-d someflag`
concrete_types []Type // concrete types, e.g. <int, string> concrete_types []Type // concrete types, e.g. <int, string>
concrete_list_pos token.Position concrete_list_pos token.Pos
free_receiver bool // true if the receiver expression needs to be freed free_receiver bool // true if the receiver expression needs to be freed
scope &Scope scope &Scope
from_embed_types []Type // holds the type of the embed that the method is called from from_embed_types []Type // holds the type of the embed that the method is called from
@ -553,14 +553,14 @@ pub mut:
expr Expr expr Expr
typ Type typ Type
is_tmp_autofree bool // this tells cgen that a tmp variable has to be used for the arg expression in order to free it after the call is_tmp_autofree bool // this tells cgen that a tmp variable has to be used for the arg expression in order to free it after the call
pos token.Position pos token.Pos
// tmp_name string // for autofree // tmp_name string // for autofree
} }
// function return statement // function return statement
pub struct Return { pub struct Return {
pub: pub:
pos token.Position pos token.Pos
comments []Comment comments []Comment
pub mut: pub mut:
exprs []Expr exprs []Expr
@ -598,7 +598,7 @@ pub mut:
// 10 <- original type (orig_type) // 10 <- original type (orig_type)
// [11, 12, 13] <- cast order (smartcasts) // [11, 12, 13] <- cast order (smartcasts)
// 12 <- the current casted type (typ) // 12 <- the current casted type (typ)
pos token.Position pos token.Pos
is_used bool // whether the local variable was used in other expressions is_used bool // whether the local variable was used in other expressions
is_changed bool // to detect mutable vars that are never changed is_changed bool // to detect mutable vars that are never changed
// //
@ -615,7 +615,7 @@ pub struct ScopeStructField {
pub: pub:
struct_type Type // type of struct struct_type Type // type of struct
name string name string
pos token.Position pos token.Pos
typ Type typ Type
smartcasts []Type // nested sum types require nested smart casting, for that a list of types is needed smartcasts []Type // nested sum types require nested smart casting, for that a list of types is needed
orig_type Type // original sumtype type; 0 if it's not a sumtype orig_type Type // original sumtype type; 0 if it's not a sumtype
@ -629,8 +629,8 @@ pub struct GlobalField {
pub: pub:
name string name string
has_expr bool has_expr bool
pos token.Position pos token.Pos
typ_pos token.Position typ_pos token.Pos
is_markused bool // an explict `[markused]` tag; the global will NOT be removed by `-skip-unused` is_markused bool // an explict `[markused]` tag; the global will NOT be removed by `-skip-unused`
pub mut: pub mut:
expr Expr expr Expr
@ -641,7 +641,7 @@ pub mut:
pub struct GlobalDecl { pub struct GlobalDecl {
pub: pub:
mod string mod string
pos token.Position pos token.Pos
is_block bool // __global() block is_block bool // __global() block
attrs []Attr // tags like `[markused]`, valid for all the globals in the list attrs []Attr // tags like `[markused]`, valid for all the globals in the list
pub mut: pub mut:
@ -740,8 +740,8 @@ pub struct Ident {
pub: pub:
language Language language Language
tok_kind token.Kind tok_kind token.Kind
pos token.Position pos token.Pos
mut_pos token.Position mut_pos token.Pos
comptime bool comptime bool
pub mut: pub mut:
scope &Scope scope &Scope
@ -770,7 +770,7 @@ pub fn (i &Ident) var_info() IdentVar {
pub struct InfixExpr { pub struct InfixExpr {
pub: pub:
op token.Kind op token.Kind
pos token.Position pos token.Pos
is_stmt bool is_stmt bool
pub mut: pub mut:
left Expr left Expr
@ -790,7 +790,7 @@ pub mut:
pub struct PostfixExpr { pub struct PostfixExpr {
pub: pub:
op token.Kind op token.Kind
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
auto_locked string auto_locked string
@ -800,7 +800,7 @@ pub mut:
pub struct PrefixExpr { pub struct PrefixExpr {
pub: pub:
op token.Kind op token.Kind
pos token.Position pos token.Pos
pub mut: pub mut:
right_type Type right_type Type
right Expr right Expr
@ -810,7 +810,7 @@ pub mut:
pub struct IndexExpr { pub struct IndexExpr {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
index Expr // [0], RangeExpr [start..end] or map[key] index Expr // [0], RangeExpr [start..end] or map[key]
or_expr OrExpr or_expr OrExpr
@ -829,7 +829,7 @@ pub struct IfExpr {
pub: pub:
is_comptime bool is_comptime bool
tok_kind token.Kind tok_kind token.Kind
pos token.Position pos token.Pos
post_comments []Comment post_comments []Comment
pub mut: pub mut:
left Expr // `a` in `a := if ...` left Expr // `a` in `a := if ...`
@ -842,8 +842,8 @@ pub mut:
pub struct IfBranch { pub struct IfBranch {
pub: pub:
pos token.Position pos token.Pos
body_pos token.Position body_pos token.Pos
comments []Comment comments []Comment
pub mut: pub mut:
cond Expr cond Expr
@ -854,7 +854,7 @@ pub mut:
pub struct UnsafeExpr { pub struct UnsafeExpr {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
} }
@ -862,7 +862,7 @@ pub mut:
pub struct LockExpr { pub struct LockExpr {
pub: pub:
is_rlock []bool is_rlock []bool
pos token.Position pos token.Pos
pub mut: pub mut:
stmts []Stmt stmts []Stmt
lockeds []Expr // `x`, `y.z` in `lock x, y.z {` lockeds []Expr // `x`, `y.z` in `lock x, y.z {`
@ -875,7 +875,7 @@ pub mut:
pub struct MatchExpr { pub struct MatchExpr {
pub: pub:
tok_kind token.Kind tok_kind token.Kind
pos token.Position pos token.Pos
comments []Comment // comments before the first branch comments []Comment // comments before the first branch
pub mut: pub mut:
cond Expr cond Expr
@ -890,10 +890,10 @@ pub mut:
pub struct MatchBranch { pub struct MatchBranch {
pub: pub:
ecmnts [][]Comment // inline comments for each left side expr ecmnts [][]Comment // inline comments for each left side expr
pos token.Position pos token.Pos
is_else bool is_else bool
post_comments []Comment // comments below ´... }´ post_comments []Comment // comments below ´... }´
branch_pos token.Position // for checker errors about invalid branches branch_pos token.Pos // for checker errors about invalid branches
pub mut: pub mut:
stmts []Stmt // right side stmts []Stmt // right side
exprs []Expr // left side exprs []Expr // left side
@ -903,7 +903,7 @@ pub mut:
pub struct SelectExpr { pub struct SelectExpr {
pub: pub:
branches []SelectBranch branches []SelectBranch
pos token.Position pos token.Pos
has_exception bool has_exception bool
pub mut: pub mut:
is_expr bool // returns a value is_expr bool // returns a value
@ -912,7 +912,7 @@ pub mut:
pub struct SelectBranch { pub struct SelectBranch {
pub: pub:
pos token.Position pos token.Pos
comment Comment // comment above `select {` comment Comment // comment above `select {`
is_else bool is_else bool
is_timeout bool is_timeout bool
@ -933,8 +933,8 @@ pub:
val_var string val_var string
stmts []Stmt stmts []Stmt
kind ComptimeForKind kind ComptimeForKind
pos token.Position pos token.Pos
typ_pos token.Position typ_pos token.Pos
pub mut: pub mut:
// expr Expr // expr Expr
typ Type typ Type
@ -943,7 +943,7 @@ pub mut:
pub struct ForStmt { pub struct ForStmt {
pub: pub:
is_inf bool // `for {}` is_inf bool // `for {}`
pos token.Position pos token.Pos
pub mut: pub mut:
cond Expr cond Expr
stmts []Stmt stmts []Stmt
@ -959,7 +959,7 @@ pub:
is_range bool is_range bool
high Expr // `10` in `for i in 0..10 {` high Expr // `10` in `for i in 0..10 {`
stmts []Stmt stmts []Stmt
pos token.Position pos token.Pos
val_is_mut bool // `for mut val in vals {` means that modifying `val` will modify the array val_is_mut bool // `for mut val in vals {` means that modifying `val` will modify the array
// and the array cannot be indexed inside the loop // and the array cannot be indexed inside the loop
pub mut: pub mut:
@ -978,7 +978,7 @@ pub:
has_cond bool has_cond bool
has_inc bool has_inc bool
is_multi bool // for a,b := 0,1; a < 10; a,b = a+b, a {...} is_multi bool // for a,b := 0,1; a < 10; a,b = a+b, a {...}
pos token.Position pos token.Pos
pub mut: pub mut:
init Stmt // i := 0; init Stmt // i := 0;
cond Expr // i < 10; cond Expr // i < 10;
@ -992,7 +992,7 @@ pub mut:
pub struct HashStmt { pub struct HashStmt {
pub: pub:
mod string mod string
pos token.Position pos token.Pos
source_file string source_file string
pub mut: pub mut:
val string // example: 'include <openssl/rand.h> # please install openssl // comment' val string // example: 'include <openssl/rand.h> # please install openssl // comment'
@ -1014,7 +1014,7 @@ pub:
pub struct AssignStmt { pub struct AssignStmt {
pub: pub:
op token.Kind // include: =,:=,+=,-=,*=,/= and so on; for a list of all the assign operators, see vlib/token/token.v op token.Kind // include: =,:=,+=,-=,*=,/= and so on; for a list of all the assign operators, see vlib/token/token.v
pos token.Position pos token.Pos
comments []Comment comments []Comment
end_comments []Comment end_comments []Comment
pub mut: pub mut:
@ -1032,7 +1032,7 @@ pub mut:
pub struct AsCast { pub struct AsCast {
pub: pub:
typ Type // to type typ Type // to type
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr // from expr: `expr` in `expr as Ident` expr Expr // from expr: `expr` in `expr as Ident`
expr_type Type // from type expr_type Type // from type
@ -1044,7 +1044,7 @@ pub:
enum_name string enum_name string
val string val string
mod string // for full path `mod_Enum_val` mod string // for full path `mod_Enum_val`
pos token.Position pos token.Pos
pub mut: pub mut:
typ Type typ Type
} }
@ -1053,7 +1053,7 @@ pub mut:
pub struct EnumField { pub struct EnumField {
pub: pub:
name string name string
pos token.Position pos token.Pos
comments []Comment // comment after Enumfield in the same line comments []Comment // comment after Enumfield in the same line
next_comments []Comment // comments between current EnumField and next EnumField next_comments []Comment // comments between current EnumField and next EnumField
has_expr bool // true, when .expr has a value has_expr bool // true, when .expr has a value
@ -1071,7 +1071,7 @@ pub:
comments []Comment // comments before the first EnumField comments []Comment // comments before the first EnumField
fields []EnumField // all the enum fields fields []EnumField // all the enum fields
attrs []Attr // attributes of enum declaration attrs []Attr // attributes of enum declaration
pos token.Position pos token.Pos
} }
pub struct AliasTypeDecl { pub struct AliasTypeDecl {
@ -1079,8 +1079,8 @@ pub:
name string name string
is_pub bool is_pub bool
parent_type Type parent_type Type
pos token.Position pos token.Pos
type_pos token.Position type_pos token.Pos
comments []Comment comments []Comment
} }
@ -1089,7 +1089,7 @@ pub struct SumTypeDecl {
pub: pub:
name string name string
is_pub bool is_pub bool
pos token.Position pos token.Pos
comments []Comment comments []Comment
typ Type typ Type
generic_types []Type generic_types []Type
@ -1103,8 +1103,8 @@ pub:
name string name string
is_pub bool is_pub bool
typ Type typ Type
pos token.Position pos token.Pos
type_pos token.Position type_pos token.Pos
comments []Comment comments []Comment
} }
@ -1114,7 +1114,7 @@ pub:
pub struct DeferStmt { pub struct DeferStmt {
pub: pub:
stmts []Stmt stmts []Stmt
pos token.Position pos token.Pos
pub mut: pub mut:
defer_vars []Ident defer_vars []Ident
ifdef string ifdef string
@ -1124,14 +1124,14 @@ pub mut:
// `(3+4)` // `(3+4)`
pub struct ParExpr { pub struct ParExpr {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
} }
pub struct GoExpr { pub struct GoExpr {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
call_expr CallExpr call_expr CallExpr
is_expr bool is_expr bool
@ -1140,20 +1140,20 @@ pub mut:
pub struct GotoLabel { pub struct GotoLabel {
pub: pub:
name string name string
pos token.Position pos token.Pos
} }
pub struct GotoStmt { pub struct GotoStmt {
pub: pub:
name string name string
pos token.Position pos token.Pos
} }
pub struct ArrayInit { pub struct ArrayInit {
pub: pub:
pos token.Position // `[]` in []Type{} position pos token.Pos // `[]` in []Type{} position
elem_type_pos token.Position // `Type` in []Type{} position elem_type_pos token.Pos // `Type` in []Type{} position
ecmnts [][]Comment // optional iembed comments after each expr ecmnts [][]Comment // optional iembed comments after each expr
pre_cmnts []Comment pre_cmnts []Comment
is_fixed bool is_fixed bool
has_val bool // fixed size literal `[expr, expr]!` has_val bool // fixed size literal `[expr, expr]!`
@ -1175,7 +1175,7 @@ pub mut:
pub struct ArrayDecompose { pub struct ArrayDecompose {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
expr_type Type expr_type Type
@ -1184,7 +1184,7 @@ pub mut:
pub struct ChanInit { pub struct ChanInit {
pub: pub:
pos token.Position pos token.Pos
has_cap bool has_cap bool
pub mut: pub mut:
cap_expr Expr cap_expr Expr
@ -1194,7 +1194,7 @@ pub mut:
pub struct MapInit { pub struct MapInit {
pub: pub:
pos token.Position pos token.Pos
comments [][]Comment // comments after key-value pairs comments [][]Comment // comments after key-value pairs
pre_cmnts []Comment // comments before the first key-value pair pre_cmnts []Comment // comments before the first key-value pair
pub mut: pub mut:
@ -1210,7 +1210,7 @@ pub struct RangeExpr {
pub: pub:
has_high bool has_high bool
has_low bool has_low bool
pos token.Position pos token.Pos
is_gated bool // #[] gated array is_gated bool // #[] gated array
pub mut: pub mut:
low Expr low Expr
@ -1225,7 +1225,7 @@ pub mut:
typname string // `&Type` in `&Type(buf)` typname string // `&Type` in `&Type(buf)`
expr_type Type // `byteptr`, the type of the `buf` expression expr_type Type // `byteptr`, the type of the `buf` expression
has_arg bool // true for `string(buf, n)`, false for `&Type(buf)` has_arg bool // true for `string(buf, n)`, false for `&Type(buf)`
pos token.Position pos token.Pos
} }
pub struct AsmStmt { pub struct AsmStmt {
@ -1235,7 +1235,7 @@ pub:
is_volatile bool is_volatile bool
is_goto bool is_goto bool
clobbered []AsmClobbered clobbered []AsmClobbered
pos token.Position pos token.Pos
pub mut: pub mut:
templates []AsmTemplate templates []AsmTemplate
scope &Scope scope &Scope
@ -1252,7 +1252,7 @@ pub mut:
is_directive bool // .globl assembly_function is_directive bool // .globl assembly_function
args []AsmArg args []AsmArg
comments []Comment comments []Comment
pos token.Position pos token.Pos
} }
// [eax+5] | j | displacement literal (e.g. 123 in [rax + 123] ) | eax | true | `a` | 0.594 | 123 | label_name // [eax+5] | j | displacement literal (e.g. 123 in [rax + 123] ) | eax | true | `a` | 0.594 | 123 | label_name
@ -1276,12 +1276,12 @@ pub mut:
pub struct AsmDisp { pub struct AsmDisp {
pub: pub:
val string val string
pos token.Position pos token.Pos
} }
pub struct AsmAlias { pub struct AsmAlias {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
name string // a name string // a
} }
@ -1290,7 +1290,7 @@ pub struct AsmAddressing {
pub: pub:
scale int = -1 // 1, 2, 4, or 8 literal scale int = -1 // 1, 2, 4, or 8 literal
mode AddressingMode mode AddressingMode
pos token.Position pos token.Pos
pub mut: pub mut:
segment string // fs: segment string // fs:
displacement AsmArg // 8, 16 or 32 bit literal value displacement AsmArg // 8, 16 or 32 bit literal value
@ -1324,7 +1324,7 @@ pub:
expr Expr // (a) expr Expr // (a)
comments []Comment // // this is a comment comments []Comment // // this is a comment
typ Type typ Type
pos token.Position pos token.Pos
} }
pub const ( pub const (
@ -1411,7 +1411,7 @@ pub const (
pub struct AssertStmt { pub struct AssertStmt {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
is_used bool // asserts are used in _test.v files, as well as in non -prod builds of all files is_used bool // asserts are used in _test.v files, as well as in non -prod builds of all files
@ -1421,7 +1421,7 @@ pub struct IfGuardVar {
pub mut: pub mut:
name string name string
is_mut bool is_mut bool
pos token.Position pos token.Pos
} }
// `if x := opt() {` // `if x := opt() {`
@ -1444,7 +1444,7 @@ pub struct OrExpr {
pub: pub:
stmts []Stmt stmts []Stmt
kind OrKind kind OrKind
pos token.Position pos token.Pos
} }
/* /*
@ -1454,7 +1454,7 @@ pub:
call_expr CallExpr call_expr CallExpr
stmts []Stmt // inside `or { }` stmts []Stmt // inside `or { }`
kind OrKind kind OrKind
pos token.Position pos token.Pos
} }
*/ */
@ -1463,7 +1463,7 @@ pub struct Assoc {
pub: pub:
var_name string var_name string
fields []string fields []string
pos token.Position pos token.Pos
pub mut: pub mut:
exprs []Expr exprs []Expr
typ Type typ Type
@ -1473,7 +1473,7 @@ pub mut:
pub struct SizeOf { pub struct SizeOf {
pub: pub:
is_type bool is_type bool
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr // checker uses this to set typ expr Expr // checker uses this to set typ
typ Type typ Type
@ -1482,7 +1482,7 @@ pub mut:
pub struct IsRefType { pub struct IsRefType {
pub: pub:
is_type bool is_type bool
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr // checker uses this to set typ expr Expr // checker uses this to set typ
typ Type typ Type
@ -1492,12 +1492,12 @@ pub struct OffsetOf {
pub: pub:
struct_type Type struct_type Type
field string field string
pos token.Position pos token.Pos
} }
pub struct Likely { pub struct Likely {
pub: pub:
pos token.Position pos token.Pos
is_likely bool // false for _unlikely_ is_likely bool // false for _unlikely_
pub mut: pub mut:
expr Expr expr Expr
@ -1505,7 +1505,7 @@ pub mut:
pub struct TypeOf { pub struct TypeOf {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
expr_type Type expr_type Type
@ -1513,7 +1513,7 @@ pub mut:
pub struct DumpExpr { pub struct DumpExpr {
pub: pub:
pos token.Position pos token.Pos
pub mut: pub mut:
expr Expr expr Expr
expr_type Type expr_type Type
@ -1525,13 +1525,13 @@ pub:
text string text string
is_multi bool // true only for /* comment */, that use many lines is_multi bool // true only for /* comment */, that use many lines
is_inline bool // true for all /* comment */ comments is_inline bool // true for all /* comment */ comments
pos token.Position pos token.Pos
} }
pub struct ConcatExpr { pub struct ConcatExpr {
pub: pub:
vals []Expr vals []Expr
pos token.Position pos token.Pos
pub mut: pub mut:
return_type Type return_type Type
} }
@ -1540,7 +1540,7 @@ pub mut:
pub struct AtExpr { pub struct AtExpr {
pub: pub:
name string name string
pos token.Position pos token.Pos
kind token.AtKind kind token.AtKind
pub mut: pub mut:
val string val string
@ -1549,7 +1549,7 @@ pub mut:
pub struct ComptimeSelector { pub struct ComptimeSelector {
pub: pub:
has_parens bool // if $() is used, for vfmt has_parens bool // if $() is used, for vfmt
pos token.Position pos token.Pos
pub mut: pub mut:
left Expr left Expr
left_type Type left_type Type
@ -1559,10 +1559,10 @@ pub mut:
pub struct ComptimeCall { pub struct ComptimeCall {
pub: pub:
pos token.Position pos token.Pos
has_parens bool // if $() is used, for vfmt has_parens bool // if $() is used, for vfmt
method_name string method_name string
method_pos token.Position method_pos token.Pos
scope &Scope scope &Scope
left Expr left Expr
args_var string args_var string
@ -1573,7 +1573,7 @@ pub:
is_embed bool is_embed bool
// //
is_env bool is_env bool
env_pos token.Position env_pos token.Pos
// //
is_pkgconfig bool is_pkgconfig bool
pub mut: pub mut:
@ -1586,7 +1586,7 @@ pub mut:
pub struct None { pub struct None {
pub: pub:
pos token.Position pos token.Pos
} }
pub enum SqlStmtKind { pub enum SqlStmtKind {
@ -1599,7 +1599,7 @@ pub enum SqlStmtKind {
pub struct SqlStmt { pub struct SqlStmt {
pub: pub:
pos token.Position pos token.Pos
db_expr Expr // `db` in `sql db {` db_expr Expr // `db` in `sql db {`
pub mut: pub mut:
lines []SqlStmtLine lines []SqlStmtLine
@ -1608,7 +1608,7 @@ pub mut:
pub struct SqlStmtLine { pub struct SqlStmtLine {
pub: pub:
kind SqlStmtKind kind SqlStmtKind
pos token.Position pos token.Pos
where_expr Expr where_expr Expr
update_exprs []Expr // for `update` update_exprs []Expr // for `update`
pub mut: pub mut:
@ -1629,7 +1629,7 @@ pub:
has_offset bool has_offset bool
has_desc bool has_desc bool
is_array bool is_array bool
pos token.Position pos token.Pos
pub mut: pub mut:
db_expr Expr // `db` in `sql db {` db_expr Expr // `db` in `sql db {`
where_expr Expr where_expr Expr
@ -1644,7 +1644,7 @@ pub mut:
pub struct NodeError { pub struct NodeError {
pub: pub:
idx int // index for referencing the related File error idx int // index for referencing the related File error
pos token.Position pos token.Pos
} }
[inline] [inline]
@ -1655,7 +1655,7 @@ pub fn (expr Expr) is_blank_ident() bool {
} }
} }
pub fn (expr Expr) position() token.Position { pub fn (expr Expr) pos() token.Pos {
// all uncommented have to be implemented // all uncommented have to be implemented
// NB: please do not print here. the language server will hang // NB: please do not print here. the language server will hang
// as it uses STDIO primarly to communicate ~Ned // as it uses STDIO primarly to communicate ~Ned
@ -1664,8 +1664,8 @@ pub fn (expr Expr) position() token.Position {
return expr.decl.pos return expr.decl.pos
} }
CTempVar, EmptyExpr { CTempVar, EmptyExpr {
// println('compiler bug, unhandled EmptyExpr position()') // println('compiler bug, unhandled EmptyExpr pos()')
return token.Position{} return token.Pos{}
} }
NodeError, ArrayDecompose, ArrayInit, AsCast, Assoc, AtExpr, BoolLiteral, CallExpr, NodeError, ArrayDecompose, ArrayInit, AsCast, Assoc, AtExpr, BoolLiteral, CallExpr,
CastExpr, ChanInit, CharLiteral, ConcatExpr, Comment, ComptimeCall, ComptimeSelector, CastExpr, ChanInit, CharLiteral, ConcatExpr, Comment, ComptimeCall, ComptimeSelector,
@ -1682,12 +1682,12 @@ pub fn (expr Expr) position() token.Position {
return expr.pos return expr.pos
} }
IfGuardExpr { IfGuardExpr {
return expr.expr.position() return expr.expr.pos()
} }
InfixExpr { InfixExpr {
left_pos := expr.left.position() left_pos := expr.left.pos()
right_pos := expr.right.position() right_pos := expr.right.pos()
return token.Position{ return token.Pos{
line_nr: expr.pos.line_nr line_nr: expr.pos.line_nr
pos: left_pos.pos pos: left_pos.pos
len: right_pos.pos - left_pos.pos + right_pos.len len: right_pos.pos - left_pos.pos + right_pos.len
@ -1793,13 +1793,13 @@ pub mut:
orig Expr // the original expression, which produced the C temp variable; used by x.str() orig Expr // the original expression, which produced the C temp variable; used by x.str()
} }
pub fn (node Node) position() token.Position { pub fn (node Node) pos() token.Pos {
match node { match node {
NodeError { NodeError {
return token.Position{} return token.Pos{}
} }
EmptyNode { EmptyNode {
return token.Position{} return token.Pos{}
} }
Stmt { Stmt {
mut pos := node.pos mut pos := node.pos
@ -1820,15 +1820,15 @@ pub fn (node Node) position() token.Position {
} }
} }
if node is AssignStmt { if node is AssignStmt {
return pos.extend(node.right.last().position()) return pos.extend(node.right.last().pos())
} }
if node is AssertStmt { if node is AssertStmt {
return pos.extend(node.expr.position()) return pos.extend(node.expr.pos())
} }
return pos return pos
} }
Expr { Expr {
return node.position() return node.pos()
} }
StructField { StructField {
return node.pos.extend(node.type_pos) return node.pos.extend(node.type_pos)
@ -1848,7 +1848,7 @@ pub fn (node Node) position() token.Position {
return node.pos return node.pos
} }
AsmRegister { AsmRegister {
return token.Position{ return token.Pos{
len: -1 len: -1
line_nr: -1 line_nr: -1
pos: -1 pos: -1
@ -1859,7 +1859,7 @@ pub fn (node Node) position() token.Position {
} }
} }
File { File {
mut pos := token.Position{} mut pos := token.Pos{}
if node.stmts.len > 0 { if node.stmts.len > 0 {
first_pos := node.stmts.first().pos first_pos := node.stmts.first().pos
last_pos := node.stmts.last().pos last_pos := node.stmts.last().pos

View File

@ -22,7 +22,7 @@ pub:
kind AttrKind kind AttrKind
ct_expr Expr // .kind == comptime_define, for [if !name] ct_expr Expr // .kind == comptime_define, for [if !name]
ct_opt bool // true for [if user_defined_name?] ct_opt bool // true for [if user_defined_name?]
pos token.Position pos token.Pos
pub mut: pub mut:
ct_evaled bool // whether ct_skip has been evaluated already ct_evaled bool // whether ct_skip has been evaluated already
ct_skip bool // is the comptime expr *false*, filled by checker ct_skip bool // is the comptime expr *false*, filled by checker

View File

@ -95,8 +95,8 @@ pub:
mod string mod string
file string file string
file_mode Language file_mode Language
pos token.Position pos token.Pos
return_type_pos token.Position return_type_pos token.Pos
pub mut: pub mut:
return_type Type return_type Type
receiver_type Type // != 0, when .is_method == true receiver_type Type // != 0, when .is_method == true
@ -119,11 +119,11 @@ fn (f &Fn) method_equals(o &Fn) bool {
pub struct Param { pub struct Param {
pub: pub:
pos token.Position pos token.Pos
name string name string
is_mut bool is_mut bool
is_auto_rec bool is_auto_rec bool
type_pos token.Position type_pos token.Pos
is_hidden bool // interface first arg is_hidden bool // interface first arg
pub mut: pub mut:
typ Type typ Type

View File

@ -16,7 +16,7 @@ mut:
} }
fn (mut n NodeByOffset) visit(node &ast.Node) ? { fn (mut n NodeByOffset) visit(node &ast.Node) ? {
node_pos := node.position() node_pos := node.pos()
if n.pos >= node_pos.pos && n.pos <= node_pos.pos + node_pos.len && node !is ast.File { if n.pos >= node_pos.pos && n.pos <= node_pos.pos + node_pos.len && node !is ast.File {
n.node = node n.node = node
return error('') return error('')

View File

@ -577,7 +577,7 @@ struct FunctionRedefinition {
f ast.FnDecl f ast.FnDecl
} }
pub fn (b &Builder) error_with_pos(s string, fpath string, pos token.Position) errors.Error { pub fn (b &Builder) error_with_pos(s string, fpath string, pos token.Pos) errors.Error {
if !b.pref.check_only { if !b.pref.check_only {
ferror := util.formatted_error('builder error:', s, fpath, pos) ferror := util.formatted_error('builder error:', s, fpath, pos)
eprintln(ferror) eprintln(ferror)

View File

@ -28,7 +28,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
if right_type_sym.kind == .multi_return { if right_type_sym.kind == .multi_return {
if node.right.len > 1 { if node.right.len > 1 {
c.error('cannot use multi-value $right_type_sym.name in single-value context', c.error('cannot use multi-value $right_type_sym.name in single-value context',
right.position()) right.pos())
} }
node.right_types = right_type_sym.mr_info().types node.right_types = right_type_sym.mr_info().types
right_len = node.right_types.len right_len = node.right_types.len
@ -99,7 +99,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
} else { } else {
if node.right[i] is ast.StructInit { if node.right[i] is ast.StructInit {
c.warn('assigning a struct literal to a map is deprecated - use `map{}` instead', c.warn('assigning a struct literal to a map is deprecated - use `map{}` instead',
node.right[i].position()) node.right[i].pos())
node.right[i] = ast.MapInit{} node.right[i] = ast.MapInit{}
} }
} }
@ -315,7 +315,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
} }
} }
if is_decl { if is_decl {
c.error('non-name `$left` on left side of `:=`', left.position()) c.error('non-name `$left` on left side of `:=`', left.pos())
} }
} }
} }
@ -357,7 +357,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
|| !right_type.is_ptr()) && !left.is_blank_ident() && right.is_lvalue() { || !right_type.is_ptr()) && !left.is_blank_ident() && right.is_lvalue() {
// Do not allow `a = b` // Do not allow `a = b`
c.error('cannot copy map: call `move` or `clone` method (or use a reference)', c.error('cannot copy map: call `move` or `clone` method (or use a reference)',
right.position()) right.pos())
} }
left_is_ptr := left_type.is_ptr() || left_sym.is_pointer() left_is_ptr := left_type.is_ptr() || left_sym.is_pointer()
if left_is_ptr && !left.is_auto_deref_var() { if left_is_ptr && !left.is_auto_deref_var() {
@ -368,7 +368,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
right_is_ptr := right_type.is_ptr() || right_sym.is_pointer() right_is_ptr := right_type.is_ptr() || right_sym.is_pointer()
if !right_is_ptr && node.op == .assign && right_type_unwrapped.is_number() { if !right_is_ptr && node.op == .assign && right_type_unwrapped.is_number() {
c.error('cannot assign to `$left`: ' + c.error('cannot assign to `$left`: ' +
c.expected_msg(right_type_unwrapped, left_type_unwrapped), right.position()) c.expected_msg(right_type_unwrapped, left_type_unwrapped), right.pos())
} }
if (right is ast.StructInit || !right_is_ptr) && !(right_sym.is_number() if (right is ast.StructInit || !right_is_ptr) && !(right_sym.is_number()
|| left_type.has_flag(.shared_f)) { || left_type.has_flag(.shared_f)) {
@ -388,41 +388,41 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
if left_type == ast.string_type { if left_type == ast.string_type {
if node.op != .plus_assign { if node.op != .plus_assign {
c.error('operator `$node.op` not defined on left operand type `$left_sym.name`', c.error('operator `$node.op` not defined on left operand type `$left_sym.name`',
left.position()) left.pos())
} }
if right_type != ast.string_type { if right_type != ast.string_type {
c.error('invalid right operand: $left_sym.name $node.op $right_sym.name', c.error('invalid right operand: $left_sym.name $node.op $right_sym.name',
right.position()) right.pos())
} }
} else if !left_sym.is_number() } else if !left_sym.is_number()
&& left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] { && left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] {
c.error('operator `$node.op` not defined on left operand type `$left_sym.name`', c.error('operator `$node.op` not defined on left operand type `$left_sym.name`',
left.position()) left.pos())
} else if !right_sym.is_number() } else if !right_sym.is_number()
&& left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] { && left_sym.kind !in [.byteptr, .charptr, .struct_, .alias] {
c.error('invalid right operand: $left_sym.name $node.op $right_sym.name', c.error('invalid right operand: $left_sym.name $node.op $right_sym.name',
right.position()) right.pos())
} }
} }
.mult_assign, .div_assign { .mult_assign, .div_assign {
if !left_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int() if !left_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int()
&& left_sym.kind !in [.struct_, .alias] { && left_sym.kind !in [.struct_, .alias] {
c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`', c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`',
left.position()) left.pos())
} else if !right_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int() } else if !right_sym.is_number() && !c.table.final_sym(left_type_unwrapped).is_int()
&& left_sym.kind !in [.struct_, .alias] { && left_sym.kind !in [.struct_, .alias] {
c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`', c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`',
right.position()) right.pos())
} }
} }
.and_assign, .or_assign, .xor_assign, .mod_assign, .left_shift_assign, .and_assign, .or_assign, .xor_assign, .mod_assign, .left_shift_assign,
.right_shift_assign { .right_shift_assign {
if !left_sym.is_int() && !c.table.final_sym(left_type_unwrapped).is_int() { if !left_sym.is_int() && !c.table.final_sym(left_type_unwrapped).is_int() {
c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`', c.error('operator $node.op.str() not defined on left operand type `$left_sym.name`',
left.position()) left.pos())
} else if !right_sym.is_int() && !c.table.final_sym(right_type_unwrapped).is_int() { } else if !right_sym.is_int() && !c.table.final_sym(right_type_unwrapped).is_int() {
c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`', c.error('operator $node.op.str() not defined on right operand type `$right_sym.name`',
right.position()) right.pos())
} }
} }
.unsigned_right_shift_assign { .unsigned_right_shift_assign {
@ -532,12 +532,12 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
node.pos) node.pos)
} }
} else { } else {
c.error('cannot assign to `$left`: $err.msg', right.position()) c.error('cannot assign to `$left`: $err.msg', right.pos())
} }
} }
} }
if left_sym.kind == .interface_ { if left_sym.kind == .interface_ {
if c.type_implements(right_type, left_type, right.position()) { if c.type_implements(right_type, left_type, right.pos()) {
if !right_type.is_ptr() && !right_type.is_pointer() && right_sym.kind != .interface_ if !right_type.is_ptr() && !right_type.is_pointer() && right_sym.kind != .interface_
&& !c.inside_unsafe { && !c.inside_unsafe {
c.mark_as_referenced(mut &node.right[i], true) c.mark_as_referenced(mut &node.right[i], true)

View File

@ -296,14 +296,14 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type ast.Type, right
// allow `bool << 2` in translated C code // allow `bool << 2` in translated C code
return ast.int_type return ast.int_type
} }
c.error('invalid operation: shift on type `$left_sym.name`', node.left.position()) c.error('invalid operation: shift on type `$left_sym.name`', node.left.pos())
return ast.void_type return ast.void_type
} }
if !right_type.is_int() && !c.pref.translated { if !right_type.is_int() && !c.pref.translated {
left_sym := c.table.sym(left_type) left_sym := c.table.sym(left_type)
right_sym := c.table.sym(right_type) right_sym := c.table.sym(right_type)
c.error('cannot shift non-integer type `$right_sym.name` into type `$left_sym.name`', c.error('cannot shift non-integer type `$right_sym.name` into type `$left_sym.name`',
node.right.position()) node.right.pos())
return ast.void_type return ast.void_type
} }
// At this point, it is guaranteed that we have a `number1 << number2`, or `number1 >> number2`, or `number1 >>> number2`: // At this point, it is guaranteed that we have a `number1 << number2`, or `number1 >> number2`, or `number1 >>> number2`:
@ -346,13 +346,13 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type ast.Type, right
if node.op == .left_shift && left_type_final.is_signed() && !(c.inside_unsafe if node.op == .left_shift && left_type_final.is_signed() && !(c.inside_unsafe
&& c.is_generated) { && c.is_generated) {
c.note('shifting a value from a signed type `$left_sym_final.name` can change the sign', c.note('shifting a value from a signed type `$left_sym_final.name` can change the sign',
node.left.position()) node.left.pos())
} }
if node.ct_right_value_evaled { if node.ct_right_value_evaled {
if node.ct_right_value !is ast.EmptyExpr { if node.ct_right_value !is ast.EmptyExpr {
ival := node.ct_right_value.i64() or { -999 } ival := node.ct_right_value.i64() or { -999 }
if ival < 0 { if ival < 0 {
c.error('invalid negative shift count', node.right.position()) c.error('invalid negative shift count', node.right.pos())
return left_type return left_type
} }
moffset := match left_type_final { moffset := match left_type_final {
@ -371,13 +371,13 @@ fn (mut c Checker) check_shift(mut node ast.InfixExpr, left_type ast.Type, right
} }
if ival > moffset && !c.pref.translated { if ival > moffset && !c.pref.translated {
c.error('shift count for type `$left_sym_final.name` too large (maximum: $moffset bits)', c.error('shift count for type `$left_sym_final.name` too large (maximum: $moffset bits)',
node.right.position()) node.right.pos())
return left_type return left_type
} }
if node.ct_left_value_evaled { if node.ct_left_value_evaled {
if lval := node.ct_left_value.i64() { if lval := node.ct_left_value.i64() {
if lval < 0 { if lval < 0 {
c.error('invalid bitshift of a negative number', node.left.position()) c.error('invalid bitshift of a negative number', node.left.pos())
return left_type return left_type
} }
} }
@ -527,7 +527,7 @@ pub fn (mut c Checker) get_default_fmt(ftyp ast.Type, typ ast.Type) byte {
} }
pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what string) { pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what string) {
mut pos := token.Position{} mut pos := token.Pos{}
match expr { match expr {
ast.Ident { ast.Ident {
if typ.has_flag(.shared_f) { if typ.has_flag(.shared_f) {
@ -554,11 +554,11 @@ pub fn (mut c Checker) fail_if_unreadable(expr ast.Expr, typ ast.Type, what stri
} }
} }
ast.IndexExpr { ast.IndexExpr {
pos = expr.left.position().extend(expr.pos) pos = expr.left.pos().extend(expr.pos)
c.fail_if_unreadable(expr.left, expr.left_type, what) c.fail_if_unreadable(expr.left, expr.left_type, what)
} }
else { else {
pos = expr.position() pos = expr.pos()
} }
} }
if typ.has_flag(.shared_f) { if typ.has_flag(.shared_f) {
@ -573,10 +573,10 @@ pub fn (mut c Checker) string_inter_lit(mut node ast.StringInterLiteral) ast.Typ
for i, expr in node.exprs { for i, expr in node.exprs {
ftyp := c.expr(expr) ftyp := c.expr(expr)
if ftyp == ast.void_type { if ftyp == ast.void_type {
c.error('expression does not return a value', expr.position()) c.error('expression does not return a value', expr.pos())
} else if ftyp == ast.char_type && ftyp.nr_muls() == 0 { } else if ftyp == ast.char_type && ftyp.nr_muls() == 0 {
c.error('expression returning type `char` cannot be used in string interpolation directly, print its address or cast it to an integer instead', c.error('expression returning type `char` cannot be used in string interpolation directly, print its address or cast it to an integer instead',
expr.position()) expr.pos())
} }
c.fail_if_unreadable(expr, ftyp, 'interpolation object') c.fail_if_unreadable(expr, ftyp, 'interpolation object')
node.expr_types << ftyp node.expr_types << ftyp
@ -621,7 +621,7 @@ pub fn (mut c Checker) string_inter_lit(mut node ast.StringInterLiteral) ast.Typ
// check recursive str // check recursive str
if c.table.cur_fn.is_method && c.table.cur_fn.name == 'str' if c.table.cur_fn.is_method && c.table.cur_fn.name == 'str'
&& c.table.cur_fn.receiver.name == expr.str() { && c.table.cur_fn.receiver.name == expr.str() {
c.error('cannot call `str()` method recursively', expr.position()) c.error('cannot call `str()` method recursively', expr.pos())
} }
} }
c.inside_println_arg = inside_println_arg_save c.inside_println_arg = inside_println_arg_save
@ -637,7 +637,7 @@ pub fn (mut c Checker) string_lit(mut node ast.StringLiteral) ast.Type {
for idx < node.val.len { for idx < node.val.len {
match node.val[idx] { match node.val[idx] {
`\\` { `\\` {
mut start_pos := token.Position{ mut start_pos := token.Pos{
...node.pos ...node.pos
col: node.pos.col + 1 + idx col: node.pos.col + 1 + idx
} }
@ -650,7 +650,7 @@ pub fn (mut c Checker) string_lit(mut node ast.StringLiteral) ast.Type {
mut hex_char_count := 0 mut hex_char_count := 0
for ch.is_hex_digit() { for ch.is_hex_digit() {
hex_char_count++ hex_char_count++
end_pos := token.Position{ end_pos := token.Pos{
...start_pos ...start_pos
len: idx + 1 - start_idx len: idx + 1 - start_idx
} }

View File

@ -336,7 +336,7 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
c.add_error_detail('The name of a test function in V, should start with `test_`.') c.add_error_detail('The name of a test function in V, should start with `test_`.')
c.add_error_detail('The test function should take 0 parameters, and no return type. Example:') c.add_error_detail('The test function should take 0 parameters, and no return type. Example:')
c.add_error_detail('fn test_xyz(){ assert 2 + 2 == 4 }') c.add_error_detail('fn test_xyz(){ assert 2 + 2 == 4 }')
c.error('a _test.v file should have *at least* one `test_` function', token.Position{}) c.error('a _test.v file should have *at least* one `test_` function', token.Pos{})
} }
} }
// Make sure fn main is defined in non lib builds // Make sure fn main is defined in non lib builds
@ -354,9 +354,9 @@ pub fn (mut c Checker) check_files(ast_files []&ast.File) {
} }
if !has_main_mod_file { if !has_main_mod_file {
c.error('project must include a `main` module or be a shared library (compile with `v -shared`)', c.error('project must include a `main` module or be a shared library (compile with `v -shared`)',
token.Position{}) token.Pos{})
} else if !has_main_fn { } else if !has_main_fn {
c.error('function `main` must be declared in the main module', token.Position{}) c.error('function `main` must be declared in the main module', token.Pos{})
} }
} }
@ -388,7 +388,7 @@ fn (mut c Checker) file_has_main_fn(file &ast.File) bool {
return has_main_fn return has_main_fn
} }
fn (mut c Checker) check_valid_snake_case(name string, identifier string, pos token.Position) { fn (mut c Checker) check_valid_snake_case(name string, identifier string, pos token.Pos) {
if !c.pref.is_vweb && !c.pref.translated && name.len > 0 if !c.pref.is_vweb && !c.pref.translated && name.len > 0
&& (name[0] == `_` || name.contains('._')) { && (name[0] == `_` || name.contains('._')) {
c.error('$identifier `$name` cannot start with `_`', pos) c.error('$identifier `$name` cannot start with `_`', pos)
@ -404,7 +404,7 @@ fn stripped_name(name string) string {
return name[(idx + 1)..] return name[(idx + 1)..]
} }
fn (mut c Checker) check_valid_pascal_case(name string, identifier string, pos token.Position) { fn (mut c Checker) check_valid_pascal_case(name string, identifier string, pos token.Pos) {
sname := stripped_name(name) sname := stripped_name(name)
if sname.len > 0 && !sname[0].is_capital() && !c.pref.translated { if sname.len > 0 && !sname[0].is_capital() && !c.pref.translated {
c.error('$identifier `$name` must begin with capital letter', pos) c.error('$identifier `$name` must begin with capital letter', pos)
@ -557,8 +557,8 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
right_final := c.table.final_sym(right_type) right_final := c.table.final_sym(right_type)
mut left_sym := c.table.sym(left_type) mut left_sym := c.table.sym(left_type)
left_final := c.table.final_sym(left_type) left_final := c.table.final_sym(left_type)
left_pos := node.left.position() left_pos := node.left.pos()
right_pos := node.right.position() right_pos := node.right.pos()
left_right_pos := left_pos.extend(right_pos) left_right_pos := left_pos.extend(right_pos)
if left_type.is_any_kind_of_pointer() if left_type.is_any_kind_of_pointer()
&& node.op in [.plus, .minus, .mul, .div, .mod, .xor, .amp, .pipe] { && node.op in [.plus, .minus, .mul, .div, .mod, .xor, .amp, .pipe] {
@ -897,7 +897,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
ast.none_type_idx ast.none_type_idx
} }
else { else {
c.error('invalid type `$right_expr`', right_expr.position()) c.error('invalid type `$right_expr`', right_expr.pos())
ast.Type(0) ast.Type(0)
} }
} }
@ -905,7 +905,7 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
typ_sym := c.table.sym(typ) typ_sym := c.table.sym(typ)
op := node.op.str() op := node.op.str()
if typ_sym.kind == .placeholder { if typ_sym.kind == .placeholder {
c.error('$op: type `$typ_sym.name` does not exist', right_expr.position()) c.error('$op: type `$typ_sym.name` does not exist', right_expr.pos())
} }
if left_sym.kind !in [.interface_, .sum_type] { if left_sym.kind !in [.interface_, .sum_type] {
c.error('`$op` can only be used with interfaces and sum types', node.pos) c.error('`$op` can only be used with interfaces and sum types', node.pos)
@ -941,10 +941,10 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
.and, .logical_or { .and, .logical_or {
if !c.pref.translated { if !c.pref.translated {
if node.left_type != ast.bool_type_idx { if node.left_type != ast.bool_type_idx {
c.error('left operand for `$node.op` is not a boolean', node.left.position()) c.error('left operand for `$node.op` is not a boolean', node.left.pos())
} }
if node.right_type != ast.bool_type_idx { if node.right_type != ast.bool_type_idx {
c.error('right operand for `$node.op` is not a boolean', node.right.position()) c.error('right operand for `$node.op` is not a boolean', node.right.pos())
} }
} }
if mut node.left is ast.InfixExpr { if mut node.left is ast.InfixExpr {
@ -1019,9 +1019,9 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
// returns name and position of variable that needs write lock // returns name and position of variable that needs write lock
// also sets `is_changed` to true (TODO update the name to reflect this?) // also sets `is_changed` to true (TODO update the name to reflect this?)
fn (mut c Checker) fail_if_immutable(expr ast.Expr) (string, token.Position) { fn (mut c Checker) fail_if_immutable(expr ast.Expr) (string, token.Pos) {
mut to_lock := '' // name of variable that needs lock mut to_lock := '' // name of variable that needs lock
mut pos := token.Position{} // and its position mut pos := token.Pos{} // and its position
mut explicit_lock_needed := false mut explicit_lock_needed := false
match mut expr { match mut expr {
ast.CastExpr { ast.CastExpr {
@ -1078,7 +1078,7 @@ fn (mut c Checker) fail_if_immutable(expr ast.Expr) (string, token.Position) {
} }
if elem_type.has_flag(.shared_f) { if elem_type.has_flag(.shared_f) {
c.error('you have to create a handle and `lock` it to modify `shared` $kind element', c.error('you have to create a handle and `lock` it to modify `shared` $kind element',
expr.left.position().extend(expr.pos)) expr.left.pos().extend(expr.pos))
} }
to_lock, pos = c.fail_if_immutable(expr.left) to_lock, pos = c.fail_if_immutable(expr.left)
} }
@ -1205,7 +1205,7 @@ fn (mut c Checker) fail_if_immutable(expr ast.Expr) (string, token.Position) {
} }
else { else {
if !expr.is_lit() { if !expr.is_lit() {
c.error('unexpected expression `$expr.type_name()`', expr.position()) c.error('unexpected expression `$expr.type_name()`', expr.pos())
return '', pos return '', pos
} }
} }
@ -1218,7 +1218,7 @@ fn (mut c Checker) fail_if_immutable(expr ast.Expr) (string, token.Position) {
return to_lock, pos return to_lock, pos
} }
fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos token.Position) bool { fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos token.Pos) bool {
if typ == interface_type { if typ == interface_type {
return true return true
} }
@ -1441,7 +1441,7 @@ pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_re
type_name := c.table.type_to_str(last_stmt.typ) type_name := c.table.type_to_str(last_stmt.typ)
expr_return_type_name := c.table.type_to_str(expr_return_type) expr_return_type_name := c.table.type_to_str(expr_return_type)
c.error('the default expression type in the `or` block should be `$expr_return_type_name`, instead you gave a value of type `$type_name`', c.error('the default expression type in the `or` block should be `$expr_return_type_name`, instead you gave a value of type `$type_name`',
last_stmt.expr.position()) last_stmt.expr.pos())
} }
else {} else {}
} }
@ -1632,7 +1632,7 @@ pub fn (mut c Checker) const_decl(mut node ast.ConstDecl) {
for field in node.fields { for field in node.fields {
// TODO Check const name once the syntax is decided // TODO Check const name once the syntax is decided
if field.name in c.const_names { if field.name in c.const_names {
name_pos := token.Position{ name_pos := token.Pos{
...field.pos ...field.pos
len: util.no_cur_mod(field.name, c.mod).len len: util.no_cur_mod(field.name, c.mod).len
} }
@ -1703,7 +1703,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
continue continue
} }
} }
mut pos := field.expr.position() mut pos := field.expr.pos()
if pos.pos == 0 { if pos.pos == 0 {
pos = field.pos pos = field.pos
} }
@ -1727,7 +1727,7 @@ pub fn (mut c Checker) enum_decl(mut node ast.EnumDecl) {
} }
[inline] [inline]
fn (mut c Checker) check_loop_label(label string, pos token.Position) { fn (mut c Checker) check_loop_label(label string, pos token.Pos) {
if label.len == 0 { if label.len == 0 {
// ignore // ignore
return return
@ -1970,13 +1970,13 @@ fn (mut c Checker) go_expr(mut node ast.GoExpr) ast.Type {
for arg in node.call_expr.args { for arg in node.call_expr.args {
if arg.is_mut && !arg.typ.is_ptr() { if arg.is_mut && !arg.typ.is_ptr() {
c.error('function in `go` statement cannot contain mutable non-reference arguments', c.error('function in `go` statement cannot contain mutable non-reference arguments',
arg.expr.position()) arg.expr.pos())
} }
} }
if node.call_expr.is_method && node.call_expr.receiver_type.is_ptr() if node.call_expr.is_method && node.call_expr.receiver_type.is_ptr()
&& !node.call_expr.left_type.is_ptr() { && !node.call_expr.left_type.is_ptr() {
c.error('method in `go` statement cannot have non-reference mutable receiver', c.error('method in `go` statement cannot have non-reference mutable receiver',
node.call_expr.left.position()) node.call_expr.left.pos())
} }
if c.pref.backend.is_js() { if c.pref.backend.is_js() {
@ -2256,7 +2256,7 @@ fn (mut c Checker) stmts(stmts []ast.Stmt) {
// `x := if cond { stmt1 stmt2 ExprStmt } else { stmt2 stmt3 ExprStmt }`, // `x := if cond { stmt1 stmt2 ExprStmt } else { stmt2 stmt3 ExprStmt }`,
// `x := match expr { Type1 { stmt1 stmt2 ExprStmt } else { stmt2 stmt3 ExprStmt }`. // `x := match expr { Type1 { stmt1 stmt2 ExprStmt } else { stmt2 stmt3 ExprStmt }`.
fn (mut c Checker) stmts_ending_with_expression(stmts []ast.Stmt) { fn (mut c Checker) stmts_ending_with_expression(stmts []ast.Stmt) {
mut unreachable := token.Position{ mut unreachable := token.Pos{
line_nr: -1 line_nr: -1
} }
c.expected_type = ast.void_type c.expected_type = ast.void_type
@ -2270,7 +2270,7 @@ fn (mut c Checker) stmts_ending_with_expression(stmts []ast.Stmt) {
} }
c.stmt(stmt) c.stmt(stmt)
if stmt is ast.GotoLabel { if stmt is ast.GotoLabel {
unreachable = token.Position{ unreachable = token.Pos{
line_nr: -1 line_nr: -1
} }
c.scope_returns = false c.scope_returns = false
@ -2308,13 +2308,13 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
// c.expr_level set to 150 so that stack overflow does not occur on windows // c.expr_level set to 150 so that stack overflow does not occur on windows
if c.expr_level > 150 { if c.expr_level > 150 {
c.error('checker: too many expr levels: $c.expr_level ', node.position()) c.error('checker: too many expr levels: $c.expr_level ', node.pos())
return ast.void_type return ast.void_type
} }
match mut node { match mut node {
ast.NodeError {} ast.NodeError {}
ast.EmptyExpr { ast.EmptyExpr {
c.error('checker.expr(): unhandled EmptyExpr', token.Position{}) c.error('checker.expr(): unhandled EmptyExpr', token.Pos{})
} }
ast.CTempVar { ast.CTempVar {
return node.typ return node.typ
@ -2326,7 +2326,7 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
typ := c.expr(node.expr) typ := c.expr(node.expr)
type_sym := c.table.sym(typ) type_sym := c.table.sym(typ)
if type_sym.kind != .array { if type_sym.kind != .array {
c.error('decomposition can only be used on arrays', node.expr.position()) c.error('decomposition can only be used on arrays', node.expr.pos())
return ast.void_type return ast.void_type
} }
array_info := type_sym.info as ast.Array array_info := type_sym.info as ast.Array
@ -2414,10 +2414,10 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
expr_sym := c.table.sym(expr_type) expr_sym := c.table.sym(expr_type)
if expr_type != ast.string_type { if expr_type != ast.string_type {
c.error('expected `string` instead of `$expr_sym.name` (e.g. `field.name`)', c.error('expected `string` instead of `$expr_sym.name` (e.g. `field.name`)',
node.field_expr.position()) node.field_expr.pos())
} }
if mut node.field_expr is ast.SelectorExpr { if mut node.field_expr is ast.SelectorExpr {
left_pos := node.field_expr.expr.position() left_pos := node.field_expr.expr.pos()
if c.comptime_fields_type.len == 0 { if c.comptime_fields_type.len == 0 {
c.error('compile time field access can only be used when iterating over `T.fields`', c.error('compile time field access can only be used when iterating over `T.fields`',
left_pos) left_pos)
@ -2428,7 +2428,7 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
} }
c.error('unknown `\$for` variable `$expr_name`', left_pos) c.error('unknown `\$for` variable `$expr_name`', left_pos)
} else { } else {
c.error('expected selector expression e.g. `$(field.name)`', node.field_expr.position()) c.error('expected selector expression e.g. `$(field.name)`', node.field_expr.pos())
} }
return ast.void_type return ast.void_type
} }
@ -2438,7 +2438,7 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
ast.DumpExpr { ast.DumpExpr {
node.expr_type = c.expr(node.expr) node.expr_type = c.expr(node.expr)
if node.expr_type.idx() == ast.void_type_idx { if node.expr_type.idx() == ast.void_type_idx {
c.error('dump expression can not be void', node.expr.position()) c.error('dump expression can not be void', node.expr.pos())
return ast.void_type return ast.void_type
} }
tsym := c.table.sym(node.expr_type) tsym := c.table.sym(node.expr_type)
@ -2487,7 +2487,7 @@ pub fn (mut c Checker) expr(node ast.Expr) ast.Type {
else {} else {}
} }
if no_opt { if no_opt {
c.error('expression should return an option', node.expr.position()) c.error('expression should return an option', node.expr.pos())
} }
} }
return ast.bool_type return ast.bool_type
@ -2650,7 +2650,7 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
node.pos) node.pos)
} }
if from_type == ast.void_type { if from_type == ast.void_type {
c.error('expression does not return a value so it cannot be cast', node.expr.position()) c.error('expression does not return a value so it cannot be cast', node.expr.pos())
} }
if to_sym.kind == .sum_type { if to_sym.kind == .sum_type {
if from_type in [ast.int_literal_type, ast.float_literal_type] { if from_type in [ast.int_literal_type, ast.float_literal_type] {
@ -3212,10 +3212,10 @@ pub fn (mut c Checker) select_expr(mut node ast.SelectExpr) ast.Type {
if branch.stmt.expr.left !is ast.Ident if branch.stmt.expr.left !is ast.Ident
&& branch.stmt.expr.left !is ast.SelectorExpr && branch.stmt.expr.left !is ast.SelectorExpr
&& branch.stmt.expr.left !is ast.IndexExpr { && branch.stmt.expr.left !is ast.IndexExpr {
c.error('channel in `select` key must be predefined', branch.stmt.expr.left.position()) c.error('channel in `select` key must be predefined', branch.stmt.expr.left.pos())
} }
} else { } else {
c.error('invalid expression for `select` key', branch.stmt.expr.position()) c.error('invalid expression for `select` key', branch.stmt.expr.pos())
} }
} }
} }
@ -3225,7 +3225,7 @@ pub fn (mut c Checker) select_expr(mut node ast.SelectExpr) ast.Type {
ast.PrefixExpr { ast.PrefixExpr {
if expr.right !is ast.Ident && expr.right !is ast.SelectorExpr if expr.right !is ast.Ident && expr.right !is ast.SelectorExpr
&& expr.right !is ast.IndexExpr { && expr.right !is ast.IndexExpr {
c.error('channel in `select` key must be predefined', expr.right.position()) c.error('channel in `select` key must be predefined', expr.right.pos())
} }
if expr.or_block.kind != .absent { if expr.or_block.kind != .absent {
err_prefix := if expr.or_block.kind == .block { err_prefix := if expr.or_block.kind == .block {
@ -3237,7 +3237,7 @@ pub fn (mut c Checker) select_expr(mut node ast.SelectExpr) ast.Type {
} }
} }
else { else {
c.error('`<-` receive expression expected', branch.stmt.right[0].position()) c.error('`<-` receive expression expected', branch.stmt.right[0].pos())
} }
} }
} }
@ -3262,12 +3262,12 @@ pub fn (mut c Checker) lock_expr(mut node ast.LockExpr) ast.Type {
if !e_typ.has_flag(.shared_f) { if !e_typ.has_flag(.shared_f) {
obj_type := if node.lockeds[i] is ast.Ident { 'variable' } else { 'struct element' } obj_type := if node.lockeds[i] is ast.Ident { 'variable' } else { 'struct element' }
c.error('`$id_name` must be declared as `shared` $obj_type to be locked', c.error('`$id_name` must be declared as `shared` $obj_type to be locked',
node.lockeds[i].position()) node.lockeds[i].pos())
} }
if id_name in c.locked_names { if id_name in c.locked_names {
c.error('`$id_name` is already locked', node.lockeds[i].position()) c.error('`$id_name` is already locked', node.lockeds[i].pos())
} else if id_name in c.rlocked_names { } else if id_name in c.rlocked_names {
c.error('`$id_name` is already read-locked', node.lockeds[i].position()) c.error('`$id_name` is already read-locked', node.lockeds[i].pos())
} }
if node.is_rlock[i] { if node.is_rlock[i] {
c.rlocked_names << id_name c.rlocked_names << id_name
@ -3534,7 +3534,7 @@ pub fn (mut c Checker) prefix_expr(mut node ast.PrefixExpr) ast.Type {
return right_type return right_type
} }
fn (mut c Checker) check_index(typ_sym &ast.TypeSymbol, index ast.Expr, index_type ast.Type, pos token.Position, range_index bool, is_gated bool) { fn (mut c Checker) check_index(typ_sym &ast.TypeSymbol, index ast.Expr, index_type ast.Type, pos token.Pos, range_index bool, is_gated bool) {
index_type_sym := c.table.sym(index_type) index_type_sym := c.table.sym(index_type)
// println('index expr left=$typ_sym.name $node.pos.line_nr') // println('index expr left=$typ_sym.name $node.pos.line_nr')
// if typ_sym.kind == .array && (!(ast.type_idx(index_type) in ast.number_type_idxs) && // if typ_sym.kind == .array && (!(ast.type_idx(index_type) in ast.number_type_idxs) &&
@ -3797,12 +3797,12 @@ pub fn (mut c Checker) add_error_detail(s string) {
c.error_details << s c.error_details << s
} }
pub fn (mut c Checker) warn(s string, pos token.Position) { pub fn (mut c Checker) warn(s string, pos token.Pos) {
allow_warnings := !(c.pref.is_prod || c.pref.warns_are_errors) // allow warnings only in dev builds allow_warnings := !(c.pref.is_prod || c.pref.warns_are_errors) // allow warnings only in dev builds
c.warn_or_error(s, pos, allow_warnings) c.warn_or_error(s, pos, allow_warnings)
} }
pub fn (mut c Checker) error(message string, pos token.Position) { pub fn (mut c Checker) error(message string, pos token.Pos) {
$if checker_exit_on_first_error ? { $if checker_exit_on_first_error ? {
eprintln('\n\n>> checker error: $message, pos: $pos') eprintln('\n\n>> checker error: $message, pos: $pos')
print_backtrace() print_backtrace()
@ -3848,7 +3848,7 @@ fn (c &Checker) check_struct_signature(from ast.Struct, to ast.Struct) bool {
return true return true
} }
pub fn (mut c Checker) note(message string, pos token.Position) { pub fn (mut c Checker) note(message string, pos token.Pos) {
if c.pref.message_limit >= 0 && c.nr_notices >= c.pref.message_limit { if c.pref.message_limit >= 0 && c.nr_notices >= c.pref.message_limit {
c.should_abort = true c.should_abort = true
return return
@ -3873,7 +3873,7 @@ pub fn (mut c Checker) note(message string, pos token.Position) {
c.nr_notices++ c.nr_notices++
} }
fn (mut c Checker) warn_or_error(message string, pos token.Position, warn bool) { fn (mut c Checker) warn_or_error(message string, pos token.Pos, warn bool) {
// add backtrace to issue struct, how? // add backtrace to issue struct, how?
// if c.pref.is_verbose { // if c.pref.is_verbose {
// print_backtrace() // print_backtrace()
@ -3950,7 +3950,7 @@ fn (mut c Checker) trace(fbase string, message string) {
} }
} }
fn (mut c Checker) ensure_type_exists(typ ast.Type, pos token.Position) ? { fn (mut c Checker) ensure_type_exists(typ ast.Type, pos token.Pos) ? {
if typ == 0 { if typ == 0 {
c.error('unknown type', pos) c.error('unknown type', pos)
return return

View File

@ -414,7 +414,7 @@ fn (mut c Checker) evaluate_once_comptime_if_attribute(mut node ast.Attr) bool {
// comptime_if_branch checks the condition of a compile-time `if` branch. It returns `true` // comptime_if_branch checks the condition of a compile-time `if` branch. It returns `true`
// if that branch's contents should be skipped (targets a different os for example) // if that branch's contents should be skipped (targets a different os for example)
fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Position) bool { fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Pos) bool {
// TODO: better error messages here // TODO: better error messages here
match cond { match cond {
ast.BoolLiteral { ast.BoolLiteral {
@ -456,7 +456,7 @@ fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Position) bool {
sym := c.table.sym(cond.right.typ) sym := c.table.sym(cond.right.typ)
if sym.kind != .interface_ { if sym.kind != .interface_ {
c.expr(cond.left) c.expr(cond.left)
// c.error('`$sym.name` is not an interface', cond.right.position()) // c.error('`$sym.name` is not an interface', cond.right.pos())
} }
return false return false
} else if cond.left in [ast.SelectorExpr, ast.TypeNode] { } else if cond.left in [ast.SelectorExpr, ast.TypeNode] {
@ -465,7 +465,7 @@ fn (mut c Checker) comptime_if_branch(cond ast.Expr, pos token.Position) bool {
return false return false
} else { } else {
c.error('invalid `\$if` condition: expected a type or a selector expression or an interface check', c.error('invalid `\$if` condition: expected a type or a selector expression or an interface check',
cond.left.position()) cond.left.pos())
} }
} }
.eq, .ne { .eq, .ne {

View File

@ -22,7 +22,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
default_typ := c.check_expr_opt_call(default_expr, c.expr(default_expr)) default_typ := c.check_expr_opt_call(default_expr, c.expr(default_expr))
node.default_type = default_typ node.default_type = default_typ
c.check_expected(default_typ, node.elem_type) or { c.check_expected(default_typ, node.elem_type) or {
c.error(err.msg, default_expr.position()) c.error(err.msg, default_expr.pos())
} }
} }
if node.has_len { if node.has_len {
@ -30,7 +30,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
elem_type_sym := c.table.sym(node.elem_type) elem_type_sym := c.table.sym(node.elem_type)
if elem_type_sym.kind == .interface_ { if elem_type_sym.kind == .interface_ {
c.error('cannot instantiate an array of interfaces without also giving a default `init:` value', c.error('cannot instantiate an array of interfaces without also giving a default `init:` value',
node.len_expr.position()) node.len_expr.pos())
} }
} }
c.ensure_sumtype_array_has_default_value(node) c.ensure_sumtype_array_has_default_value(node)
@ -87,7 +87,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
for i, mut expr in node.exprs { for i, mut expr in node.exprs {
typ := c.check_expr_opt_call(expr, c.expr(expr)) typ := c.check_expr_opt_call(expr, c.expr(expr))
if typ == ast.void_type { if typ == ast.void_type {
c.error('invalid void array element type', expr.position()) c.error('invalid void array element type', expr.pos())
} }
node.expr_types << typ node.expr_types << typ
// The first element's type // The first element's type
@ -95,7 +95,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
if i == 0 { if i == 0 {
elem_type = expected_value_type elem_type = expected_value_type
c.expected_type = elem_type c.expected_type = elem_type
c.type_implements(typ, elem_type, expr.position()) c.type_implements(typ, elem_type, expr.pos())
} }
if !typ.is_ptr() && !typ.is_pointer() && !c.inside_unsafe { if !typ.is_ptr() && !typ.is_pointer() && !c.inside_unsafe {
typ_sym := c.table.sym(typ) typ_sym := c.table.sym(typ)
@ -117,7 +117,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
} }
if expr !is ast.TypeNode { if expr !is ast.TypeNode {
c.check_expected(typ, elem_type) or { c.check_expected(typ, elem_type) or {
c.error('invalid array element: $err.msg', expr.position()) c.error('invalid array element: $err.msg', expr.pos())
} }
} }
} }
@ -163,12 +163,12 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
} }
} }
else { else {
c.error('fixed array size cannot use non-constant value', init_expr.position()) c.error('fixed array size cannot use non-constant value', init_expr.pos())
} }
} }
if fixed_size <= 0 { if fixed_size <= 0 {
c.error('fixed size cannot be zero or negative (fixed_size: $fixed_size)', c.error('fixed size cannot be zero or negative (fixed_size: $fixed_size)',
init_expr.position()) init_expr.pos())
} }
idx := c.table.find_or_register_array_fixed(node.elem_type, int(fixed_size), init_expr) idx := c.table.find_or_register_array_fixed(node.elem_type, int(fixed_size), init_expr)
if node.elem_type.has_flag(.generic) { if node.elem_type.has_flag(.generic) {
@ -183,7 +183,7 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
return node.typ return node.typ
} }
fn (mut c Checker) check_array_init_para_type(para string, expr ast.Expr, pos token.Position) { fn (mut c Checker) check_array_init_para_type(para string, expr ast.Expr, pos token.Pos) {
sym := c.table.sym(c.expr(expr)) sym := c.table.sym(c.expr(expr))
if sym.kind !in [.int, .int_literal] { if sym.kind !in [.int, .int_literal] {
c.error('array $para needs to be an int', pos) c.error('array $para needs to be an int', pos)
@ -261,13 +261,13 @@ pub fn (mut c Checker) map_init(mut node ast.MapInit) ast.Type {
if !c.check_types(key_type, key0_type) || (i == 0 && key_type.is_number() if !c.check_types(key_type, key0_type) || (i == 0 && key_type.is_number()
&& key0_type.is_number() && key0_type != ast.mktyp(key_type)) { && key0_type.is_number() && key0_type != ast.mktyp(key_type)) {
msg := c.expected_msg(key_type, key0_type) msg := c.expected_msg(key_type, key0_type)
c.error('invalid map key: $msg', key.position()) c.error('invalid map key: $msg', key.pos())
same_key_type = false same_key_type = false
} }
if !c.check_types(val_type, val0_type) || (i == 0 && val_type.is_number() if !c.check_types(val_type, val0_type) || (i == 0 && val_type.is_number()
&& val0_type.is_number() && val0_type != ast.mktyp(val_type)) { && val0_type.is_number() && val0_type != ast.mktyp(val_type)) {
msg := c.expected_msg(val_type, val0_type) msg := c.expected_msg(val_type, val0_type)
c.error('invalid map value: $msg', val.position()) c.error('invalid map value: $msg', val.pos())
} }
} }
if same_key_type { if same_key_type {

View File

@ -355,15 +355,15 @@ pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
if !arg.is_mut { if !arg.is_mut {
tok := call_arg.share.str() tok := call_arg.share.str()
c.error('`$node.name` parameter `$arg.name` is not `$tok`, `$tok` is not needed`', c.error('`$node.name` parameter `$arg.name` is not `$tok`, `$tok` is not needed`',
call_arg.expr.position()) call_arg.expr.pos())
} else if arg.typ.share() != call_arg.share { } else if arg.typ.share() != call_arg.share {
c.error('wrong shared type', call_arg.expr.position()) c.error('wrong shared type', call_arg.expr.pos())
} }
} else { } else {
if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) { if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) {
tok := call_arg.share.str() tok := call_arg.share.str()
c.error('`$node.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`', c.error('`$node.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`',
call_arg.expr.position()) call_arg.expr.pos())
} }
} }
} }
@ -787,15 +787,15 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if call_arg.is_mut { if call_arg.is_mut {
to_lock, pos := c.fail_if_immutable(call_arg.expr) to_lock, pos := c.fail_if_immutable(call_arg.expr)
if !call_arg.expr.is_lvalue() { if !call_arg.expr.is_lvalue() {
c.error('cannot pass expression as `mut`', call_arg.expr.position()) c.error('cannot pass expression as `mut`', call_arg.expr.pos())
} }
if !param.is_mut { if !param.is_mut {
tok := call_arg.share.str() tok := call_arg.share.str()
c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`', c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`',
call_arg.expr.position()) call_arg.expr.pos())
} else { } else {
if param.typ.share() != call_arg.share { if param.typ.share() != call_arg.share {
c.error('wrong shared type', call_arg.expr.position()) c.error('wrong shared type', call_arg.expr.pos())
} }
if to_lock != '' && !param.typ.has_flag(.shared_f) { if to_lock != '' && !param.typ.has_flag(.shared_f) {
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`', c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
@ -806,7 +806,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if param.is_mut { if param.is_mut {
tok := call_arg.share.str() tok := call_arg.share.str()
c.error('`$node.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${ c.error('`$node.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${
i + 1}`', call_arg.expr.position()) i + 1}`', call_arg.expr.pos())
} else { } else {
c.fail_if_unreadable(call_arg.expr, typ, 'argument') c.fail_if_unreadable(call_arg.expr, typ, 'argument')
} }
@ -828,11 +828,11 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
&& (param.typ == ast.voidptr_type || final_param_sym.idx == ast.voidptr_type_idx) && (param.typ == ast.voidptr_type || final_param_sym.idx == ast.voidptr_type_idx)
&& !call_arg.typ.is_any_kind_of_pointer() && func.language == .v && !call_arg.typ.is_any_kind_of_pointer() && func.language == .v
&& !call_arg.expr.is_lvalue() && func.name != 'json.encode' && !c.pref.translated { && !call_arg.expr.is_lvalue() && func.name != 'json.encode' && !c.pref.translated {
c.error('expression cannot be passed as `voidptr`', call_arg.expr.position()) c.error('expression cannot be passed as `voidptr`', call_arg.expr.pos())
} }
// Handle expected interface // Handle expected interface
if final_param_sym.kind == .interface_ { if final_param_sym.kind == .interface_ {
if c.type_implements(typ, final_param_typ, call_arg.expr.position()) { if c.type_implements(typ, final_param_typ, call_arg.expr.pos()) {
if !typ.is_ptr() && !typ.is_pointer() && !c.inside_unsafe if !typ.is_ptr() && !typ.is_pointer() && !c.inside_unsafe
&& typ_sym.kind != .interface_ { && typ_sym.kind != .interface_ {
c.mark_as_referenced(mut &call_arg.expr, true) c.mark_as_referenced(mut &call_arg.expr, true)
@ -922,7 +922,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
utyp := c.unwrap_generic(typ) utyp := c.unwrap_generic(typ)
unwrap_sym := c.table.sym(unwrap_typ) unwrap_sym := c.table.sym(unwrap_typ)
if unwrap_sym.kind == .interface_ { if unwrap_sym.kind == .interface_ {
if c.type_implements(utyp, unwrap_typ, call_arg.expr.position()) { if c.type_implements(utyp, unwrap_typ, call_arg.expr.pos()) {
if !utyp.is_ptr() && !utyp.is_pointer() && !c.inside_unsafe if !utyp.is_ptr() && !utyp.is_pointer() && !c.inside_unsafe
&& c.table.sym(utyp).kind != .interface_ { && c.table.sym(utyp).kind != .interface_ {
c.mark_as_referenced(mut &call_arg.expr, true) c.mark_as_referenced(mut &call_arg.expr, true)
@ -1005,7 +1005,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
'unknown method or field: `${left_sym.name}.$method_name`' 'unknown method or field: `${left_sym.name}.$method_name`'
} }
if left_type.has_flag(.optional) { if left_type.has_flag(.optional) {
c.error('optional type cannot be called directly', node.left.position()) c.error('optional type cannot be called directly', node.left.pos())
return ast.void_type return ast.void_type
} }
if left_sym.kind in [.sum_type, .interface_] { if left_sym.kind in [.sum_type, .interface_] {
@ -1019,7 +1019,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if left_type == ast.void_type { if left_type == ast.void_type {
// No need to print this error, since this means that the variable is unknown, // No need to print this error, since this means that the variable is unknown,
// and there already was an error before. // and there already was an error before.
// c.error('`void` type has no methods', node.left.position()) // c.error('`void` type has no methods', node.left.pos())
return ast.void_type return ast.void_type
} }
mut concrete_types := []ast.Type{} mut concrete_types := []ast.Type{}
@ -1057,7 +1057,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
arg_type := c.expr(node.args[0].expr) arg_type := c.expr(node.args[0].expr)
if arg_type !in [ast.int_type, ast.int_literal_type] { if arg_type !in [ast.int_type, ast.int_literal_type] {
c.error('the first argument of `array.insert()` should be integer', c.error('the first argument of `array.insert()` should be integer',
node.args[0].expr.position()) node.args[0].expr.pos())
return ast.void_type return ast.void_type
} }
} }
@ -1073,7 +1073,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
arg_type := c.expr(arg_expr) arg_type := c.expr(arg_expr)
arg_sym := c.table.sym(arg_type) arg_sym := c.table.sym(arg_type)
if !c.check_types(arg_type, info.elem_type) && !c.check_types(left_type, arg_type) { if !c.check_types(arg_type, info.elem_type) && !c.check_types(left_type, arg_type) {
c.error('cannot $method_name `$arg_sym.name` to `$left_sym.name`', arg_expr.position()) c.error('cannot $method_name `$arg_sym.name` to `$left_sym.name`', arg_expr.pos())
} }
} else if final_left_sym.kind == .array && method_name in ['first', 'last', 'pop'] { } else if final_left_sym.kind == .array && method_name in ['first', 'last', 'pop'] {
if final_left_sym.info is ast.Array { if final_left_sym.info is ast.Array {
@ -1099,7 +1099,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
return info.return_type return info.return_type
} else if left_sym.kind == .char && left_type.nr_muls() == 0 && method_name == 'str' { } else if left_sym.kind == .char && left_type.nr_muls() == 0 && method_name == 'str' {
c.error('calling `.str()` on type `char` is not allowed, use its address or cast it to an integer instead', c.error('calling `.str()` on type `char` is not allowed, use its address or cast it to an integer instead',
node.left.position().extend(node.pos)) node.left.pos().extend(node.pos))
return ast.void_type return ast.void_type
} }
mut method := ast.Fn{} mut method := ast.Fn{}
@ -1164,7 +1164,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if method.params[0].is_mut { if method.params[0].is_mut {
to_lock, pos := c.fail_if_immutable(node.left) to_lock, pos := c.fail_if_immutable(node.left)
if !node.left.is_lvalue() { if !node.left.is_lvalue() {
c.error('cannot pass expression as `mut`', node.left.position()) c.error('cannot pass expression as `mut`', node.left.pos())
} }
// node.is_mut = true // node.is_mut = true
if to_lock != '' && rec_share != .shared_t { if to_lock != '' && rec_share != .shared_t {
@ -1232,7 +1232,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
} }
// Handle expected interface // Handle expected interface
if final_arg_sym.kind == .interface_ { if final_arg_sym.kind == .interface_ {
if c.type_implements(got_arg_typ, final_arg_typ, arg.expr.position()) { if c.type_implements(got_arg_typ, final_arg_typ, arg.expr.pos()) {
if !got_arg_typ.is_ptr() && !got_arg_typ.is_pointer() && !c.inside_unsafe { if !got_arg_typ.is_ptr() && !got_arg_typ.is_pointer() && !c.inside_unsafe {
got_arg_typ_sym := c.table.sym(got_arg_typ) got_arg_typ_sym := c.table.sym(got_arg_typ)
if got_arg_typ_sym.kind != .interface_ { if got_arg_typ_sym.kind != .interface_ {
@ -1293,10 +1293,10 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if !param_is_mut { if !param_is_mut {
tok := arg.share.str() tok := arg.share.str()
c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`', c.error('`$node.name` parameter `$param.name` is not `$tok`, `$tok` is not needed`',
arg.expr.position()) arg.expr.pos())
} else { } else {
if param_share != arg.share { if param_share != arg.share {
c.error('wrong shared type', arg.expr.position()) c.error('wrong shared type', arg.expr.pos())
} }
if to_lock != '' && param_share != .shared_t { if to_lock != '' && param_share != .shared_t {
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`', c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
@ -1307,7 +1307,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if param_is_mut { if param_is_mut {
tok := arg.share.str() tok := arg.share.str()
c.error('`$node.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${ c.error('`$node.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${
i + 1}`', arg.expr.position()) i + 1}`', arg.expr.pos())
} else { } else {
c.fail_if_unreadable(arg.expr, got_arg_typ, 'argument') c.fail_if_unreadable(arg.expr, got_arg_typ, 'argument')
} }
@ -1728,7 +1728,7 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
node.return_type = c.table.find_or_register_array(thread_ret_type) node.return_type = c.table.find_or_register_array(thread_ret_type)
} else { } else {
c.error('`$left_sym.name` has no method `wait()` (only thread handles and arrays of them have)', c.error('`$left_sym.name` has no method `wait()` (only thread handles and arrays of them have)',
node.left.position()) node.left.pos())
} }
} }
// map/filter are supposed to have 1 arg only // map/filter are supposed to have 1 arg only
@ -1783,7 +1783,7 @@ fn (mut c Checker) array_builtin_method_call(mut node ast.CallExpr, left_type as
return node.return_type return node.return_type
} }
fn scope_register_it(mut s ast.Scope, pos token.Position, typ ast.Type) { fn scope_register_it(mut s ast.Scope, pos token.Pos, typ ast.Type) {
s.register(ast.Var{ s.register(ast.Var{
name: 'it' name: 'it'
pos: pos pos: pos
@ -1792,7 +1792,7 @@ fn scope_register_it(mut s ast.Scope, pos token.Position, typ ast.Type) {
}) })
} }
fn scope_register_a_b(mut s ast.Scope, pos token.Position, typ ast.Type) { fn scope_register_a_b(mut s ast.Scope, pos token.Pos, typ ast.Type) {
s.register(ast.Var{ s.register(ast.Var{
name: 'a' name: 'a'
pos: pos pos: pos

View File

@ -35,13 +35,13 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
high_type := c.expr(node.high) high_type := c.expr(node.high)
high_type_idx := high_type.idx() high_type_idx := high_type.idx()
if typ_idx in ast.integer_type_idxs && high_type_idx !in ast.integer_type_idxs { if typ_idx in ast.integer_type_idxs && high_type_idx !in ast.integer_type_idxs {
c.error('range types do not match', node.cond.position()) c.error('range types do not match', node.cond.pos())
} else if typ_idx in ast.float_type_idxs || high_type_idx in ast.float_type_idxs { } else if typ_idx in ast.float_type_idxs || high_type_idx in ast.float_type_idxs {
c.error('range type can not be float', node.cond.position()) c.error('range type can not be float', node.cond.pos())
} else if typ_idx == ast.bool_type_idx || high_type_idx == ast.bool_type_idx { } else if typ_idx == ast.bool_type_idx || high_type_idx == ast.bool_type_idx {
c.error('range type can not be bool', node.cond.position()) c.error('range type can not be bool', node.cond.pos())
} else if typ_idx == ast.string_type_idx || high_type_idx == ast.string_type_idx { } else if typ_idx == ast.string_type_idx || high_type_idx == ast.string_type_idx {
c.error('range type can not be string', node.cond.position()) c.error('range type can not be string', node.cond.pos())
} }
if high_type in [ast.int_type, ast.int_literal_type] { if high_type in [ast.int_type, ast.int_literal_type] {
node.val_type = typ node.val_type = typ
@ -55,19 +55,19 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
if sym.kind == .struct_ { if sym.kind == .struct_ {
// iterators // iterators
next_fn := sym.find_method_with_generic_parent('next') or { next_fn := sym.find_method_with_generic_parent('next') or {
c.error('a struct must have a `next()` method to be an iterator', node.cond.position()) c.error('a struct must have a `next()` method to be an iterator', node.cond.pos())
return return
} }
if !next_fn.return_type.has_flag(.optional) { if !next_fn.return_type.has_flag(.optional) {
c.error('iterator method `next()` must return an optional', node.cond.position()) c.error('iterator method `next()` must return an optional', node.cond.pos())
} }
return_sym := c.table.sym(next_fn.return_type) return_sym := c.table.sym(next_fn.return_type)
if return_sym.kind == .multi_return { if return_sym.kind == .multi_return {
c.error('iterator method `next()` must not return multiple values', node.cond.position()) c.error('iterator method `next()` must not return multiple values', node.cond.pos())
} }
// the receiver // the receiver
if next_fn.params.len != 1 { if next_fn.params.len != 1 {
c.error('iterator method `next()` must have 0 parameters', node.cond.position()) c.error('iterator method `next()` must have 0 parameters', node.cond.pos())
} }
mut val_type := next_fn.return_type.clear_flag(.optional) mut val_type := next_fn.return_type.clear_flag(.optional)
if node.val_is_mut { if node.val_is_mut {
@ -96,7 +96,7 @@ fn (mut c Checker) for_in_stmt(mut node ast.ForInStmt) {
mut value_type := c.table.value_type(typ) mut value_type := c.table.value_type(typ)
if value_type == ast.void_type || typ.has_flag(.optional) { if value_type == ast.void_type || typ.has_flag(.optional) {
if typ != ast.void_type { if typ != ast.void_type {
c.error('for in: cannot index `${c.table.type_to_str(typ)}`', node.cond.position()) c.error('for in: cannot index `${c.table.type_to_str(typ)}`', node.cond.pos())
} }
} }
if node.val_is_mut { if node.val_is_mut {

View File

@ -43,7 +43,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
if (cond_typ.idx() != ast.bool_type_idx || cond_typ.has_flag(.optional)) if (cond_typ.idx() != ast.bool_type_idx || cond_typ.has_flag(.optional))
&& !c.pref.translated { && !c.pref.translated {
c.error('non-bool type `${c.table.type_to_str(cond_typ)}` used as if condition', c.error('non-bool type `${c.table.type_to_str(cond_typ)}` used as if condition',
branch.cond.position()) branch.cond.pos())
} }
} }
} }
@ -53,13 +53,13 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
if mut branch.cond is ast.InfixExpr { if mut branch.cond is ast.InfixExpr {
if branch.cond.op == .key_is { if branch.cond.op == .key_is {
if branch.cond.right !is ast.TypeNode { if branch.cond.right !is ast.TypeNode {
c.error('invalid `\$if` condition: expected a type', branch.cond.right.position()) c.error('invalid `\$if` condition: expected a type', branch.cond.right.pos())
return 0 return 0
} }
got_type := c.unwrap_generic((branch.cond.right as ast.TypeNode).typ) got_type := c.unwrap_generic((branch.cond.right as ast.TypeNode).typ)
sym := c.table.sym(got_type) sym := c.table.sym(got_type)
if sym.kind == .placeholder || got_type.has_flag(.generic) { if sym.kind == .placeholder || got_type.has_flag(.generic) {
c.error('unknown type `$sym.name`', branch.cond.right.position()) c.error('unknown type `$sym.name`', branch.cond.right.pos())
} }
left := branch.cond.left left := branch.cond.left
if left is ast.SelectorExpr { if left is ast.SelectorExpr {
@ -271,7 +271,7 @@ fn (mut c Checker) smartcast_if_conds(node ast.Expr, mut scope ast.Scope) {
ast.none_type_idx ast.none_type_idx
} }
else { else {
c.error('invalid type `$right_expr`', right_expr.position()) c.error('invalid type `$right_expr`', right_expr.pos())
ast.Type(0) ast.Type(0)
} }
} }

View File

@ -165,7 +165,7 @@ pub fn (mut c Checker) interface_decl(mut node ast.InterfaceDecl) {
} }
} }
fn (mut c Checker) resolve_generic_interface(typ ast.Type, interface_type ast.Type, pos token.Position) ast.Type { fn (mut c Checker) resolve_generic_interface(typ ast.Type, interface_type ast.Type, pos token.Pos) ast.Type {
utyp := c.unwrap_generic(typ) utyp := c.unwrap_generic(typ)
typ_sym := c.table.sym(utyp) typ_sym := c.table.sym(utyp)
mut inter_sym := c.table.sym(interface_type) mut inter_sym := c.table.sym(interface_type)

View File

@ -73,7 +73,7 @@ pub fn (mut c Checker) match_expr(mut node ast.MatchExpr) ast.Type {
is_noreturn := is_noreturn_callexpr(stmt.expr) is_noreturn := is_noreturn_callexpr(stmt.expr)
if !(node.is_expr && ret_sym.kind == .sum_type) && !is_noreturn { if !(node.is_expr && ret_sym.kind == .sum_type) && !is_noreturn {
c.error('return type mismatch, it should be `$ret_sym.name`', c.error('return type mismatch, it should be `$ret_sym.name`',
stmt.expr.position()) stmt.expr.pos())
} }
} }
} }
@ -213,9 +213,9 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
// TODO // TODO
// This generates a memory issue with TCC // This generates a memory issue with TCC
// Needs to be checked later when TCC errors are fixed // Needs to be checked later when TCC errors are fixed
// Current solution is to move expr.position() to its own statement // Current solution is to move expr.pos() to its own statement
// c.type_implements(expr_type, c.expected_type, expr.position()) // c.type_implements(expr_type, c.expected_type, expr.pos())
expr_pos := expr.position() expr_pos := expr.pos()
if c.type_implements(expr_type, c.expected_type, expr_pos) { if c.type_implements(expr_type, c.expected_type, expr_pos) {
if !expr_type.is_ptr() && !expr_type.is_pointer() && !c.inside_unsafe { if !expr_type.is_ptr() && !expr_type.is_pointer() && !c.inside_unsafe {
if expr_type_sym.kind != .interface_ { if expr_type_sym.kind != .interface_ {
@ -227,16 +227,16 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
if expr_type !in cond_type_sym.info.variants { if expr_type !in cond_type_sym.info.variants {
expr_str := c.table.type_to_str(expr_type) expr_str := c.table.type_to_str(expr_type)
expect_str := c.table.type_to_str(node.cond_type) expect_str := c.table.type_to_str(node.cond_type)
c.error('`$expect_str` has no variant `$expr_str`', expr.position()) c.error('`$expect_str` has no variant `$expr_str`', expr.pos())
} }
} else if cond_type_sym.info is ast.Alias && expr_type_sym.info is ast.Struct { } else if cond_type_sym.info is ast.Alias && expr_type_sym.info is ast.Struct {
expr_str := c.table.type_to_str(expr_type) expr_str := c.table.type_to_str(expr_type)
expect_str := c.table.type_to_str(node.cond_type) expect_str := c.table.type_to_str(node.cond_type)
c.error('cannot match alias type `$expect_str` with `$expr_str`', expr.position()) c.error('cannot match alias type `$expect_str` with `$expr_str`', expr.pos())
} else if !c.check_types(expr_type, node.cond_type) { } else if !c.check_types(expr_type, node.cond_type) {
expr_str := c.table.type_to_str(expr_type) expr_str := c.table.type_to_str(expr_type)
expect_str := c.table.type_to_str(node.cond_type) expect_str := c.table.type_to_str(node.cond_type)
c.error('cannot match `$expect_str` with `$expr_str`', expr.position()) c.error('cannot match `$expect_str` with `$expr_str`', expr.pos())
} }
branch_exprs[key] = val + 1 branch_exprs[key] = val + 1
} }

View File

@ -174,7 +174,7 @@ fn (mut c Checker) sql_stmt_line(mut node ast.SqlStmtLine) ast.Type {
return ast.void_type return ast.void_type
} }
fn (mut c Checker) fetch_and_verify_orm_fields(info ast.Struct, pos token.Position, table_name string) []ast.StructField { fn (mut c Checker) fetch_and_verify_orm_fields(info ast.Struct, pos token.Pos, table_name string) []ast.StructField {
fields := info.fields.filter( fields := info.fields.filter(
(it.typ in [ast.string_type, ast.bool_type] || int(it.typ) in ast.number_type_idxs (it.typ in [ast.string_type, ast.bool_type] || int(it.typ) in ast.number_type_idxs
|| c.table.type_symbols[int(it.typ)].kind == .struct_ || c.table.type_symbols[int(it.typ)].kind == .struct_

View File

@ -11,7 +11,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
mut expected_type := c.unwrap_generic(c.expected_type) mut expected_type := c.unwrap_generic(c.expected_type)
expected_type_sym := c.table.sym(expected_type) expected_type_sym := c.table.sym(expected_type)
if node.exprs.len > 0 && c.table.cur_fn.return_type == ast.void_type { if node.exprs.len > 0 && c.table.cur_fn.return_type == ast.void_type {
c.error('unexpected argument, current function does not return anything', node.exprs[0].position()) c.error('unexpected argument, current function does not return anything', node.exprs[0].pos())
return return
} else if node.exprs.len == 0 && !(c.expected_type == ast.void_type } else if node.exprs.len == 0 && !(c.expected_type == ast.void_type
|| expected_type_sym.kind == .void) { || expected_type_sym.kind == .void) {
@ -82,7 +82,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
got_typ := c.unwrap_generic(got_types[i]) got_typ := c.unwrap_generic(got_types[i])
if got_typ.has_flag(.optional) && (!exp_type.has_flag(.optional) if got_typ.has_flag(.optional) && (!exp_type.has_flag(.optional)
|| c.table.type_to_str(got_typ) != c.table.type_to_str(exp_type)) { || c.table.type_to_str(got_typ) != c.table.type_to_str(exp_type)) {
pos := node.exprs[i].position() pos := node.exprs[i].pos()
c.error('cannot use `${c.table.type_to_str(got_typ)}` as type `${c.table.type_to_str(exp_type)}` in return argument', c.error('cannot use `${c.table.type_to_str(got_typ)}` as type `${c.table.type_to_str(exp_type)}` in return argument',
pos) pos)
} }
@ -98,13 +98,13 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
} }
continue continue
} }
pos := node.exprs[i].position() pos := node.exprs[i].pos()
c.error('cannot use `$got_typ_sym.name` as type `${c.table.type_to_str(exp_type)}` in return argument', c.error('cannot use `$got_typ_sym.name` as type `${c.table.type_to_str(exp_type)}` in return argument',
pos) pos)
} }
if (got_typ.is_ptr() || got_typ.is_pointer()) if (got_typ.is_ptr() || got_typ.is_pointer())
&& (!exp_type.is_ptr() && !exp_type.is_pointer()) { && (!exp_type.is_ptr() && !exp_type.is_pointer()) {
pos := node.exprs[i].position() pos := node.exprs[i].pos()
if node.exprs[i].is_auto_deref_var() { if node.exprs[i].is_auto_deref_var() {
continue continue
} }
@ -114,7 +114,7 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
if (exp_type.is_ptr() || exp_type.is_pointer()) if (exp_type.is_ptr() || exp_type.is_pointer())
&& (!got_typ.is_ptr() && !got_typ.is_pointer()) && got_typ != ast.int_literal_type && (!got_typ.is_ptr() && !got_typ.is_pointer()) && got_typ != ast.int_literal_type
&& !c.pref.translated { && !c.pref.translated {
pos := node.exprs[i].position() pos := node.exprs[i].pos()
if node.exprs[i].is_auto_deref_var() { if node.exprs[i].is_auto_deref_var() {
continue continue
} }

View File

@ -82,7 +82,7 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
} }
} else { } else {
c.error('incompatible initializer for field `$field.name`: $err.msg', c.error('incompatible initializer for field `$field.name`: $err.msg',
field.default_expr.position()) field.default_expr.pos())
} }
} }
// Check for unnecessary inits like ` = 0` and ` = ''` // Check for unnecessary inits like ` = 0` and ` = ''`
@ -394,7 +394,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
node.update_expr_type = update_type node.update_expr_type = update_type
if c.table.type_kind(update_type) != .struct_ { if c.table.type_kind(update_type) != .struct_ {
s := c.table.type_to_str(update_type) s := c.table.type_to_str(update_type)
c.error('expected struct, found `$s`', node.update_expr.position()) c.error('expected struct, found `$s`', node.update_expr.pos())
} else if update_type != node.typ { } else if update_type != node.typ {
from_sym := c.table.sym(update_type) from_sym := c.table.sym(update_type)
to_sym := c.table.sym(node.typ) to_sym := c.table.sym(node.typ)
@ -403,13 +403,13 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
// TODO this check is too strict // TODO this check is too strict
if !c.check_struct_signature(from_info, to_info) { if !c.check_struct_signature(from_info, to_info) {
c.error('struct `$from_sym.name` is not compatible with struct `$to_sym.name`', c.error('struct `$from_sym.name` is not compatible with struct `$to_sym.name`',
node.update_expr.position()) node.update_expr.pos())
} }
} }
if !node.update_expr.is_lvalue() { if !node.update_expr.is_lvalue() {
// cgen will repeat `update_expr` for each field // cgen will repeat `update_expr` for each field
// so enforce an lvalue for efficiency // so enforce an lvalue for efficiency
c.error('expression is not an lvalue', node.update_expr.position()) c.error('expression is not an lvalue', node.update_expr.pos())
} }
} }
return node.typ return node.typ

View File

@ -10,7 +10,7 @@ pub struct DocComment {
pub mut: pub mut:
text string // Raw text content of the comment, excluding the comment token chars ('//, /*, */') text string // Raw text content of the comment, excluding the comment token chars ('//, /*, */')
is_multi bool // Is a block / multi-line comment is_multi bool // Is a block / multi-line comment
pos token.Position pos token.Pos
} }
// is_example returns true if the contents of this comment is a doc example. // is_example returns true if the contents of this comment is a doc example.

View File

@ -126,7 +126,7 @@ pub mut:
name string name string
content string content string
comments []DocComment comments []DocComment
pos token.Position pos token.Pos
file_path string file_path string
kind SymbolKind kind SymbolKind
tags []string tags []string

View File

@ -19,7 +19,7 @@ pub fn ast_comment_to_doc_comment(ast_node ast.Comment) DocComment {
return DocComment{ return DocComment{
text: text text: text
is_multi: ast_node.is_multi is_multi: ast_node.is_multi
pos: token.Position{ pos: token.Pos{
line_nr: ast_node.pos.line_nr line_nr: ast_node.pos.line_nr
col: 0 // ast_node.pos.pos - ast_node.text.len col: 0 // ast_node.pos.pos - ast_node.text.len
len: text.len len: text.len

View File

@ -15,7 +15,7 @@ pub:
message string message string
details string details string
file_path string file_path string
pos token.Position pos token.Pos
backtrace string backtrace string
reporter Reporter reporter Reporter
} }
@ -25,7 +25,7 @@ pub:
message string message string
details string details string
file_path string file_path string
pos token.Position pos token.Pos
reporter Reporter reporter Reporter
} }
@ -34,6 +34,6 @@ pub:
message string message string
details string details string
file_path string file_path string
pos token.Position pos token.Pos
reporter Reporter reporter Reporter
} }

View File

@ -331,7 +331,7 @@ fn (f Fmt) should_insert_newline_before_node(node ast.Node, prev_node ast.Node)
if f.out.last_n(2) == '\n\n' { if f.out.last_n(2) == '\n\n' {
return false return false
} }
prev_line_nr := prev_node.position().last_line prev_line_nr := prev_node.pos().last_line
// The nodes are Stmts // The nodes are Stmts
if node is ast.Stmt && prev_node is ast.Stmt { if node is ast.Stmt && prev_node is ast.Stmt {
stmt := node stmt := node
@ -372,7 +372,7 @@ fn (f Fmt) should_insert_newline_before_node(node ast.Node, prev_node ast.Node)
} }
} }
// The node shouldn't have a newline before // The node shouldn't have a newline before
if node.position().line_nr - prev_line_nr <= 1 { if node.pos().line_nr - prev_line_nr <= 1 {
return false return false
} }
return true return true
@ -507,7 +507,7 @@ fn stmt_is_single_line(stmt ast.Stmt) bool {
pub fn (mut f Fmt) expr(node ast.Expr) { pub fn (mut f Fmt) expr(node ast.Expr) {
if f.is_debug { if f.is_debug {
eprintln('expr: ${node.position():-42} | node: ${node.type_name():-20} | $node.str()') eprintln('expr: ${node.pos():-42} | node: ${node.type_name():-20} | $node.str()')
} }
match mut node { match mut node {
ast.NodeError {} ast.NodeError {}
@ -1382,7 +1382,7 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
} }
} else { } else {
next_line := if node.exprs.len > 0 { next_line := if node.exprs.len > 0 {
node.exprs[0].position().line_nr node.exprs[0].pos().line_nr
} else { } else {
node.pos.last_line node.pos.last_line
} }
@ -1402,11 +1402,11 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
} }
mut set_comma := false mut set_comma := false
for i, expr in node.exprs { for i, expr in node.exprs {
pos := expr.position() pos := expr.pos()
if i == 0 { if i == 0 {
if f.array_init_depth > f.array_init_break.len { if f.array_init_depth > f.array_init_break.len {
f.array_init_break << pos.line_nr > last_line_nr f.array_init_break << pos.line_nr > last_line_nr
|| f.line_len + expr.position().len > fmt.max_len[3] || f.line_len + expr.pos().len > fmt.max_len[3]
} }
} }
line_break := f.array_init_break[f.array_init_depth - 1] line_break := f.array_init_break[f.array_init_depth - 1]
@ -1449,7 +1449,7 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
mut last_comment_was_inline := false mut last_comment_was_inline := false
mut has_comments := node.ecmnts[i].len > 0 mut has_comments := node.ecmnts[i].len > 0
if i < node.ecmnts.len && has_comments { if i < node.ecmnts.len && has_comments {
expr_pos := expr.position() expr_pos := expr.pos()
for icmt, cmt in node.ecmnts[i] { for icmt, cmt in node.ecmnts[i] {
if !set_comma && cmt.pos.pos > expr_pos.pos + expr_pos.len + 2 { if !set_comma && cmt.pos.pos > expr_pos.pos + expr_pos.len + 2 {
if icmt > 0 { if icmt > 0 {
@ -1464,7 +1464,7 @@ pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
} }
mut next_pos := expr_pos mut next_pos := expr_pos
if i + 1 < node.exprs.len { if i + 1 < node.exprs.len {
next_pos = node.exprs[i + 1].position() next_pos = node.exprs[i + 1].pos()
} }
if cmt.pos.line_nr > expr_pos.last_line { if cmt.pos.line_nr > expr_pos.last_line {
embed := i + 1 < node.exprs.len && next_pos.line_nr == cmt.pos.last_line embed := i + 1 < node.exprs.len && next_pos.line_nr == cmt.pos.last_line
@ -2114,7 +2114,7 @@ pub fn (mut f Fmt) map_init(node ast.MapInit) {
f.write(': ') f.write(': ')
f.write(strings.repeat(` `, max_field_len - key.str().len)) f.write(strings.repeat(` `, max_field_len - key.str().len))
f.expr(node.vals[i]) f.expr(node.vals[i])
f.comments(node.comments[i], prev_line: node.vals[i].position().last_line, has_nl: false) f.comments(node.comments[i], prev_line: node.vals[i].pos().last_line, has_nl: false)
f.writeln('') f.writeln('')
} }
f.indent-- f.indent--

View File

@ -91,7 +91,7 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl) {
before_last_line = mu.max(before_last_line, node.fields[i - 1].comments.last().pos.last_line) before_last_line = mu.max(before_last_line, node.fields[i - 1].comments.last().pos.last_line)
} }
if node.fields[i - 1].has_default_expr { if node.fields[i - 1].has_default_expr {
before_last_line = mu.max(before_last_line, node.fields[i - 1].default_expr.position().last_line) before_last_line = mu.max(before_last_line, node.fields[i - 1].default_expr.pos().last_line)
} }
mut next_first_line := field.pos.line_nr mut next_first_line := field.pos.line_nr

View File

@ -8,6 +8,6 @@ interface Egg {
milk string | int milk string | int
} }
fn foo(bar string | int) int | string | token.Position { fn foo(bar string | int) int | string | token.Pos {
return 1 return 1
} }

View File

@ -1,6 +1,6 @@
module ast module ast
pub fn (stmt Stmt) position() Position { pub fn (stmt Stmt) pos() Pos {
match stmt { match stmt {
AssertStmt { return stmt.pos } AssertStmt { return stmt.pos }
AssignStmt { return stmt.pos } AssignStmt { return stmt.pos }
@ -52,6 +52,6 @@ pub fn (stmt Stmt) position() Position {
// } // }
*/ */
// //
else { return Position{} } else { return Pos{} }
} }
} }

View File

@ -691,7 +691,7 @@ fn (mut g Gen) gen_cross_tmp_variable(left []ast.Expr, val ast.Expr) {
for lx in left { for lx in left {
if val_.str() == lx.str() { if val_.str() == lx.str() {
g.write('_var_') g.write('_var_')
g.write(lx.position().pos.str()) g.write(lx.pos().pos.str())
has_var = true has_var = true
break break
} }
@ -731,7 +731,7 @@ fn (mut g Gen) gen_cross_tmp_variable(left []ast.Expr, val ast.Expr) {
for lx in left { for lx in left {
if val_.str() == lx.str() { if val_.str() == lx.str() {
g.write('_var_') g.write('_var_')
g.write(lx.position().pos.str()) g.write(lx.pos().pos.str())
has_var = true has_var = true
break break
} }

View File

@ -1502,7 +1502,7 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) {
if stmt is ast.ExprStmt { if stmt is ast.ExprStmt {
// For some reason ExprStmt.pos is 0 when ExprStmt.expr is comp if expr // For some reason ExprStmt.pos is 0 when ExprStmt.expr is comp if expr
// Extract the pos. TODO figure out why and fix. // Extract the pos. TODO figure out why and fix.
stmt_pos = stmt.expr.position() stmt_pos = stmt.expr.pos()
} }
if stmt_pos.pos == 0 { if stmt_pos.pos == 0 {
$if trace_autofree ? { $if trace_autofree ? {
@ -1517,7 +1517,7 @@ fn (mut g Gen) stmts_with_tmp_var(stmts []ast.Stmt, tmp_var string) {
} }
[inline] [inline]
fn (mut g Gen) write_v_source_line_info(pos token.Position) { fn (mut g Gen) write_v_source_line_info(pos token.Pos) {
if g.inside_ternary == 0 && g.pref.is_vlines && g.is_vlines_enabled { if g.inside_ternary == 0 && g.pref.is_vlines && g.is_vlines_enabled {
nline := pos.line_nr + 1 nline := pos.line_nr + 1
lineinfo := '\n#line $nline "$g.vlines_path"' lineinfo := '\n#line $nline "$g.vlines_path"'
@ -1876,7 +1876,7 @@ fn (mut g Gen) stmt(node ast.Stmt) {
if g.is_autofree { if g.is_autofree {
// if node is ast.ExprStmt {&& node.expr is ast.CallExpr { // if node is ast.ExprStmt {&& node.expr is ast.CallExpr {
if node !is ast.FnDecl { if node !is ast.FnDecl {
// p := node.position() // p := node.pos()
// g.autofree_call_postgen(p.pos) // g.autofree_call_postgen(p.pos)
} }
} }
@ -2440,7 +2440,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
got_deref_type := if got_is_ptr { unwrapped_got_type.deref() } else { unwrapped_got_type } got_deref_type := if got_is_ptr { unwrapped_got_type.deref() } else { unwrapped_got_type }
if g.table.sumtype_has_variant(expected_deref_type, got_deref_type, false) { if g.table.sumtype_has_variant(expected_deref_type, got_deref_type, false) {
mut is_already_sum_type := false mut is_already_sum_type := false
scope := g.file.scope.innermost(expr.position().pos) scope := g.file.scope.innermost(expr.pos().pos)
if expr is ast.Ident { if expr is ast.Ident {
if v := scope.find_var(expr.name) { if v := scope.find_var(expr.name) {
if v.smartcasts.len > 0 { if v.smartcasts.len > 0 {
@ -2477,7 +2477,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
if to_shared { if to_shared {
shared_styp := exp_styp[0..exp_styp.len - 1] // `shared` implies ptr, so eat one `*` shared_styp := exp_styp[0..exp_styp.len - 1] // `shared` implies ptr, so eat one `*`
if got_type_raw.is_ptr() { if got_type_raw.is_ptr() {
g.error('cannot convert reference to `shared`', expr.position()) g.error('cannot convert reference to `shared`', expr.pos())
} }
if exp_sym.kind == .array { if exp_sym.kind == .array {
g.writeln('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =') g.writeln('($shared_styp*)__dup_shared_array(&($shared_styp){.mtx = {0}, .val =')
@ -3091,7 +3091,7 @@ fn (mut g Gen) expr(node ast.Expr) {
// NB: please keep the type names in the match here in alphabetical order: // NB: please keep the type names in the match here in alphabetical order:
match mut node { match mut node {
ast.EmptyExpr { ast.EmptyExpr {
g.error('g.expr(): unhandled EmptyExpr', token.Position{}) g.error('g.expr(): unhandled EmptyExpr', token.Pos{})
} }
ast.AnonFn { ast.AnonFn {
g.gen_anon_fn(mut node) g.gen_anon_fn(mut node)
@ -4691,7 +4691,7 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
} else { } else {
mut is_auto_heap := false mut is_auto_heap := false
if branch.stmts.len > 0 { if branch.stmts.len > 0 {
scope := g.file.scope.innermost(ast.Node(branch.stmts[branch.stmts.len - 1]).position().pos) scope := g.file.scope.innermost(ast.Node(branch.stmts[branch.stmts.len - 1]).pos().pos)
if v := scope.find_var(branch.cond.vars[0].name) { if v := scope.find_var(branch.cond.vars[0].name) {
is_auto_heap = v.is_auto_heap is_auto_heap = v.is_auto_heap
} }
@ -5634,13 +5634,13 @@ fn verror(s string) {
} }
[noreturn] [noreturn]
fn (g &Gen) error(s string, pos token.Position) { fn (g &Gen) error(s string, pos token.Pos) {
ferror := util.formatted_error('cgen error:', s, g.file.path, pos) ferror := util.formatted_error('cgen error:', s, g.file.path, pos)
eprintln(ferror) eprintln(ferror)
exit(1) exit(1)
} }
fn (g &Gen) checker_bug(s string, pos token.Position) { fn (g &Gen) checker_bug(s string, pos token.Pos) {
g.error('checker bug; $s', pos) g.error('checker bug; $s', pos)
} }
@ -6943,7 +6943,7 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
return sb.str() return sb.str()
} }
fn (mut g Gen) panic_debug_info(pos token.Position) (int, string, string, string) { fn (mut g Gen) panic_debug_info(pos token.Pos) (int, string, string, string) {
paline := pos.line_nr + 1 paline := pos.line_nr + 1
if isnil(g.fn_decl) { if isnil(g.fn_decl) {
return paline, '', 'main', 'C._vinit' return paline, '', 'main', 'C._vinit'

View File

@ -580,7 +580,7 @@ fn (mut g JsGen) stmts(stmts []ast.Stmt) {
} }
[inline] [inline]
fn (mut g JsGen) write_v_source_line_info(pos token.Position) { fn (mut g JsGen) write_v_source_line_info(pos token.Pos) {
// g.inside_ternary == 0 && // g.inside_ternary == 0 &&
if g.pref.sourcemap { if g.pref.sourcemap {
g.ns.sourcemap_helper << SourcemapHelper{ g.ns.sourcemap_helper << SourcemapHelper{

View File

@ -1176,7 +1176,7 @@ g.v_error('oops', node.pos)
} }
else { else {
// dump(node) // dump(node)
g.v_error('unhandled assign_stmt expression: $right.type_name()', right.position()) g.v_error('unhandled assign_stmt expression: $right.type_name()', right.pos())
} }
} }
// } // }

View File

@ -762,7 +762,7 @@ pub fn (mut g Gen) n_error(s string) {
util.verror('native error', s) util.verror('native error', s)
} }
pub fn (mut g Gen) warning(s string, pos token.Position) { pub fn (mut g Gen) warning(s string, pos token.Pos) {
if g.pref.output_mode == .stdout { if g.pref.output_mode == .stdout {
werror := util.formatted_error('warning', s, g.pref.path, pos) werror := util.formatted_error('warning', s, g.pref.path, pos)
eprintln(werror) eprintln(werror)
@ -776,7 +776,7 @@ pub fn (mut g Gen) warning(s string, pos token.Position) {
} }
} }
pub fn (mut g Gen) v_error(s string, pos token.Position) { pub fn (mut g Gen) v_error(s string, pos token.Pos) {
// TODO: store a file index in the Position too, // TODO: store a file index in the Position too,
// so that the file path can be retrieved from the pos, instead // so that the file path can be retrieved from the pos, instead
// of guessed from the pref.path ... // of guessed from the pref.path ...

View File

@ -133,7 +133,7 @@ fn (mut p Parser) check_cross_variables(exprs []ast.Expr, val ast.Expr) bool {
fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comment) ast.Stmt { fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comment) ast.Stmt {
p.is_stmt_ident = false p.is_stmt_ident = false
op := p.tok.kind op := p.tok.kind
mut pos := p.tok.position() mut pos := p.tok.pos()
p.next() p.next()
mut comments := []ast.Comment{cap: 2 * left_comments.len + 1} mut comments := []ast.Comment{cap: 2 * left_comments.len + 1}
comments << left_comments comments << left_comments
@ -210,7 +210,7 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
} }
else { else {
// TODO: parexpr ( check vars) // TODO: parexpr ( check vars)
// else { p.error_with_pos('unexpected `${typeof(lx)}`', lx.position()) } // else { p.error_with_pos('unexpected `${typeof(lx)}`', lx.pos()) }
} }
} }
} }

View File

@ -14,7 +14,7 @@ const (
// // #include, #flag, #v // // #include, #flag, #v
fn (mut p Parser) hash() ast.HashStmt { fn (mut p Parser) hash() ast.HashStmt {
pos := p.tok.position() pos := p.tok.pos()
val := p.tok.lit val := p.tok.lit
kind := val.all_before(' ') kind := val.all_before(' ')
p.next() p.next()
@ -44,7 +44,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
scope: 0 scope: 0
} }
p.check(.dollar) p.check(.dollar)
start_pos := p.prev_tok.position() start_pos := p.prev_tok.pos()
error_msg := 'only `\$tmpl()`, `\$env()`, `\$embed_file()`, `\$pkgconfig()` and `\$vweb.html()` comptime functions are supported right now' error_msg := 'only `\$tmpl()`, `\$env()`, `\$embed_file()`, `\$pkgconfig()` and `\$vweb.html()` comptime functions are supported right now'
if p.peek_tok.kind == .dot { if p.peek_tok.kind == .dot {
name := p.check_name() // skip `vweb.html()` TODO name := p.check_name() // skip `vweb.html()` TODO
@ -63,7 +63,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
is_html := method_name == 'html' is_html := method_name == 'html'
// $env('ENV_VAR_NAME') // $env('ENV_VAR_NAME')
p.check(.lpar) p.check(.lpar)
spos := p.tok.position() spos := p.tok.pos()
if method_name == 'env' { if method_name == 'env' {
s := p.tok.lit s := p.tok.lit
p.check(.string) p.check(.string)
@ -74,7 +74,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
args_var: s args_var: s
is_env: true is_env: true
env_pos: spos env_pos: spos
pos: spos.extend(p.prev_tok.position()) pos: spos.extend(p.prev_tok.pos())
} }
} }
if method_name == 'pkgconfig' { if method_name == 'pkgconfig' {
@ -87,7 +87,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
args_var: s args_var: s
is_pkgconfig: true is_pkgconfig: true
env_pos: spos env_pos: spos
pos: spos.extend(p.prev_tok.position()) pos: spos.extend(p.prev_tok.pos())
} }
} }
literal_string_param := if is_html { '' } else { p.tok.lit } literal_string_param := if is_html { '' } else { p.tok.lit }
@ -146,7 +146,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
apath: epath apath: epath
compression_type: embed_compression_type compression_type: embed_compression_type
} }
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} }
// Compile vweb html template to V code, parse that V code and embed the resulting V function // Compile vweb html template to V code, parse that V code and embed the resulting V function
@ -180,7 +180,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
is_vweb: true is_vweb: true
method_name: method_name method_name: method_name
args_var: literal_string_param args_var: literal_string_param
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} }
if is_html { if is_html {
@ -245,7 +245,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall {
vweb_tmpl: file vweb_tmpl: file
method_name: method_name method_name: method_name
args_var: literal_string_param args_var: literal_string_param
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} }
@ -255,13 +255,13 @@ fn (mut p Parser) comptime_for() ast.ComptimeFor {
// $for field in App(fields) { // $for field in App(fields) {
p.next() p.next()
p.check(.key_for) p.check(.key_for)
var_pos := p.tok.position() var_pos := p.tok.pos()
val_var := p.check_name() val_var := p.check_name()
p.check(.key_in) p.check(.key_in)
mut typ_pos := p.tok.position() mut typ_pos := p.tok.pos()
lang := p.parse_language() lang := p.parse_language()
typ := p.parse_any_type(lang, false, false) typ := p.parse_any_type(lang, false, false)
typ_pos = typ_pos.extend(p.prev_tok.position()) typ_pos = typ_pos.extend(p.prev_tok.pos())
p.check(.dot) p.check(.dot)
for_val := p.check_name() for_val := p.check_name()
mut kind := ast.ComptimeForKind.methods mut kind := ast.ComptimeForKind.methods
@ -288,10 +288,10 @@ fn (mut p Parser) comptime_for() ast.ComptimeFor {
kind = .attributes kind = .attributes
} else { } else {
p.error_with_pos('unknown kind `$for_val`, available are: `methods`, `fields` or `attributes`', p.error_with_pos('unknown kind `$for_val`, available are: `methods`, `fields` or `attributes`',
p.prev_tok.position()) p.prev_tok.pos())
return ast.ComptimeFor{} return ast.ComptimeFor{}
} }
spos := p.tok.position() spos := p.tok.pos()
stmts := p.parse_block() stmts := p.parse_block()
p.close_scope() p.close_scope()
return ast.ComptimeFor{ return ast.ComptimeFor{
@ -300,7 +300,7 @@ fn (mut p Parser) comptime_for() ast.ComptimeFor {
kind: kind kind: kind
typ: typ typ: typ
typ_pos: typ_pos typ_pos: typ_pos
pos: spos.extend(p.tok.position()) pos: spos.extend(p.tok.pos())
} }
} }
@ -326,16 +326,16 @@ fn (mut p Parser) at() ast.AtExpr {
p.next() p.next()
return ast.AtExpr{ return ast.AtExpr{
name: name name: name
pos: p.tok.position() pos: p.tok.pos()
kind: kind kind: kind
} }
} }
fn (mut p Parser) comptime_selector(left ast.Expr) ast.Expr { fn (mut p Parser) comptime_selector(left ast.Expr) ast.Expr {
p.check(.dollar) p.check(.dollar)
start_pos := p.prev_tok.position() start_pos := p.prev_tok.pos()
if p.peek_tok.kind == .lpar { if p.peek_tok.kind == .lpar {
method_pos := p.tok.position() method_pos := p.tok.pos()
method_name := p.check_name() method_name := p.check_name()
p.mark_var_as_used(method_name) p.mark_var_as_used(method_name)
// `app.$action()` (`action` is a string) // `app.$action()` (`action` is a string)
@ -353,7 +353,7 @@ fn (mut p Parser) comptime_selector(left ast.Expr) ast.Expr {
scope: p.scope scope: p.scope
args_var: '' args_var: ''
args: args args: args
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} }
mut has_parens := false mut has_parens := false
@ -361,7 +361,7 @@ fn (mut p Parser) comptime_selector(left ast.Expr) ast.Expr {
p.check(.lpar) p.check(.lpar)
has_parens = true has_parens = true
} else { } else {
p.warn_with_pos('use brackets instead e.g. `s.$(field.name)` - run vfmt', p.tok.position()) p.warn_with_pos('use brackets instead e.g. `s.$(field.name)` - run vfmt', p.tok.pos())
} }
expr := p.expr(0) expr := p.expr(0)
if has_parens { if has_parens {
@ -371,6 +371,6 @@ fn (mut p Parser) comptime_selector(left ast.Expr) ast.Expr {
has_parens: has_parens has_parens: has_parens
left: left left: left
field_expr: expr field_expr: expr
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} }

View File

@ -6,8 +6,8 @@ module parser
import v.ast import v.ast
fn (mut p Parser) array_init() ast.ArrayInit { fn (mut p Parser) array_init() ast.ArrayInit {
first_pos := p.tok.position() first_pos := p.tok.pos()
mut last_pos := p.tok.position() mut last_pos := p.tok.pos()
p.check(.lsbr) p.check(.lsbr)
// p.warn('array_init() exp=$p.expected_type') // p.warn('array_init() exp=$p.expected_type')
mut array_type := ast.void_type mut array_type := ast.void_type
@ -23,13 +23,13 @@ fn (mut p Parser) array_init() ast.ArrayInit {
mut has_it := false mut has_it := false
mut default_expr := ast.empty_expr() mut default_expr := ast.empty_expr()
if p.tok.kind == .rsbr { if p.tok.kind == .rsbr {
last_pos = p.tok.position() last_pos = p.tok.pos()
// []typ => `[]` and `typ` must be on the same line // []typ => `[]` and `typ` must be on the same line
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
p.next() p.next()
// []string // []string
if p.tok.kind in [.name, .amp, .lsbr, .key_shared] && p.tok.line_nr == line_nr { if p.tok.kind in [.name, .amp, .lsbr, .key_shared] && p.tok.line_nr == line_nr {
elem_type_pos = p.tok.position() elem_type_pos = p.tok.pos()
elem_type = p.parse_type() elem_type = p.parse_type()
// this is set here because it's a known type, others could be the // this is set here because it's a known type, others could be the
// result of expr so we do those in checker // result of expr so we do those in checker
@ -41,7 +41,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
} }
has_type = true has_type = true
} }
last_pos = p.tok.position() last_pos = p.tok.pos()
} else { } else {
// [1,2,3] or [const]byte // [1,2,3] or [const]byte
old_inside_array_lit := p.inside_array_lit old_inside_array_lit := p.inside_array_lit
@ -63,17 +63,17 @@ fn (mut p Parser) array_init() ast.ArrayInit {
tcc_stack_bug := 12345 tcc_stack_bug := 12345
_ = tcc_stack_bug _ = tcc_stack_bug
} }
last_pos = p.tok.position() last_pos = p.tok.pos()
p.check(.rsbr) p.check(.rsbr)
if exprs.len == 1 && p.tok.kind in [.name, .amp, .lsbr] && p.tok.line_nr == line_nr { if exprs.len == 1 && p.tok.kind in [.name, .amp, .lsbr] && p.tok.line_nr == line_nr {
// [100]byte // [100]byte
elem_type = p.parse_type() elem_type = p.parse_type()
last_pos = p.tok.position() last_pos = p.tok.pos()
is_fixed = true is_fixed = true
if p.tok.kind == .lcbr { if p.tok.kind == .lcbr {
p.next() p.next()
if p.tok.kind != .rcbr { if p.tok.kind != .rcbr {
pos := p.tok.position() pos := p.tok.pos()
n := p.check_name() n := p.check_name()
if n != 'init' { if n != 'init' {
p.error_with_pos('expected `init:`, not `$n`', pos) p.error_with_pos('expected `init:`, not `$n`', pos)
@ -94,7 +94,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
} }
p.close_scope() p.close_scope()
} }
last_pos = p.tok.position() last_pos = p.tok.pos()
p.check(.rcbr) p.check(.rcbr)
} else { } else {
p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`', p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`',
@ -102,13 +102,13 @@ fn (mut p Parser) array_init() ast.ArrayInit {
} }
} else { } else {
if p.tok.kind == .not { // && p.tok.line_nr == p.prev_tok.line_nr { if p.tok.kind == .not { // && p.tok.line_nr == p.prev_tok.line_nr {
last_pos = p.tok.position() last_pos = p.tok.pos()
is_fixed = true is_fixed = true
has_val = true has_val = true
p.next() p.next()
} }
if p.tok.kind == .not && p.tok.line_nr == p.prev_tok.line_nr { if p.tok.kind == .not && p.tok.line_nr == p.prev_tok.line_nr {
last_pos = p.tok.position() last_pos = p.tok.pos()
p.error_with_pos('use e.g. `[1, 2, 3]!` instead of `[1, 2, 3]!!`', last_pos) p.error_with_pos('use e.g. `[1, 2, 3]!` instead of `[1, 2, 3]!!`', last_pos)
p.next() p.next()
} }
@ -188,7 +188,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
// parse tokens between braces // parse tokens between braces
fn (mut p Parser) map_init() ast.MapInit { fn (mut p Parser) map_init() ast.MapInit {
first_pos := p.prev_tok.position() first_pos := p.prev_tok.pos()
mut keys := []ast.Expr{} mut keys := []ast.Expr{}
mut vals := []ast.Expr{} mut vals := []ast.Expr{}
mut comments := [][]ast.Comment{} mut comments := [][]ast.Comment{}
@ -207,7 +207,7 @@ fn (mut p Parser) map_init() ast.MapInit {
return ast.MapInit{ return ast.MapInit{
keys: keys keys: keys
vals: vals vals: vals
pos: first_pos.extend_with_last_line(p.tok.position(), p.tok.line_nr) pos: first_pos.extend_with_last_line(p.tok.pos(), p.tok.line_nr)
comments: comments comments: comments
pre_cmnts: pre_cmnts pre_cmnts: pre_cmnts
} }
@ -216,7 +216,7 @@ fn (mut p Parser) map_init() ast.MapInit {
fn (mut p Parser) scope_register_it_as_index() { fn (mut p Parser) scope_register_it_as_index() {
p.scope.objects['it'] = ast.Var{ // override it variable if it already exist, else create it variable p.scope.objects['it'] = ast.Var{ // override it variable if it already exist, else create it variable
name: 'it' name: 'it'
pos: p.tok.position() pos: p.tok.pos()
typ: ast.int_type typ: ast.int_type
is_mut: false is_mut: false
is_used: false is_used: false

View File

@ -9,7 +9,7 @@ import v.token
pub fn (mut p Parser) expr(precedence int) ast.Expr { pub fn (mut p Parser) expr(precedence int) ast.Expr {
return p.check_expr(precedence) or { return p.check_expr(precedence) or {
p.error_with_pos('invalid expression: unexpected $p.tok', p.tok.position()) p.error_with_pos('invalid expression: unexpected $p.tok', p.tok.pos())
} }
} }
@ -47,7 +47,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
} else if p.tok.lit == 'map' && p.peek_tok.kind == .lcbr && !(p.builtin_mod } else if p.tok.lit == 'map' && p.peek_tok.kind == .lcbr && !(p.builtin_mod
&& p.file_base in ['map.v', 'map_d_gcboehm_opt.v']) { && p.file_base in ['map.v', 'map_d_gcboehm_opt.v']) {
p.warn_with_pos("deprecated map syntax, use syntax like `{'age': 20}`", p.warn_with_pos("deprecated map syntax, use syntax like `{'age': 20}`",
p.tok.position()) p.tok.pos())
p.next() // `map` p.next() // `map`
p.next() // `{` p.next() // `{`
node = p.map_init() node = p.map_init()
@ -85,14 +85,14 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
return p.if_expr(true) return p.if_expr(true)
} }
else { else {
return p.error_with_pos('unexpected `$`', p.peek_tok.position()) return p.error_with_pos('unexpected `$`', p.peek_tok.pos())
} }
} }
} }
.chartoken { .chartoken {
node = ast.CharLiteral{ node = ast.CharLiteral{
val: p.tok.lit val: p.tok.lit
pos: p.tok.position() pos: p.tok.pos()
} }
p.next() p.next()
} }
@ -116,7 +116,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
.key_true, .key_false { .key_true, .key_false {
node = ast.BoolLiteral{ node = ast.BoolLiteral{
val: p.tok.kind == .key_true val: p.tok.kind == .key_true
pos: p.tok.position() pos: p.tok.pos()
} }
p.next() p.next()
} }
@ -130,13 +130,13 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
node = p.parse_number_literal() node = p.parse_number_literal()
} }
.lpar { .lpar {
mut pos := p.tok.position() mut pos := p.tok.pos()
p.check(.lpar) p.check(.lpar)
node = p.expr(0) node = p.expr(0)
p.check(.rpar) p.check(.rpar)
node = ast.ParExpr{ node = ast.ParExpr{
expr: node expr: node
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
} }
} }
.key_if { .key_if {
@ -144,7 +144,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
} }
.key_unsafe { .key_unsafe {
// unsafe { // unsafe {
mut pos := p.tok.position() mut pos := p.tok.pos()
p.next() p.next()
if p.inside_unsafe { if p.inside_unsafe {
return p.error_with_pos('already inside `unsafe` block', pos) return p.error_with_pos('already inside `unsafe` block', pos)
@ -168,7 +168,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
// parse json.decode type (`json.decode([]User, s)`) // parse json.decode type (`json.decode([]User, s)`)
node = p.name_expr() node = p.name_expr()
} else if p.is_amp && p.peek_tok.kind == .rsbr && p.peek_token(3).kind != .lcbr { } else if p.is_amp && p.peek_tok.kind == .rsbr && p.peek_token(3).kind != .lcbr {
pos := p.tok.position() pos := p.tok.pos()
typ := p.parse_type() typ := p.parse_type()
typname := p.table.sym(typ).name typname := p.table.sym(typ).name
p.check(.lpar) p.check(.lpar)
@ -185,7 +185,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
} }
} }
.key_none { .key_none {
pos := p.tok.position() pos := p.tok.pos()
p.next() p.next()
node = ast.None{ node = ast.None{
pos: pos pos: pos
@ -195,7 +195,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
is_reftype := p.tok.kind == .key_isreftype is_reftype := p.tok.kind == .key_isreftype
p.next() // sizeof p.next() // sizeof
p.check(.lpar) p.check(.lpar)
pos := p.tok.position() pos := p.tok.pos()
is_known_var := p.mark_var_as_used(p.tok.lit) is_known_var := p.mark_var_as_used(p.tok.lit)
|| p.table.global_scope.known_const(p.mod + '.' + p.tok.lit) || p.table.global_scope.known_const(p.mod + '.' + p.tok.lit)
//|| p.table.known_fn(p.mod + '.' + p.tok.lit) //|| p.table.known_fn(p.mod + '.' + p.tok.lit)
@ -242,7 +242,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
p.check(.rpar) p.check(.rpar)
} }
.key_typeof { .key_typeof {
spos := p.tok.position() spos := p.tok.pos()
p.next() p.next()
p.check(.lpar) p.check(.lpar)
expr := p.expr(0) expr := p.expr(0)
@ -253,29 +253,29 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
} }
node = ast.TypeOf{ node = ast.TypeOf{
expr: expr expr: expr
pos: spos.extend(p.tok.position()) pos: spos.extend(p.tok.pos())
} }
} }
.key_dump { .key_dump {
spos := p.tok.position() spos := p.tok.pos()
p.next() p.next()
p.check(.lpar) p.check(.lpar)
expr := p.expr(0) expr := p.expr(0)
p.check(.rpar) p.check(.rpar)
node = ast.DumpExpr{ node = ast.DumpExpr{
expr: expr expr: expr
pos: spos.extend(p.tok.position()) pos: spos.extend(p.tok.pos())
} }
} }
.key_offsetof { .key_offsetof {
pos := p.tok.position() pos := p.tok.pos()
p.next() // __offsetof p.next() // __offsetof
p.check(.lpar) p.check(.lpar)
st := p.parse_type() st := p.parse_type()
p.check(.comma) p.check(.comma)
if p.tok.kind != .name { if p.tok.kind != .name {
return p.error_with_pos('unexpected `$p.tok.lit`, expecting struct field', return p.error_with_pos('unexpected `$p.tok.lit`, expecting struct field',
p.tok.position()) p.tok.pos())
} }
field := p.tok.lit field := p.tok.lit
p.next() p.next()
@ -290,7 +290,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
is_likely := p.tok.kind == .key_likely is_likely := p.tok.kind == .key_likely
p.next() p.next()
p.check(.lpar) p.check(.lpar)
lpos := p.tok.position() lpos := p.tok.pos()
expr := p.expr(0) expr := p.expr(0)
p.check(.rpar) p.check(.rpar)
node = ast.Likely{ node = ast.Likely{
@ -308,10 +308,10 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
.key_fn { .key_fn {
if p.expecting_type { if p.expecting_type {
// Anonymous function type // Anonymous function type
start_pos := p.tok.position() start_pos := p.tok.pos()
return ast.TypeNode{ return ast.TypeNode{
typ: p.parse_type() typ: p.parse_type()
pos: start_pos.extend(p.prev_tok.position()) pos: start_pos.extend(p.prev_tok.pos())
} }
} else { } else {
// Anonymous function // Anonymous function
@ -322,7 +322,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
// but this would take a bit of modification // but this would take a bit of modification
if p.tok.kind == .lpar { if p.tok.kind == .lpar {
p.next() p.next()
pos := p.tok.position() pos := p.tok.pos()
args := p.call_args() args := p.call_args()
p.check(.rpar) p.check(.rpar)
node = ast.CallExpr{ node = ast.CallExpr{
@ -340,7 +340,7 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
if p.tok.kind != .eof && !(p.tok.kind == .rsbr && p.inside_asm) { if p.tok.kind != .eof && !(p.tok.kind == .rsbr && p.inside_asm) {
// eof should be handled where it happens // eof should be handled where it happens
return none return none
// return p.error_with_pos('invalid expression: unexpected $p.tok', p.tok.position()) // return p.error_with_pos('invalid expression: unexpected $p.tok', p.tok.pos())
} }
} }
} }
@ -355,17 +355,17 @@ pub fn (mut p Parser) check_expr(precedence int) ?ast.Expr {
pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_ident bool) ast.Expr { pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_ident bool) ast.Expr {
mut node := left mut node := left
if p.inside_asm && p.prev_tok.position().line_nr < p.tok.position().line_nr { if p.inside_asm && p.prev_tok.pos().line_nr < p.tok.pos().line_nr {
return node return node
} }
// Infix // Infix
for precedence < p.tok.precedence() { for precedence < p.tok.precedence() {
if p.tok.kind == .dot { //&& (p.tok.line_nr == p.prev_tok.line_nr if p.tok.kind == .dot { //&& (p.tok.line_nr == p.prev_tok.line_nr
// TODO fix a bug with prev_tok.last_line // TODO fix a bug with prev_tok.last_line
//|| p.prev_tok.position().last_line == p.tok.line_nr) { //|| p.prev_tok.pos().last_line == p.tok.line_nr) {
// if p.fileis('vcache.v') { // if p.fileis('vcache.v') {
// p.warn('tok.line_nr = $p.tok.line_nr; prev_tok.line_nr=$p.prev_tok.line_nr; // p.warn('tok.line_nr = $p.tok.line_nr; prev_tok.line_nr=$p.prev_tok.line_nr;
// prev_tok.last_line=$p.prev_tok.position().last_line') // prev_tok.last_line=$p.prev_tok.pos().last_line')
//} //}
node = p.dot_expr(node) node = p.dot_expr(node)
if p.name_error { if p.name_error {
@ -384,7 +384,7 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
p.is_stmt_ident = is_stmt_ident p.is_stmt_ident = is_stmt_ident
if p.tok.kind == .lpar && p.tok.line_nr == p.prev_tok.line_nr && node is ast.IndexExpr { if p.tok.kind == .lpar && p.tok.line_nr == p.prev_tok.line_nr && node is ast.IndexExpr {
p.next() p.next()
pos := p.tok.position() pos := p.tok.pos()
args := p.call_args() args := p.call_args()
p.check(.rpar) p.check(.rpar)
node = ast.CallExpr{ node = ast.CallExpr{
@ -398,7 +398,7 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
} else if p.tok.kind == .key_as { } else if p.tok.kind == .key_as {
// sum type as cast `x := SumType as Variant` // sum type as cast `x := SumType as Variant`
if !p.inside_asm { if !p.inside_asm {
pos := p.tok.position() pos := p.tok.pos()
p.next() p.next()
typ := p.parse_type() typ := p.parse_type()
node = ast.AsCast{ node = ast.AsCast{
@ -412,7 +412,7 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
} else if p.tok.kind == .left_shift && p.is_stmt_ident { } else if p.tok.kind == .left_shift && p.is_stmt_ident {
// arr << elem // arr << elem
tok := p.tok tok := p.tok
mut pos := tok.position() mut pos := tok.pos()
p.next() p.next()
right := p.expr(precedence - 1) right := p.expr(precedence - 1)
pos.update_last_line(p.prev_tok.line_nr) pos.update_last_line(p.prev_tok.line_nr)
@ -434,7 +434,7 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
} }
// added 10/2020: LATER this will be parsed as PrefixExpr instead // added 10/2020: LATER this will be parsed as PrefixExpr instead
p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression', p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression',
p.tok.position()) p.tok.pos())
} }
// continue on infix expr // continue on infix expr
node = p.infix_expr(node) node = p.infix_expr(node)
@ -448,12 +448,12 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
if p.peek_tok.kind in [.rpar, .rsbr] { if p.peek_tok.kind in [.rpar, .rsbr] {
if !p.inside_ct_if_expr { if !p.inside_ct_if_expr {
p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement', p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement',
p.peek_tok.position()) p.peek_tok.pos())
} }
} }
if p.tok.kind in [.inc, .dec] && p.prev_tok.line_nr != p.tok.line_nr { if p.tok.kind in [.inc, .dec] && p.prev_tok.line_nr != p.tok.line_nr {
p.error_with_pos('$p.tok must be on the same line as the previous token', p.error_with_pos('$p.tok must be on the same line as the previous token',
p.tok.position()) p.tok.pos())
} }
if mut node is ast.IndexExpr { if mut node is ast.IndexExpr {
node.recursive_mapset_is_setter(true) node.recursive_mapset_is_setter(true)
@ -461,7 +461,7 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
node = ast.PostfixExpr{ node = ast.PostfixExpr{
op: p.tok.kind op: p.tok.kind
expr: node expr: node
pos: p.tok.position() pos: p.tok.pos()
} }
p.next() p.next()
// return node // TODO bring back, only allow ++/-- in exprs in translated code // return node // TODO bring back, only allow ++/-- in exprs in translated code
@ -479,7 +479,7 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
p.register_auto_import('sync') p.register_auto_import('sync')
} }
precedence := p.tok.precedence() precedence := p.tok.precedence()
mut pos := p.tok.position() mut pos := p.tok.pos()
p.next() p.next()
mut right := ast.empty_expr() mut right := ast.empty_expr()
prev_expecting_type := p.expecting_type prev_expecting_type := p.expecting_type
@ -502,7 +502,7 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
} }
mut or_stmts := []ast.Stmt{} mut or_stmts := []ast.Stmt{}
mut or_kind := ast.OrKind.absent mut or_kind := ast.OrKind.absent
mut or_pos := p.tok.position() mut or_pos := p.tok.pos()
// allow `x := <-ch or {...}` to handle closed channel // allow `x := <-ch or {...}` to handle closed channel
if op == .arrow { if op == .arrow {
if p.tok.kind == .key_orelse { if p.tok.kind == .key_orelse {
@ -513,13 +513,13 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
p.scope.register(ast.Var{ p.scope.register(ast.Var{
name: 'err' name: 'err'
typ: ast.error_type typ: ast.error_type
pos: p.tok.position() pos: p.tok.pos()
is_used: true is_used: true
is_stack_obj: true is_stack_obj: true
}) })
or_kind = .block or_kind = .block
or_stmts = p.parse_block_no_scope(false) or_stmts = p.parse_block_no_scope(false)
or_pos = or_pos.extend(p.prev_tok.position()) or_pos = or_pos.extend(p.prev_tok.pos())
p.close_scope() p.close_scope()
p.inside_or_expr = was_inside_or_expr p.inside_or_expr = was_inside_or_expr
} }
@ -546,17 +546,17 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
fn (mut p Parser) go_expr() ast.GoExpr { fn (mut p Parser) go_expr() ast.GoExpr {
p.next() p.next()
spos := p.tok.position() spos := p.tok.pos()
expr := p.expr(0) expr := p.expr(0)
call_expr := if expr is ast.CallExpr { call_expr := if expr is ast.CallExpr {
expr expr
} else { } else {
p.error_with_pos('expression in `go` must be a function call', expr.position()) p.error_with_pos('expression in `go` must be a function call', expr.pos())
ast.CallExpr{ ast.CallExpr{
scope: p.scope scope: p.scope
} }
} }
pos := spos.extend(p.prev_tok.position()) pos := spos.extend(p.prev_tok.pos())
p.register_auto_import('sync.threads') p.register_auto_import('sync.threads')
p.table.gostmts++ p.table.gostmts++
return ast.GoExpr{ return ast.GoExpr{
@ -570,7 +570,7 @@ fn (p &Parser) fileis(s string) bool {
} }
fn (mut p Parser) prefix_expr() ast.Expr { fn (mut p Parser) prefix_expr() ast.Expr {
mut pos := p.tok.position() mut pos := p.tok.pos()
op := p.tok.kind op := p.tok.kind
if op == .amp { if op == .amp {
p.is_amp = true p.is_amp = true
@ -608,7 +608,7 @@ fn (mut p Parser) prefix_expr() ast.Expr {
} }
mut or_stmts := []ast.Stmt{} mut or_stmts := []ast.Stmt{}
mut or_kind := ast.OrKind.absent mut or_kind := ast.OrKind.absent
mut or_pos := p.tok.position() mut or_pos := p.tok.pos()
// allow `x := <-ch or {...}` to handle closed channel // allow `x := <-ch or {...}` to handle closed channel
if op == .arrow { if op == .arrow {
if p.tok.kind == .key_orelse { if p.tok.kind == .key_orelse {
@ -619,13 +619,13 @@ fn (mut p Parser) prefix_expr() ast.Expr {
p.scope.register(ast.Var{ p.scope.register(ast.Var{
name: 'err' name: 'err'
typ: ast.error_type typ: ast.error_type
pos: p.tok.position() pos: p.tok.pos()
is_used: true is_used: true
is_stack_obj: true is_stack_obj: true
}) })
or_kind = .block or_kind = .block
or_stmts = p.parse_block_no_scope(false) or_stmts = p.parse_block_no_scope(false)
or_pos = or_pos.extend(p.prev_tok.position()) or_pos = or_pos.extend(p.prev_tok.pos())
p.close_scope() p.close_scope()
p.inside_or_expr = was_inside_or_expr p.inside_or_expr = was_inside_or_expr
} }
@ -648,7 +648,7 @@ fn (mut p Parser) prefix_expr() ast.Expr {
} }
} }
fn (mut p Parser) recast_as_pointer(mut cast_expr ast.CastExpr, pos token.Position) { fn (mut p Parser) recast_as_pointer(mut cast_expr ast.CastExpr, pos token.Pos) {
cast_expr.typ = cast_expr.typ.ref() cast_expr.typ = cast_expr.typ.ref()
cast_expr.typname = p.table.sym(cast_expr.typ).name cast_expr.typname = p.table.sym(cast_expr.typ).name
cast_expr.pos = pos.extend(cast_expr.pos) cast_expr.pos = pos.extend(cast_expr.pos)

View File

@ -8,7 +8,7 @@ import v.token
import v.util import v.util
pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr { pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr {
first_pos := p.tok.position() first_pos := p.tok.pos()
mut fn_name := if language == .c { mut fn_name := if language == .c {
'C.$p.check_name()' 'C.$p.check_name()'
} else if language == .js { } else if language == .js {
@ -34,16 +34,16 @@ pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr
p.expr_mod = '' p.expr_mod = ''
mut concrete_types := []ast.Type{} mut concrete_types := []ast.Type{}
mut concrete_list_pos := p.tok.position() mut concrete_list_pos := p.tok.pos()
if p.tok.kind == .lt { if p.tok.kind == .lt {
// `foo<int>(10)` // `foo<int>(10)`
p.expr_mod = '' p.expr_mod = ''
concrete_types = p.parse_concrete_types() concrete_types = p.parse_concrete_types()
concrete_list_pos = concrete_list_pos.extend(p.prev_tok.position()) concrete_list_pos = concrete_list_pos.extend(p.prev_tok.pos())
} }
p.check(.lpar) p.check(.lpar)
args := p.call_args() args := p.call_args()
last_pos := p.tok.position() last_pos := p.tok.pos()
p.check(.rpar) p.check(.rpar)
// ! in mutable methods // ! in mutable methods
if p.tok.kind == .not { if p.tok.kind == .not {
@ -51,7 +51,7 @@ pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr
} }
mut pos := first_pos.extend(last_pos) mut pos := first_pos.extend(last_pos)
mut or_stmts := []ast.Stmt{} // TODO remove unnecessary allocations by just using .absent mut or_stmts := []ast.Stmt{} // TODO remove unnecessary allocations by just using .absent
mut or_pos := p.tok.position() mut or_pos := p.tok.pos()
if p.tok.kind == .key_orelse { if p.tok.kind == .key_orelse {
// `foo() or {}`` // `foo() or {}``
was_inside_or_expr := p.inside_or_expr was_inside_or_expr := p.inside_or_expr
@ -61,12 +61,12 @@ pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr
p.scope.register(ast.Var{ p.scope.register(ast.Var{
name: 'err' name: 'err'
typ: ast.error_type typ: ast.error_type
pos: p.tok.position() pos: p.tok.pos()
is_used: true is_used: true
}) })
or_kind = .block or_kind = .block
or_stmts = p.parse_block_no_scope(false) or_stmts = p.parse_block_no_scope(false)
or_pos = or_pos.extend(p.prev_tok.position()) or_pos = or_pos.extend(p.prev_tok.pos())
p.close_scope() p.close_scope()
p.inside_or_expr = was_inside_or_expr p.inside_or_expr = was_inside_or_expr
} }
@ -74,7 +74,7 @@ pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr
// `foo()?` // `foo()?`
p.next() p.next()
if p.inside_defer { if p.inside_defer {
p.error_with_pos('error propagation not allowed inside `defer` blocks', p.prev_tok.position()) p.error_with_pos('error propagation not allowed inside `defer` blocks', p.prev_tok.pos())
} }
or_kind = .propagate or_kind = .propagate
} }
@ -104,7 +104,7 @@ pub fn (mut p Parser) call_expr(language ast.Language, mod string) ast.CallExpr
pub fn (mut p Parser) call_args() []ast.CallArg { pub fn (mut p Parser) call_args() []ast.CallArg {
mut args := []ast.CallArg{} mut args := []ast.CallArg{}
start_pos := p.tok.position() start_pos := p.tok.pos()
for p.tok.kind != .rpar { for p.tok.kind != .rpar {
if p.tok.kind == .eof { if p.tok.kind == .eof {
p.error_with_pos('unexpected eof reached, while parsing call argument', start_pos) p.error_with_pos('unexpected eof reached, while parsing call argument', start_pos)
@ -117,7 +117,7 @@ pub fn (mut p Parser) call_args() []ast.CallArg {
p.next() p.next()
} }
mut comments := p.eat_comments() mut comments := p.eat_comments()
arg_start_pos := p.tok.position() arg_start_pos := p.tok.pos()
mut array_decompose := false mut array_decompose := false
if p.tok.kind == .ellipsis { if p.tok.kind == .ellipsis {
p.next() p.next()
@ -133,14 +133,14 @@ pub fn (mut p Parser) call_args() []ast.CallArg {
if array_decompose { if array_decompose {
expr = ast.ArrayDecompose{ expr = ast.ArrayDecompose{
expr: expr expr: expr
pos: p.tok.position() pos: p.tok.pos()
} }
} }
if mut expr is ast.StructInit { if mut expr is ast.StructInit {
expr.pre_comments << comments expr.pre_comments << comments
comments = []ast.Comment{} comments = []ast.Comment{}
} }
pos := arg_start_pos.extend(p.prev_tok.position()) pos := arg_start_pos.extend(p.prev_tok.pos())
comments << p.eat_comments() comments << p.eat_comments()
args << ast.CallArg{ args << ast.CallArg{
is_mut: is_mut is_mut: is_mut
@ -159,16 +159,16 @@ pub fn (mut p Parser) call_args() []ast.CallArg {
struct ReceiverParsingInfo { struct ReceiverParsingInfo {
mut: mut:
name string name string
pos token.Position pos token.Pos
typ ast.Type typ ast.Type
type_pos token.Position type_pos token.Pos
is_mut bool is_mut bool
language ast.Language language ast.Language
} }
fn (mut p Parser) fn_decl() ast.FnDecl { fn (mut p Parser) fn_decl() ast.FnDecl {
p.top_level_statement_start() p.top_level_statement_start()
start_pos := p.tok.position() start_pos := p.tok.pos()
mut is_manualfree := p.is_manualfree mut is_manualfree := p.is_manualfree
mut is_deprecated := false mut is_deprecated := false
@ -207,7 +207,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
p.open_scope() p.open_scope()
// C. || JS. // C. || JS.
mut language := ast.Language.v mut language := ast.Language.v
language_tok_pos := p.tok.position() language_tok_pos := p.tok.pos()
if p.tok.kind == .name && p.tok.lit == 'C' { if p.tok.kind == .name && p.tok.lit == 'C' {
is_unsafe = !is_trusted is_unsafe = !is_trusted
language = .c language = .c
@ -251,7 +251,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
p.fn_language = language p.fn_language = language
} }
mut name := '' mut name := ''
name_pos := p.tok.position() name_pos := p.tok.pos()
if p.tok.kind == .name { if p.tok.kind == .name {
// TODO high order fn // TODO high order fn
name = if language == .js { p.check_js_name() } else { p.check_name() } name = if language == .js { p.check_js_name() } else { p.check_name() }
@ -297,14 +297,14 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
name = p.tok.kind.str() // op_to_fn_name() name = p.tok.kind.str() // op_to_fn_name()
if rec.typ == ast.void_type { if rec.typ == ast.void_type {
p.error_with_pos('cannot use operator overloading with normal functions', p.error_with_pos('cannot use operator overloading with normal functions',
p.tok.position()) p.tok.pos())
} }
p.next() p.next()
} else if p.tok.kind in [.ne, .gt, .ge, .le] && p.peek_tok.kind == .lpar { } else if p.tok.kind in [.ne, .gt, .ge, .le] && p.peek_tok.kind == .lpar {
p.error_with_pos('cannot overload `!=`, `>`, `<=` and `>=` as they are auto generated from `==` and`<`', p.error_with_pos('cannot overload `!=`, `>`, `<=` and `>=` as they are auto generated from `==` and`<`',
p.tok.position()) p.tok.pos())
} else { } else {
p.error_with_pos('expecting method name', p.tok.position()) p.error_with_pos('expecting method name', p.tok.pos())
return ast.FnDecl{ return ast.FnDecl{
scope: 0 scope: 0
} }
@ -351,18 +351,18 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
} }
} }
// Return type // Return type
mut return_type_pos := p.tok.position() mut return_type_pos := p.tok.pos()
mut return_type := ast.void_type mut return_type := ast.void_type
// don't confuse token on the next line: fn decl, [attribute] // don't confuse token on the next line: fn decl, [attribute]
same_line := p.tok.line_nr == p.prev_tok.line_nr same_line := p.tok.line_nr == p.prev_tok.line_nr
if (p.tok.kind.is_start_of_type() && (same_line || p.tok.kind != .lsbr)) if (p.tok.kind.is_start_of_type() && (same_line || p.tok.kind != .lsbr))
|| (same_line && p.tok.kind == .key_fn) { || (same_line && p.tok.kind == .key_fn) {
return_type = p.parse_type() return_type = p.parse_type()
return_type_pos = return_type_pos.extend(p.prev_tok.position()) return_type_pos = return_type_pos.extend(p.prev_tok.pos())
} }
mut type_sym_method_idx := 0 mut type_sym_method_idx := 0
no_body := p.tok.kind != .lcbr no_body := p.tok.kind != .lcbr
end_pos := p.prev_tok.position() end_pos := p.prev_tok.pos()
short_fn_name := name short_fn_name := name
is_main := short_fn_name == 'main' && p.mod == 'main' is_main := short_fn_name == 'main' && p.mod == 'main'
mut is_test := (short_fn_name.starts_with('test_') || short_fn_name.starts_with('testsuite_')) mut is_test := (short_fn_name.starts_with('test_') || short_fn_name.starts_with('testsuite_'))
@ -470,10 +470,10 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
// Body // Body
p.cur_fn_name = name p.cur_fn_name = name
mut stmts := []ast.Stmt{} mut stmts := []ast.Stmt{}
body_start_pos := p.peek_tok.position() body_start_pos := p.peek_tok.pos()
if p.tok.kind == .lcbr { if p.tok.kind == .lcbr {
if language != .v && language != .js { if language != .v && language != .js {
p.error_with_pos('interop functions cannot have a body', p.tok.position()) p.error_with_pos('interop functions cannot have a body', p.tok.pos())
} }
p.inside_fn = true p.inside_fn = true
p.inside_unsafe_fn = is_unsafe p.inside_unsafe_fn = is_unsafe
@ -546,7 +546,7 @@ fn (mut p Parser) fn_receiver(mut params []ast.Param, mut rec ReceiverParsingInf
defer { defer {
p.inside_receiver_param = false p.inside_receiver_param = false
} }
lpar_pos := p.tok.position() lpar_pos := p.tok.pos()
p.next() // ( p.next() // (
is_shared := p.tok.kind == .key_shared is_shared := p.tok.kind == .key_shared
is_atomic := p.tok.kind == .key_atomic is_atomic := p.tok.kind == .key_atomic
@ -554,20 +554,20 @@ fn (mut p Parser) fn_receiver(mut params []ast.Param, mut rec ReceiverParsingInf
if rec.is_mut { if rec.is_mut {
p.next() // `mut` p.next() // `mut`
} }
rec_start_pos := p.tok.position() rec_start_pos := p.tok.pos()
rec.name = p.check_name() rec.name = p.check_name()
if !rec.is_mut { if !rec.is_mut {
rec.is_mut = p.tok.kind == .key_mut rec.is_mut = p.tok.kind == .key_mut
if rec.is_mut { if rec.is_mut {
ptoken2 := p.peek_token(2) // needed to prevent codegen bug, where .position() expects &Token ptoken2 := p.peek_token(2) // needed to prevent codegen bug, where .pos() expects &Token
p.warn_with_pos('use `(mut f Foo)` instead of `(f mut Foo)`', lpar_pos.extend(ptoken2.position())) p.warn_with_pos('use `(mut f Foo)` instead of `(f mut Foo)`', lpar_pos.extend(ptoken2.pos()))
} }
} }
if p.tok.kind == .key_shared { if p.tok.kind == .key_shared {
ptoken2 := p.peek_token(2) // needed to prevent codegen bug, where .position() expects &Token ptoken2 := p.peek_token(2) // needed to prevent codegen bug, where .pos() expects &Token
p.error_with_pos('use `(shared f Foo)` instead of `(f shared Foo)`', lpar_pos.extend(ptoken2.position())) p.error_with_pos('use `(shared f Foo)` instead of `(f shared Foo)`', lpar_pos.extend(ptoken2.pos()))
} }
rec.pos = rec_start_pos.extend(p.tok.position()) rec.pos = rec_start_pos.extend(p.tok.pos())
is_amp := p.tok.kind == .amp is_amp := p.tok.kind == .amp
if p.tok.kind == .name && p.tok.lit == 'JS' { if p.tok.kind == .name && p.tok.lit == 'JS' {
rec.language = ast.Language.js rec.language = ast.Language.js
@ -577,16 +577,16 @@ fn (mut p Parser) fn_receiver(mut params []ast.Param, mut rec ReceiverParsingInf
// } // }
// TODO: talk to alex, should mut be parsed with the type like this? // TODO: talk to alex, should mut be parsed with the type like this?
// or should it be a property of the arg, like this ptr/mut becomes indistinguishable // or should it be a property of the arg, like this ptr/mut becomes indistinguishable
rec.type_pos = p.tok.position() rec.type_pos = p.tok.pos()
rec.typ = p.parse_type_with_mut(rec.is_mut) rec.typ = p.parse_type_with_mut(rec.is_mut)
if rec.typ.idx() == 0 { if rec.typ.idx() == 0 {
// error is set in parse_type // error is set in parse_type
return error('void receiver type') return error('void receiver type')
} }
rec.type_pos = rec.type_pos.extend(p.prev_tok.position()) rec.type_pos = rec.type_pos.extend(p.prev_tok.pos())
if is_amp && rec.is_mut { if is_amp && rec.is_mut {
p.error_with_pos('use `(mut f Foo)` or `(f &Foo)` instead of `(mut f &Foo)`', p.error_with_pos('use `(mut f Foo)` or `(f &Foo)` instead of `(mut f &Foo)`',
lpar_pos.extend(p.tok.position())) lpar_pos.extend(p.tok.pos()))
return error('invalid `mut f &Foo`') return error('invalid `mut f &Foo`')
} }
if is_shared { if is_shared {
@ -624,11 +624,11 @@ fn (mut p Parser) fn_receiver(mut params []ast.Param, mut rec ReceiverParsingInf
} }
fn (mut p Parser) anon_fn() ast.AnonFn { fn (mut p Parser) anon_fn() ast.AnonFn {
pos := p.tok.position() pos := p.tok.pos()
p.check(.key_fn) p.check(.key_fn)
if p.pref.is_script && p.tok.kind == .name { if p.pref.is_script && p.tok.kind == .name {
p.error_with_pos('function declarations in script mode should be before all script statements', p.error_with_pos('function declarations in script mode should be before all script statements',
p.tok.position()) p.tok.pos())
return ast.AnonFn{} return ast.AnonFn{}
} }
old_inside_defer := p.inside_defer old_inside_defer := p.inside_defer
@ -658,16 +658,16 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
} }
mut same_line := p.tok.line_nr == p.prev_tok.line_nr mut same_line := p.tok.line_nr == p.prev_tok.line_nr
mut return_type := ast.void_type mut return_type := ast.void_type
mut return_type_pos := p.tok.position() mut return_type_pos := p.tok.pos()
// lpar: multiple return types // lpar: multiple return types
if same_line { if same_line {
if (p.tok.kind.is_start_of_type() && (same_line || p.tok.kind != .lsbr)) if (p.tok.kind.is_start_of_type() && (same_line || p.tok.kind != .lsbr))
|| (same_line && p.tok.kind == .key_fn) { || (same_line && p.tok.kind == .key_fn) {
return_type = p.parse_type() return_type = p.parse_type()
return_type_pos = return_type_pos.extend(p.tok.position()) return_type_pos = return_type_pos.extend(p.tok.pos())
} else if p.tok.kind != .lcbr { } else if p.tok.kind != .lcbr {
p.error_with_pos('expected return type, not $p.tok for anonymous function', p.error_with_pos('expected return type, not $p.tok for anonymous function',
p.tok.position()) p.tok.pos())
} }
} }
mut stmts := []ast.Stmt{} mut stmts := []ast.Stmt{}
@ -675,7 +675,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
same_line = p.tok.line_nr == p.prev_tok.line_nr same_line = p.tok.line_nr == p.prev_tok.line_nr
if no_body && same_line { if no_body && same_line {
p.error_with_pos('unexpected $p.tok after anonymous function signature, expecting `{`', p.error_with_pos('unexpected $p.tok after anonymous function signature, expecting `{`',
p.tok.position()) p.tok.pos())
} }
mut label_names := []string{} mut label_names := []string{}
mut func := ast.Fn{ mut func := ast.Fn{
@ -712,7 +712,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
is_method: false is_method: false
is_anon: true is_anon: true
no_body: no_body no_body: no_body
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
file: p.file_name file: p.file_name
scope: p.scope scope: p.scope
label_names: label_names label_names: label_names
@ -743,7 +743,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
mut arg_no := 1 mut arg_no := 1
for p.tok.kind != .rpar { for p.tok.kind != .rpar {
if p.tok.kind == .eof { if p.tok.kind == .eof {
p.error_with_pos('expecting `)`', p.tok.position()) p.error_with_pos('expecting `)`', p.tok.pos())
return []ast.Param{}, false, false return []ast.Param{}, false, false
} }
is_shared := p.tok.kind == .key_shared is_shared := p.tok.kind == .key_shared
@ -756,7 +756,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
p.next() p.next()
is_variadic = true is_variadic = true
} }
pos := p.tok.position() pos := p.tok.pos()
mut arg_type := p.parse_type() mut arg_type := p.parse_type()
if arg_type == 0 { if arg_type == 0 {
// error is added in parse_type // error is added in parse_type
@ -791,7 +791,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
arg_type = ast.new_type(p.table.find_or_register_array(arg_type)).set_flag(.variadic) arg_type = ast.new_type(p.table.find_or_register_array(arg_type)).set_flag(.variadic)
} }
if p.tok.kind == .eof { if p.tok.kind == .eof {
p.error_with_pos('expecting `)`', p.prev_tok.position()) p.error_with_pos('expecting `)`', p.prev_tok.pos())
return []ast.Param{}, false, false return []ast.Param{}, false, false
} }
if p.tok.kind == .comma { if p.tok.kind == .comma {
@ -822,7 +822,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
} else { } else {
for p.tok.kind != .rpar { for p.tok.kind != .rpar {
if p.tok.kind == .eof { if p.tok.kind == .eof {
p.error_with_pos('expecting `)`', p.tok.position()) p.error_with_pos('expecting `)`', p.tok.pos())
return []ast.Param{}, false, false return []ast.Param{}, false, false
} }
is_shared := p.tok.kind == .key_shared is_shared := p.tok.kind == .key_shared
@ -831,9 +831,9 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
if is_mut { if is_mut {
p.next() p.next()
} }
mut arg_pos := [p.tok.position()] mut arg_pos := [p.tok.pos()]
mut arg_names := [p.check_name()] mut arg_names := [p.check_name()]
mut type_pos := [p.tok.position()] mut type_pos := [p.tok.pos()]
// `a, b, c int` // `a, b, c int`
for p.tok.kind == .comma { for p.tok.kind == .comma {
if !p.pref.is_fmt { if !p.pref.is_fmt {
@ -842,25 +842,25 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
'Use `fn f(x Type, y Type)` instead. You can run `v fmt -w "$p.scanner.file_path"` to automatically fix your code.') 'Use `fn f(x Type, y Type)` instead. You can run `v fmt -w "$p.scanner.file_path"` to automatically fix your code.')
} }
p.next() p.next()
arg_pos << p.tok.position() arg_pos << p.tok.pos()
arg_names << p.check_name() arg_names << p.check_name()
type_pos << p.tok.position() type_pos << p.tok.pos()
} }
if p.tok.kind == .key_mut { if p.tok.kind == .key_mut {
// TODO remove old syntax // TODO remove old syntax
if !p.pref.is_fmt { if !p.pref.is_fmt {
p.warn_with_pos('use `mut f Foo` instead of `f mut Foo`', p.tok.position()) p.warn_with_pos('use `mut f Foo` instead of `f mut Foo`', p.tok.pos())
} }
is_mut = true is_mut = true
} }
if p.tok.kind == .key_shared { if p.tok.kind == .key_shared {
p.error_with_pos('use `shared f Foo` instead of `f shared Foo`', p.tok.position()) p.error_with_pos('use `shared f Foo` instead of `f shared Foo`', p.tok.pos())
} }
if p.tok.kind == .ellipsis { if p.tok.kind == .ellipsis {
p.next() p.next()
is_variadic = true is_variadic = true
} }
pos := p.tok.position() pos := p.tok.pos()
mut typ := p.parse_type() mut typ := p.parse_type()
if typ == 0 { if typ == 0 {
// error is added in parse_type // error is added in parse_type
@ -911,7 +911,7 @@ fn (mut p Parser) fn_args() ([]ast.Param, bool, bool) {
} }
} }
if p.tok.kind == .eof { if p.tok.kind == .eof {
p.error_with_pos('expecting `)`', p.prev_tok.position()) p.error_with_pos('expecting `)`', p.prev_tok.pos())
return []ast.Param{}, false, false return []ast.Param{}, false, false
} }
if p.tok.kind != .rpar { if p.tok.kind != .rpar {
@ -934,11 +934,11 @@ fn (mut p Parser) closure_vars() []ast.Param {
if is_mut { if is_mut {
p.next() p.next()
} }
var_pos := p.tok.position() var_pos := p.tok.pos()
p.check(.name) p.check(.name)
var_name := p.prev_tok.lit var_name := p.prev_tok.lit
mut var := p.scope.parent.find_var(var_name) or { mut var := p.scope.parent.find_var(var_name) or {
p.error_with_pos('undefined ident: `$var_name`', p.prev_tok.position()) p.error_with_pos('undefined ident: `$var_name`', p.prev_tok.pos())
continue continue
} }
var.is_used = true var.is_used = true
@ -967,7 +967,7 @@ fn (mut p Parser) closure_vars() []ast.Param {
return vars return vars
} }
fn (mut p Parser) check_fn_mutable_arguments(typ ast.Type, pos token.Position) { fn (mut p Parser) check_fn_mutable_arguments(typ ast.Type, pos token.Pos) {
sym := p.table.sym(typ) sym := p.table.sym(typ)
if sym.kind in [.array, .array_fixed, .interface_, .map, .placeholder, .struct_, .generic_inst, if sym.kind in [.array, .array_fixed, .interface_, .map, .placeholder, .struct_, .generic_inst,
.sum_type] { .sum_type] {
@ -990,7 +990,7 @@ fn (mut p Parser) check_fn_mutable_arguments(typ ast.Type, pos token.Position) {
pos) pos)
} }
fn (mut p Parser) check_fn_shared_arguments(typ ast.Type, pos token.Position) { fn (mut p Parser) check_fn_shared_arguments(typ ast.Type, pos token.Pos) {
sym := p.table.sym(typ) sym := p.table.sym(typ)
if sym.kind !in [.array, .struct_, .map, .placeholder] && !typ.is_ptr() { if sym.kind !in [.array, .struct_, .map, .placeholder] && !typ.is_ptr() {
p.error_with_pos('shared arguments are only allowed for arrays, maps, and structs\n', p.error_with_pos('shared arguments are only allowed for arrays, maps, and structs\n',
@ -998,7 +998,7 @@ fn (mut p Parser) check_fn_shared_arguments(typ ast.Type, pos token.Position) {
} }
} }
fn (mut p Parser) check_fn_atomic_arguments(typ ast.Type, pos token.Position) { fn (mut p Parser) check_fn_atomic_arguments(typ ast.Type, pos token.Pos) {
sym := p.table.sym(typ) sym := p.table.sym(typ)
if sym.kind !in [.u32, .int, .u64] { if sym.kind !in [.u32, .int, .u64] {
p.error_with_pos('atomic arguments are only allowed for 32/64 bit integers\n' + p.error_with_pos('atomic arguments are only allowed for 32/64 bit integers\n' +

View File

@ -7,7 +7,7 @@ import v.ast
fn (mut p Parser) for_stmt() ast.Stmt { fn (mut p Parser) for_stmt() ast.Stmt {
p.check(.key_for) p.check(.key_for)
mut pos := p.tok.position() mut pos := p.tok.pos()
p.open_scope() p.open_scope()
p.inside_for = true p.inside_for = true
if p.tok.kind == .key_match { if p.tok.kind == .key_match {
@ -86,12 +86,12 @@ fn (mut p Parser) for_stmt() ast.Stmt {
|| (p.tok.kind == .key_mut && p.peek_token(2).kind in [.key_in, .comma]) { || (p.tok.kind == .key_mut && p.peek_token(2).kind in [.key_in, .comma]) {
// `for i in vals`, `for i in start .. end`, `for mut user in users`, `for i, mut user in users` // `for i in vals`, `for i in start .. end`, `for mut user in users`, `for i, mut user in users`
mut val_is_mut := p.tok.kind == .key_mut mut val_is_mut := p.tok.kind == .key_mut
mut_pos := p.tok.position() mut_pos := p.tok.pos()
if val_is_mut { if val_is_mut {
p.next() p.next()
} }
key_var_pos := p.tok.position() key_var_pos := p.tok.pos()
mut val_var_pos := p.tok.position() mut val_var_pos := p.tok.pos()
mut key_var_name := '' mut key_var_name := ''
mut val_var_name := p.check_name() mut val_var_name := p.check_name()
if p.tok.kind == .comma { if p.tok.kind == .comma {
@ -105,7 +105,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
val_is_mut = true val_is_mut = true
} }
key_var_name = val_var_name key_var_name = val_var_name
val_var_pos = p.tok.position() val_var_pos = p.tok.pos()
val_var_name = p.check_name() val_var_name = p.check_name()
if key_var_name == val_var_name && key_var_name != '_' { if key_var_name == val_var_name && key_var_name != '_' {
return p.error_with_pos('key and value in a for loop cannot be the same', return p.error_with_pos('key and value in a for loop cannot be the same',

View File

@ -15,11 +15,11 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
} }
p.inside_if_expr = true p.inside_if_expr = true
is_expr := p.prev_tok.kind == .key_return is_expr := p.prev_tok.kind == .key_return
mut pos := p.tok.position() mut pos := p.tok.pos()
if is_comptime { if is_comptime {
p.inside_ct_if_expr = true p.inside_ct_if_expr = true
p.next() // `$` p.next() // `$`
pos = p.prev_tok.position().extend(p.tok.position()) pos = p.prev_tok.pos().extend(p.tok.pos())
} }
mut branches := []ast.IfBranch{} mut branches := []ast.IfBranch{}
mut has_else := false mut has_else := false
@ -27,11 +27,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
mut prev_guard := false mut prev_guard := false
for p.tok.kind in [.key_if, .key_else] { for p.tok.kind in [.key_if, .key_else] {
p.inside_if = true p.inside_if = true
start_pos := if is_comptime { start_pos := if is_comptime { p.prev_tok.pos().extend(p.tok.pos()) } else { p.tok.pos() }
p.prev_tok.position().extend(p.tok.position())
} else {
p.tok.position()
}
if p.tok.kind == .key_else { if p.tok.kind == .key_else {
comments << p.eat_comments() comments << p.eat_comments()
p.check(.key_else) p.check(.key_else)
@ -44,15 +40,15 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
// else { // else {
has_else = true has_else = true
p.inside_if = false p.inside_if = false
end_pos := p.prev_tok.position() end_pos := p.prev_tok.pos()
body_pos := p.tok.position() body_pos := p.tok.pos()
p.open_scope() p.open_scope()
// only declare `err` if previous branch was an `if` guard // only declare `err` if previous branch was an `if` guard
if prev_guard { if prev_guard {
p.scope.register(ast.Var{ p.scope.register(ast.Var{
name: 'err' name: 'err'
typ: ast.error_type typ: ast.error_type
pos: p.tok.position() pos: p.tok.pos()
is_used: true is_used: true
is_stack_obj: true is_stack_obj: true
}) })
@ -60,7 +56,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
branches << ast.IfBranch{ branches << ast.IfBranch{
stmts: p.parse_block_no_scope(false) stmts: p.parse_block_no_scope(false)
pos: start_pos.extend(end_pos) pos: start_pos.extend(end_pos)
body_pos: body_pos.extend(p.tok.position()) body_pos: body_pos.extend(p.tok.pos())
comments: comments comments: comments
scope: p.scope scope: p.scope
} }
@ -95,7 +91,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
p.next() p.next()
} }
var.is_mut = is_mut var.is_mut = is_mut
var.pos = p.tok.position() var.pos = p.tok.pos()
var.name = p.check_name() var.name = p.check_name()
if p.scope.known_var(var.name) { if p.scope.known_var(var.name) {
@ -132,8 +128,8 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
p.comptime_if_cond = false p.comptime_if_cond = false
} }
comments << p.eat_comments() comments << p.eat_comments()
end_pos := p.prev_tok.position() end_pos := p.prev_tok.pos()
body_pos := p.tok.position() body_pos := p.tok.pos()
p.inside_if = false p.inside_if = false
p.open_scope() p.open_scope()
stmts := p.parse_block_no_scope(false) stmts := p.parse_block_no_scope(false)
@ -141,7 +137,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
cond: cond cond: cond
stmts: stmts stmts: stmts
pos: start_pos.extend(end_pos) pos: start_pos.extend(end_pos)
body_pos: body_pos.extend(p.prev_tok.position()) body_pos: body_pos.extend(p.prev_tok.pos())
comments: comments comments: comments
scope: p.scope scope: p.scope
} }
@ -178,7 +174,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
} }
fn (mut p Parser) match_expr() ast.MatchExpr { fn (mut p Parser) match_expr() ast.MatchExpr {
match_first_pos := p.tok.position() match_first_pos := p.tok.pos()
p.inside_match = true p.inside_match = true
p.check(.key_match) p.check(.key_match)
mut is_sum_type := false mut is_sum_type := false
@ -191,7 +187,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
comments := p.eat_comments() // comments before the first branch comments := p.eat_comments() // comments before the first branch
mut branches := []ast.MatchBranch{} mut branches := []ast.MatchBranch{}
for p.tok.kind != .eof { for p.tok.kind != .eof {
branch_first_pos := p.tok.position() branch_first_pos := p.tok.pos()
mut exprs := []ast.Expr{} mut exprs := []ast.Expr{}
mut ecmnts := [][]ast.Comment{} mut ecmnts := [][]ast.Comment{}
p.open_scope() p.open_scope()
@ -212,7 +208,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
types << parsed_type types << parsed_type
exprs << ast.TypeNode{ exprs << ast.TypeNode{
typ: parsed_type typ: parsed_type
pos: p.prev_tok.position() pos: p.prev_tok.pos()
} }
if p.tok.kind != .comma { if p.tok.kind != .comma {
break break
@ -237,7 +233,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
p.inside_match_case = false p.inside_match_case = false
if p.tok.kind == .dotdot { if p.tok.kind == .dotdot {
p.error_with_pos('match only supports inclusive (`...`) ranges, not exclusive (`..`)', p.error_with_pos('match only supports inclusive (`...`) ranges, not exclusive (`..`)',
p.tok.position()) p.tok.pos())
return ast.MatchExpr{} return ast.MatchExpr{}
} else if p.tok.kind == .ellipsis { } else if p.tok.kind == .ellipsis {
p.next() p.next()
@ -247,7 +243,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
high: expr2 high: expr2
has_low: true has_low: true
has_high: true has_high: true
pos: p.tok.position() pos: p.tok.pos()
} }
} else { } else {
exprs << expr exprs << expr
@ -267,7 +263,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
} }
} }
} }
branch_last_pos := p.prev_tok.position() branch_last_pos := p.prev_tok.pos()
// p.warn('match block') // p.warn('match block')
p.inside_match_body = true p.inside_match_body = true
stmts := p.parse_block_no_scope(false) stmts := p.parse_block_no_scope(false)
@ -275,7 +271,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
p.close_scope() p.close_scope()
p.inside_match_body = false p.inside_match_body = false
pos := branch_first_pos.extend_with_last_line(branch_last_pos, p.prev_tok.line_nr) pos := branch_first_pos.extend_with_last_line(branch_last_pos, p.prev_tok.line_nr)
branch_pos := branch_first_pos.extend_with_last_line(p.tok.position(), p.tok.line_nr) branch_pos := branch_first_pos.extend_with_last_line(p.tok.pos(), p.tok.line_nr)
post_comments := p.eat_comments() post_comments := p.eat_comments()
branches << ast.MatchBranch{ branches << ast.MatchBranch{
exprs: exprs exprs: exprs
@ -294,8 +290,8 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
break break
} }
} }
match_last_pos := p.tok.position() match_last_pos := p.tok.pos()
mut pos := token.Position{ mut pos := token.Pos{
line_nr: match_first_pos.line_nr line_nr: match_first_pos.line_nr
pos: match_first_pos.pos pos: match_first_pos.pos
len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len
@ -316,7 +312,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
} }
fn (mut p Parser) select_expr() ast.SelectExpr { fn (mut p Parser) select_expr() ast.SelectExpr {
match_first_pos := p.tok.position() match_first_pos := p.tok.pos()
p.check(.key_select) p.check(.key_select)
no_lcbr := p.tok.kind != .lcbr no_lcbr := p.tok.kind != .lcbr
if !no_lcbr { if !no_lcbr {
@ -326,7 +322,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
mut has_else := false mut has_else := false
mut has_timeout := false mut has_timeout := false
for { for {
branch_first_pos := p.tok.position() branch_first_pos := p.tok.pos()
comment := p.check_comment() // comment before {} comment := p.check_comment() // comment before {}
p.open_scope() p.open_scope()
// final else // final else
@ -336,12 +332,12 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
if p.tok.kind == .key_else { if p.tok.kind == .key_else {
if has_timeout { if has_timeout {
p.error_with_pos('timeout `> t` and `else` are mutually exclusive `select` keys', p.error_with_pos('timeout `> t` and `else` are mutually exclusive `select` keys',
p.tok.position()) p.tok.pos())
return ast.SelectExpr{} return ast.SelectExpr{}
} }
if has_else { if has_else {
p.error_with_pos('at most one `else` branch allowed in `select` block', p.error_with_pos('at most one `else` branch allowed in `select` block',
p.tok.position()) p.tok.pos())
return ast.SelectExpr{} return ast.SelectExpr{}
} }
is_else = true is_else = true
@ -352,7 +348,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
if p.tok.kind == .gt { if p.tok.kind == .gt {
is_gt = true is_gt = true
p.note_with_pos('`>` is deprecated and will soon be forbidden - just state the timeout in nanoseconds', p.note_with_pos('`>` is deprecated and will soon be forbidden - just state the timeout in nanoseconds',
p.tok.position()) p.tok.pos())
p.next() p.next()
} }
p.inside_match = true p.inside_match = true
@ -367,7 +363,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
} else { } else {
stmt = ast.ExprStmt{ stmt = ast.ExprStmt{
expr: exprs[0] expr: exprs[0]
pos: exprs[0].position() pos: exprs[0].pos()
comments: [comment] comments: [comment]
is_expr: true is_expr: true
} }
@ -422,7 +418,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
} }
else { else {
p.error_with_pos('select key: receive expression expected', p.error_with_pos('select key: receive expression expected',
stmt.right[0].position()) stmt.right[0].pos())
return ast.SelectExpr{} return ast.SelectExpr{}
} }
} }
@ -434,12 +430,12 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
} }
} }
} }
branch_last_pos := p.tok.position() branch_last_pos := p.tok.pos()
p.inside_match_body = true p.inside_match_body = true
stmts := p.parse_block_no_scope(false) stmts := p.parse_block_no_scope(false)
p.close_scope() p.close_scope()
p.inside_match_body = false p.inside_match_body = false
mut pos := token.Position{ mut pos := token.Pos{
line_nr: branch_first_pos.line_nr line_nr: branch_first_pos.line_nr
pos: branch_first_pos.pos pos: branch_first_pos.pos
len: branch_last_pos.pos - branch_first_pos.pos + branch_last_pos.len len: branch_last_pos.pos - branch_first_pos.pos + branch_last_pos.len
@ -463,8 +459,8 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
break break
} }
} }
match_last_pos := p.tok.position() match_last_pos := p.tok.pos()
pos := token.Position{ pos := token.Pos{
line_nr: match_first_pos.line_nr line_nr: match_first_pos.line_nr
pos: match_first_pos.pos pos: match_first_pos.pos
len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len
@ -475,7 +471,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
} }
return ast.SelectExpr{ return ast.SelectExpr{
branches: branches branches: branches
pos: pos.extend_with_last_line(p.prev_tok.position(), p.prev_tok.line_nr) pos: pos.extend_with_last_line(p.prev_tok.pos(), p.prev_tok.line_nr)
has_exception: has_else || has_timeout has_exception: has_else || has_timeout
} }
} }

View File

@ -6,12 +6,12 @@ import v.ast
// parse `x` or `x.y.z` - no index, no struct literals (`{` starts lock block) // parse `x` or `x.y.z` - no index, no struct literals (`{` starts lock block)
fn (mut p Parser) lockable() ast.Expr { fn (mut p Parser) lockable() ast.Expr {
mut names := []string{} mut names := []string{}
mut positions := []token.Position{} mut positions := []token.Pos{}
mut pos := p.tok.position() mut pos := p.tok.pos()
for { for {
if p.tok.kind != .name { if p.tok.kind != .name {
p.error_with_pos('unexpected `$p.tok.lit` (field/variable name expected)', p.error_with_pos('unexpected `$p.tok.lit` (field/variable name expected)',
p.tok.position()) p.tok.pos())
} }
names << p.tok.lit names << p.tok.lit
positions << pos positions << pos
@ -20,7 +20,7 @@ fn (mut p Parser) lockable() ast.Expr {
break break
} }
p.next() p.next()
pos.extend(p.tok.position()) pos.extend(p.tok.pos())
} }
mut expr := ast.Expr(ast.Ident{ mut expr := ast.Expr(ast.Ident{
language: ast.Language.v language: ast.Language.v
@ -67,14 +67,14 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
// TODO Handle aliasing sync // TODO Handle aliasing sync
p.register_auto_import('sync') p.register_auto_import('sync')
p.open_scope() p.open_scope()
mut pos := p.tok.position() mut pos := p.tok.pos()
mut lockeds := []ast.Expr{} mut lockeds := []ast.Expr{}
mut comments := []ast.Comment{} mut comments := []ast.Comment{}
mut is_rlocked := []bool{} mut is_rlocked := []bool{}
for { for {
is_rlock := p.tok.kind == .key_rlock is_rlock := p.tok.kind == .key_rlock
if !is_rlock && p.tok.kind != .key_lock { if !is_rlock && p.tok.kind != .key_lock {
p.error_with_pos('unexpected `$p.tok`, expected `lock` or `rlock`', p.tok.position()) p.error_with_pos('unexpected `$p.tok`, expected `lock` or `rlock`', p.tok.pos())
} }
p.next() p.next()
if p.tok.kind == .lcbr { if p.tok.kind == .lcbr {
@ -85,7 +85,7 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
for e in exprs { for e in exprs {
if !e.is_lockable() { if !e.is_lockable() {
p.error_with_pos('`$e` cannot be locked - only `x` or `x.y` are supported', p.error_with_pos('`$e` cannot be locked - only `x` or `x.y` are supported',
e.position()) e.pos())
} }
lockeds << e lockeds << e
is_rlocked << is_rlock is_rlocked << is_rlock

View File

@ -36,7 +36,7 @@ fn (mut p Parser) register_auto_import(alias string) {
p.imports[alias] = alias p.imports[alias] = alias
p.table.imports << alias p.table.imports << alias
node := ast.Import{ node := ast.Import{
pos: p.tok.position() pos: p.tok.pos()
mod: alias mod: alias
alias: alias alias: alias
} }

View File

@ -60,7 +60,7 @@ pub fn (mut p Parser) parse_array_type(expecting token.Kind) ast.Type {
} }
else { else {
p.error_with_pos('fixed array size cannot use non-constant value', p.error_with_pos('fixed array size cannot use non-constant value',
size_expr.position()) size_expr.pos())
} }
} }
} }
@ -71,7 +71,7 @@ pub fn (mut p Parser) parse_array_type(expecting token.Kind) ast.Type {
return 0 return 0
} }
if fixed_size <= 0 { if fixed_size <= 0 {
p.error_with_pos('fixed size cannot be zero or negative', size_expr.position()) p.error_with_pos('fixed size cannot be zero or negative', size_expr.pos())
} }
// sym := p.table.sym(elem_type) // sym := p.table.sym(elem_type)
idx := p.table.find_or_register_array_fixed(elem_type, fixed_size, size_expr) idx := p.table.find_or_register_array_fixed(elem_type, fixed_size, size_expr)
@ -128,7 +128,7 @@ pub fn (mut p Parser) parse_map_type() ast.Type {
} }
s := p.table.type_to_str(key_type) s := p.table.type_to_str(key_type)
p.error_with_pos('maps only support string, integer, float, rune, enum or voidptr keys for now (not `$s`)', p.error_with_pos('maps only support string, integer, float, rune, enum or voidptr keys for now (not `$s`)',
p.tok.position()) p.tok.pos())
return 0 return 0
} }
p.check(.rsbr) p.check(.rsbr)
@ -138,7 +138,7 @@ pub fn (mut p Parser) parse_map_type() ast.Type {
return 0 return 0
} }
if value_type.idx() == ast.void_type_idx { if value_type.idx() == ast.void_type_idx {
p.error_with_pos('map value type cannot be void', p.tok.position()) p.error_with_pos('map value type cannot be void', p.tok.pos())
return 0 return 0
} }
idx := p.table.find_or_register_map(key_type, value_type) idx := p.table.find_or_register_map(key_type, value_type)
@ -238,14 +238,14 @@ pub fn (mut p Parser) parse_fn_type(name string) ast.Type {
} }
} }
mut return_type := ast.void_type mut return_type := ast.void_type
mut return_type_pos := token.Position{} mut return_type_pos := token.Pos{}
if p.tok.line_nr == line_nr && p.tok.kind.is_start_of_type() && !p.is_attributes() { if p.tok.line_nr == line_nr && p.tok.kind.is_start_of_type() && !p.is_attributes() {
return_type_pos = p.tok.position() return_type_pos = p.tok.pos()
return_type = p.parse_type() return_type = p.parse_type()
if return_type.has_flag(.generic) { if return_type.has_flag(.generic) {
has_generic = true has_generic = true
} }
return_type_pos = return_type_pos.extend(p.prev_tok.position()) return_type_pos = return_type_pos.extend(p.prev_tok.pos())
} }
func := ast.Fn{ func := ast.Fn{
name: name name: name
@ -335,11 +335,11 @@ pub fn (mut p Parser) parse_sum_type_variants() []ast.TypeNode {
} }
mut types := []ast.TypeNode{} mut types := []ast.TypeNode{}
for { for {
type_start_pos := p.tok.position() type_start_pos := p.tok.pos()
typ := p.parse_type() typ := p.parse_type()
// TODO: needs to be its own var, otherwise TCC fails because of a known stack error // TODO: needs to be its own var, otherwise TCC fails because of a known stack error
prev_tok := p.prev_tok prev_tok := p.prev_tok
type_end_pos := prev_tok.position() type_end_pos := prev_tok.pos()
type_pos := type_start_pos.extend(type_end_pos) type_pos := type_start_pos.extend(type_end_pos)
types << ast.TypeNode{ types << ast.TypeNode{
typ: typ typ: typ
@ -356,7 +356,7 @@ pub fn (mut p Parser) parse_sum_type_variants() []ast.TypeNode {
pub fn (mut p Parser) parse_type() ast.Type { pub fn (mut p Parser) parse_type() ast.Type {
// optional // optional
mut is_optional := false mut is_optional := false
optional_pos := p.tok.position() optional_pos := p.tok.pos()
if p.tok.kind == .question { if p.tok.kind == .question {
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
p.next() p.next()
@ -394,7 +394,7 @@ pub fn (mut p Parser) parse_type() ast.Type {
mut typ := ast.void_type mut typ := ast.void_type
is_array := p.tok.kind == .lsbr is_array := p.tok.kind == .lsbr
if p.tok.kind != .lcbr { if p.tok.kind != .lcbr {
pos := p.tok.position() pos := p.tok.pos()
typ = p.parse_any_type(language, nr_muls > 0, true) typ = p.parse_any_type(language, nr_muls > 0, true)
if typ.idx() == 0 { if typ.idx() == 0 {
// error is set in parse_type // error is set in parse_type
@ -406,7 +406,7 @@ pub fn (mut p Parser) parse_type() ast.Type {
} }
sym := p.table.sym(typ) sym := p.table.sym(typ)
if is_optional && sym.info is ast.SumType && (sym.info as ast.SumType).is_anon { if is_optional && sym.info is ast.SumType && (sym.info as ast.SumType).is_anon {
p.error_with_pos('an inline sum type cannot be optional', optional_pos.extend(p.prev_tok.position())) p.error_with_pos('an inline sum type cannot be optional', optional_pos.extend(p.prev_tok.pos()))
} }
} }
if is_optional { if is_optional {
@ -439,12 +439,12 @@ pub fn (mut p Parser) parse_any_type(language ast.Language, is_ptr bool, check_d
} else if p.peek_tok.kind == .dot && check_dot { } else if p.peek_tok.kind == .dot && check_dot {
// `module.Type` // `module.Type`
mut mod := name mut mod := name
mut mod_pos := p.tok.position() mut mod_pos := p.tok.pos()
p.next() p.next()
p.check(.dot) p.check(.dot)
mut mod_last_part := mod mut mod_last_part := mod
for p.peek_tok.kind == .dot { for p.peek_tok.kind == .dot {
mod_pos = mod_pos.extend(p.tok.position()) mod_pos = mod_pos.extend(p.tok.pos())
mod_last_part = p.tok.lit mod_last_part = p.tok.lit
mod += '.$mod_last_part' mod += '.$mod_last_part'
p.next() p.next()
@ -615,7 +615,7 @@ pub fn (mut p Parser) parse_generic_type(name string) ast.Type {
pub fn (mut p Parser) parse_generic_inst_type(name string) ast.Type { pub fn (mut p Parser) parse_generic_inst_type(name string) ast.Type {
mut bs_name := name mut bs_name := name
mut bs_cname := name mut bs_cname := name
start_pos := p.tok.position() start_pos := p.tok.pos()
p.next() p.next()
p.inside_generic_params = true p.inside_generic_params = true
bs_name += '<' bs_name += '<'
@ -638,7 +638,7 @@ pub fn (mut p Parser) parse_generic_inst_type(name string) ast.Type {
bs_name += ', ' bs_name += ', '
bs_cname += '_' bs_cname += '_'
} }
concrete_types_pos := start_pos.extend(p.tok.position()) concrete_types_pos := start_pos.extend(p.tok.pos())
p.check(.gt) p.check(.gt)
p.inside_generic_params = false p.inside_generic_params = false
bs_name += '>' bs_name += '>'

File diff suppressed because it is too large Load Diff

View File

@ -7,11 +7,11 @@ import v.ast
fn (mut p Parser) sql_expr() ast.Expr { fn (mut p Parser) sql_expr() ast.Expr {
// `sql db {` // `sql db {`
pos := p.tok.position() pos := p.tok.pos()
p.check_name() p.check_name()
db_expr := p.check_expr(0) or { db_expr := p.check_expr(0) or {
p.error_with_pos('invalid expression: unexpected $p.tok, expecting database', p.error_with_pos('invalid expression: unexpected $p.tok, expecting database',
p.tok.position()) p.tok.pos())
} }
p.check(.lcbr) p.check(.lcbr)
p.check(.key_select) p.check(.key_select)
@ -22,7 +22,7 @@ fn (mut p Parser) sql_expr() ast.Expr {
p.check_name() // from p.check_name() // from
typ = ast.int_type typ = ast.int_type
} }
table_pos := p.tok.position() table_pos := p.tok.pos()
table_type := p.parse_type() // `User` table_type := p.parse_type() // `User`
mut where_expr := ast.empty_expr() mut where_expr := ast.empty_expr()
has_where := p.tok.kind == .name && p.tok.lit == 'where' has_where := p.tok.kind == .name && p.tok.lit == 'where'
@ -49,7 +49,7 @@ fn (mut p Parser) sql_expr() ast.Expr {
mut has_desc := false mut has_desc := false
if p.tok.kind == .name && p.tok.lit == 'order' { if p.tok.kind == .name && p.tok.lit == 'order' {
p.check_name() // `order` p.check_name() // `order`
order_pos := p.tok.position() order_pos := p.tok.pos()
if p.tok.kind == .name && p.tok.lit == 'by' { if p.tok.kind == .name && p.tok.lit == 'by' {
p.check_name() // `by` p.check_name() // `by`
} else { } else {
@ -100,7 +100,7 @@ fn (mut p Parser) sql_expr() ast.Expr {
order_expr: order_expr order_expr: order_expr
has_desc: has_desc has_desc: has_desc
is_array: !query_one is_array: !query_one
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
table_expr: ast.TypeNode{ table_expr: ast.TypeNode{
typ: table_type typ: table_type
pos: table_pos pos: table_pos
@ -111,7 +111,7 @@ fn (mut p Parser) sql_expr() ast.Expr {
// insert user into User // insert user into User
// update User set nr_oders=nr_orders+1 where id == user_id // update User set nr_oders=nr_orders+1 where id == user_id
fn (mut p Parser) sql_stmt() ast.SqlStmt { fn (mut p Parser) sql_stmt() ast.SqlStmt {
mut pos := p.tok.position() mut pos := p.tok.pos()
p.inside_match = true p.inside_match = true
defer { defer {
p.inside_match = false p.inside_match = false
@ -120,7 +120,7 @@ fn (mut p Parser) sql_stmt() ast.SqlStmt {
p.check_name() p.check_name()
db_expr := p.check_expr(0) or { db_expr := p.check_expr(0) or {
p.error_with_pos('invalid expression: unexpected $p.tok, expecting database', p.error_with_pos('invalid expression: unexpected $p.tok, expecting database',
p.tok.position()) p.tok.pos())
} }
// println(typeof(db_expr)) // println(typeof(db_expr))
p.check(.lcbr) p.check(.lcbr)
@ -134,7 +134,7 @@ fn (mut p Parser) sql_stmt() ast.SqlStmt {
p.next() p.next()
pos.last_line = p.prev_tok.line_nr pos.last_line = p.prev_tok.line_nr
return ast.SqlStmt{ return ast.SqlStmt{
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
db_expr: db_expr db_expr: db_expr
lines: lines lines: lines
} }
@ -142,7 +142,7 @@ fn (mut p Parser) sql_stmt() ast.SqlStmt {
fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine { fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
mut n := p.check_name() // insert mut n := p.check_name() // insert
pos := p.tok.position() pos := p.tok.pos()
mut kind := ast.SqlStmtKind.insert mut kind := ast.SqlStmtKind.insert
if n == 'delete' { if n == 'delete' {
kind = .delete kind = .delete
@ -156,10 +156,10 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
return ast.SqlStmtLine{} return ast.SqlStmtLine{}
} }
typ := p.parse_type() typ := p.parse_type()
typ_pos := p.tok.position() typ_pos := p.tok.pos()
return ast.SqlStmtLine{ return ast.SqlStmtLine{
kind: kind kind: kind
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
table_expr: ast.TypeNode{ table_expr: ast.TypeNode{
typ: typ typ: typ
pos: typ_pos pos: typ_pos
@ -173,10 +173,10 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
return ast.SqlStmtLine{} return ast.SqlStmtLine{}
} }
typ := p.parse_type() typ := p.parse_type()
typ_pos := p.tok.position() typ_pos := p.tok.pos()
return ast.SqlStmtLine{ return ast.SqlStmtLine{
kind: kind kind: kind
pos: pos.extend(p.prev_tok.position()) pos: pos.extend(p.prev_tok.pos())
table_expr: ast.TypeNode{ table_expr: ast.TypeNode{
typ: typ typ: typ
pos: typ_pos pos: typ_pos
@ -225,16 +225,16 @@ fn (mut p Parser) parse_sql_stmt_line() ast.SqlStmtLine {
return ast.SqlStmtLine{} return ast.SqlStmtLine{}
} }
mut table_pos := p.tok.position() mut table_pos := p.tok.pos()
mut where_expr := ast.empty_expr() mut where_expr := ast.empty_expr()
if kind == .insert { if kind == .insert {
table_pos = p.tok.position() table_pos = p.tok.pos()
table_type = p.parse_type() table_type = p.parse_type()
} else if kind == .update { } else if kind == .update {
p.check_sql_keyword('where') or { return ast.SqlStmtLine{} } p.check_sql_keyword('where') or { return ast.SqlStmtLine{} }
where_expr = p.expr(0) where_expr = p.expr(0)
} else if kind == .delete { } else if kind == .delete {
table_pos = p.tok.position() table_pos = p.tok.pos()
table_type = p.parse_type() table_type = p.parse_type()
p.check_sql_keyword('where') or { return ast.SqlStmtLine{} } p.check_sql_keyword('where') or { return ast.SqlStmtLine{} }
where_expr = p.expr(0) where_expr = p.expr(0)

View File

@ -12,7 +12,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
// save attributes, they will be changed later in fields // save attributes, they will be changed later in fields
attrs := p.attrs attrs := p.attrs
p.attrs = [] p.attrs = []
start_pos := p.tok.position() start_pos := p.tok.pos()
is_pub := p.tok.kind == .key_pub is_pub := p.tok.kind == .key_pub
if is_pub { if is_pub {
p.next() p.next()
@ -34,7 +34,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
p.next() // C || JS p.next() // C || JS
p.next() // . p.next() // .
} }
name_pos := p.tok.position() name_pos := p.tok.pos()
p.check_for_impure_v(language, name_pos) p.check_for_impure_v(language, name_pos)
mut name := p.check_name() mut name := p.check_name()
// defer { // defer {
@ -90,7 +90,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
mut is_field_mut := false mut is_field_mut := false
mut is_field_pub := false mut is_field_pub := false
mut is_field_global := false mut is_field_global := false
mut last_line := p.prev_tok.position().line_nr + 1 mut last_line := p.prev_tok.pos().line_nr + 1
mut end_comments := []ast.Comment{} mut end_comments := []ast.Comment{}
if !no_body { if !no_body {
p.check(.lcbr) p.check(.lcbr)
@ -169,7 +169,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
break break
} }
} }
field_start_pos := p.tok.position() field_start_pos := p.tok.pos()
mut is_field_volatile := false mut is_field_volatile := false
if p.tok.kind == .key_volatile { if p.tok.kind == .key_volatile {
p.next() p.next()
@ -180,14 +180,14 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
is_on_top := ast_fields.len == 0 && !(is_field_mut || is_field_global) is_on_top := ast_fields.len == 0 && !(is_field_mut || is_field_global)
mut field_name := '' mut field_name := ''
mut typ := ast.Type(0) mut typ := ast.Type(0)
mut type_pos := token.Position{} mut type_pos := token.Pos{}
mut field_pos := token.Position{} mut field_pos := token.Pos{}
if is_embed { if is_embed {
// struct embedding // struct embedding
type_pos = p.tok.position() type_pos = p.tok.pos()
typ = p.parse_type() typ = p.parse_type()
comments << p.eat_comments() comments << p.eat_comments()
type_pos = type_pos.extend(p.prev_tok.position()) type_pos = type_pos.extend(p.prev_tok.pos())
if !is_on_top { if !is_on_top {
p.error_with_pos('struct embedding must be declared at the beginning of the struct body', p.error_with_pos('struct embedding must be declared at the beginning of the struct body',
type_pos) type_pos)
@ -224,7 +224,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
// error is set in parse_type // error is set in parse_type
return ast.StructDecl{} return ast.StructDecl{}
} }
type_pos = p.prev_tok.position() type_pos = p.prev_tok.pos()
field_pos = field_start_pos.extend(type_pos) field_pos = field_start_pos.extend(type_pos)
} }
// Comments after type (same line) // Comments after type (same line)
@ -336,7 +336,7 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
} }
fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit { fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit {
first_pos := (if short_syntax && p.prev_tok.kind == .lcbr { p.prev_tok } else { p.tok }).position() first_pos := (if short_syntax && p.prev_tok.kind == .lcbr { p.prev_tok } else { p.tok }).pos()
typ := if short_syntax { ast.void_type } else { p.parse_type() } typ := if short_syntax { ast.void_type } else { p.parse_type() }
p.expr_mod = '' p.expr_mod = ''
// sym := p.table.sym(typ) // sym := p.table.sym(typ)
@ -356,15 +356,15 @@ fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit
for p.tok.kind !in [.rcbr, .rpar, .eof] { for p.tok.kind !in [.rcbr, .rpar, .eof] {
mut field_name := '' mut field_name := ''
mut expr := ast.empty_expr() mut expr := ast.empty_expr()
mut field_pos := token.Position{} mut field_pos := token.Pos{}
mut first_field_pos := token.Position{} mut first_field_pos := token.Pos{}
mut comments := []ast.Comment{} mut comments := []ast.Comment{}
mut nline_comments := []ast.Comment{} mut nline_comments := []ast.Comment{}
is_update_expr := fields.len == 0 && p.tok.kind == .ellipsis is_update_expr := fields.len == 0 && p.tok.kind == .ellipsis
if no_keys { if no_keys {
// name will be set later in checker // name will be set later in checker
expr = p.expr(0) expr = p.expr(0)
field_pos = expr.position() field_pos = expr.pos()
first_field_pos = field_pos first_field_pos = field_pos
comments = p.eat_comments(same_line: true) comments = p.eat_comments(same_line: true)
} else if is_update_expr { } else if is_update_expr {
@ -374,18 +374,18 @@ fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit
update_expr_comments << p.eat_comments(same_line: true) update_expr_comments << p.eat_comments(same_line: true)
has_update_expr = true has_update_expr = true
} else { } else {
first_field_pos = p.tok.position() first_field_pos = p.tok.pos()
field_name = p.check_name() field_name = p.check_name()
p.check(.colon) p.check(.colon)
expr = p.expr(0) expr = p.expr(0)
comments = p.eat_comments(same_line: true) comments = p.eat_comments(same_line: true)
last_field_pos := expr.position() last_field_pos := expr.pos()
field_len := if last_field_pos.len > 0 { field_len := if last_field_pos.len > 0 {
last_field_pos.pos - first_field_pos.pos + last_field_pos.len last_field_pos.pos - first_field_pos.pos + last_field_pos.len
} else { } else {
first_field_pos.len + 1 first_field_pos.len + 1
} }
field_pos = token.Position{ field_pos = token.Pos{
line_nr: first_field_pos.line_nr line_nr: first_field_pos.line_nr
pos: first_field_pos.pos pos: first_field_pos.pos
len: field_len len: field_len
@ -423,7 +423,7 @@ fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit
update_expr_comments: update_expr_comments update_expr_comments: update_expr_comments
has_update_expr: has_update_expr has_update_expr: has_update_expr
name_pos: first_pos name_pos: first_pos
pos: first_pos.extend(if short_syntax { p.tok.position() } else { p.prev_tok.position() }) pos: first_pos.extend(if short_syntax { p.tok.pos() } else { p.prev_tok.pos() })
is_short: no_keys is_short: no_keys
pre_comments: pre_comments pre_comments: pre_comments
} }
@ -431,7 +431,7 @@ fn (mut p Parser) struct_init(typ_str string, short_syntax bool) ast.StructInit
fn (mut p Parser) interface_decl() ast.InterfaceDecl { fn (mut p Parser) interface_decl() ast.InterfaceDecl {
p.top_level_statement_start() p.top_level_statement_start()
mut pos := p.tok.position() mut pos := p.tok.pos()
attrs := p.attrs attrs := p.attrs
is_pub := p.tok.kind == .key_pub is_pub := p.tok.kind == .key_pub
if is_pub { if is_pub {
@ -449,7 +449,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
p.next() // C || JS p.next() // C || JS
p.next() // . p.next() // .
} }
name_pos := p.tok.position() name_pos := p.tok.pos()
p.check_for_impure_v(language, name_pos) p.check_for_impure_v(language, name_pos)
modless_name := p.check_name() modless_name := p.check_name()
mut interface_name := '' mut interface_name := ''
@ -499,7 +499,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
mut ifaces := []ast.InterfaceEmbedding{} mut ifaces := []ast.InterfaceEmbedding{}
for p.tok.kind != .rcbr && p.tok.kind != .eof { for p.tok.kind != .rcbr && p.tok.kind != .eof {
if p.tok.kind == .name && p.tok.lit.len > 0 && p.tok.lit[0].is_capital() { if p.tok.kind == .name && p.tok.lit.len > 0 && p.tok.lit[0].is_capital() {
iface_pos := p.tok.position() iface_pos := p.tok.pos()
mut iface_name := p.tok.lit mut iface_name := p.tok.lit
iface_type := p.parse_type() iface_type := p.parse_type()
if iface_name == 'JS' { if iface_name == 'JS' {
@ -519,7 +519,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
} }
if p.tok.kind == .key_mut { if p.tok.kind == .key_mut {
if is_mut { if is_mut {
p.error_with_pos('redefinition of `mut` section', p.tok.position()) p.error_with_pos('redefinition of `mut` section', p.tok.pos())
return ast.InterfaceDecl{} return ast.InterfaceDecl{}
} }
p.next() p.next()
@ -528,7 +528,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
mut_pos = fields.len mut_pos = fields.len
} }
if p.peek_tok.kind == .lpar { if p.peek_tok.kind == .lpar {
method_start_pos := p.tok.position() method_start_pos := p.tok.pos()
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
name := p.check_name() name := p.check_name()
@ -563,13 +563,13 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
return_type: ast.void_type return_type: ast.void_type
is_variadic: is_variadic is_variadic: is_variadic
is_pub: true is_pub: true
pos: method_start_pos.extend(p.prev_tok.position()) pos: method_start_pos.extend(p.prev_tok.pos())
scope: p.scope scope: p.scope
} }
if p.tok.kind.is_start_of_type() && p.tok.line_nr == line_nr { if p.tok.kind.is_start_of_type() && p.tok.line_nr == line_nr {
method.return_type_pos = p.tok.position() method.return_type_pos = p.tok.pos()
method.return_type = p.parse_type() method.return_type = p.parse_type()
method.return_type_pos = method.return_type_pos.extend(p.tok.position()) method.return_type_pos = method.return_type_pos.extend(p.tok.pos())
method.pos = method.pos.extend(method.return_type_pos) method.pos = method.pos.extend(method.return_type_pos)
} }
mcomments := p.eat_comments(same_line: true) mcomments := p.eat_comments(same_line: true)
@ -592,11 +592,11 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
info.methods << tmethod info.methods << tmethod
} else { } else {
// interface fields // interface fields
field_pos := p.tok.position() field_pos := p.tok.pos()
field_name := p.check_name() field_name := p.check_name()
mut type_pos := p.tok.position() mut type_pos := p.tok.pos()
field_typ := p.parse_type() field_typ := p.parse_type()
type_pos = type_pos.extend(p.prev_tok.position()) type_pos = type_pos.extend(p.prev_tok.pos())
mut comments := []ast.Comment{} mut comments := []ast.Comment{}
for p.tok.kind == .comment { for p.tok.kind == .comment {
comments << p.comment() comments << p.comment()
@ -624,7 +624,7 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
ts.info = info ts.info = info
p.top_level_statement_end() p.top_level_statement_end()
p.check(.rcbr) p.check(.rcbr)
pos = pos.extend_with_last_line(p.prev_tok.position(), p.prev_tok.line_nr) pos = pos.extend_with_last_line(p.prev_tok.pos(), p.prev_tok.line_nr)
res := ast.InterfaceDecl{ res := ast.InterfaceDecl{
name: interface_name name: interface_name
language: language language: language

View File

@ -1,21 +1,21 @@
vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:4:6: warning: an inline sum type expects a maximum of 3 types (5 were given) vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:4:6: warning: an inline sum type expects a maximum of 3 types (5 were given)
2 | 2 |
3 | struct Foo { 3 | struct Foo {
4 | bar int | string | token.Position | bool | u32 4 | bar int | string | token.Pos | bool | u32
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 | } 5 | }
6 | 6 |
vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:7:12: warning: an inline sum type expects a maximum of 3 types (5 were given) vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:7:12: warning: an inline sum type expects a maximum of 3 types (5 were given)
5 | } 5 | }
6 | 6 |
7 | fn foo(arg int | string | token.Position | bool | u32) int | string | token.Position | bool | u32 { 7 | fn foo(arg int | string | token.Pos | bool | u32) int | string | token.Pos | bool | u32 {
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
8 | return 1 8 | return 1
9 | } 9 | }
vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:7:56: warning: an inline sum type expects a maximum of 3 types (5 were given) vlib/v/parser/tests/inline_sum_type_return_type_too_many_variants.vv:7:51: warning: an inline sum type expects a maximum of 3 types (5 were given)
5 | } 5 | }
6 | 6 |
7 | fn foo(arg int | string | token.Position | bool | u32) int | string | token.Position | bool | u32 { 7 | fn foo(arg int | string | token.Pos | bool | u32) int | string | token.Pos | bool | u32 {
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
8 | return 1 8 | return 1
9 | } 9 | }

View File

@ -1,9 +1,9 @@
import v.token import v.token
struct Foo { struct Foo {
bar int | string | token.Position | bool | u32 bar int | string | token.Pos | bool | u32
} }
fn foo(arg int | string | token.Position | bool | u32) int | string | token.Position | bool | u32 { fn foo(arg int | string | token.Pos | bool | u32) int | string | token.Pos | bool | u32 {
return 1 return 1
} }

View File

@ -133,7 +133,7 @@ fn vweb_tmpl_${fn_name}() string {
p.error_with_error(errors.Error{ p.error_with_error(errors.Error{
message: "Please use @include 'header' instead of @header (deprecated)" message: "Please use @include 'header' instead of @header (deprecated)"
file_path: template_file file_path: template_file
pos: token.Position{ pos: token.Pos{
len: '@header'.len len: '@header'.len
line_nr: tline_number line_nr: tline_number
pos: start_of_line_pos + position pos: start_of_line_pos + position
@ -148,7 +148,7 @@ fn vweb_tmpl_${fn_name}() string {
p.error_with_error(errors.Error{ p.error_with_error(errors.Error{
message: "Please use @include 'footer' instead of @footer (deprecated)" message: "Please use @include 'footer' instead of @footer (deprecated)"
file_path: template_file file_path: template_file
pos: token.Position{ pos: token.Pos{
len: '@footer'.len len: '@footer'.len
line_nr: tline_number line_nr: tline_number
pos: start_of_line_pos + position pos: start_of_line_pos + position
@ -182,7 +182,7 @@ fn vweb_tmpl_${fn_name}() string {
message: 'Reading file $file_name from path: $file_path failed' message: 'Reading file $file_name from path: $file_path failed'
details: "Failed to @include '$file_name'" details: "Failed to @include '$file_name'"
file_path: template_file file_path: template_file
pos: token.Position{ pos: token.Pos{
len: '@include '.len + file_name.len len: '@include '.len + file_name.len
line_nr: tline_number line_nr: tline_number
pos: start_of_line_pos + position pos: start_of_line_pos + position

View File

@ -1143,7 +1143,7 @@ fn (s &Scanner) count_symbol_before(p int, sym byte) int {
[direct_array_access] [direct_array_access]
fn (mut s Scanner) ident_string() string { fn (mut s Scanner) ident_string() string {
lspos := token.Position{ lspos := token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
pos: s.pos pos: s.pos
col: s.pos - s.last_nl_pos - 1 col: s.pos - s.last_nl_pos - 1
@ -1375,7 +1375,7 @@ fn trim_slash_line_break(s string) string {
/// escaped utf8 runes in hex like `\xe2\x98\x85` => (★) /// escaped utf8 runes in hex like `\xe2\x98\x85` => (★)
/// escaped utf8 runes in octal like `\342\230\205` => (★) /// escaped utf8 runes in octal like `\342\230\205` => (★)
fn (mut s Scanner) ident_char() string { fn (mut s Scanner) ident_char() string {
lspos := token.Position{ lspos := token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
pos: s.pos pos: s.pos
col: s.pos - s.last_nl_pos - 1 col: s.pos - s.last_nl_pos - 1
@ -1498,7 +1498,7 @@ fn (mut s Scanner) inc_line_number() {
} }
pub fn (mut s Scanner) note(msg string) { pub fn (mut s Scanner) note(msg string) {
pos := token.Position{ pos := token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
pos: s.pos pos: s.pos
} }
@ -1519,7 +1519,7 @@ pub fn (mut s Scanner) add_error_detail(msg string) {
s.error_details << msg s.error_details << msg
} }
pub fn (mut s Scanner) add_error_detail_with_pos(msg string, pos token.Position) { pub fn (mut s Scanner) add_error_detail_with_pos(msg string, pos token.Pos) {
details := util.formatted_error('details:', msg, s.file_path, pos) details := util.formatted_error('details:', msg, s.file_path, pos)
s.add_error_detail(details) s.add_error_detail(details)
} }
@ -1538,7 +1538,7 @@ pub fn (mut s Scanner) warn(msg string) {
s.error(msg) s.error(msg)
return return
} }
pos := token.Position{ pos := token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
pos: s.pos pos: s.pos
col: s.current_column() - 1 col: s.current_column() - 1
@ -1565,7 +1565,7 @@ pub fn (mut s Scanner) warn(msg string) {
} }
pub fn (mut s Scanner) error(msg string) { pub fn (mut s Scanner) error(msg string) {
pos := token.Position{ pos := token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
pos: s.pos pos: s.pos
col: s.current_column() - 1 col: s.current_column() - 1
@ -1599,7 +1599,7 @@ fn (mut s Scanner) vet_error(msg string, fix vet.FixKind) {
ve := vet.Error{ ve := vet.Error{
message: msg message: msg
file_path: s.file_path file_path: s.file_path
pos: token.Position{ pos: token.Pos{
line_nr: s.line_nr line_nr: s.line_nr
col: s.current_column() - 1 col: s.current_column() - 1
} }

View File

@ -160,7 +160,7 @@ fn test_embed_method_generic() {
type Piece = King | Queen type Piece = King | Queen
struct Position { struct Pos {
x byte x byte
y byte y byte
} }
@ -171,11 +171,11 @@ enum TeamEnum {
} }
struct PieceCommonFields { struct PieceCommonFields {
pos Position pos Pos
team TeamEnum team TeamEnum
} }
fn (p PieceCommonFields) get_pos() Position { fn (p PieceCommonFields) get_pos() Pos {
return p.pos return p.pos
} }
@ -187,16 +187,16 @@ struct Queen {
PieceCommonFields PieceCommonFields
} }
fn (piece Piece) position() Position { fn (piece Piece) pos() Pos {
mut pos := Position{} mut pos := Pos{}
match piece { match piece {
King, Queen { pos = piece.pos } King, Queen { pos = piece.pos }
} }
return pos return pos
} }
fn (piece Piece) get_position() Position { fn (piece Piece) get_pos() Pos {
mut pos := Position{} mut pos := Pos{}
match piece { match piece {
King, Queen { pos = piece.get_pos() } King, Queen { pos = piece.get_pos() }
} }
@ -205,20 +205,20 @@ fn (piece Piece) get_position() Position {
fn test_match_aggregate_field() { fn test_match_aggregate_field() {
piece := Piece(King{ piece := Piece(King{
pos: Position{1, 8} pos: Pos{1, 8}
team: .black team: .black
}) })
pos := piece.position() pos := piece.pos()
assert pos.x == 1 assert pos.x == 1
assert pos.y == 8 assert pos.y == 8
} }
fn test_match_aggregate_method() { fn test_match_aggregate_method() {
piece := Piece(King{ piece := Piece(King{
pos: Position{1, 8} pos: Pos{1, 8}
team: .black team: .black
}) })
pos := piece.get_position() pos := piece.get_pos()
assert pos.x == 1 assert pos.x == 1
assert pos.y == 8 assert pos.y == 8
} }

View File

@ -3,7 +3,7 @@
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
module token module token
pub struct Position { pub struct Pos {
pub: pub:
len int // length of the literal in the source len int // length of the literal in the source
line_nr int // the line number in the source where the token occured line_nr int // the line number in the source where the token occured
@ -14,23 +14,23 @@ pub mut:
} }
[unsafe] [unsafe]
pub fn (mut p Position) free() { pub fn (mut p Pos) free() {
} }
pub fn (p Position) line_str() string { pub fn (p Pos) line_str() string {
return '{l: ${p.line_nr + 1:5}, c: ${p.col:3}, p: ${p.pos:5}, ll: ${p.last_line + 1:5}}' return '{l: ${p.line_nr + 1:5}, c: ${p.col:3}, p: ${p.pos:5}, ll: ${p.last_line + 1:5}}'
} }
pub fn (pos Position) extend(end Position) Position { pub fn (pos Pos) extend(end Pos) Pos {
return Position{ return Pos{
...pos ...pos
len: end.pos - pos.pos + end.len len: end.pos - pos.pos + end.len
last_line: end.last_line last_line: end.last_line
} }
} }
pub fn (pos Position) extend_with_last_line(end Position, last_line int) Position { pub fn (pos Pos) extend_with_last_line(end Pos, last_line int) Pos {
return Position{ return Pos{
len: end.pos - pos.pos + end.len len: end.pos - pos.pos + end.len
line_nr: pos.line_nr line_nr: pos.line_nr
pos: pos.pos pos: pos.pos
@ -39,13 +39,13 @@ pub fn (pos Position) extend_with_last_line(end Position, last_line int) Positio
} }
} }
pub fn (mut pos Position) update_last_line(last_line int) { pub fn (mut pos Pos) update_last_line(last_line int) {
pos.last_line = last_line - 1 pos.last_line = last_line - 1
} }
[inline] [inline]
pub fn (tok &Token) position() Position { pub fn (tok &Token) pos() Pos {
return Position{ return Pos{
len: tok.len len: tok.len
line_nr: tok.line_nr - 1 line_nr: tok.line_nr - 1
pos: tok.pos pos: tok.pos

View File

@ -657,9 +657,9 @@ pub fn (mut t Transformer) infix_expr(mut node ast.InfixExpr) ast.Expr {
node.left = t.expr(mut node.left) node.left = t.expr(mut node.left)
node.right = t.expr(mut node.right) node.right = t.expr(mut node.right)
mut pos := node.left.position() mut pos := node.left.pos()
pos.extend(node.pos) pos.extend(node.pos)
pos.extend(node.right.position()) pos.extend(node.right.pos())
if t.pref.is_debug { if t.pref.is_debug {
return node return node

View File

@ -69,7 +69,7 @@ fn color(kind string, msg string) string {
} }
// formatted_error - `kind` may be 'error' or 'warn' // formatted_error - `kind` may be 'error' or 'warn'
pub fn formatted_error(kind string, omsg string, filepath string, pos token.Position) string { pub fn formatted_error(kind string, omsg string, filepath string, pos token.Pos) string {
emsg := omsg.replace('main.', '') emsg := omsg.replace('main.', '')
mut path := filepath mut path := filepath
verror_paths_override := os.getenv('VERROR_PATHS') verror_paths_override := os.getenv('VERROR_PATHS')
@ -120,7 +120,7 @@ pub fn cached_file2sourcelines(path string) []string {
return res return res
} }
pub fn source_file_context(kind string, filepath string, pos token.Position) []string { pub fn source_file_context(kind string, filepath string, pos token.Pos) []string {
mut clines := []string{} mut clines := []string{}
source_lines := unsafe { cached_file2sourcelines(filepath) } source_lines := unsafe { cached_file2sourcelines(filepath) }
if source_lines.len == 0 { if source_lines.len == 0 {

View File

@ -27,10 +27,10 @@ pub mut:
kind ErrorKind [required] kind ErrorKind [required]
pub: pub:
// General message // General message
message string [required] message string [required]
details string // Details about how to resolve or fix the situation details string // Details about how to resolve or fix the situation
file_path string // file where the error have origin file_path string // file where the error have origin
pos token.Position // position in the file pos token.Pos // position in the file
fix FixKind [required] fix FixKind [required]
typ ErrorType [required] typ ErrorType [required]
} }