run vfmt on parser.v

pull/3130/head
Alexander Medvednikov 2019-12-18 04:34:50 +03:00
parent ad211a86a6
commit a46a2e4715
2 changed files with 809 additions and 780 deletions

View File

@ -1,7 +1,6 @@
// Copyright (c) 2019 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module compiler
import (
@ -83,10 +82,21 @@ pub mut:
const (
max_module_depth = 5
reserved_types = {
'i8' : true, 'i16' : true, 'int' : true, 'i64' : true, 'i128' : true,
'byte' : true, 'u16' : true, 'u32' : true, 'u64' : true, 'u128' : true,
'f32' : true, 'f64' : true,
'rune' : true, 'byteptr' : true, 'voidptr' : true
'i8': true,
'i16': true,
'int': true,
'i64': true,
'i128': true,
'byte': true,
'u16': true,
'u32': true,
'u64': true,
'u128': true,
'f32': true,
'f64': true,
'rune': true,
'byteptr': true,
'voidptr': true
}
)
@ -129,8 +139,7 @@ fn (v mut V) new_parser_from_file(path string) Parser {
// println('new_parser("$path")')
mut path_pcguard := ''
mut path_platform := '.v'
for path_ending in ['_lin.v', '_mac.v', '_win.v', '_nix.v', '_linux.v',
'_darwin.v', '_windows.v'] {
for path_ending in ['_lin.v', '_mac.v', '_win.v', '_nix.v', '_linux.v', '_darwin.v', '_windows.v'] {
if path.ends_with(path_ending) {
if path_ending == '_mac.v' {
p := path_ending.replace('_mac.v', '_darwin.v')
@ -149,10 +158,9 @@ fn (v mut V) new_parser_from_file(path string) Parser {
break
}
}
mut p := v.new_parser(new_scanner_file(path))
p = { p|
file_path: path,
p = {
p|file_path:path,
file_name:path.all_after(os.path_separator),
file_platform:path_platform,
file_pcguard:path_pcguard,
@ -188,12 +196,12 @@ fn (v mut V) new_parser(scanner &Scanner) Parser {
pref: v.pref
os: v.os
vroot: v.vroot
local_vars: [Var{}].repeat(MaxLocalVars)
local_vars: [Var{
}].repeat(MaxLocalVars)
import_table: new_import_table()
}
$if js {
p.is_js = true
}
p.is_js=true}
if p.pref.is_repl {
p.scanner.should_print_line_on_error = false
p.scanner.should_print_errors_in_color = false
@ -228,7 +236,6 @@ fn (p mut Parser) next() {
// (only when vfmt compile time flag is enabled, otherwise this function
// is not even generated)
p.fnext()
p.prev_tok2 = p.prev_tok
p.prev_tok = p.tok
p.scanner.prev_tok = p.tok
@ -261,18 +268,26 @@ fn (p &Parser) peek() TokenKind {
}
return .eof
*/
}
// TODO remove dups
[inline] fn (p &Parser) prev_token() Token {
[inline]
fn (p &Parser) prev_token() Token {
return p.tokens[p.token_idx - 2]
}
[inline] fn (p &Parser) cur_tok() Token {
[inline]
fn (p &Parser) cur_tok() Token {
return p.tokens[p.token_idx - 1]
}
[inline] fn (p &Parser) peek_token() Token {
[inline]
fn (p &Parser) peek_token() Token {
if p.token_idx >= p.tokens.len - 2 {
return Token{ tok:.eof }
return Token{
tok: .eof
}
}
return p.tokens[p.token_idx]
}
@ -285,6 +300,7 @@ fn (p &Parser) log(s string) {
}
println(s)
*/
}
pub fn (p &Parser) save_state() ParserState {
@ -363,7 +379,8 @@ fn (p mut Parser) statements_from_text(text string, rcbr bool) {
p.next()
if rcbr {
p.statements()
} else {
}
else {
p.statements_no_rcbr()
}
p.restore_state(saved_state, true, false)
@ -384,7 +401,8 @@ fn (p mut Parser) parse(pass Pass) {
p.next()
p.fspace()
p.mod = p.check_name()
} else {
}
else {
p.mod = 'main'
}
}
@ -395,7 +413,6 @@ fn (p mut Parser) parse(pass Pass) {
}
//
p.fgenln('\n')
p.cgen.nogen = false
if p.pref.build_mode == .build_module && p.mod != p.v.mod {
// println('skipping $p.mod (v.mod = $p.v.mod)')
@ -407,16 +424,11 @@ fn (p mut Parser) parse(pass Pass) {
p.can_chash = p.mod in['ui','darwin','clipboard','webview'] // TODO tmp remove
// Import pass - the first and the smallest pass that only analyzes imports
// if we are a building module get the full module name from v.mod
fq_mod := if p.pref.build_mode == .build_module && p.v.mod.ends_with(p.mod) {
p.v.mod
}
fq_mod := if p.pref.build_mode == .build_module && p.v.mod.ends_with(p.mod) {p.v.mod}
// fully qualify the module name, eg base64 to encoding.base64
else {
p.table.qualify_module(p.mod, p.file_path)
}
else {p.table.qualify_module(p.mod, p.file_path)}
p.table.register_module(fq_mod)
p.mod = fq_mod
if p.pass == .imports {
for p.tok == .key_import && p.peek() != .key_const {
p.imports()
@ -449,17 +461,24 @@ fn (p mut Parser) parse(pass Pass) {
.key_pub {
next := p.peek()
match next {
.key_fn { p.fn_decl() }
.key_const { p.const_decl() }
.key_struct,
.key_union,
.key_interface { p.struct_decl() }
.key_enum { p.enum_decl(false) }
.key_type { p.type_decl() }
.key_fn {
p.fn_decl()
}
.key_const {
p.const_decl()
}
.key_struct,.key_union,.key_interface {
p.struct_decl()
}
.key_enum {
p.enum_decl(false)
}
.key_type {
p.type_decl()
}
else {
p.error('wrong pub keyword usage')
}
}
}}
}
.key_fn {
p.fn_decl()
@ -488,13 +507,8 @@ fn (p mut Parser) parse(pass Pass) {
p.comp_time()
}
.key_global {
if !p.pref.translated && !p.pref.is_live &&
!p.builtin_mod && !p.pref.building_v &&
p.mod != 'ui' && !os.getwd().contains('/volt') &&
!p.pref.enable_globals
{
if !p.pref.translated && !p.pref.is_live && !p.builtin_mod && !p.pref.building_v && p.mod != 'ui' && !os.getwd().contains('/volt') && !p.pref.enable_globals {
p.error('use `v --enable-globals ...` to enable globals')
// p.error('__global is only allowed in translated code')
}
p.next()
@ -570,8 +584,7 @@ fn (p mut Parser) parse(pass Pass) {
else {
p.error('unexpected token `${p.strtok()}`')
}
}
}
}}
}
}
@ -677,14 +690,15 @@ fn (p mut Parser) const_decl() {
p.cgen.nogen = true
typ = p.expression()
p.cgen.nogen = false
} else {
}
else {
typ = p.get_type()
}
p.table.register_const(name, typ, p.mod, is_pub)
p.cgen.consts << ('extern ' +
p.table.cgen_name_type_pair(name, typ)) + ';'
p.cgen.consts << ('extern ' + p.table.cgen_name_type_pair(name, typ)) + ';'
continue // Don't generate C code when building a .vh file
} else {
}
else {
p.check_space(.assign)
typ = p.expression()
}
@ -709,8 +723,7 @@ fn (p mut Parser) const_decl() {
// We are building module `ui`, but are parsing `gx` right now
// (because of nogen). We need to import gx constants with `extern`.
// println('extern const mod=$p.mod name=$name')
p.cgen.consts << ('extern ' +
p.table.cgen_name_type_pair(name, typ)) + ';'
p.cgen.consts << ('extern ' + p.table.cgen_name_type_pair(name, typ)) + ';'
}
if p.pass == .main && !p.cgen.nogen {
// TODO hack
@ -725,8 +738,7 @@ fn (p mut Parser) const_decl() {
continue
}
if typ.starts_with('[') {
p.cgen.consts << p.table.cgen_name_type_pair(name, typ) +
' = $p.cgen.cur_line;'
p.cgen.consts << p.table.cgen_name_type_pair(name, typ) + ' = $p.cgen.cur_line;'
}
else {
p.cgen.consts << p.table.cgen_name_type_pair(name, typ) + ';'
@ -762,12 +774,7 @@ fn (p mut Parser) type_decl() {
// TODO dirty C typedef hacks for DOOM
// Unknown type probably means it's a struct, and it's used before the struct is defined,
// so specify "struct"
_struct := if parent.cat != .array && parent.cat != .func &&
!p.table.known_type(parent.name) {
'struct'
} else {
''
}
_struct := if parent.cat != .array && parent.cat != .func && !p.table.known_type(parent.name) {'struct'}else {''}
p.gen_typedef('typedef $_struct $nt_pair; //type alias name="$name" parent=`$parent.name`')
p.table.register_type(Type{
name: name
@ -793,7 +800,8 @@ fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
// No type on the same line, this method doesn't return a type, process next
if prev_tok.line_nr != cur_tok.line_nr {
method.typ = 'void'
} else {
}
else {
method.typ = p.get_type() // method return type
// p.fspace()
p.fgen_nl()
@ -803,11 +811,17 @@ fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
fn key_to_type_cat(tok TokenKind) TypeCategory {
match tok {
.key_interface { return .interface_ }
.key_struct { return .struct_ }
.key_union { return .union_ }
else {}
.key_interface {
return .interface_
}
.key_struct {
return .struct_
}
.key_union {
return .union_
}
else {
}}
verror('Unknown token: $tok')
return .builtin
}
@ -827,7 +841,7 @@ fn (p mut Parser) check_string() string {
fn (p mut Parser) check_not_reserved() {
if reserved_types[p.lit] {
p.error('`$p.lit` can\'t be used as name')
p.error("`$p.lit` can\'t be used as name")
}
}
@ -844,7 +858,8 @@ fn (p &Parser) strtok() string {
if p.tok == .str {
if p.lit.contains("'") {
return '"$p.lit"'
} else {
}
else {
return "'$p.lit'"
}
}
@ -886,15 +901,14 @@ fn (p mut Parser) check(expected TokenKind) {
p.fmt_inc()
}
*/
p.next()
p.next()
// if p.scanner.line_comment != '' {
// p.fgenln('// ! "$p.scanner.line_comment"')
// p.scanner.line_comment = ''
// }
}
[inline]
fn (p &Parser) first_pass() bool {
return p.pass == .decl
@ -927,7 +941,10 @@ fn (p mut Parser) get_type() string {
}
// fn type
if p.tok == .key_fn {
mut f := Fn{name: '_', mod: p.mod}
mut f := Fn{
name: '_',
mod: p.mod
}
p.next()
line_nr := p.scanner.line_nr
p.fn_args(mut f)
@ -947,6 +964,7 @@ fn (p mut Parser) get_type() string {
// Register anon fn type
fn_typ := Type{
name: f.typ_str() // 'fn (int, int) string'
mod: p.mod
func: f
}
@ -985,7 +1003,6 @@ fn (p mut Parser) get_type() string {
p.register_map(typ)
return typ
}
// ptr/ref
mut warn := false
for p.tok == .mul {
@ -1005,7 +1022,8 @@ fn (p mut Parser) get_type() string {
ti := p.cur_fn.dispatch_of.inst
if p.lit in ti.keys() {
typ += ti[p.lit]
} else {
}
else {
typ += p.lit
}
// C.Struct import
@ -1035,8 +1053,7 @@ fn (p mut Parser) get_type() string {
// "typ" not found? try "mod__typ"
if t.name == '' && !p.builtin_mod {
// && !p.first_pass() {
if !typ.contains('array_') && p.mod != 'main' && !typ.contains('__') &&
!typ.starts_with('[') {
if !typ.contains('array_') && p.mod != 'main' && !typ.contains('__') && !typ.starts_with('[') {
typ = p.prepend_mod(typ)
}
t = p.table.find_type(typ)
@ -1093,6 +1110,7 @@ fn (p mut Parser) get_type() string {
p.error('2 __ in gettype(): typ="$typ"')
}
*/
return typ
}
@ -1123,7 +1141,6 @@ fn (p mut Parser) statements() string {
fn (p mut Parser) statements_no_rcbr() string {
p.open_scope()
if !p.inside_if_expr {
// p.genln('')
}
@ -1147,7 +1164,6 @@ fn (p mut Parser) statements_no_rcbr() string {
// p.next()
p.check(.rcbr)
// p.fmt_dec()
p.close_scope()
return last_st_typ
}
@ -1160,12 +1176,14 @@ fn (p mut Parser) close_scope() {
mut i := p.var_idx - 1
for ; i >= 0; i-- {
v := p.local_vars[i]
if p.pref.autofree && (v.is_alloc || (v.is_arg && v.typ == 'string')) { // && !p.pref.is_test {
if p.pref.autofree && (v.is_alloc || (v.is_arg && v.typ == 'string')) {
// && !p.pref.is_test {
p.free_var(v)
}
// if p.fileis('mem.v') {
// println(v.name + ' $v.is_arg scope=$v.scope_level cur=$p.cur_fn.scope_level')}
if v.scope_level != p.cur_fn.scope_level {// && !v.is_arg {
if v.scope_level != p.cur_fn.scope_level {
// && !v.is_arg {
break
}
}
@ -1189,21 +1207,25 @@ fn (p mut Parser) free_var(v Var) {
mut free_fn := 'free'
if v.typ.starts_with('array_') {
free_fn = 'v_array_free'
} else if v.typ == 'string' {
}
else if v.typ == 'string' {
free_fn = 'v_string_free'
// if p.fileis('str.v') {
// println('freeing str $v.name')
// }
// continue
} else if v.ptr || v.typ.ends_with('*') {
}
else if v.ptr || v.typ.ends_with('*') {
free_fn = 'v_ptr_free'
// continue
} else {
}
else {
return
}
if p.returns {
// Don't free a variable that's being returned
if !v.is_returned && v.typ != 'FILE*' { //!v.is_c {
if !v.is_returned && v.typ != 'FILE*' {
// !v.is_c {
// p.cgen.cur_line = '/* free */' + p.cgen.cur_line
// p.cgen.set_placeholder(0, '/*free2*/')
prev_line := p.cgen.lines[p.cgen.lines.len - 1]
@ -1211,7 +1233,9 @@ fn (p mut Parser) free_var(v Var) {
p.cgen.lines[p.cgen.lines.len - 1] = free + '\n' + prev_line
// '$free_fn ($v.name); /* :) close_scope free $v.typ */\n' + prev_line
}
} else if p.mod != 'strings' { //&& p.mod != 'builtin' {
}
else if p.mod != 'strings' {
// && p.mod != 'builtin' {
/*
prev_line := p.cgen.lines[p.cgen.lines.len-1]
free := '$free_fn ($v.name); /* :) close_scope free $v.typ */'
@ -1325,7 +1349,8 @@ fn (p mut Parser) statement(add_semi bool) string {
.key_return {
p.return_st()
}
.lcbr {// {} block
.lcbr {
// {} block
// Do not allow {} block to start on the same line
// to avoid e.g. `foo() {` instead of `if foo() {`
if p.prev_token().line_nr == p.scanner.line_nr {
@ -1372,8 +1397,7 @@ fn (p mut Parser) statement(add_semi bool) string {
p.genln('; ')
}
return typ
}
}
}}
// ? : uses , as statement separators
if p.inside_if_expr && p.tok != .rcbr {
p.gen(', ')
@ -1384,7 +1408,6 @@ fn (p mut Parser) statement(add_semi bool) string {
return q
// p.cgen.end_statement()
}
// is_map: are we in map assignment? (m[key] = val) if yes, dont generate '='
// this can be `user = ...` or `user.field = ...`, in both cases `v` is `user`
fn (p mut Parser) assign_statement(v Var,ph int,is_map bool) {
@ -1431,8 +1454,7 @@ fn ($v.name mut $v.typ) ${p.cur_fn.name}(...) {
}
else {
p.gen(' ' + p.tok.str() + ' ')
}
}
}}
p.fspace()
p.next()
p.fspace()
@ -1448,16 +1470,13 @@ fn ($v.name mut $v.typ) ${p.cur_fn.name}(...) {
p.error_with_token_index('${fn_name}() $err_used_as_value', p.token_idx - 2)
}
// Allow `num = 4` where `num` is an `?int`
if p.assigned_type.starts_with('Option_') &&
expr_type == p.assigned_type['Option_'.len..] {
if p.assigned_type.starts_with('Option_') && expr_type == p.assigned_type['Option_'.len..] {
expr := p.cgen.cur_line[pos..]
left := p.cgen.cur_line[..pos]
typ := expr_type.replace('Option_', '')
p.cgen.resetln(left + 'opt_ok(($typ[]){ $expr }, sizeof($typ))')
}
else if expr_type.starts_with('Option_') &&
p.assigned_type == expr_type['Option_'.len..] && p.tok == .key_orelse
{
else if expr_type.starts_with('Option_') && p.assigned_type == expr_type['Option_'.len..] && p.tok == .key_orelse {
line := p.cgen.cur_line
vname := line[..pos].replace('=', '') // TODO cgen line hack
if idx:=line.index('='){
@ -1514,11 +1533,9 @@ fn (p mut Parser) var_decl() {
p.check(.key_static)
p.fspace()
}
mut var_token_idxs := [p.cur_tok_index()]
mut var_mut := [is_mut] // add first var mut
mut var_names := [p.check_name()] // add first variable
p.scanner.validate_var_name(var_names[0])
mut new_vars := 0
if var_names[0] != '_' && !p.known_var(var_names[0]) {
@ -1530,7 +1547,8 @@ fn (p mut Parser) var_decl() {
if p.tok == .key_mut {
p.check(.key_mut)
var_mut << true
} else {
}
else {
var_mut << false
}
var_token_idxs << p.cur_tok_index()
@ -1545,9 +1563,11 @@ fn (p mut Parser) var_decl() {
is_decl_assign := p.tok == .decl_assign
if is_assign {
p.check_space(.assign) // =
} else if is_decl_assign {
}
else if is_decl_assign {
p.check_space(.decl_assign) // :=
} else {
}
else {
p.error('expected `=` or `:=`')
}
// all vars on left of `:=` already defined (or `_`)
@ -1659,7 +1679,9 @@ fn (p mut Parser) get_struct_type(name_ string, is_c bool, is_ptr bool) string {
}
fn (p mut Parser) get_var_type(name string,is_ptr bool,deref_nr int) string {
v := p.find_var_check_new_var(name) or { return "" }
v := p.find_var_check_new_var(name) or{
return ''
}
if is_ptr {
p.gen('&')
}
@ -1675,6 +1697,7 @@ fn (p mut Parser) get_var_type(name string, is_ptr bool, deref_nr int) string {
p.mark_arg_moved(v)
}
*/
mut typ := p.var_expr(v)
// *var
if deref_nr > 0 {
@ -1691,7 +1714,6 @@ fn (p mut Parser) get_var_type(name string, is_ptr bool, deref_nr int) string {
typ = typ.replace_once('ptr', '') // TODO
typ = typ.replace_once('*', '') // TODO
}
}
// &var
else if is_ptr {
@ -1708,10 +1730,13 @@ fn (p mut Parser) get_var_type(name string, is_ptr bool, deref_nr int) string {
}
fn (p mut Parser) get_const_type(name string,is_ptr bool) string {
c := p.table.find_const(name) or { return "" }
c := p.table.find_const(name) or{
return ''
}
if is_ptr && !c.is_global {
p.error('cannot take the address of constant `$c.name`')
} else if is_ptr && c.is_global {
}
else if is_ptr && c.is_global {
// c.ptr = true
p.gen('& /*const*/ ')
}
@ -1739,8 +1764,7 @@ fn (p mut Parser) get_c_func_type(name string) string {
// without declaring `foo`?
// Do not allow it.
if !name.starts_with('gl') && !name.starts_with('glad') {
p.error('undefined C function `$f.name`\n' +
'define it with `fn C.${name}([args]) [return_type]`')
p.error('undefined C function `$f.name`\n' + 'define it with `fn C.${name}([args]) [return_type]`')
}
return 'void*'
}
@ -1758,7 +1782,8 @@ fn (p mut Parser) undefined_error(name string, orig_name string) {
// If orig_name is a mod, then printing undefined: `mod` tells us nothing
if p.table.known_mod(orig_name) || p.import_table.known_alias(orig_name) {
p.error('undefined: `$name_dotted` (in module `$orig_name`)')
} else if orig_name in reserved_type_param_names {
}
else if orig_name in reserved_type_param_names {
p.error('the letter `$orig_name` is reserved for type parameters')
}
p.error('undefined: `$orig_name`')
@ -1800,14 +1825,14 @@ fn (p mut Parser) var_expr(v Var) string {
p.next()
return p.select_query(fn_ph)
}
if typ == 'pg__DB' && !p.fileis('pg.v') && p.peek() == .name
{
if typ == 'pg__DB' && !p.fileis('pg.v') && p.peek() == .name {
name := p.tokens[p.token_idx].lit
if !name.contains('exec') && !name.starts_with('q_') {
p.next()
if name == 'insert' {
p.insert_query(fn_ph)
} else if name == 'update' {
}
else if name == 'update' {
p.update_query(fn_ph)
}
return 'void'
@ -1918,7 +1943,8 @@ fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
p.error_with_token_index('type `$typ.name` has no field or method `$field_name`', fname_tidx)
}
mut dot := '.'
if str_typ.ends_with('*') || str_typ == 'FT_Face' { // TODO fix C ptr typedefs
if str_typ.ends_with('*') || str_typ == 'FT_Face' {
// TODO fix C ptr typedefs
dot = dot_ptr
}
// field
@ -1934,13 +1960,10 @@ fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
}
// Is the next token `=`, `+=` etc? (Are we modifying the field?)
next := p.peek()
modifying := next.is_assign() || next == .inc || next == .dec ||
(field.typ.starts_with('array_') && next == .left_shift)
if !p.builtin_mod && !p.pref.translated && modifying &&
p.has_immutable_field {
modifying := next.is_assign() || next == .inc || next == .dec || (field.typ.starts_with('array_') && next == .left_shift)
if !p.builtin_mod && !p.pref.translated && modifying && p.has_immutable_field {
f := p.first_immutable_field
p.error_with_token_index('cannot modify immutable field `$f.name` (type `$f.parent_fn`)\n' +
'declare the field with `mut:`
p.error_with_token_index('cannot modify immutable field `$f.name` (type `$f.parent_fn`)\n' + 'declare the field with `mut:`
struct $f.parent_fn {
mut:
$f.name $f.typ
@ -1951,15 +1974,13 @@ struct $f.parent_fn {
if field.access_mod == .private && !p.builtin_mod && !p.pref.translated && p.mod != typ.mod && !p.is_vgen {
// println('$typ.name :: $field.name ')
// println(field.access_mod)
p.error_with_token_index('cannot refer to unexported field `$struct_field` (type `$typ.name`)\n' +
'declare the field with `pub:`
p.error_with_token_index('cannot refer to unexported field `$struct_field` (type `$typ.name`)\n' + 'declare the field with `pub:`
struct $typ.name {
pub:
$struct_field $field.typ
}
', fname_tidx)
}
if p.base_type(field.typ).starts_with('fn ') && p.peek() == .lpar {
tmp_typ := p.table.find_type(field.typ)
mut f := tmp_typ.func
@ -1970,7 +1991,6 @@ struct $typ.name {
p.gen(')')
return f.typ
}
p.gen(dot + struct_field)
p.next()
return field.typ
@ -1983,6 +2003,9 @@ struct $typ.name {
p.fn_call(mut method, method_ph, '', str_typ)
// optional method call `a.method() or {}`, no return assignment
is_or_else := p.tok == .key_orelse
if is_or_else {
p.fspace()
}
if p.tok == .question {
// `files := os.ls('.')?`
return p.gen_handle_question_suffix(method, method_ph)
@ -1990,8 +2013,7 @@ struct $typ.name {
else if !p.is_var_decl && is_or_else {
method.typ = p.gen_handle_option_or_else(method.typ, '', method_ph)
}
else if !p.is_var_decl && !is_or_else && !p.inside_return_expr &&
method.typ.starts_with('Option_') {
else if !p.is_var_decl && !is_or_else && !p.inside_return_expr && method.typ.starts_with('Option_') {
opt_type := method.typ[7..]
p.error('unhandled option type: `?$opt_type`')
}
@ -2024,13 +2046,21 @@ enum IndexType {
}
fn get_index_type(typ string) IndexType {
if typ.starts_with('map_') { return .map }
if typ == 'string' { return .str }
if typ.starts_with('array_') || typ == 'array' { return .array }
if typ.starts_with('map_') {
return .map
}
if typ == 'string' {
return .str
}
if typ.starts_with('array_') || typ == 'array' {
return .array
}
if typ == 'byte*' || typ == 'byteptr' || typ.contains('*') {
return .ptr
}
if typ[0] == `[` { return .fixed_array }
if typ[0] == `[` {
return .fixed_array
}
return .noindex
}
@ -2071,7 +2101,9 @@ fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
p.gen(', ')
}
}
if is_variadic_arg { typ = typ[5..] }
if is_variadic_arg {
typ = typ[5..]
}
if is_fixed_arr {
// `[10]int` => `int`, `[10][3]int` => `[3]int`
if typ.contains('][') {
@ -2131,9 +2163,11 @@ fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
if p.tok == .dotdot {
if is_arr {
typ = 'array_' + typ
} else if is_str {
}
else if is_str {
typ = 'string'
} else {
}
else {
p.error('slicing is supported by arrays and strings only')
}
is_slice = true
@ -2334,11 +2368,10 @@ fn (p mut Parser) assoc() string {
}
fn (p mut Parser) char_expr() {
p.gen('\'$p.lit\'')
p.gen("\'$p.lit\'")
p.next()
}
fn format_str(_str string) string {
// TODO don't call replace 3 times for every string, do this in scanner.v
mut str := _str.replace('"', '\\"')
@ -2384,8 +2417,7 @@ fn (p mut Parser) map_init() string {
}
p.fgen_nl()
}
p.gen('new_map_init($i, sizeof($val_type), ' +
'(string[$i]){ $keys_gen }, ($val_type [$i]){ $vals_gen } )')
p.gen('new_map_init($i, sizeof($val_type), ' + '(string[$i]){ $keys_gen }, ($val_type [$i]){ $vals_gen } )')
typ := 'map_$val_type'
p.register_map(typ)
return typ
@ -2407,8 +2439,7 @@ fn (p mut Parser) map_init() string {
if p.tok == .lcbr {
p.check(.lcbr)
p.check(.rcbr)
println('warning: $p.file_name:$p.scanner.line_nr ' +
'initializaing maps no longer requires `{}`')
println('warning: $p.file_name:$p.scanner.line_nr ' + 'initializaing maps no longer requires `{}`')
}
return typ
}
@ -2431,10 +2462,12 @@ fn (p mut Parser) array_init() string {
// p.error('unknown const `$p.lit`')
exit(1)
}
if c.typ == 'int' && p.peek() == .rsbr { //&& !p.inside_const {
if c.typ == 'int' && p.peek() == .rsbr {
// && !p.inside_const {
is_integer = true
is_const_len = true
} else {
}
else {
p.error('bad fixed size array const `$p.lit`')
}
}
@ -2449,8 +2482,7 @@ fn (p mut Parser) array_init() string {
if i == 0 {
typ = val_typ
// fixed width array initialization? (`arr := [20]byte`)
if is_integer && p.tok == .rsbr && p.peek() == .name &&
p.cur_tok().line_nr == p.peek_token().line_nr {
if is_integer && p.tok == .rsbr && p.peek() == .name && p.cur_tok().line_nr == p.peek_token().line_nr {
// there is no space between `[10]` and `byte`
// if p.cur_tok().col + p.peek_token().lit.len == p.peek_token().col {
if p.cur_tok().pos + p.peek_token().lit.len == p.peek_token().pos {
@ -2466,7 +2498,8 @@ fn (p mut Parser) array_init() string {
return '[${mod_gen_name(p.mod)}__$lit]$array_elem_typ'
}
return '[$lit]$array_elem_typ'
} else {
}
else {
p.check(.rsbr)
typ = p.get_type()
p.error('no space allowed between [$lit] and $typ')
@ -2505,14 +2538,15 @@ fn (p mut Parser) array_init() string {
if p.tok != .name && p.tok != .mul && p.tok != .lsbr && i == 0 && !exp_array {
p.error('specify array type: `[]typ` instead of `[]`')
}
if i == 0 && (p.tok == .name || p.tok == .mul) &&
p.tokens[p.token_idx-2].line_nr == p.tokens[p.token_idx-1].line_nr { // TODO
if i == 0 && (p.tok == .name || p.tok == .mul) && p.tokens[p.token_idx - 2].line_nr == p.tokens[p.token_idx - 1].line_nr {
// TODO
// vals.len == 0 {
if exp_array {
p.error('no need to specify the full array type here, use `[]` instead of `[]${p.expected_type[6..]}`')
}
typ = p.get_type().replace('*', '_ptr')
} else if exp_array && i == 0 {
}
else if exp_array && i == 0 {
// allow `known_array = []`
typ = p.expected_type[6..]
}
@ -2521,7 +2555,6 @@ fn (p mut Parser) array_init() string {
if no_alloc {
p.next()
}
// [1,2,3]!! => [3]int{1,2,3}
is_fixed_size := p.tok == .not
if is_fixed_size {
@ -2542,7 +2575,6 @@ fn (p mut Parser) array_init() string {
// if ptr {
// typ += '_ptr"
// }
real := typ.replace('_ptr', '*')
p.gen_array_init(real, no_alloc, new_arr_ph, i)
typ = 'array_$typ'
@ -2552,7 +2584,6 @@ fn (p mut Parser) array_init() string {
// `f32(3)`
// tok is `f32` or `)` if `(*int)(ptr)`
fn (p mut Parser) get_tmp() string {
p.tmp_cnt++
return 'tmp$p.tmp_cnt'
@ -2563,7 +2594,6 @@ fn (p mut Parser) get_tmp_counter() int {
return p.tmp_cnt
}
fn (p mut Parser) assert_statement() {
if p.first_pass() {
return
@ -2577,8 +2607,7 @@ fn (p mut Parser) assert_statement() {
// TODO print "expected: got" for failed tests
filename := cescaped_path(p.file_path)
cfname := p.cur_fn.name.replace('main__', '')
sourceline := p.scanner.line( nline - 1 ).replace('"', '\'')
sourceline := p.scanner.line(nline - 1).replace('"', "\'")
if !p.pref.is_test {
// an assert used in a normal v program. no fancy formatting
p.genln(';\n
@ -2595,7 +2624,6 @@ if (!$tmp) {
')
return
}
p.genln(';\n
if (!$tmp) {
g_test_fails++;
@ -2663,8 +2691,7 @@ fn (p mut Parser) return_st() {
// Automatically wrap an object inside an option if the function
// returns an option:
// `return val` => `return opt_ok(val)`
if p.cur_fn.typ.ends_with(expr_type) && !is_none &&
p.cur_fn.typ.starts_with('Option_') {
if p.cur_fn.typ.ends_with(expr_type) && !is_none && p.cur_fn.typ.starts_with('Option_') {
tmp := p.get_tmp()
ret := p.cgen.cur_line[ph..]
typ := expr_type.replace('Option_', '')
@ -2674,15 +2701,16 @@ fn (p mut Parser) return_st() {
}
else {
ret := p.cgen.cur_line[ph..]
if deferred_text == '' || expr_type == 'void*' {
// no defer{} necessary?
if expr_type == '${p.cur_fn.typ}*' {
p.cgen.resetln('return *$ret')
} else {
}
else {
p.cgen.resetln('return $ret')
}
} else {
}
else {
tmp := p.get_tmp()
p.cgen.resetln('$expr_type $tmp = $ret;\n')
p.genln(deferred_text)
@ -2696,7 +2724,6 @@ fn (p mut Parser) return_st() {
if p.tok == .name || p.tok == .number || p.tok == .str {
p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx)
}
p.genln(deferred_text)
if p.cur_fn.name == 'main' {
p.gen('return 0')
@ -2841,7 +2868,8 @@ fn (p mut Parser) attribute() {
// [if vfmt]
p.next()
p.attr = 'if ' + p.check_name()
} else {
}
else {
p.attr = p.check_name()
}
attr_token_idx := p.cur_tok_index()
@ -2872,27 +2900,24 @@ fn (p mut Parser) attribute() {
fn (p mut Parser) defer_st() {
p.check(.key_defer)
p.check(.lcbr)
pos := p.cgen.lines.len
// Save everything inside the defer block to `defer_text`.
// It will be inserted before every `return`
// Emily: TODO: all variables that are used in this defer statement need to be evaluated when the block
// is defined otherwise they could change over the course of the function
// (make temps out of them)
p.genln('{')
p.statements()
p.cur_fn.defer_text.last() = p.cgen.lines[pos..].join('\n') + p.cur_fn.defer_text.last()
// Rollback p.cgen.lines
p.cgen.lines = p.cgen.lines[..pos]
p.cgen.resetln('')
}
fn (p mut Parser) check_and_register_used_imported_type(typ_name string) {
us_idx := typ_name.index('__') or { return }
us_idx := typ_name.index('__') or{
return
}
arg_mod := typ_name[..us_idx]
if p.import_table.known_alias(arg_mod) {
p.import_table.register_used_import(arg_mod)
@ -2911,7 +2936,9 @@ fn (p mut Parser) check_unused_imports() {
output += '\n * $mod_alias'
}
}
if output == '' { return }
if output == '' {
return
}
// the imports are usually at the start of the file
p.production_error_with_token_index('the following imports were never used: $output', 0)
}
@ -2921,8 +2948,7 @@ fn (p &Parser) is_expr_fn_call(start_tok_idx int) (bool, string) {
mut is_fn_call := p.tokens[start_tok_idx].tok == .lpar
if !is_fn_call {
mut i := start_tok_idx
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) &&
p.tokens[i].lit != '_' && i < p.tokens.len {
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) && p.tokens[i].lit != '_' && i < p.tokens.len {
expr += p.tokens[i].str()
i++
}

View File

@ -10,10 +10,12 @@ import os
[if vfmt]
fn (scanner mut Scanner) fgen(s_ string) {
mut s := s_
if s != ' ' {
//s = s.trim_space()
}
if scanner.fmt_line_empty {
s = strings.repeat(`\t`, scanner.fmt_indent) + s.trim_left(' ')
}
scanner.fmt_lines << s
//scanner.fmt_out << s
//scanner.fmt_out.write(s)
@ -22,7 +24,7 @@ fn (scanner mut Scanner) fgen(s_ string) {
[if vfmt]
fn (scanner mut Scanner) fgenln(s_ string) {
mut s := s_
mut s := s_//.trim_space()
if scanner.fmt_line_empty && scanner.fmt_indent > 0 {
s = strings.repeat(`\t`, scanner.fmt_indent) + s
}
@ -227,7 +229,8 @@ fn (p &Parser) gen_fmt() {
}
//s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space()
//s := p.scanner.fmt_out.str().trim_space()
s := p.scanner.fmt_lines.join('').trim_space()
s := p.scanner.fmt_lines.join('').trim_space().replace('\n\n\n\n', '\n\n')
.replace(' \n', '\n')
if s == '' {
return
}