more vfmt fixes

pull/2723/head
Alexander Medvednikov 2019-11-10 03:08:53 +03:00
parent b9728c7af0
commit c7f3413d70
14 changed files with 76 additions and 74 deletions

View File

@ -10,6 +10,7 @@ fn (p mut Parser) enum_decl(no_name bool) {
p.next() p.next()
} }
p.check(.key_enum) p.check(.key_enum)
p.fspace()
mut enum_name := p.check_name() mut enum_name := p.check_name()
is_c := enum_name == 'C' && p.tok == .dot is_c := enum_name == 'C' && p.tok == .dot
if is_c { if is_c {
@ -24,6 +25,7 @@ fn (p mut Parser) enum_decl(no_name bool) {
if !no_name && !p.first_pass() { if !no_name && !p.first_pass() {
p.cgen.typedefs << 'typedef int $enum_name;' p.cgen.typedefs << 'typedef int $enum_name;'
} }
p.fspace()
p.check(.lcbr) p.check(.lcbr)
mut val := 0 mut val := 0
mut fields := []string mut fields := []string

View File

@ -657,12 +657,10 @@ fn (p mut Parser) factor() string {
.key_false { .key_false {
typ = 'bool' typ = 'bool'
p.gen('0') p.gen('0')
p.fgen('false')
} }
.key_true { .key_true {
typ = 'bool' typ = 'bool'
p.gen('1') p.gen('1')
p.fgen('true')
} }
.lsbr { .lsbr {
// `[1,2,3]` or `[]` or `[20]byte` // `[1,2,3]` or `[]` or `[20]byte`

View File

@ -179,7 +179,7 @@ fn (p mut Parser) clear_vars() {
// Function signatures are added to the top of the .c file in the first run. // Function signatures are added to the top of the .c file in the first run.
fn (p mut Parser) fn_decl() { fn (p mut Parser) fn_decl() {
p.clear_vars() // clear local vars every time a new fn is started p.clear_vars() // clear local vars every time a new fn is started
defer { p.fgenln2('\n') } defer { p.fgenln('\n') }
// If we are in the first pass, create a new function. // If we are in the first pass, create a new function.
// In the second pass fetch the one we created. // In the second pass fetch the one we created.
/* /*
@ -909,6 +909,7 @@ fn (p mut Parser) fn_call_args(f mut Fn) {
if p.tok == .comma { if p.tok == .comma {
p.gen(', ') p.gen(', ')
p.check(.comma) p.check(.comma)
p.fspace()
} }
} }
p.check(.rpar) p.check(.rpar)
@ -1132,6 +1133,7 @@ fn (p mut Parser) fn_call_args(f mut Fn) {
} }
if p.tok == .comma && (!f.is_variadic || (f.is_variadic && i < f.args.len-2 )) { if p.tok == .comma && (!f.is_variadic || (f.is_variadic && i < f.args.len-2 )) {
p.check(.comma) p.check(.comma)
p.fspace()
p.gen(',') p.gen(',')
} }
} }

View File

@ -11,6 +11,7 @@ import (
// Returns typ if used as expression // Returns typ if used as expression
fn (p mut Parser) match_statement(is_expr bool) string { fn (p mut Parser) match_statement(is_expr bool) string {
p.check(.key_match) p.check(.key_match)
p.fspace()
p.cgen.start_tmp() p.cgen.start_tmp()
typ := p.bool_expression() typ := p.bool_expression()
if typ.starts_with('array_') { if typ.starts_with('array_') {
@ -22,6 +23,7 @@ fn (p mut Parser) match_statement(is_expr bool) string {
tmp_var := p.get_tmp() tmp_var := p.get_tmp()
p.cgen.insert_before('$typ $tmp_var = $expr;') p.cgen.insert_before('$typ $tmp_var = $expr;')
p.fspace()
p.check(.lcbr) p.check(.lcbr)
mut i := 0 mut i := 0
mut all_cases_return := true mut all_cases_return := true
@ -37,8 +39,7 @@ fn (p mut Parser) match_statement(is_expr bool) string {
if p.tok == .key_else { if p.tok == .key_else {
p.check(.key_else) p.check(.key_else)
if p.tok == .arrow { if p.tok == .arrow {
p.warn(warn_match_arrow) p.error(warn_match_arrow)
p.check(.arrow)
} }
// unwrap match if there is only else // unwrap match if there is only else
@ -201,6 +202,7 @@ fn (p mut Parser) match_statement(is_expr bool) string {
// p.gen(')') // p.gen(')')
} }
i++ i++
p.fgenln('')
} }
if is_expr { if is_expr {

View File

@ -73,6 +73,7 @@ mut:
sql_params []string // ("select * from users where id = $1", ***"100"***) sql_params []string // ("select * from users where id = $1", ***"100"***)
sql_types []string // int, string and so on; see sql_params sql_types []string // int, string and so on; see sql_params
is_vh bool // parsing .vh file (for example `const (a int)` is allowed) is_vh bool // parsing .vh file (for example `const (a int)` is allowed)
fmt_dollar bool
pub: pub:
mod string mod string
} }
@ -279,7 +280,7 @@ fn (p mut Parser) parse(pass Pass) {
p.mod = p.check_name() p.mod = p.check_name()
} }
// //
p.fgenln2('\n') p.fgenln('\n')
p.cgen.nogen = false p.cgen.nogen = false
if p.pref.build_mode == .build_module && p.mod != p.v.mod { if p.pref.build_mode == .build_module && p.mod != p.v.mod {
@ -315,22 +316,11 @@ fn (p mut Parser) parse(pass Pass) {
for { for {
match p.tok { match p.tok {
.key_import { .key_import {
if p.peek() == .key_const { p.imports()
p.const_decl()
}
else {
// TODO remove imported consts from the language
p.imports()
if p.tok != .key_import {
p.fgenln('')
}
}
} }
.key_enum { .key_enum {
next := p.peek() next := p.peek()
if next == .name { if next == .name {
p.fgen('enum ')
p.fgen(' ')
p.enum_decl(false) p.enum_decl(false)
} }
else if next == .lcbr && p.pref.translated { else if next == .lcbr && p.pref.translated {
@ -410,10 +400,10 @@ fn (p mut Parser) parse(pass Pass) {
if !p.cgen.nogen { if !p.cgen.nogen {
p.cgen.consts << g p.cgen.consts << g
} }
p.fgenln2('') p.fgenln('')
if p.tok != .key_global { if p.tok != .key_global {
// An extra empty line to separate a block of globals // An extra empty line to separate a block of globals
p.fgenln2('') p.fgenln('')
} }
} }
.eof { .eof {
@ -469,7 +459,6 @@ fn (p mut Parser) parse(pass Pass) {
} }
fn (p mut Parser) imports() { fn (p mut Parser) imports() {
p.fgenln2('\n')
p.check(.key_import) p.check(.key_import)
// `import ()` // `import ()`
if p.tok == .lpar { if p.tok == .lpar {
@ -482,7 +471,10 @@ fn (p mut Parser) imports() {
} }
// `import foo` // `import foo`
p.import_statement() p.import_statement()
p.fgenln2('') p.fgenln('')
if p.tok != .key_import {
p.fgenln('')
}
} }
fn (p mut Parser) import_statement() { fn (p mut Parser) import_statement() {
@ -515,7 +507,6 @@ fn (p mut Parser) import_statement() {
} }
// add import to file scope import table // add import to file scope import table
p.register_import_alias(mod_alias, mod, import_tok_idx) p.register_import_alias(mod_alias, mod, import_tok_idx)
p.fgenln2('')
// Make sure there are no duplicate imports // Make sure there are no duplicate imports
if mod in p.table.imports { if mod in p.table.imports {
return return
@ -675,7 +666,7 @@ fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
method.typ = 'void' method.typ = 'void'
} else { } else {
method.typ = p.get_type()// method return type method.typ = p.get_type()// method return type
p.fspace() //p.fspace()
p.fgenln('') p.fgenln('')
} }
return method return method
@ -878,7 +869,6 @@ fn (p mut Parser) get_type() string {
nr_muls++ nr_muls++
p.check(.amp) p.check(.amp)
} }
// Generic type check // Generic type check
ti := p.cur_fn.dispatch_of.inst ti := p.cur_fn.dispatch_of.inst
if p.lit in ti.keys() { if p.lit in ti.keys() {
@ -887,12 +877,6 @@ fn (p mut Parser) get_type() string {
} else { } else {
typ += p.lit typ += p.lit
} }
if !p.is_struct_init {
// Otherwise we get `foo := FooFoo{` because `Foo` was already
// generated in name_expr()
p.fgen(p.lit)
}
// C.Struct import // C.Struct import
if p.lit == 'C' && p.peek() == .dot { if p.lit == 'C' && p.peek() == .dot {
p.next() p.next()
@ -1017,7 +1001,7 @@ fn (p mut Parser) statements_no_rcbr() string {
// println('last st typ=$last_st_typ') // println('last st typ=$last_st_typ')
if !p.inside_if_expr { if !p.inside_if_expr {
p.genln('')// // end st tok= ${p.strtok()}') p.genln('')// // end st tok= ${p.strtok()}')
p.fgenln2('') p.fgenln('')
} }
i++ i++
if i > 50000 { if i > 50000 {
@ -1138,7 +1122,7 @@ fn (p mut Parser) statement(add_semi bool) string {
} }
.key_goto { .key_goto {
p.check(.key_goto) p.check(.key_goto)
p.fgen(' ') p.fspace()
label := p.check_name() label := p.check_name()
p.genln('goto $label;') p.genln('goto $label;')
return '' return ''
@ -1683,7 +1667,6 @@ fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
} }
fname_tidx := p.cur_tok_index() fname_tidx := p.cur_tok_index()
p.fgen(field_name)
//p.log('dot() field_name=$field_name typ=$str_typ') //p.log('dot() field_name=$field_name typ=$str_typ')
//if p.fileis('main.v') { //if p.fileis('main.v') {
//println('dot() field_name=$field_name typ=$str_typ prev_tok=${prev_tok.str()}') //println('dot() field_name=$field_name typ=$str_typ prev_tok=${prev_tok.str()}')
@ -1845,7 +1828,6 @@ fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
// Get element type (set `typ` to it) // Get element type (set `typ` to it)
if is_str { if is_str {
typ = 'byte' typ = 'byte'
p.fgen('[')
// Direct faster access to .str[i] in builtin modules // Direct faster access to .str[i] in builtin modules
if p.builtin_mod { if p.builtin_mod {
p.gen('.str[') p.gen('.str[')
@ -2028,7 +2010,7 @@ fn (p mut Parser) indot_expr() string {
// `a in [1, 2, 3]` // `a in [1, 2, 3]`
// `key in map` // `key in map`
if p.tok == .key_in { if p.tok == .key_in {
p.fgen(' ') p.fspace()
p.check(.key_in) p.check(.key_in)
p.expected_type = typ // this allows `foo in [.val1, .val2, .val3]` p.expected_type = typ // this allows `foo in [.val1, .val2, .val3]`
if p.tok == .lsbr { if p.tok == .lsbr {
@ -2037,7 +2019,7 @@ fn (p mut Parser) indot_expr() string {
p.in_optimization(typ, ph) p.in_optimization(typ, ph)
return 'bool' return 'bool'
} }
p.fgen(' ') p.fspace()
p.gen('), ') p.gen('), ')
arr_typ := p.expression() arr_typ := p.expression()
is_map := arr_typ.starts_with('map_') is_map := arr_typ.starts_with('map_')
@ -2121,7 +2103,6 @@ fn (p mut Parser) string_expr() {
str := p.lit str := p.lit
// No ${}, just return a simple string // No ${}, just return a simple string
if p.peek() != .dollar || is_raw { if p.peek() != .dollar || is_raw {
p.fgen("'$str'")
f := if is_raw { cescaped_path(str) } else { format_str(str) } f := if is_raw { cescaped_path(str) } else { format_str(str) }
// `C.puts('hi')` => `puts("hi");` // `C.puts('hi')` => `puts("hi");`
/* /*
@ -2149,11 +2130,9 @@ fn (p mut Parser) string_expr() {
p.is_alloc = true // $ interpolation means there's allocation p.is_alloc = true // $ interpolation means there's allocation
mut args := '"' mut args := '"'
mut format := '"' mut format := '"'
p.fgen('\'')
mut complex_inter := false // for vfmt mut complex_inter := false // for vfmt
for p.tok == .str { for p.tok == .str {
// Add the string between %d's // Add the string between %d's
p.fgen(p.lit)
p.lit = p.lit.replace('%', '%%') p.lit = p.lit.replace('%', '%%')
format += format_str(p.lit) format += format_str(p.lit)
p.next()// skip $ p.next()// skip $
@ -2238,7 +2217,7 @@ fn (p mut Parser) string_expr() {
if complex_inter { if complex_inter {
p.fgen('}') p.fgen('}')
} }
p.fgen('\'') //p.fgen('\'')
// println("hello %d", num) optimization. // println("hello %d", num) optimization.
if p.cgen.nogen { if p.cgen.nogen {
return return
@ -2545,7 +2524,7 @@ fn (p mut Parser) if_st(is_expr bool, elif_depth int) string {
p.returns = false p.returns = false
if p.tok == .key_else { if p.tok == .key_else {
if !p.inside_if_expr { if !p.inside_if_expr {
p.fgenln2('') p.fgenln('')
} }
p.check(.key_else) p.check(.key_else)
p.fspace() p.fspace()
@ -2733,7 +2712,7 @@ fn (p mut Parser) return_st() {
p.gen('return') p.gen('return')
} }
} }
p.fgenln('//ret') //p.fgenln('//ret')
p.returns = true p.returns = true
} }
@ -2762,6 +2741,7 @@ fn (p &Parser) prepend_mod(name string) string {
fn (p mut Parser) go_statement() { fn (p mut Parser) go_statement() {
p.check(.key_go) p.check(.key_go)
p.fspace()
mut gotoken_idx := p.cur_tok_index() mut gotoken_idx := p.cur_tok_index()
// TODO copypasta of name_expr() ? // TODO copypasta of name_expr() ?
if p.peek() == .dot { if p.peek() == .dot {
@ -2880,7 +2860,7 @@ fn (p mut Parser) attribute() {
p.attr = p.attr + ':' + p.check_name() p.attr = p.attr + ':' + p.check_name()
} }
p.check(.rsbr) p.check(.rsbr)
p.fgenln2('') p.fgenln('')
if p.tok == .key_fn || (p.tok == .key_pub && p.peek() == .key_fn) { if p.tok == .key_fn || (p.tok == .key_pub && p.peek() == .key_fn) {
p.fn_decl() p.fn_decl()
p.attr = '' p.attr = ''

View File

@ -97,11 +97,6 @@ fn (p mut Parser) get_type2() Type {
p.check(.amp) p.check(.amp)
} }
typ += p.lit typ += p.lit
if !p.is_struct_init {
// Otherwise we get `foo := FooFoo{` because `Foo` was already
// generated in name_expr()
p.fgen(p.lit)
}
// C.Struct import // C.Struct import
if p.lit == 'C' && p.peek() == .dot { if p.lit == 'C' && p.peek() == .dot {
p.next() p.next()

View File

@ -140,7 +140,7 @@ fn (p mut Parser) struct_decl() {
p.check(.colon) p.check(.colon)
} }
p.fmt_inc() p.fmt_inc()
p.fgenln2('') p.fgenln('')
} }
if p.tok == .key_mut { if p.tok == .key_mut {
if is_mut { if is_mut {
@ -153,7 +153,7 @@ fn (p mut Parser) struct_decl() {
p.check(.colon) p.check(.colon)
} }
p.fmt_inc() p.fmt_inc()
p.fgenln2('') p.fgenln('')
} }
// if is_pub { // if is_pub {
// } // }
@ -221,13 +221,13 @@ fn (p mut Parser) struct_decl() {
if p.first_pass() { if p.first_pass() {
p.table.add_field(typ.name, field_name, field_type, is_mut, attr, access_mod) p.table.add_field(typ.name, field_name, field_type, is_mut, attr, access_mod)
} }
p.fgenln2('') // newline between struct fields p.fgenln('') // newline between struct fields
} }
p.check(.rcbr) p.check(.rcbr)
if !is_c && !did_gen_something && p.first_pass() { if !is_c && !did_gen_something && p.first_pass() {
p.table.add_field(typ.name, '', 'EMPTY_STRUCT_DECLARATION', false, '', .private) p.table.add_field(typ.name, '', 'EMPTY_STRUCT_DECLARATION', false, '', .private)
} }
p.fgenln2('\n') p.fgenln('\n')
} }
// `User{ foo: bar }` // `User{ foo: bar }`
@ -273,7 +273,7 @@ fn (p mut Parser) struct_init(typ string) string {
} }
p.fspace() p.fspace()
did_gen_something = true did_gen_something = true
p.fgenln2('') // newline between struct fields p.fgenln('') // newline between struct fields
} }
// If we already set some fields, need to prepend a comma // If we already set some fields, need to prepend a comma
if t.fields.len != inited_fields.len && inited_fields.len > 0 { if t.fields.len != inited_fields.len && inited_fields.len > 0 {
@ -310,7 +310,6 @@ fn (p mut Parser) struct_init(typ string) string {
p.gen(',') p.gen(',')
} }
did_gen_something = true did_gen_something = true
p.fgenln2('') // newline between struct fields
} }
} }
} }

View File

@ -11,4 +11,4 @@ fn get_st() MyStruct {
fn main() { fn main() {
s := get_st() s := get_st()
println(s) println(s)
} }

View File

@ -26,4 +26,4 @@ fn test_all_v_prod_files() {
bmark.stop() bmark.stop()
println( bmark.total_message('total time spent running PROD files') ) println( bmark.total_message('total time spent running PROD files') )
} }
} }

View File

@ -66,4 +66,4 @@ fn test_shift_operators() {
assert e == b assert e == b
e >>= u64(i) e >>= u64(i)
assert e == a assert e == a
} }

View File

@ -30,9 +30,6 @@ fn (scanner mut Scanner) fgenln(s_ string) {
[if vfmt] [if vfmt]
fn (p mut Parser) fgen(s string) { fn (p mut Parser) fgen(s string) {
}
[if vfmt]
fn (p mut Parser) fgen2(s string) {
if p.pass != .main { if p.pass != .main {
return return
} }
@ -44,16 +41,12 @@ fn (p mut Parser) fspace() {
if p.first_pass() { if p.first_pass() {
return return
} }
p.fgen2(' ') p.fgen(' ')
} }
[if vfmt] [if vfmt]
fn (p mut Parser) fgenln(s string) { fn (p mut Parser) fgenln(s string) {
}
[if vfmt]
fn (p mut Parser) fgenln2(s string) {
if p.pass != .main { if p.pass != .main {
return return
} }
@ -94,18 +87,30 @@ fn (p mut Parser) fnext() {
if p.tok == .eof { if p.tok == .eof {
return return
} }
if p.tok == .rcbr && !p.inside_if_expr { if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr {
p.fmt_dec() p.fmt_dec()
} }
p.fgen2(p.strtok()) mut s := p.strtok()
// Need to reconstruct an interpolated string from multiple string and
// dollar tokens.
// 'abc $name zxc' => ['abc', $, name, 'zxc'] => 'abc'$name'zxc'
// need to remove the extra '
if p.tok == .str && p.peek() == .dollar {
s = s[..s.len - 1]
p.fmt_dollar = true
}
else if p.tok == .str && p.fmt_dollar {
s = s[1..]
p.fmt_dollar = false
}
p.fgen(s)
// vfmt: increase indentation on `{` unless it's `{}` // vfmt: increase indentation on `{` unless it's `{}`
if p.tok == .lcbr && !p.inside_if_expr { //&& p.scanner.pos + 1 < p.scanner.text.len && p.scanner.text[p.scanner.pos + 1] != `}` { if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr {
p.fgenln2('') p.fgenln('')
p.fmt_inc() p.fmt_inc()
} }
} }
[if vfmt] [if vfmt]
fn (p mut Parser) gen_fmt() { fn (p mut Parser) gen_fmt() {
if p.pass != .main { if p.pass != .main {

View File

@ -284,4 +284,4 @@ pub fn exec(cmd string) ?Result {
output: read_data output: read_data
exit_code: exit_code exit_code: exit_code
} }
} }

View File

@ -40,4 +40,4 @@ mut:
previous_lines []ustring previous_lines []ustring
search_index int search_index int
is_tty bool is_tty bool
} }

View File

@ -1,5 +1,5 @@
import strings import strings
fn test_sb() { fn test_sb() {
mut sb := strings.Builder{} mut sb := strings.Builder{}
sb.write('hi') sb.write('hi')
@ -13,3 +13,22 @@ fn test_sb() {
assert sb.str() == 'ab' assert sb.str() == 'ab'
} }
const (
n = 100000
)
fn test_big_sb() {
mut sb := strings.new_builder(100)
for i in 0..n {
sb.writeln(i.str())
}
s := sb.str()
lines := s.split_into_lines()
assert lines.len == n
assert lines[0] == '0'
assert lines[1] == '1'
assert lines[777] == '777'
assert lines[98765] == '98765'
}