lots of vfmt fixes

pull/3130/head
Alexander Medvednikov 2019-12-17 17:28:25 +03:00
parent 67cf7f18e6
commit 53b334145b
8 changed files with 100 additions and 49 deletions

View File

@ -51,6 +51,9 @@ fn (p mut Parser) error_with_position(s string, sp ScannerPos) {
} }
fn (p mut Parser) warn_with_position(s string, sp ScannerPos) { fn (p mut Parser) warn_with_position(s string, sp ScannerPos) {
if p.scanner.is_fmt {
return
}
// on a warning, restore the scanner state after printing the warning: // on a warning, restore the scanner state after printing the warning:
cpos := p.scanner.get_scanner_pos() cpos := p.scanner.get_scanner_pos()
e := normalized_error( s ) e := normalized_error( s )

View File

@ -195,7 +195,7 @@ fn (p mut Parser) clear_vars() {
// Function signatures are added to the top of the .c file in the first run. // Function signatures are added to the top of the .c file in the first run.
fn (p mut Parser) fn_decl() { fn (p mut Parser) fn_decl() {
p.clear_vars() // clear local vars every time a new fn is started p.clear_vars() // clear local vars every time a new fn is started
defer { p.fgenln('\n') } defer { p.fgen_nl() p.fgen_nl() }
fn_start_idx := p.cur_tok_index() fn_start_idx := p.cur_tok_index()
// If we are in the first pass, create a new function. // If we are in the first pass, create a new function.
// In the second pass fetch the one we created. // In the second pass fetch the one we created.
@ -477,7 +477,7 @@ fn (p mut Parser) fn_decl() {
p.fgen_nl() p.fgen_nl()
} }
if is_c { if is_c {
p.fgenln('\n') p.fgen_nl()
} }
// Register the method // Register the method
if receiver_typ != '' { if receiver_typ != '' {

View File

@ -6,9 +6,11 @@ module compiler
fn (p mut Parser) for_st() { fn (p mut Parser) for_st() {
p.check(.key_for) p.check(.key_for)
p.fspace()
p.for_expr_cnt++ p.for_expr_cnt++
next_tok := p.peek() next_tok := p.peek()
if p.tok != .lcbr {
p.fspace()
}
//debug := p.scanner.file_path.contains('r_draw') //debug := p.scanner.file_path.contains('r_draw')
p.open_scope() p.open_scope()
mut label := 0 mut label := 0

View File

@ -20,7 +20,6 @@ struct Parser {
file_pcguard string file_pcguard string
v &V v &V
pref &Preferences // Preferences shared from V struct pref &Preferences // Preferences shared from V struct
kek string
mut: mut:
scanner &Scanner scanner &Scanner
tokens []Token tokens []Token
@ -44,7 +43,7 @@ mut:
tmp_cnt int tmp_cnt int
builtin_mod bool builtin_mod bool
inside_if_expr bool inside_if_expr bool
inside_unwrapping_match_statement bool //inside_unwrapping_match bool
inside_return_expr bool inside_return_expr bool
inside_unsafe bool inside_unsafe bool
is_struct_init bool is_struct_init bool
@ -60,7 +59,7 @@ mut:
returns bool returns bool
vroot string vroot string
is_c_struct_init bool is_c_struct_init bool
is_empty_c_struct_init bool is_empty_c_struct_init bool // for `foo := C.Foo{}` => `Foo foo;`
is_c_fn_call bool is_c_fn_call bool
can_chash bool can_chash bool
attr string attr string
@ -112,9 +111,11 @@ struct ParserState {
// new parser from string. unique id specified in `id`. // new parser from string. unique id specified in `id`.
// tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text) // tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text)
fn (v mut V) new_parser_from_string(text string) Parser { fn (v mut V) new_parser_from_string(text string) Parser {
// line comment 1
mut p := v.new_parser(new_scanner(text)) mut p := v.new_parser(new_scanner(text))
p.scan_tokens() p.scan_tokens() // same line comment
return p return p
// final comment
} }
fn (v mut V) reset_cgen_file_line_parameters(){ fn (v mut V) reset_cgen_file_line_parameters(){
@ -277,6 +278,7 @@ fn (p &Parser) peek() TokenKind {
} }
fn (p &Parser) log(s string) { fn (p &Parser) log(s string) {
123 // vfmt
/* /*
if !p.pref.is_verbose { if !p.pref.is_verbose {
return return
@ -738,6 +740,8 @@ fn (p mut Parser) const_decl() {
p.fmt_dec() p.fmt_dec()
p.check(.rpar) p.check(.rpar)
p.inside_const = false p.inside_const = false
p.fgen_nl()
p.fgen_nl()
} }
// `type myint int` // `type myint int`
@ -1131,6 +1135,7 @@ fn (p mut Parser) statements_no_rcbr() string {
// println('last st typ=$last_st_typ') // println('last st typ=$last_st_typ')
if !p.inside_if_expr { if !p.inside_if_expr {
//p.genln('')// // end st tok= ${p.strtok()}') //p.genln('')// // end st tok= ${p.strtok()}')
//p.fgenln('// ST')
p.fgen_nl() p.fgen_nl()
} }
i++ i++

View File

@ -6,7 +6,7 @@ module compiler
import ( import (
os os
strings //strings
) )
const ( const (
@ -29,10 +29,13 @@ mut:
debug bool debug bool
line_comment string line_comment string
started bool started bool
// vfmt fields // vfmt fields TODO move to a separate struct
fmt_out strings.Builder //fmt_out strings.Builder
fmt_lines []string
//fmt_line string
fmt_indent int fmt_indent int
fmt_line_empty bool fmt_line_empty bool
//fmt_needs_nl bool
prev_tok TokenKind prev_tok TokenKind
fn_name string // needed for @FN fn_name string // needed for @FN
should_print_line_on_error bool should_print_line_on_error bool
@ -79,7 +82,7 @@ fn new_scanner_file(file_path string) &Scanner {
fn new_scanner(text string) &Scanner { fn new_scanner(text string) &Scanner {
return &Scanner { return &Scanner {
text: text text: text
fmt_out: strings.new_builder(1000) //fmt_out: strings.new_builder(1000)
should_print_line_on_error: true should_print_line_on_error: true
should_print_errors_in_color: true should_print_errors_in_color: true
should_print_relative_paths_on_error: true should_print_relative_paths_on_error: true

View File

@ -4,6 +4,10 @@
module compiler module compiler
import (
strings
)
// also unions and interfaces // also unions and interfaces
fn (p mut Parser) struct_decl() { fn (p mut Parser) struct_decl() {
is_pub := p.tok == .key_pub is_pub := p.tok == .key_pub
@ -113,16 +117,7 @@ fn (p mut Parser) struct_decl() {
//mut is_pub_field := false //mut is_pub_field := false
//mut is_mut := false //mut is_mut := false
mut names := []string// to avoid dup names TODO alloc perf mut names := []string// to avoid dup names TODO alloc perf
mut fmt_max_len := 0 mut fmt_max_len := p.table.max_field_len[name]
// TODO why is typ.fields == 0?
if p.scanner.is_fmt && p.pass == .main {
for field in typ.fields {
println(field.name)
if field.name.len > fmt_max_len {
fmt_max_len = field.name.len
}
}
}
//println('fmt max len = $max_len nrfields=$typ.fields.len pass=$p.pass') //println('fmt max len = $max_len nrfields=$typ.fields.len pass=$p.pass')
if !is_ph && p.first_pass() { if !is_ph && p.first_pass() {
p.table.register_type(typ) p.table.register_type(typ)
@ -135,8 +130,10 @@ fn (p mut Parser) struct_decl() {
i++ i++
mut new_access_mod := access_mod mut new_access_mod := access_mod
if p.tok == .key_pub { if p.tok == .key_pub {
p.fmt_dec()
p.check(.key_pub) p.check(.key_pub)
if p.tok == .key_mut { if p.tok == .key_mut {
p.fspace()
new_access_mod = .public_mut new_access_mod = .public_mut
p.next() // skip `mut` p.next() // skip `mut`
} else { } else {
@ -145,7 +142,6 @@ fn (p mut Parser) struct_decl() {
if new_access_mod in used { if new_access_mod in used {
p.error('structs can only have one `pub:`/`pub mut:`, all public fields have to be grouped') p.error('structs can only have one `pub:`/`pub mut:`, all public fields have to be grouped')
} }
p.fmt_dec()
p.check(.colon) p.check(.colon)
p.fmt_inc() p.fmt_inc()
p.fgen_nl() p.fgen_nl()
@ -183,15 +179,16 @@ fn (p mut Parser) struct_decl() {
// Check if reserved name // Check if reserved name
field_name_token_idx := p.cur_tok_index() field_name_token_idx := p.cur_tok_index()
field_name := if name != 'Option' && !is_interface { p.table.var_cgen_name(p.check_name()) } else { p.check_name() } field_name := if name != 'Option' && !is_interface { p.table.var_cgen_name(p.check_name()) } else { p.check_name() }
/* if p.pass == .main {
if !p.first_pass() {
p.fgen(strings.repeat(` `, fmt_max_len - field_name.len)) p.fgen(strings.repeat(` `, fmt_max_len - field_name.len))
} }
*/
// Check dups // Check dups
if field_name in names { if field_name in names {
p.error('duplicate field `$field_name`') p.error('duplicate field `$field_name`')
} }
if p.scanner.is_fmt && p.pass == .decl && field_name.len > fmt_max_len {
fmt_max_len = field_name.len
}
if !is_c && p.mod != 'os' && contains_capital(field_name) { if !is_c && p.mod != 'os' && contains_capital(field_name) {
p.error('struct fields cannot contain uppercase letters, use snake_case instead') p.error('struct fields cannot contain uppercase letters, use snake_case instead')
} }
@ -265,11 +262,17 @@ fn (p mut Parser) struct_decl() {
} }
p.fgen_nl() // newline between struct fields p.fgen_nl() // newline between struct fields
} }
if p.scanner.is_fmt && p.pass == .decl {
p.table.max_field_len[typ.name] = fmt_max_len
}
//p.fgen_require_nl()
p.check(.rcbr) p.check(.rcbr)
if !is_c && !did_gen_something && p.first_pass() { if !is_c && !did_gen_something && p.first_pass() {
p.table.add_field(typ.name, '', 'EMPTY_STRUCT_DECLARATION', false, '', .private) p.table.add_field(typ.name, '', 'EMPTY_STRUCT_DECLARATION', false, '', .private)
} }
p.fgenln('\n') p.fgen_nl()
p.fgen_nl()
//p.fgenln('//kek')
} }
// `User{ foo: bar }` // `User{ foo: bar }`

View File

@ -20,6 +20,7 @@ pub mut:
varg_access []VargAccess varg_access []VargAccess
//enum_vals map[string][]string //enum_vals map[string][]string
//names []Name //names []Name
max_field_len map[string]int // for vfmt: max_field_len['Parser'] == 12
} }
struct VargAccess { struct VargAccess {
@ -120,6 +121,7 @@ pub mut:
is_placeholder bool is_placeholder bool
gen_str bool // needs `.str()` method generation gen_str bool // needs `.str()` method generation
is_flag bool // enum bitfield flag is_flag bool // enum bitfield flag
//max_field_len int
} }
struct TypeNode { struct TypeNode {

View File

@ -11,11 +11,12 @@ import os
fn (scanner mut Scanner) fgen(s_ string) { fn (scanner mut Scanner) fgen(s_ string) {
mut s := s_ mut s := s_
if scanner.fmt_line_empty { if scanner.fmt_line_empty {
s = strings.repeat(`\t`, scanner.fmt_indent) + s s = strings.repeat(`\t`, scanner.fmt_indent) + s.trim_left(' ')
} }
scanner.fmt_lines << s
//scanner.fmt_out << s //scanner.fmt_out << s
scanner.fmt_out.write(s) //scanner.fmt_out.write(s)
scanner.fmt_line_empty = false scanner.fmt_line_empty = false
} }
@ -25,19 +26,13 @@ fn (scanner mut Scanner) fgenln(s_ string) {
if scanner.fmt_line_empty && scanner.fmt_indent > 0 { if scanner.fmt_line_empty && scanner.fmt_indent > 0 {
s = strings.repeat(`\t`, scanner.fmt_indent) + s s = strings.repeat(`\t`, scanner.fmt_indent) + s
} }
//scanner.fmt_out << s scanner.fmt_lines << s
//scanner.fmt_out << '\n' //scanner.fmt_lines << '//!'
scanner.fmt_out.writeln(s) scanner.fmt_lines << '\n'
//scanner.fmt_out.writeln(s)
scanner.fmt_line_empty = true scanner.fmt_line_empty = true
} }
[if vfmt]
fn (scanner mut Scanner) fgen_nl() {
scanner.fmt_out.writeln('')
scanner.fmt_line_empty = true
}
[if vfmt] [if vfmt]
fn (p mut Parser) fgen(s string) { fn (p mut Parser) fgen(s string) {
if p.pass != .main { if p.pass != .main {
@ -68,13 +63,41 @@ fn (p mut Parser) fgen_nl() {
if p.pass != .main { if p.pass != .main {
return return
} }
println(p.tok)
if p.prev_tok == .line_comment { //println(p.tok)
// Don't insert a newline after a comment
/*
if p.token_idx>0 && p.tokens[p.token_idx-1].tok == .line_comment &&
p.tokens[p.token_idx].tok != .line_comment {
p.scanner.fgenln('notin')
return return
} }
*/
///if p.token_idx > 0 && p.token_idx < p.tokens.len &&
// Previous token is a comment, and NL has already been generated?
// Don't generate a second NL.
if p.scanner.fmt_lines.len > 0 && p.scanner.fmt_lines.last() == '\n' &&
p.tokens[p.token_idx-2].tok == .line_comment
{
//if p.fileis('parser.v') {
//println(p.scanner.line_nr.str() + ' ' +p.tokens[p.token_idx-2].str())
//}
return
}
p.scanner.fgen_nl() p.scanner.fgen_nl()
} }
[if vfmt]
fn (scanner mut Scanner) fgen_nl() {
//scanner.fmt_lines << ' fgen_nl'
//scanner.fmt_lines << '//fgen_nl\n'
scanner.fmt_lines << '\n'
//scanner.fmt_out.writeln('')
scanner.fmt_line_empty = true
}
/* /*
fn (p mut Parser) peek() TokenKind { fn (p mut Parser) peek() TokenKind {
for { for {
@ -146,16 +169,22 @@ fn (p mut Parser) fnext() {
if tok != .line_comment && tok != .mline_comment { if tok != .line_comment && tok != .mline_comment {
break break
} }
comment_token := p.tokens[p.token_idx] comment_token := p.tokens[i]
next := p.tokens[p.token_idx+1] next := p.tokens[i+1]
comment_on_new_line := p.token_idx == 0 || comment_on_new_line := i == 0 ||
comment_token.line_nr > p.tokens[p.token_idx - 1].line_nr comment_token.line_nr > p.tokens[i-1].line_nr
//prev_token := p.tokens[p.token_idx - 1] //prev_token := p.tokens[p.token_idx - 1]
comment := comment_token.lit comment := comment_token.lit
// Newline before the comment, but not between two // comments,
// and not right after `{`, there's already a newline there
if i > 0 && p.tokens[i-1].tok != .line_comment && if i > 0 && p.tokens[i-1].tok != .line_comment &&
p.tokens[i-1].tok != .lcbr &&
comment_token.line_nr > p.tokens[i-1].line_nr { comment_token.line_nr > p.tokens[i-1].line_nr {
p.fgen_nl() p.fgen_nl()
} }
if i > 0 && p.tokens[i-1].tok == .rcbr && p.scanner.fmt_indent == 0 {
p.fgen_nl()
}
if tok == .line_comment { if tok == .line_comment {
if !comment_on_new_line { //prev_token.line_nr < comment_token.line_nr { if !comment_on_new_line { //prev_token.line_nr < comment_token.line_nr {
p.fgen(' ') p.fgen(' ')
@ -170,9 +199,12 @@ fn (p mut Parser) fnext() {
*/ */
} else { } else {
// /**/ comment
p.fgen(comment) p.fgen(comment)
} }
if next.tok == .line_comment && comment_token.line_nr < next.line_nr { //if next.tok == .line_comment && comment_token.line_nr < next.line_nr {
if comment_token.line_nr < next.line_nr {
//p.fgenln('nextcm')
p.fgen_nl() p.fgen_nl()
} }
p.token_idx++ p.token_idx++
@ -186,7 +218,7 @@ fn (p mut Parser) fnext() {
[if vfmt] [if vfmt]
fn (p mut Parser) gen_fmt() { fn (p &Parser) gen_fmt() {
if p.pass != .main { if p.pass != .main {
return return
} }
@ -194,12 +226,13 @@ fn (p mut Parser) gen_fmt() {
return return
} }
//s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space() //s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space()
s := p.scanner.fmt_out.str().trim_space() //s := p.scanner.fmt_out.str().trim_space()
//s := p.scanner.fmt_out.join('').trim_space() s := p.scanner.fmt_lines.join('').trim_space()
if s == '' { if s == '' {
return return
} }
println('generating ${p.file_name}.v') //if !p.file_name.contains('parser.v') {return}
println('generating ${p.file_name}')
mut out := os.create('/var/tmp/fmt/' + p.file_name) or { mut out := os.create('/var/tmp/fmt/' + p.file_name) or {
verror('failed to create fmt.v') verror('failed to create fmt.v')
return return