fmt: comments

pull/3883/head
Alexander Medvednikov 2020-02-29 17:51:35 +01:00
parent 5a5f4ce99a
commit d5faf36aa9
9 changed files with 110 additions and 41 deletions

View File

@ -153,7 +153,7 @@ fn (foptions &FormatOptions) format_file(file string) {
eprintln('vfmt2 running fmt.fmt over file: $file') eprintln('vfmt2 running fmt.fmt over file: $file')
} }
table := table.new_table() table := table.new_table()
file_ast := parser.parse_file(file, table) file_ast := parser.parse_file(file, table, .parse_comments)
formatted_content := fmt.fmt(file_ast, table) formatted_content := fmt.fmt(file_ast, table)
file_name := filepath.filename(file) file_name := filepath.filename(file)
vfmt_output_path := filepath.join(os.tmpdir(), 'vfmt_' + file_name) vfmt_output_path := filepath.join(os.tmpdir(), 'vfmt_' + file_name)

View File

@ -22,9 +22,10 @@ struct Parser {
// the #include directives in the parsed .v file // the #include directives in the parsed .v file
file_pcguard string file_pcguard string
v &V v &V
pref &pref.Preferences // Preferences shared from V struct pref &pref.Preferences
mut: mut:
scanner &Scanner scanner &Scanner
// Preferences shared from V struct
tokens []Token tokens []Token
token_idx int token_idx int
prev_stuck_token_idx int prev_stuck_token_idx int
@ -261,6 +262,7 @@ fn (p mut Parser) next() {
// (only when vfmt compile time flag is enabled, otherwise this function // (only when vfmt compile time flag is enabled, otherwise this function
// is not even generated) // is not even generated)
p.fnext() p.fnext()
//
p.prev_tok2 = p.prev_tok p.prev_tok2 = p.prev_tok
p.prev_tok = p.tok p.prev_tok = p.tok
p.scanner.prev_tok = p.tok p.scanner.prev_tok = p.tok

View File

@ -14,6 +14,9 @@ import (
pub fn (node &FnDecl) str(t &table.Table) string { pub fn (node &FnDecl) str(t &table.Table) string {
mut f := strings.new_builder(30) mut f := strings.new_builder(30)
if node.is_pub {
f.write('pub ')
}
mut receiver := '' mut receiver := ''
if node.is_method { if node.is_method {
sym := t.get_type_symbol(node.receiver.typ) sym := t.get_type_symbol(node.receiver.typ)

View File

@ -45,7 +45,7 @@ pub fn doc(mod string, table &table.Table) string {
if file.ends_with('_test.v') || file.ends_with('_windows.v') || file.ends_with('_macos.v') { if file.ends_with('_test.v') || file.ends_with('_windows.v') || file.ends_with('_macos.v') {
continue continue
} }
file_ast := parser.parse_file(filepath.join(path,file), table) file_ast := parser.parse_file(filepath.join(path,file), table, .skip_comments)
d.stmts << file_ast.stmts d.stmts << file_ast.stmts
} }
d.print_fns() d.print_fns()

View File

@ -195,6 +195,9 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
f.stmts(it.stmts) f.stmts(it.stmts)
f.writeln('}') f.writeln('}')
} }
ast.LineComment {
f.writeln('// $it.text')
}
ast.Return { ast.Return {
f.write('return') f.write('return')
// multiple returns // multiple returns
@ -244,6 +247,9 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
} }
fn (f mut Fmt) struct_decl(node ast.StructDecl) { fn (f mut Fmt) struct_decl(node ast.StructDecl) {
if node.is_pub {
f.write('pub ')
}
f.writeln('struct $node.name {') f.writeln('struct $node.name {')
mut max := 0 mut max := 0
for field in node.fields { for field in node.fields {

View File

@ -23,7 +23,9 @@ fn test_fmt() {
} }
vroot := filepath.dir(vexe) vroot := filepath.dir(vexe)
tmpfolder := os.tmpdir() tmpfolder := os.tmpdir()
diff_cmd := find_working_diff_command() or { '' } diff_cmd := find_working_diff_command() or {
''
}
mut fmt_bench := benchmark.new_benchmark() mut fmt_bench := benchmark.new_benchmark()
// Lookup the existing test _input.vv files: // Lookup the existing test _input.vv files:
input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv') input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv')
@ -44,7 +46,7 @@ fn test_fmt() {
continue continue
} }
table := table.new_table() table := table.new_table()
file_ast := parser.parse_file(ipath, table) file_ast := parser.parse_file(ipath, table, .skip_comments)
result_ocontent := fmt.fmt(file_ast, table) result_ocontent := fmt.fmt(file_ast, table)
if expected_ocontent != result_ocontent { if expected_ocontent != result_ocontent {
fmt_bench.fail() fmt_bench.fail()

View File

@ -46,11 +46,12 @@ mut:
expected_type table.Type expected_type table.Type
scope &ast.Scope scope &ast.Scope
imports map[string]string imports map[string]string
ast_imports []ast.Import
} }
// for tests // for tests
pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt { pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
s := scanner.new_scanner(text) s := scanner.new_scanner(text, .skip_comments)
mut p := Parser{ mut p := Parser{
scanner: s scanner: s
table: table table: table
@ -64,10 +65,15 @@ pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
return p.stmt() return p.stmt()
} }
pub fn parse_file(path string, table &table.Table) ast.File { pub fn parse_file(path string, table &table.Table, comments_mode scanner.CommentsMode) ast.File {
// println('parse_file("$path")')
text := os.read_file(path) or {
panic(err)
}
mut stmts := []ast.Stmt mut stmts := []ast.Stmt
mut p := Parser{ mut p := Parser{
scanner: scanner.new_scanner_file(path) // scanner: scanner.new_scanner(text, comments_mode)
scanner: scanner.new_scanner_file(path, comments_mode)
table: table table: table
file_name: path file_name: path
pref: &pref.Preferences{} pref: &pref.Preferences{}
@ -75,6 +81,8 @@ pub fn parse_file(path string, table &table.Table) ast.File {
start_pos: 0 start_pos: 0
parent: 0 parent: 0
} }
// comments_mode: comments_mode
} }
p.read_first_token() p.read_first_token()
// p.scope = &ast.Scope{start_pos: p.tok.position(), parent: 0} // p.scope = &ast.Scope{start_pos: p.tok.position(), parent: 0}
@ -84,10 +92,12 @@ pub fn parse_file(path string, table &table.Table) ast.File {
p.mod = module_decl.name p.mod = module_decl.name
p.builtin_mod = p.mod == 'builtin' p.builtin_mod = p.mod == 'builtin'
// imports // imports
/*
mut imports := []ast.Import mut imports := []ast.Import
for p.tok.kind == .key_import { for p.tok.kind == .key_import {
imports << p.import_stmt() imports << p.import_stmt()
} }
*/
// TODO: import only mode // TODO: import only mode
for { for {
// res := s.scan() // res := s.scan()
@ -104,7 +114,7 @@ pub fn parse_file(path string, table &table.Table) ast.File {
return ast.File{ return ast.File{
path: path path: path
mod: module_decl mod: module_decl
imports: imports imports: p.ast_imports
stmts: stmts stmts: stmts
scope: p.scope scope: p.scope
} }
@ -113,7 +123,7 @@ pub fn parse_file(path string, table &table.Table) ast.File {
pub fn parse_files(paths []string, table &table.Table) []ast.File { pub fn parse_files(paths []string, table &table.Table) []ast.File {
mut files := []ast.File mut files := []ast.File
for path in paths { for path in paths {
files << parse_file(path, table) files << parse_file(path, table, .skip_comments)
} }
return files return files
} }
@ -164,12 +174,20 @@ pub fn (p mut Parser) parse_block() []ast.Stmt {
} }
fn (p mut Parser) next() { fn (p mut Parser) next() {
// for {
p.tok = p.peek_tok p.tok = p.peek_tok
p.peek_tok = p.scanner.scan() p.peek_tok = p.scanner.scan()
// if !(p.tok.kind in [.line_comment, .mline_comment]) {
// break
// }
// }
// println(p.tok.str()) // println(p.tok.str())
} }
fn (p mut Parser) check(expected token.Kind) { fn (p mut Parser) check(expected token.Kind) {
// for p.tok.kind in [.line_comment, .mline_comment] {
// p.next()
// }
if p.tok.kind != expected { if p.tok.kind != expected {
s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`' s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`'
p.error(s) p.error(s)
@ -211,6 +229,14 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
.lsbr { .lsbr {
return p.attr() return p.attr()
} }
.key_module {
return p.module_decl()
}
.key_import {
node := p.import_stmt()
p.ast_imports << node
return node[0]
}
.key_global { .key_global {
return p.global_decl() return p.global_decl()
} }
@ -239,10 +265,7 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
return p.struct_decl() return p.struct_decl()
} }
.line_comment { .line_comment {
// p.next() return p.line_comment()
return ast.LineComment{
text: p.scanner.line_comment
}
} }
.mline_comment { .mline_comment {
// p.next() // p.next()
@ -258,6 +281,14 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
} }
} }
pub fn (p mut Parser) line_comment() ast.LineComment {
text := p.tok.lit
p.next()
return ast.LineComment{
text: text
}
}
pub fn (p mut Parser) stmt() ast.Stmt { pub fn (p mut Parser) stmt() ast.Stmt {
match p.tok.kind { match p.tok.kind {
.key_assert { .key_assert {
@ -273,6 +304,9 @@ pub fn (p mut Parser) stmt() ast.Stmt {
.key_for { .key_for {
return p.for_statement() return p.for_statement()
} }
.line_comment {
return p.line_comment()
}
.key_return { .key_return {
return p.return_stmt() return p.return_stmt()
} }
@ -553,7 +587,8 @@ pub fn (p mut Parser) name_expr() ast.Expr {
if p.peek_tok.kind == .dot && (is_c || p.known_import(p.tok.lit) || p.mod.all_after('.') == p.tok.lit) { if p.peek_tok.kind == .dot && (is_c || p.known_import(p.tok.lit) || p.mod.all_after('.') == p.tok.lit) {
if is_c { if is_c {
mod = 'C' mod = 'C'
} else { }
else {
// prepend the full import // prepend the full import
mod = p.imports[p.tok.lit] mod = p.imports[p.tok.lit]
} }
@ -1472,7 +1507,8 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
p.check(.rcbr) p.check(.rcbr)
if is_c { if is_c {
name = 'C.$name' name = 'C.$name'
} else { }
else {
name = p.prepend_mod(name) name = p.prepend_mod(name)
} }
t := table.TypeSymbol{ t := table.TypeSymbol{

View File

@ -41,9 +41,16 @@ mut:
is_vh bool // Keep newlines is_vh bool // Keep newlines
is_fmt bool // Used only for skipping ${} in strings, since we need literal is_fmt bool // Used only for skipping ${} in strings, since we need literal
// string values when generating formatted code. // string values when generating formatted code.
comments_mode CommentsMode
} }
pub enum CommentsMode {
skip_comments
parse_comments
}
// new scanner from file. // new scanner from file.
pub fn new_scanner_file(file_path string) &Scanner { pub fn new_scanner_file(file_path string, comments_mode CommentsMode) &Scanner {
if !os.exists(file_path) { if !os.exists(file_path) {
verror("$file_path doesn't exist") verror("$file_path doesn't exist")
} }
@ -60,7 +67,7 @@ pub fn new_scanner_file(file_path string) &Scanner {
raw_text = tos(c_text[offset_from_begin], vstrlen(c_text) - offset_from_begin) raw_text = tos(c_text[offset_from_begin], vstrlen(c_text) - offset_from_begin)
} }
} }
mut s := new_scanner(raw_text) mut s := new_scanner(raw_text, comments_mode) // .skip_comments)
// s.init_fmt() // s.init_fmt()
s.file_path = file_path s.file_path = file_path
return s return s
@ -70,13 +77,14 @@ const (
is_fmt = os.getenv('VEXE').contains('vfmt') is_fmt = os.getenv('VEXE').contains('vfmt')
) )
// new scanner from string. // new scanner from string.
pub fn new_scanner(text string) &Scanner { pub fn new_scanner(text string, comments_mode CommentsMode) &Scanner {
return &Scanner{ return &Scanner{
text: text text: text
print_line_on_error: true print_line_on_error: true
print_colored_error: true print_colored_error: true
print_rel_paths_on_error: true print_rel_paths_on_error: true
is_fmt: is_fmt is_fmt: is_fmt
comments_mode: comments_mode
} }
} }
@ -332,6 +340,9 @@ fn (s mut Scanner) end_of_file() token.Token {
} }
pub fn (s mut Scanner) scan() token.Token { pub fn (s mut Scanner) scan() token.Token {
// if s.comments_mode == .parse_comments {
// println('\nscan()')
// }
// if s.line_comment != '' { // if s.line_comment != '' {
// s.fgenln('// LC "$s.line_comment"') // s.fgenln('// LC "$s.line_comment"')
// s.line_comment = '' // s.line_comment = ''
@ -712,11 +723,19 @@ pub fn (s mut Scanner) scan() token.Token {
start := s.pos + 1 start := s.pos + 1
s.ignore_line() s.ignore_line()
s.line_comment = s.text[start + 1..s.pos] s.line_comment = s.text[start + 1..s.pos]
s.line_comment = s.line_comment.trim_space() // if s.comments_mode == .parse_comments {
if s.is_fmt { // println('line c $s.line_comment')
s.pos-- // fix line_nr, \n was read, and the comment is marked on the next line // }
s.line_nr-- comment := s.line_comment.trim_space()
return s.scan_res(.line_comment, s.line_comment) // s.line_comment = comment
if s.comments_mode == .parse_comments {
// println('line c "$comment" z=')
// fix line_nr, \n was read, and the comment is marked
// on the next line
s.pos--
// println("'" + s.text[s.pos].str() + "'")
// s.line_nr--
return s.scan_res(.line_comment, comment)
} }
// s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"') // s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"')
// Skip the comment (return the next token) // Skip the comment (return the next token)
@ -748,7 +767,8 @@ pub fn (s mut Scanner) scan() token.Token {
s.pos++ s.pos++
end := s.pos + 1 end := s.pos + 1
comment := s.text[start..end] comment := s.text[start..end]
if s.is_fmt { // if s.is_fmt {
if false && s.comments_mode == .parse_comments {
s.line_comment = comment s.line_comment = comment
return s.scan_res(.mline_comment, s.line_comment) return s.scan_res(.mline_comment, s.line_comment)
} }