fmt: comments
parent
5a5f4ce99a
commit
d5faf36aa9
|
@ -153,7 +153,7 @@ fn (foptions &FormatOptions) format_file(file string) {
|
|||
eprintln('vfmt2 running fmt.fmt over file: $file')
|
||||
}
|
||||
table := table.new_table()
|
||||
file_ast := parser.parse_file(file, table)
|
||||
file_ast := parser.parse_file(file, table, .parse_comments)
|
||||
formatted_content := fmt.fmt(file_ast, table)
|
||||
file_name := filepath.filename(file)
|
||||
vfmt_output_path := filepath.join(os.tmpdir(), 'vfmt_' + file_name)
|
||||
|
|
|
@ -22,9 +22,10 @@ struct Parser {
|
|||
// the #include directives in the parsed .v file
|
||||
file_pcguard string
|
||||
v &V
|
||||
pref &pref.Preferences // Preferences shared from V struct
|
||||
pref &pref.Preferences
|
||||
mut:
|
||||
scanner &Scanner
|
||||
// Preferences shared from V struct
|
||||
tokens []Token
|
||||
token_idx int
|
||||
prev_stuck_token_idx int
|
||||
|
@ -261,6 +262,7 @@ fn (p mut Parser) next() {
|
|||
// (only when vfmt compile time flag is enabled, otherwise this function
|
||||
// is not even generated)
|
||||
p.fnext()
|
||||
//
|
||||
p.prev_tok2 = p.prev_tok
|
||||
p.prev_tok = p.tok
|
||||
p.scanner.prev_tok = p.tok
|
||||
|
|
|
@ -14,6 +14,9 @@ import (
|
|||
|
||||
pub fn (node &FnDecl) str(t &table.Table) string {
|
||||
mut f := strings.new_builder(30)
|
||||
if node.is_pub {
|
||||
f.write('pub ')
|
||||
}
|
||||
mut receiver := ''
|
||||
if node.is_method {
|
||||
sym := t.get_type_symbol(node.receiver.typ)
|
||||
|
|
|
@ -19,7 +19,7 @@ mut:
|
|||
stmts []ast.Stmt // all module statements from all files
|
||||
}
|
||||
|
||||
type FilterFn fn(node ast.FnDecl) bool
|
||||
type FilterFn fn(node ast.FnDecl)bool
|
||||
|
||||
pub fn doc(mod string, table &table.Table) string {
|
||||
mut d := Doc{
|
||||
|
@ -45,7 +45,7 @@ pub fn doc(mod string, table &table.Table) string {
|
|||
if file.ends_with('_test.v') || file.ends_with('_windows.v') || file.ends_with('_macos.v') {
|
||||
continue
|
||||
}
|
||||
file_ast := parser.parse_file(filepath.join(path,file), table)
|
||||
file_ast := parser.parse_file(filepath.join(path,file), table, .skip_comments)
|
||||
d.stmts << file_ast.stmts
|
||||
}
|
||||
d.print_fns()
|
||||
|
|
|
@ -195,6 +195,9 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
|||
f.stmts(it.stmts)
|
||||
f.writeln('}')
|
||||
}
|
||||
ast.LineComment {
|
||||
f.writeln('// $it.text')
|
||||
}
|
||||
ast.Return {
|
||||
f.write('return')
|
||||
// multiple returns
|
||||
|
@ -244,6 +247,9 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
|||
}
|
||||
|
||||
fn (f mut Fmt) struct_decl(node ast.StructDecl) {
|
||||
if node.is_pub {
|
||||
f.write('pub ')
|
||||
}
|
||||
f.writeln('struct $node.name {')
|
||||
mut max := 0
|
||||
for field in node.fields {
|
||||
|
|
|
@ -15,7 +15,7 @@ const (
|
|||
|
||||
fn test_fmt() {
|
||||
fmt_message := 'vfmt tests'
|
||||
eprintln(term.header(fmt_message,'-'))
|
||||
eprintln(term.header(fmt_message, '-'))
|
||||
vexe := os.getenv('VEXE')
|
||||
if vexe.len == 0 || !os.exists(vexe) {
|
||||
eprintln('VEXE must be set')
|
||||
|
@ -23,11 +23,13 @@ fn test_fmt() {
|
|||
}
|
||||
vroot := filepath.dir(vexe)
|
||||
tmpfolder := os.tmpdir()
|
||||
diff_cmd := find_working_diff_command() or { '' }
|
||||
diff_cmd := find_working_diff_command() or {
|
||||
''
|
||||
}
|
||||
mut fmt_bench := benchmark.new_benchmark()
|
||||
// Lookup the existing test _input.vv files:
|
||||
input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv')
|
||||
fmt_bench.set_total_expected_steps( input_files.len )
|
||||
fmt_bench.set_total_expected_steps(input_files.len)
|
||||
for istep, ipath in input_files {
|
||||
fmt_bench.cstep = istep
|
||||
fmt_bench.step()
|
||||
|
@ -44,7 +46,7 @@ fn test_fmt() {
|
|||
continue
|
||||
}
|
||||
table := table.new_table()
|
||||
file_ast := parser.parse_file(ipath, table)
|
||||
file_ast := parser.parse_file(ipath, table, .skip_comments)
|
||||
result_ocontent := fmt.fmt(file_ast, table)
|
||||
if expected_ocontent != result_ocontent {
|
||||
fmt_bench.fail()
|
||||
|
|
|
@ -46,11 +46,12 @@ mut:
|
|||
expected_type table.Type
|
||||
scope &ast.Scope
|
||||
imports map[string]string
|
||||
ast_imports []ast.Import
|
||||
}
|
||||
|
||||
// for tests
|
||||
pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
|
||||
s := scanner.new_scanner(text)
|
||||
s := scanner.new_scanner(text, .skip_comments)
|
||||
mut p := Parser{
|
||||
scanner: s
|
||||
table: table
|
||||
|
@ -64,10 +65,15 @@ pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
|
|||
return p.stmt()
|
||||
}
|
||||
|
||||
pub fn parse_file(path string, table &table.Table) ast.File {
|
||||
pub fn parse_file(path string, table &table.Table, comments_mode scanner.CommentsMode) ast.File {
|
||||
// println('parse_file("$path")')
|
||||
text := os.read_file(path) or {
|
||||
panic(err)
|
||||
}
|
||||
mut stmts := []ast.Stmt
|
||||
mut p := Parser{
|
||||
scanner: scanner.new_scanner_file(path)
|
||||
// scanner: scanner.new_scanner(text, comments_mode)
|
||||
scanner: scanner.new_scanner_file(path, comments_mode)
|
||||
table: table
|
||||
file_name: path
|
||||
pref: &pref.Preferences{}
|
||||
|
@ -75,6 +81,8 @@ pub fn parse_file(path string, table &table.Table) ast.File {
|
|||
start_pos: 0
|
||||
parent: 0
|
||||
}
|
||||
// comments_mode: comments_mode
|
||||
|
||||
}
|
||||
p.read_first_token()
|
||||
// p.scope = &ast.Scope{start_pos: p.tok.position(), parent: 0}
|
||||
|
@ -84,10 +92,12 @@ pub fn parse_file(path string, table &table.Table) ast.File {
|
|||
p.mod = module_decl.name
|
||||
p.builtin_mod = p.mod == 'builtin'
|
||||
// imports
|
||||
/*
|
||||
mut imports := []ast.Import
|
||||
for p.tok.kind == .key_import {
|
||||
imports << p.import_stmt()
|
||||
}
|
||||
*/
|
||||
// TODO: import only mode
|
||||
for {
|
||||
// res := s.scan()
|
||||
|
@ -104,7 +114,7 @@ pub fn parse_file(path string, table &table.Table) ast.File {
|
|||
return ast.File{
|
||||
path: path
|
||||
mod: module_decl
|
||||
imports: imports
|
||||
imports: p.ast_imports
|
||||
stmts: stmts
|
||||
scope: p.scope
|
||||
}
|
||||
|
@ -113,7 +123,7 @@ pub fn parse_file(path string, table &table.Table) ast.File {
|
|||
pub fn parse_files(paths []string, table &table.Table) []ast.File {
|
||||
mut files := []ast.File
|
||||
for path in paths {
|
||||
files << parse_file(path, table)
|
||||
files << parse_file(path, table, .skip_comments)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
@ -164,12 +174,20 @@ pub fn (p mut Parser) parse_block() []ast.Stmt {
|
|||
}
|
||||
|
||||
fn (p mut Parser) next() {
|
||||
// for {
|
||||
p.tok = p.peek_tok
|
||||
p.peek_tok = p.scanner.scan()
|
||||
// if !(p.tok.kind in [.line_comment, .mline_comment]) {
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// println(p.tok.str())
|
||||
}
|
||||
|
||||
fn (p mut Parser) check(expected token.Kind) {
|
||||
// for p.tok.kind in [.line_comment, .mline_comment] {
|
||||
// p.next()
|
||||
// }
|
||||
if p.tok.kind != expected {
|
||||
s := 'syntax error: unexpected `${p.tok.kind.str()}`, expecting `${expected.str()}`'
|
||||
p.error(s)
|
||||
|
@ -211,6 +229,14 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
|
|||
.lsbr {
|
||||
return p.attr()
|
||||
}
|
||||
.key_module {
|
||||
return p.module_decl()
|
||||
}
|
||||
.key_import {
|
||||
node := p.import_stmt()
|
||||
p.ast_imports << node
|
||||
return node[0]
|
||||
}
|
||||
.key_global {
|
||||
return p.global_decl()
|
||||
}
|
||||
|
@ -239,10 +265,7 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
|
|||
return p.struct_decl()
|
||||
}
|
||||
.line_comment {
|
||||
// p.next()
|
||||
return ast.LineComment{
|
||||
text: p.scanner.line_comment
|
||||
}
|
||||
return p.line_comment()
|
||||
}
|
||||
.mline_comment {
|
||||
// p.next()
|
||||
|
@ -258,6 +281,14 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn (p mut Parser) line_comment() ast.LineComment {
|
||||
text := p.tok.lit
|
||||
p.next()
|
||||
return ast.LineComment{
|
||||
text: text
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (p mut Parser) stmt() ast.Stmt {
|
||||
match p.tok.kind {
|
||||
.key_assert {
|
||||
|
@ -273,6 +304,9 @@ pub fn (p mut Parser) stmt() ast.Stmt {
|
|||
.key_for {
|
||||
return p.for_statement()
|
||||
}
|
||||
.line_comment {
|
||||
return p.line_comment()
|
||||
}
|
||||
.key_return {
|
||||
return p.return_stmt()
|
||||
}
|
||||
|
@ -553,7 +587,8 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
|||
if p.peek_tok.kind == .dot && (is_c || p.known_import(p.tok.lit) || p.mod.all_after('.') == p.tok.lit) {
|
||||
if is_c {
|
||||
mod = 'C'
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
// prepend the full import
|
||||
mod = p.imports[p.tok.lit]
|
||||
}
|
||||
|
@ -1472,7 +1507,8 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
|||
p.check(.rcbr)
|
||||
if is_c {
|
||||
name = 'C.$name'
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
name = p.prepend_mod(name)
|
||||
}
|
||||
t := table.TypeSymbol{
|
||||
|
|
|
@ -41,9 +41,16 @@ mut:
|
|||
is_vh bool // Keep newlines
|
||||
is_fmt bool // Used only for skipping ${} in strings, since we need literal
|
||||
// string values when generating formatted code.
|
||||
comments_mode CommentsMode
|
||||
}
|
||||
|
||||
pub enum CommentsMode {
|
||||
skip_comments
|
||||
parse_comments
|
||||
}
|
||||
|
||||
// new scanner from file.
|
||||
pub fn new_scanner_file(file_path string) &Scanner {
|
||||
pub fn new_scanner_file(file_path string, comments_mode CommentsMode) &Scanner {
|
||||
if !os.exists(file_path) {
|
||||
verror("$file_path doesn't exist")
|
||||
}
|
||||
|
@ -60,7 +67,7 @@ pub fn new_scanner_file(file_path string) &Scanner {
|
|||
raw_text = tos(c_text[offset_from_begin], vstrlen(c_text) - offset_from_begin)
|
||||
}
|
||||
}
|
||||
mut s := new_scanner(raw_text)
|
||||
mut s := new_scanner(raw_text, comments_mode) // .skip_comments)
|
||||
// s.init_fmt()
|
||||
s.file_path = file_path
|
||||
return s
|
||||
|
@ -70,13 +77,14 @@ const (
|
|||
is_fmt = os.getenv('VEXE').contains('vfmt')
|
||||
)
|
||||
// new scanner from string.
|
||||
pub fn new_scanner(text string) &Scanner {
|
||||
pub fn new_scanner(text string, comments_mode CommentsMode) &Scanner {
|
||||
return &Scanner{
|
||||
text: text
|
||||
print_line_on_error: true
|
||||
print_colored_error: true
|
||||
print_rel_paths_on_error: true
|
||||
is_fmt: is_fmt
|
||||
comments_mode: comments_mode
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,7 +113,7 @@ const (
|
|||
)
|
||||
|
||||
fn filter_num_sep(txt byteptr, start int, end int) string {
|
||||
unsafe {
|
||||
unsafe{
|
||||
mut b := malloc(end - start + 1) // add a byte for the endstring 0
|
||||
mut i := start
|
||||
mut i1 := 0
|
||||
|
@ -332,6 +340,9 @@ fn (s mut Scanner) end_of_file() token.Token {
|
|||
}
|
||||
|
||||
pub fn (s mut Scanner) scan() token.Token {
|
||||
// if s.comments_mode == .parse_comments {
|
||||
// println('\nscan()')
|
||||
// }
|
||||
// if s.line_comment != '' {
|
||||
// s.fgenln('// LC "$s.line_comment"')
|
||||
// s.line_comment = ''
|
||||
|
@ -712,11 +723,19 @@ pub fn (s mut Scanner) scan() token.Token {
|
|||
start := s.pos + 1
|
||||
s.ignore_line()
|
||||
s.line_comment = s.text[start + 1..s.pos]
|
||||
s.line_comment = s.line_comment.trim_space()
|
||||
if s.is_fmt {
|
||||
s.pos-- // fix line_nr, \n was read, and the comment is marked on the next line
|
||||
s.line_nr--
|
||||
return s.scan_res(.line_comment, s.line_comment)
|
||||
// if s.comments_mode == .parse_comments {
|
||||
// println('line c $s.line_comment')
|
||||
// }
|
||||
comment := s.line_comment.trim_space()
|
||||
// s.line_comment = comment
|
||||
if s.comments_mode == .parse_comments {
|
||||
// println('line c "$comment" z=')
|
||||
// fix line_nr, \n was read, and the comment is marked
|
||||
// on the next line
|
||||
s.pos--
|
||||
// println("'" + s.text[s.pos].str() + "'")
|
||||
// s.line_nr--
|
||||
return s.scan_res(.line_comment, comment)
|
||||
}
|
||||
// s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"')
|
||||
// Skip the comment (return the next token)
|
||||
|
@ -748,7 +767,8 @@ pub fn (s mut Scanner) scan() token.Token {
|
|||
s.pos++
|
||||
end := s.pos + 1
|
||||
comment := s.text[start..end]
|
||||
if s.is_fmt {
|
||||
// if s.is_fmt {
|
||||
if false && s.comments_mode == .parse_comments {
|
||||
s.line_comment = comment
|
||||
return s.scan_res(.mline_comment, s.line_comment)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue