vfmt: comments and other fixes

pull/4243/head
Alexander Medvednikov 2020-04-05 02:08:10 +02:00
parent 45fdbc4df7
commit 97db4c0e9a
8 changed files with 196 additions and 96 deletions

View File

@ -54,7 +54,7 @@ fn main() {
is_2: '-2' in args is_2: '-2' in args
is_c: '-c' in args is_c: '-c' in args
is_l: '-l' in args is_l: '-l' in args
is_w: '-w' in args is_w: '-ww' in args
is_diff: '-diff' in args is_diff: '-diff' in args
is_verbose: '-verbose' in args || '--verbose' in args is_verbose: '-verbose' in args || '--verbose' in args
is_all: '-all' in args || '--all' in args is_all: '-all' in args || '--all' in args
@ -153,6 +153,7 @@ fn main() {
} }
exit(1) exit(1)
} }
println('vfmt done')
} }
fn (foptions &FormatOptions) format_file(file string) { fn (foptions &FormatOptions) format_file(file string) {

View File

@ -19,7 +19,7 @@ Type | AsCast | TypeOf | StringInterLiteral
pub type Stmt = GlobalDecl | FnDecl | Return | Module | Import | ExprStmt | pub type Stmt = GlobalDecl | FnDecl | Return | Module | Import | ExprStmt |
ForStmt | StructDecl | ForCStmt | ForInStmt | CompIf | ConstDecl | Attr | BranchStmt | ForStmt | StructDecl | ForCStmt | ForInStmt | CompIf | ConstDecl | Attr | BranchStmt |
HashStmt | AssignStmt | EnumDecl | TypeDecl | DeferStmt | GotoLabel | GotoStmt | HashStmt | AssignStmt | EnumDecl | TypeDecl | DeferStmt | GotoLabel | GotoStmt |
LineComment | MultiLineComment | AssertStmt | UnsafeStmt | GoStmt | Block | InterfaceDecl Comment | AssertStmt | UnsafeStmt | GoStmt | Block | InterfaceDecl
pub type ScopeObject = ConstField | GlobalDecl | Var pub type ScopeObject = ConstField | GlobalDecl | Var
@ -102,12 +102,21 @@ pub:
expr Expr expr Expr
} }
pub struct StructField {
pub:
name string
pos token.Position
comment Comment
default_expr Expr
mut:
typ table.Type
}
pub struct Field { pub struct Field {
pub: pub:
name string name string
// type_idx int // type_idx int
pos token.Position pos token.Position
already_reported bool
mut: mut:
typ table.Type typ table.Type
// typ2 Type // typ2 Type
@ -134,13 +143,12 @@ pub struct StructDecl {
pub: pub:
pos token.Position pos token.Position
name string name string
fields []Field fields []StructField
is_pub bool is_pub bool
mut_pos int // mut: mut_pos int // mut:
pub_pos int // pub: pub_pos int // pub:
pub_mut_pos int // pub mut: pub_mut_pos int // pub mut:
is_c bool is_c bool
default_exprs []Expr
} }
pub struct InterfaceDecl { pub struct InterfaceDecl {
@ -268,6 +276,7 @@ pub:
scope &Scope scope &Scope
// TODO: consider parent instead of field // TODO: consider parent instead of field
global_scope &Scope global_scope &Scope
//comments []Comment
} }
pub struct IdentFn { pub struct IdentFn {
@ -661,14 +670,11 @@ mut:
expr_type table.Type expr_type table.Type
} }
pub struct LineComment { pub struct Comment {
pub:
text string
}
pub struct MultiLineComment {
pub: pub:
text string text string
is_multi bool
line_nr int
} }
pub struct ConcatExpr { pub struct ConcatExpr {

View File

@ -24,6 +24,7 @@ mut:
line_len int line_len int
single_line_if bool single_line_if bool
cur_mod string cur_mod string
file ast.File
} }
pub fn fmt(file ast.File, table &table.Table) string { pub fn fmt(file ast.File, table &table.Table) string {
@ -31,15 +32,32 @@ pub fn fmt(file ast.File, table &table.Table) string {
out: strings.new_builder(1000) out: strings.new_builder(1000)
table: table table: table
indent: 0 indent: 0
file: file
} }
f.mod(file.mod) f.mod(file.mod)
f.imports(file.imports) f.imports(file.imports)
for stmt in file.stmts { for stmt in file.stmts {
f.stmt(stmt) f.stmt(stmt)
} }
/*
for comment in file.comments {
println('$comment.line_nr $comment.text')
}
*/
return f.out.str().trim_space() + '\n' return f.out.str().trim_space() + '\n'
} }
/*
fn (f mut Fmt) find_comment(line_nr int) {
for comment in f.file.comments {
if comment.line_nr == line_nr {
f.writeln('// FFF $comment.line_nr $comment.text')
return
}
}
}
*/
pub fn (f mut Fmt) write(s string) { pub fn (f mut Fmt) write(s string) {
if f.indent > 0 && f.empty_line { if f.indent > 0 && f.empty_line {
f.out.write(tabs[f.indent]) f.out.write(tabs[f.indent])
@ -170,6 +188,8 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
} }
} }
ast.FnDecl { ast.FnDecl {
//println('$it.name find_comment($it.pos.line_nr)')
//f.find_comment(it.pos.line_nr)
s := it.str(f.table) s := it.str(f.table)
// f.write(it.str(f.table)) // f.write(it.str(f.table))
f.write(s.replace(f.cur_mod + '.', '')) // `Expr` instead of `ast.Expr` in mod ast f.write(s.replace(f.cur_mod + '.', '')) // `Expr` instead of `ast.Expr` in mod ast
@ -215,13 +235,8 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
ast.GotoStmt { ast.GotoStmt {
f.writeln('goto $it.name') f.writeln('goto $it.name')
} }
ast.LineComment { ast.Comment {
f.writeln('// $it.text') f.comment(it)
}
ast.MultiLineComment {
f.writeln('/*')
f.writeln(it.text)
f.writeln('*/')
} }
ast.Return { ast.Return {
f.write('return') f.write('return')
@ -251,8 +266,8 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
f.writeln('}') f.writeln('}')
} }
ast.Import {} ast.Import {}
// already handled in f.imports
ast.TypeDecl { ast.TypeDecl {
// already handled in f.imports
f.type_decl(it) f.type_decl(it)
} }
ast.AssertStmt { ast.AssertStmt {
@ -308,7 +323,8 @@ fn (f mut Fmt) struct_decl(node ast.StructDecl) {
if node.is_pub { if node.is_pub {
f.write('pub ') f.write('pub ')
} }
f.writeln('struct $node.name {') name := node.name.after('.')
f.writeln('struct $name {')
mut max := 0 mut max := 0
for field in node.fields { for field in node.fields {
if field.name.len > max { if field.name.len > max {
@ -325,6 +341,10 @@ fn (f mut Fmt) struct_decl(node ast.StructDecl) {
else if i == node.pub_mut_pos { else if i == node.pub_mut_pos {
f.writeln('pub mut:') f.writeln('pub mut:')
} }
if field.comment.text != '' {
f.write('\t')
f.comment(field.comment)
}
f.write('\t$field.name ') f.write('\t$field.name ')
f.write(strings.repeat(` `, max - field.name.len)) f.write(strings.repeat(` `, max - field.name.len))
f.writeln(f.type_to_str(field.typ)) f.writeln(f.type_to_str(field.typ))
@ -596,6 +616,10 @@ fn (f mut Fmt) expr(node ast.Expr) {
f.write('}') f.write('}')
} }
} }
ast.Type {
f.writeln(f.type_to_str(it.typ))
}
ast.TypeOf { ast.TypeOf {
f.write('typeof(') f.write('typeof(')
f.expr(it.expr) f.expr(it.expr)
@ -603,6 +627,9 @@ fn (f mut Fmt) expr(node ast.Expr) {
} }
else { else {
eprintln('fmt expr: unhandled node ' + typeof(node)) eprintln('fmt expr: unhandled node ' + typeof(node))
if typeof(node) != 'unknown v.ast.Expr' {
exit(1)
}
} }
} }
} }
@ -636,3 +663,16 @@ fn (f mut Fmt) or_expr(or_block ast.OrExpr) {
f.write('}') f.write('}')
} }
} }
fn (f mut Fmt) comment(node ast.Comment) {
if !node.text.contains('\n') {
f.writeln('// $node.text')
return
}
lines := node.text.split_into_lines()
f.writeln('/*')
for line in lines {
f.writeln(line)
}
f.writeln('*/')
}

View File

@ -1,3 +1,19 @@
import os
fn foo(a []os.File) {
}
struct IfExpr {
}
struct MatchExpr {
}
type Expr = IfExpr | MatchExpr
fn sum_types(a []Expr) {
}
fn main() { fn main() {
x := 0xdeadbeef x := 0xdeadbeef
u := 9978654321 u := 9978654321

View File

@ -12,12 +12,12 @@ import (
) )
const ( const (
c_reserved = ['delete', 'exit', 'unix',
// 'print',
// 'ok',
'error', 'calloc', 'malloc', 'free', 'panic',
// Full list of C reserved words, from: https://en.cppreference.com/w/c/keyword // Full list of C reserved words, from: https://en.cppreference.com/w/c/keyword
'auto', 'char', 'default', 'do', 'double', 'extern', 'float', 'inline', 'int', 'long', 'register', 'restrict', 'short', 'signed', 'sizeof', 'static', 'switch', 'typedef', 'union', 'unsigned', 'void', 'volatile', 'while', ] c_reserved = ['delete', 'exit', 'unix',
'error', 'calloc', 'malloc', 'free', 'panic',
'auto', 'char', 'default', 'do', 'double', 'extern', 'float', 'inline', 'int', 'long', 'register',
'restrict', 'short', 'signed', 'sizeof', 'static', 'switch', 'typedef', 'union', 'unsigned', 'void',
'volatile', 'while', ]
) )
struct Gen { struct Gen {
@ -59,6 +59,9 @@ const (
'\t\t\t\t\t\t\t\t'] '\t\t\t\t\t\t\t\t']
) )
fn foo(file []ast.File) {}
fn foo2(file []int) {}
pub fn cgen(files []ast.File, table &table.Table, pref &pref.Preferences) string { pub fn cgen(files []ast.File, table &table.Table, pref &pref.Preferences) string {
// println('start cgen2') // println('start cgen2')
mut g := Gen{ mut g := Gen{
@ -365,8 +368,9 @@ fn (g mut Gen) stmt(node ast.Stmt) {
g.expr(it.expr) g.expr(it.expr)
expr := it.expr expr := it.expr
match expr { match expr {
// no ; after an if expression ast.IfExpr {
ast.IfExpr {} // no ; after an if expression
}
else { else {
if !g.inside_ternary { if !g.inside_ternary {
g.writeln(';') g.writeln(';')
@ -491,8 +495,8 @@ fn (g mut Gen) for_in(it ast.ForInStmt) {
g.stmts(it.stmts) g.stmts(it.stmts)
g.writeln('}') g.writeln('}')
} }
// TODO:
else if it.kind == .array { else if it.kind == .array {
// TODO:
// `for num in nums {` // `for num in nums {`
g.writeln('// FOR IN') g.writeln('// FOR IN')
i := if it.key_var == '' { g.new_tmp_var() } else { it.key_var } i := if it.key_var == '' { g.new_tmp_var() } else { it.key_var }
@ -658,8 +662,8 @@ fn (g mut Gen) gen_assign_stmt(assign_stmt ast.AssignStmt) {
} }
} }
} }
// `a := 1` | `a,b := 1,2`
else { else {
// `a := 1` | `a,b := 1,2`
for i, ident in assign_stmt.left { for i, ident in assign_stmt.left {
val := assign_stmt.right[i] val := assign_stmt.right[i]
ident_var_info := ident.var_info() ident_var_info := ident.var_info()
@ -1128,20 +1132,6 @@ fn (g mut Gen) expr(node ast.Expr) {
g.expr(it.right) g.expr(it.right)
g.is_amp = false g.is_amp = false
} }
/*
ast.UnaryExpr {
// probably not :D
if it.op in [.inc, .dec] {
g.expr(it.left)
g.write(it.op.str())
}
else {
g.write(it.op.str())
g.expr(it.left)
}
}
*/
ast.SizeOf { ast.SizeOf {
if it.type_name != '' { if it.type_name != '' {
g.write('sizeof($it.type_name)') g.write('sizeof($it.type_name)')
@ -1172,8 +1162,8 @@ fn (g mut Gen) expr(node ast.Expr) {
ast.StringInterLiteral { ast.StringInterLiteral {
g.string_inter_literal(it) g.string_inter_literal(it)
} }
// `user := User{name: 'Bob'}`
ast.StructInit { ast.StructInit {
// `user := User{name: 'Bob'}`
g.struct_init(it) g.struct_init(it)
} }
ast.SelectorExpr { ast.SelectorExpr {
@ -1395,8 +1385,8 @@ fn (g mut Gen) infix_expr(node ast.InfixExpr) {
g.write(')') g.write(')')
} }
} }
// arr << val
else if node.op == .left_shift && g.table.get_type_symbol(node.left_type).kind == .array { else if node.op == .left_shift && g.table.get_type_symbol(node.left_type).kind == .array {
// arr << val
tmp := g.new_tmp_var() tmp := g.new_tmp_var()
sym := g.table.get_type_symbol(node.left_type) sym := g.table.get_type_symbol(node.left_type)
info := sym.info as table.Array info := sym.info as table.Array
@ -1840,8 +1830,8 @@ fn (g mut Gen) return_statement(node ast.Return) {
g.write(' }, sizeof($styp))') g.write(' }, sizeof($styp))')
} }
} }
// normal return
else if node.exprs.len == 1 { else if node.exprs.len == 1 {
// normal return
g.write(' ') g.write(' ')
// `return opt_ok(expr)` for functions that expect an optional // `return opt_ok(expr)` for functions that expect an optional
if fn_return_is_optional && !table.type_is(node.types[0], .optional) { if fn_return_is_optional && !table.type_is(node.types[0], .optional) {
@ -2187,8 +2177,9 @@ fn (g mut Gen) write_types(types []table.TypeSymbol) {
// //
g.definitions.writeln('};\n') g.definitions.writeln('};\n')
} }
// table.Alias, table.SumType { TODO table.Alias {
table.Alias {} // table.Alias, table.SumType { TODO
}
table.SumType { table.SumType {
g.definitions.writeln('// Sum type') g.definitions.writeln('// Sum type')
g.definitions.writeln(' g.definitions.writeln('
@ -2393,10 +2384,9 @@ fn (g mut Gen) method_call(node ast.CallExpr) {
g.gen_filter(node) g.gen_filter(node)
return return
} }
if typ_sym.kind == .array && node.name in
// TODO performance, detect `array` method differently // TODO performance, detect `array` method differently
if typ_sym.kind == .array && node.name in
['repeat', 'sort_with_compare', 'free', 'push_many', 'trim', ['repeat', 'sort_with_compare', 'free', 'push_many', 'trim',
//
'first', 'last', 'clone', 'reverse', 'slice'] { 'first', 'last', 'clone', 'reverse', 'slice'] {
// && rec_sym.name == 'array' { // && rec_sym.name == 'array' {
// && rec_sym.name == 'array' && receiver_name.starts_with('array') { // && rec_sym.name == 'array' && receiver_name.starts_with('array') {

View File

@ -12,9 +12,6 @@ import (
v.util v.util
term term
os os
// runtime
// sync
// time
) )
struct Parser { struct Parser {
@ -22,13 +19,11 @@ struct Parser {
file_name string file_name string
mut: mut:
tok token.Token tok token.Token
peek_tok token.Token peek_tok token.Token // sdfsdf
// vars []string
table &table.Table table &table.Table
is_c bool is_c bool
// prefix_parse_fns []PrefixParseFn
inside_if bool inside_if bool
pref &pref.Preferences // Preferences shared from V struct pref &pref.Preferences
builtin_mod bool builtin_mod bool
mod string mod string
attr string attr string
@ -39,23 +34,37 @@ mut:
ast_imports []ast.Import ast_imports []ast.Import
is_amp bool is_amp bool
returns bool returns bool
inside_match_case bool // to separate `match_expr { }` from `Struct{}`
inside_match_case bool
comments []ast.Comment
// sdfsdfd
} }
//inside_match_case bool // to separate `match_expr { }` from `Struct{}`
// prefix_parse_fns []PrefixParseFn
// vars []string
// for tests // for tests
pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt { pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
s := scanner.new_scanner(text, .skip_comments) s := scanner.new_scanner(text, .skip_comments)
a := 324
mut p := Parser{ mut p := Parser{
scanner: s scanner: s
table: table table: table
pref: &pref.Preferences{} pref: &pref.Preferences{}
scope: scope scope: scope
// scope: &ast.Scope{start_pos: 0, parent: 0}
global_scope: &ast.Scope{ global_scope: &ast.Scope{
start_pos: 0 start_pos: 0
parent: 0 parent: 0
} }
} }
// scope: &ast.Scope{start_pos: 0, parent: 0}
p.init_parse_fns() p.init_parse_fns()
p.read_first_token() p.read_first_token()
return p.stmt() return p.stmt()
@ -115,6 +124,7 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
stmts: stmts stmts: stmts
scope: p.scope scope: p.scope
global_scope: p.global_scope global_scope: p.global_scope
//comments: p.comments
} }
} }
@ -222,15 +232,23 @@ pub fn (p mut Parser) parse_block_no_scope() []ast.Stmt {
return stmts return stmts
} }
fn (p mut Parser) next() { /*
// for { fn (p mut Parser) next_with_comment() {
p.tok = p.peek_tok p.tok = p.peek_tok
p.peek_tok = p.scanner.scan() p.peek_tok = p.scanner.scan()
// if !(p.tok.kind in [.line_comment, .mline_comment]) { }
// break */
// }
// }
// println(p.tok.str()) fn (p mut Parser) next() {
p.tok = p.peek_tok
p.peek_tok = p.scanner.scan()
/*
if p.tok.kind==.comment {
p.comments << ast.Comment{text:p.tok.lit, line_nr:p.tok.line_nr}
p.next()
}
*/
} }
fn (p mut Parser) check(expected token.Kind) { fn (p mut Parser) check(expected token.Kind) {
@ -319,15 +337,8 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
.key_union { .key_union {
return p.struct_decl() return p.struct_decl()
} }
.line_comment { .comment {
return p.line_comment() return p.comment()
}
.mline_comment {
comment := p.tok.lit
p.next()
return ast.MultiLineComment{
text: comment
}
} }
else { else {
// #printf(""); // #printf("");
@ -337,10 +348,18 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
} }
} }
pub fn (p mut Parser) line_comment() ast.LineComment { // TODO [if vfmt]
pub fn (p mut Parser) check_comment() ast.Comment {
if p.tok.kind == .comment {
return p.comment()
}
}
pub fn (p mut Parser) comment() ast.Comment {
text := p.tok.lit text := p.tok.lit
p.next() p.next()
return ast.LineComment{ //p.next_with_comment()
return ast.Comment{
text: text text: text
} }
} }
@ -367,8 +386,8 @@ pub fn (p mut Parser) stmt() ast.Stmt {
.key_for { .key_for {
return p.for_statement() return p.for_statement()
} }
.line_comment { .comment {
return p.line_comment() return p.comment()
} }
.key_return { .key_return {
return p.return_stmt() return p.return_stmt()
@ -555,6 +574,9 @@ fn (p mut Parser) struct_init(short_syntax bool) ast.StructInit {
is_short_syntax := !(p.peek_tok.kind == .colon || p.tok.kind == .rcbr) // `Vec{a,b,c}` is_short_syntax := !(p.peek_tok.kind == .colon || p.tok.kind == .rcbr) // `Vec{a,b,c}`
// p.warn(is_short_syntax.str()) // p.warn(is_short_syntax.str())
for p.tok.kind != .rcbr { for p.tok.kind != .rcbr {
if p.tok.kind == .comment {
p.comment()
}
mut field_name := '' mut field_name := ''
if is_short_syntax { if is_short_syntax {
expr := p.expr(0) expr := p.expr(0)
@ -573,6 +595,9 @@ fn (p mut Parser) struct_init(short_syntax bool) ast.StructInit {
if p.tok.kind == .comma { if p.tok.kind == .comma {
p.check(.comma) p.check(.comma)
} }
if p.tok.kind == .comment {
p.comment()
}
} }
node := ast.StructInit{ node := ast.StructInit{
typ: typ typ: typ
@ -709,6 +734,11 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
// println('\n\nparser.expr()') // println('\n\nparser.expr()')
mut typ := table.void_type mut typ := table.void_type
mut node := ast.Expr{} mut node := ast.Expr{}
//defer {
//if p.tok.kind == .comment {
//p.comment()
//}
//}
// Prefix // Prefix
match p.tok.kind { match p.tok.kind {
.name { .name {
@ -813,7 +843,10 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
p.check(.rcbr) p.check(.rcbr)
} }
else { else {
p.error('expr(): bad token `$p.tok.str()`') if p.tok.kind == .comment {
println(p.tok.lit)
}
p.error('expr(): bad token `$p.tok.kind.str()`')
} }
} }
// Infix // Infix
@ -1317,6 +1350,9 @@ fn (p mut Parser) array_init() ast.ArrayInit {
if p.tok.kind == .comma { if p.tok.kind == .comma {
p.check(.comma) p.check(.comma)
} }
if p.tok.kind == .comment {
p.comment()
}
} }
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
p.check(.rsbr) p.check(.rsbr)
@ -1421,6 +1457,9 @@ fn (p mut Parser) import_stmt() []ast.Import {
p.check(.lpar) p.check(.lpar)
for p.tok.kind != .rpar { for p.tok.kind != .rpar {
imports << p.parse_import() imports << p.parse_import()
if p.tok.kind == .comment {
p.comment()
}
} }
p.check(.rpar) p.check(.rpar)
} }
@ -1440,6 +1479,9 @@ fn (p mut Parser) const_decl() ast.ConstDecl {
p.check(.lpar) p.check(.lpar)
mut fields := []ast.ConstField mut fields := []ast.ConstField
for p.tok.kind != .rpar { for p.tok.kind != .rpar {
if p.tok.kind == .comment {
p.comment()
}
name := p.prepend_mod(p.check_name()) name := p.prepend_mod(p.check_name())
// name := p.check_name() // name := p.check_name()
// println('!!const: $name') // println('!!const: $name')
@ -1481,15 +1523,19 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
} }
is_typedef := p.attr == 'typedef' is_typedef := p.attr == 'typedef'
mut name := p.check_name() mut name := p.check_name()
mut default_exprs := []ast.Expr
// println('struct decl $name') // println('struct decl $name')
p.check(.lcbr) p.check(.lcbr)
mut ast_fields := []ast.Field mut ast_fields := []ast.StructField
mut fields := []table.Field mut fields := []table.Field
mut mut_pos := -1 mut mut_pos := -1
mut pub_pos := -1 mut pub_pos := -1
mut pub_mut_pos := -1 mut pub_mut_pos := -1
for p.tok.kind != .rcbr { for p.tok.kind != .rcbr {
mut comment := ast.Comment{}
if p.tok.kind == .comment {
comment = p.comment()
}
if p.tok.kind == .key_pub { if p.tok.kind == .key_pub {
p.check(.key_pub) p.check(.key_pub)
if p.tok.kind == .key_mut { if p.tok.kind == .key_mut {
@ -1520,16 +1566,20 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
println('XXXX' + s.str()) println('XXXX' + s.str())
} }
*/ */
mut default_expr := ast.Expr{}
// Default value
if p.tok.kind == .assign { if p.tok.kind == .assign {
// Default value
p.next() p.next()
default_exprs << p.expr(0) default_expr = p.expr(0)
} }
ast_fields << ast.Field{ if p.tok.kind == .comment {
comment = p.comment()
}
ast_fields << ast.StructField{
name: field_name name: field_name
pos: field_pos pos: field_pos
typ: typ typ: typ
comment: comment
} }
fields << table.Field{ fields << table.Field{
name: field_name name: field_name
@ -1574,7 +1624,6 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
pub_pos: pub_pos pub_pos: pub_pos
pub_mut_pos: pub_mut_pos pub_mut_pos: pub_mut_pos
is_c: is_c is_c: is_c
default_exprs: default_exprs
} }
} }

View File

@ -13,7 +13,8 @@ import (
const ( const (
single_quote = `\'` single_quote = `\'`
double_quote = `"` double_quote = `"`
is_fmt = os.getenv('VEXE').contains('vfmt') //is_fmt = os.getenv('VEXE').contains('vfmt')
is_fmt = os.executable().contains('vfmt')
num_sep = `_` // char used as number separator num_sep = `_` // char used as number separator
) )
@ -742,8 +743,8 @@ pub fn (s mut Scanner) scan() token.Token {
// on the next line // on the next line
s.pos-- s.pos--
// println("'" + s.text[s.pos].str() + "'") // println("'" + s.text[s.pos].str() + "'")
// s.line_nr-- s.line_nr--
return s.new_token(.line_comment, comment) return s.new_token(.comment, comment)
} }
// s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"') // s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"')
// Skip the comment (return the next token) // Skip the comment (return the next token)
@ -775,7 +776,7 @@ pub fn (s mut Scanner) scan() token.Token {
s.pos++ s.pos++
if s.comments_mode == .parse_comments { if s.comments_mode == .parse_comments {
comment := s.text[start..(s.pos - 1)].trim_space() comment := s.text[start..(s.pos - 1)].trim_space()
return s.new_token(.mline_comment, comment) return s.new_token(.comment, comment)
} }
// Skip if not in fmt mode // Skip if not in fmt mode
return s.scan() return s.scan()

View File

@ -70,9 +70,7 @@ pub enum Kind {
lt lt
ge ge
le le
// comments comment
line_comment
mline_comment
nl nl
dot dot
dotdot dotdot
@ -199,8 +197,7 @@ fn build_token_str() []string {
s[Kind.question] = '?' s[Kind.question] = '?'
s[Kind.left_shift] = '<<' s[Kind.left_shift] = '<<'
s[Kind.right_shift] = '>>' s[Kind.right_shift] = '>>'
s[Kind.line_comment] = '// line comment' s[Kind.comment] = '// comment'
s[Kind.mline_comment] = '/* mline comment */'
s[Kind.nl] = 'NLL' s[Kind.nl] = 'NLL'
s[Kind.dollar] = '$' s[Kind.dollar] = '$'
s[Kind.str_dollar] = '$2' s[Kind.str_dollar] = '$2'