fmt: fix multiple things and format most of the compiler (#6631)

Format expressions inside string interpolation like the rest (it used to be a+b instead of a + b, not too sure why)
Fix formatting some match branches when there were only one statement inside (it was inlined)
Fix parsing and formatting some comments edge case on struct field init. You should check out this test because the result is a bit different from before. I personally find it more logical but I would understand if the former format was to stay
Fix formatting of void-returning function signature
pull/6621/head^2
Enzo 2020-10-15 22:12:59 +02:00 committed by GitHub
parent 23644d92a9
commit b083f4014b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 414 additions and 456 deletions

View File

@ -18,37 +18,42 @@ jobs:
./v vet vlib/v ./v vet vlib/v
- name: v fmt -verify - name: v fmt -verify
run: | run: |
./v fmt -verify vlib/v/scanner/scanner.v
./v fmt -verify vlib/v/parser/parser.v
./v fmt -verify vlib/v/parser/fn.v
./v fmt -verify vlib/v/checker/checker.v
./v fmt -verify vlib/v/gen/cgen.v
./v fmt -verify vlib/v/gen/fn.v
./v fmt -verify vlib/v/gen/x64/gen.v
./v fmt -verify vlib/v/table/table.v
./v fmt -verify vlib/v/fmt/fmt.v
./v fmt -verify vlib/builtin/array.v ./v fmt -verify vlib/builtin/array.v
./v fmt -verify vlib/os/file.v ./v fmt -verify vlib/os/file.v
./v fmt -verify vlib/v/util/errors.v
./v fmt -verify vlib/v/util/suggestions.v
./v fmt -verify vlib/v/util/util.v
./v fmt -verify vlib/v/builder/builder.v
./v fmt -verify vlib/v/builder/cc.v
./v fmt -verify vlib/v/builder/compile.v
./v fmt -verify vlib/v/builder/msvc.v
./v fmt -verify vlib/math/bits/bits.v ./v fmt -verify vlib/math/bits/bits.v
./v fmt -verify vlib/time/time.v ./v fmt -verify vlib/time/time.v
./v fmt -verify vlib/term/colors.v ./v fmt -verify vlib/term/colors.v
./v fmt -verify vlib/term/term.v ./v fmt -verify vlib/term/term.v
./v fmt -verify vlib/v/ast/scope.v ./v fmt -verify vlib/v/ast/
./v fmt -verify vlib/v/checker/check_types.v ./v fmt -verify vlib/v/builder/
./v fmt -verify vlib/v/table/atypes.v ./v fmt -verify vlib/v/cflag/
./v fmt -verify vlib/v/cflag/cflags.v ./v fmt -verify vlib/v/checker/
./v fmt -verify vlib/v/table/cflags.v ./v fmt -verify vlib/v/depgraph/
./v fmt -verify vlib/v/doc/
./v fmt -verify vlib/v/errors/
./v fmt -verify vlib/v/eval/
./v fmt -verify vlib/v/fmt/
./v fmt -verify vlib/v/gen/auto_str_methods.v ./v fmt -verify vlib/v/gen/auto_str_methods.v
./v fmt -verify vlib/v/parser/parse_type.v ./v fmt -verify vlib/v/gen/cgen.v
./v fmt -verify vlib/v/gen/cgen_test.v
./v fmt -verify vlib/v/gen/cmain.v
./v fmt -verify vlib/v/gen/comptime.v
./v fmt -verify vlib/v/gen/fn.v
./v fmt -verify vlib/v/gen/json.v ./v fmt -verify vlib/v/gen/json.v
./v fmt -verify vlib/v/gen/live.v
./v fmt -verify vlib/v/gen/profile.v
./v fmt -verify vlib/v/gen/sql.v
./v fmt -verify vlib/v/gen/str.v
./v fmt -verify vlib/v/gen/x64/elf.v
./v fmt -verify vlib/v/gen/x64/elf_obj.v
./v fmt -verify vlib/v/gen/x64/gen.v
./v fmt -verify vlib/v/parser/
./v fmt -verify vlib/v/pref/
./v fmt -verify vlib/v/scanner/
./v fmt -verify vlib/v/table/
./v fmt -verify vlib/v/util/
./v fmt -verify vlib/v/vet/
./v fmt -verify vlib/v/vmod/
- name: v test-fmt - name: v test-fmt
run: ./v -silent test-fmt run: ./v -silent test-fmt

View File

@ -240,13 +240,13 @@ pub fn (t Time) relative_short() string {
return '1m' return '1m'
} }
if secs < 3600 { if secs < 3600 {
return '${secs/60}m' return '${secs / 60}m'
} }
if secs < 3600 * 24 { if secs < 3600 * 24 {
return '${secs/3600}h' return '${secs / 3600}h'
} }
if secs < 3600 * 24 * 5 { if secs < 3600 * 24 * 5 {
return '${secs/3600/24}d' return '${secs / 3600 / 24}d'
} }
if secs > 3600 * 24 * 10000 { if secs > 3600 * 24 * 10000 {
return '' return ''

View File

@ -179,11 +179,11 @@ pub:
pub struct InterfaceDecl { pub struct InterfaceDecl {
pub: pub:
name string name string
field_names []string field_names []string
is_pub bool is_pub bool
methods []FnDecl methods []FnDecl
pos token.Position pos token.Position
pre_comments []Comment pre_comments []Comment
} }
@ -602,8 +602,8 @@ pub:
stmts []Stmt stmts []Stmt
pos token.Position pos token.Position
val_is_mut bool // `for mut val in vals {` means that modifying `val` will modify the array val_is_mut bool // `for mut val in vals {` means that modifying `val` will modify the array
pub mut:
// and the array cannot be indexed inside the loop // and the array cannot be indexed inside the loop
pub mut:
key_type table.Type key_type table.Type
val_type table.Type val_type table.Type
cond_type table.Type cond_type table.Type
@ -625,10 +625,10 @@ pub:
// #include etc // #include etc
pub struct HashStmt { pub struct HashStmt {
pub: pub:
mod string mod string
pos token.Position pos token.Position
pub mut: pub mut:
val string val string
kind string kind string
} }

View File

@ -58,7 +58,9 @@ pub fn (mut b Builder) compile_c() {
// println(files) // println(files)
} }
$if windows { $if windows {
b.find_win_cc() or { verror(no_compiler_error) } b.find_win_cc() or {
verror(no_compiler_error)
}
// TODO Probably extend this to other OS's? // TODO Probably extend this to other OS's?
} }
// v1 compiler files // v1 compiler files
@ -83,7 +85,8 @@ pub fn (mut b Builder) compile_c() {
bundle_id := if b.pref.bundle_id != '' { b.pref.bundle_id } else { 'app.vlang.$bundle_name' } bundle_id := if b.pref.bundle_id != '' { b.pref.bundle_id } else { 'app.vlang.$bundle_name' }
display_name := if b.pref.display_name != '' { b.pref.display_name } else { bundle_name } display_name := if b.pref.display_name != '' { b.pref.display_name } else { bundle_name }
os.mkdir('$display_name\.app') os.mkdir('$display_name\.app')
os.write_file('$display_name\.app/Info.plist', make_ios_plist(display_name, bundle_id, bundle_name, 1)) os.write_file('$display_name\.app/Info.plist', make_ios_plist(display_name, bundle_id,
bundle_name, 1))
} }
b.cc() b.cc()
} }

View File

@ -761,7 +761,7 @@ fn (mut v Builder) build_thirdparty_obj_file(path string, moduleflags []cflag.CF
return return
} }
println('$obj_path not found, building it...') println('$obj_path not found, building it...')
cfile := '${path[..path.len-2]}.c' cfile := '${path[..path.len - 2]}.c'
btarget := moduleflags.c_options_before_target() btarget := moduleflags.c_options_before_target()
atarget := moduleflags.c_options_after_target() atarget := moduleflags.c_options_after_target()
cppoptions := if v.pref.ccompiler.contains('++') { ' -fpermissive -w ' } else { '' } cppoptions := if v.pref.ccompiler.contains('++') { ' -fpermissive -w ' } else { '' }

View File

@ -5,7 +5,7 @@ import v.table
// generic struct instantiations to concrete types // generic struct instantiations to concrete types
pub fn (b &Builder) generic_struct_insts_to_concrete() { pub fn (b &Builder) generic_struct_insts_to_concrete() {
for idx, _ in b.table.types { for idx, _ in b.table.types {
mut typ := unsafe { &b.table.types[idx] } mut typ := unsafe {&b.table.types[idx]}
if typ.kind == .generic_struct_inst { if typ.kind == .generic_struct_inst {
info := typ.info as table.GenericStructInst info := typ.info as table.GenericStructInst
parent := b.table.types[info.parent_idx] parent := b.table.types[info.parent_idx]

View File

@ -36,4 +36,4 @@ fn make_ios_plist(display_name string, bundle_id string, bundle_name string, bun
</array> </array>
</dict> </dict>
</plist>' </plist>'
} }

View File

@ -331,7 +331,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(path string, moduleflags
return return
} }
println('$obj_path not found, building it (with msvc)...') println('$obj_path not found, building it (with msvc)...')
cfiles := '${path[..path.len-2]}.c' cfiles := '${path[..path.len - 2]}.c'
flags := msvc_string_flags(moduleflags) flags := msvc_string_flags(moduleflags)
inc_dirs := flags.inc_paths.join(' ') inc_dirs := flags.inc_paths.join(' ')
defines := flags.defines.join(' ') defines := flags.defines.join(' ')

View File

@ -1168,8 +1168,8 @@ pub fn (mut c Checker) call_method(mut call_expr ast.CallExpr) table.Type {
} }
} }
if got_arg_typ != table.void_type { if got_arg_typ != table.void_type {
c.error('cannot use type `$got_arg_sym.source_name` as type `$exp_arg_sym.source_name` in argument ${i+1} to `${left_type_sym.source_name}.$method_name`', c.error('cannot use type `$got_arg_sym.source_name` as type `$exp_arg_sym.source_name` in argument ${i +
call_expr.pos) 1} to `${left_type_sym.source_name}.$method_name`', call_expr.pos)
} }
} }
param := if method.is_variadic && i >= method.params.len - 1 { method.params[method.params.len - param := if method.is_variadic && i >= method.params.len - 1 { method.params[method.params.len -
@ -1186,8 +1186,8 @@ pub fn (mut c Checker) call_method(mut call_expr ast.CallExpr) table.Type {
} else { } else {
if param.is_mut && (!arg.is_mut || param.typ.share() != arg.share) { if param.is_mut && (!arg.is_mut || param.typ.share() != arg.share) {
tok := arg.share.str() tok := arg.share.str()
c.error('`$call_expr.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`', c.error('`$call_expr.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i +
arg.expr.position()) 1}`', arg.expr.position())
} }
} }
} }
@ -1460,8 +1460,8 @@ pub fn (mut c Checker) call_fn(mut call_expr ast.CallExpr) table.Type {
} else { } else {
if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) { if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) {
tok := call_arg.share.str() tok := call_arg.share.str()
c.error('`$call_expr.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`', c.error('`$call_expr.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i +
call_arg.expr.position()) 1}`', call_arg.expr.position())
} }
} }
// Handle expected interface // Handle expected interface
@ -1492,11 +1492,11 @@ pub fn (mut c Checker) call_fn(mut call_expr ast.CallExpr) table.Type {
} }
if typ_sym.kind == .function && arg_typ_sym.kind == .function { if typ_sym.kind == .function && arg_typ_sym.kind == .function {
candidate_fn_name := if typ_sym.source_name.starts_with('anon_') { 'anonymous function' } else { 'fn `$typ_sym.source_name`' } candidate_fn_name := if typ_sym.source_name.starts_with('anon_') { 'anonymous function' } else { 'fn `$typ_sym.source_name`' }
c.error('cannot use $candidate_fn_name as function type `$arg_typ_sym.str()` in argument ${i+1} to `$fn_name`', c.error('cannot use $candidate_fn_name as function type `$arg_typ_sym.str()` in argument ${i +
call_expr.pos) 1} to `$fn_name`', call_expr.pos)
} else { } else {
c.error('cannot use type `$typ_sym.source_name` as type `$arg_typ_sym.source_name` in argument ${i+1} to `$fn_name`', c.error('cannot use type `$typ_sym.source_name` as type `$arg_typ_sym.source_name` in argument ${i +
call_expr.pos) 1} to `$fn_name`', call_expr.pos)
} }
} }
} }
@ -3197,21 +3197,27 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, type_sym table.TypeSymbol
mut is_exhaustive := true mut is_exhaustive := true
mut unhandled := []string{} mut unhandled := []string{}
match type_sym.info as info { match type_sym.info as info {
table.SumType { for v in info.variants { table.SumType {
for v in info.variants {
v_str := c.table.type_to_str(v) v_str := c.table.type_to_str(v)
if v_str !in branch_exprs { if v_str !in branch_exprs {
is_exhaustive = false is_exhaustive = false
unhandled << '`$v_str`' unhandled << '`$v_str`'
} }
} } }
}
// //
table.Enum { for v in info.vals { table.Enum {
for v in info.vals {
if v !in branch_exprs { if v !in branch_exprs {
is_exhaustive = false is_exhaustive = false
unhandled << '`.$v`' unhandled << '`.$v`'
} }
} } }
else { is_exhaustive = false } }
else {
is_exhaustive = false
}
} }
mut else_branch := node.branches[node.branches.len - 1] mut else_branch := node.branches[node.branches.len - 1]
mut has_else := else_branch.is_else mut has_else := else_branch.is_else

View File

@ -1,6 +1,6 @@
vlib/v/checker/tests/modules/overload_return_type/main.v:8:11: error: cannot assign to `two`: expected `Point`, not `int` vlib/v/checker/tests/modules/overload_return_type/main.v:14:8: error: cannot assign to `two`: expected `Point`, not `int`
6 | one := Point {x:1, y:2} 12 | y: 1
7 | mut two := Point {x:5, y:1} 13 | }
8 | two = one + two 14 | two = one + two
| ~~~~~~~~~ | ~~~~~~~~~
9 | } 15 | }

View File

@ -3,7 +3,13 @@ module main
import point { Point } import point { Point }
fn main() { fn main() {
one := Point {x:1, y:2} one := Point{
mut two := Point {x:5, y:1} x: 1
two = one + two y: 2
}
mut two := Point{
x: 5
y: 1
}
two = one + two
} }

View File

@ -1,11 +1,11 @@
module point module point
pub struct Point { pub struct Point {
mut: mut:
x int x int
y int y int
} }
pub fn (a Point) +(b Point) int { pub fn (a Point) +(b Point) int {
return a.x + b.x return a.x + b.x
} }

View File

@ -35,8 +35,8 @@ pub fn (mut o OrderedDepMap) add(name string, deps []string) {
for dep in deps { for dep in deps {
if dep !in d { if dep !in d {
d << dep d << dep
} else {
} }
else{}
} }
o.set(name, d) o.set(name, d)
} }
@ -135,7 +135,7 @@ pub fn (graph &DepGraph) display() string {
} }
pub fn (graph &DepGraph) display_cycles() string { pub fn (graph &DepGraph) display_cycles() string {
mut node_names := map[string]DepGraphNode mut node_names := map[string]DepGraphNode{}
for node in graph.nodes { for node in graph.nodes {
node_names[node.name] = node node_names[node.name] = node
} }

View File

@ -56,7 +56,7 @@ pub mut:
pub fn merge_comments(comments []ast.Comment) string { pub fn merge_comments(comments []ast.Comment) string {
mut res := []string{} mut res := []string{}
for comment in comments { for comment in comments {
res << comment.text.trim_left('|') res << comment.text.trim_left('\x01')
} }
return res.join('\n') return res.join('\n')
} }
@ -74,7 +74,7 @@ pub fn get_comment_block_right_before(comments []ast.Comment) string {
// located right above the top level statement. // located right above the top level statement.
// break // break
} }
mut cmt_content := cmt.text.trim_left('|') mut cmt_content := cmt.text.trim_left('\x01')
if cmt_content.len == cmt.text.len || cmt.is_multi { if cmt_content.len == cmt.text.len || cmt.is_multi {
// ignore /* */ style comments for now // ignore /* */ style comments for now
continue continue

View File

@ -1,10 +1,9 @@
//import v.table // import v.table
//import v.doc // import v.doc
//import v.pref // import v.pref
// fn test_vdoc() { // fn test_vdoc() {
// mut prefs := &pref.Preferences{} // mut prefs := &pref.Preferences{}
// prefs.fill_with_defaults() // prefs.fill_with_defaults()
// table := table.new_table() // table := table.new_table()
// println(doc.doc('net', table, prefs)) // println(doc.doc('net', table, prefs))
// } // }

View File

@ -12,7 +12,7 @@ pub enum Reporter {
pub struct Error { pub struct Error {
pub: pub:
message string message string
details string details string
file_path string file_path string
pos token.Position pos token.Position
backtrace string backtrace string

View File

@ -34,14 +34,14 @@ pub fn (mut e Eval) eval(file ast.File, table &table.Table) string {
fn print_object(o Object) { fn print_object(o Object) {
match o { match o {
int { println(it) } int { println(o) }
else { println('unknown object') } else { println('unknown object') }
} }
} }
pub fn (o Object) str() string { pub fn (o Object) str() string {
match o { match o {
int { return it.str() } int { return o.str() }
else { println('unknown object') } else { println('unknown object') }
} }
return '' return ''
@ -53,18 +53,18 @@ fn (mut e Eval) stmt(node ast.Stmt) string {
// TODO; replaced VarDecl // TODO; replaced VarDecl
} }
ast.ExprStmt { ast.ExprStmt {
o := e.expr(it.expr) o := e.expr(node.expr)
print('out: ') print('out: ')
print_object(o) print_object(o)
return o.str() return o.str()
} }
// ast.StructDecl { // ast.StructDecl {
// println('s decl') // println('s decl')
// } // }
// ast.VarDecl { // ast.VarDecl {
// e.vars[it.name] = Var{ // e.vars[it.name] = Var{
// value: e.expr(it.expr) // value: e.expr(it.expr)
// } // }
// } // }
else {} else {}
} }
@ -74,20 +74,20 @@ fn (mut e Eval) stmt(node ast.Stmt) string {
fn (mut e Eval) expr(node ast.Expr) Object { fn (mut e Eval) expr(node ast.Expr) Object {
match node { match node {
ast.IntegerLiteral { ast.IntegerLiteral {
return it.val return node.val
} }
ast.Ident { ast.Ident {
print_object(it.value) print_object(node.value)
// Find the variable // Find the variable
v := e.vars[it.name] v := e.vars[node.name]
return v.value return v.value
} }
ast.InfixExpr { ast.InfixExpr {
e.checker.infix_expr(mut it) e.checker.infix_expr(mut node)
// println('bin $it.op') // println('bin $it.op')
left := e.expr(it.left) as int left := e.expr(node.left) as int
right := e.expr(it.right) as int right := e.expr(node.right) as int
match it.op { match node.op {
.plus { return left + right } .plus { return left + right }
.mul { return left * right } .mul { return left * right }
else {} else {}

View File

@ -38,7 +38,6 @@ pub mut:
file ast.File file ast.File
did_imports bool did_imports bool
is_assign bool is_assign bool
is_inside_interp bool
auto_imports []string // automatically inserted imports that the user forgot to specify auto_imports []string // automatically inserted imports that the user forgot to specify
import_pos int // position of the imports in the resulting string for later autoimports insertion import_pos int // position of the imports in the resulting string for later autoimports insertion
used_imports []string // to remove unused imports used_imports []string // to remove unused imports
@ -609,55 +608,48 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl) {
} }
end_pos := field.pos.pos + field.pos.len end_pos := field.pos.pos + field.pos.len
comments := field.comments comments := field.comments
if comments.len == 0 {
f.write('\t$field.name ')
f.write(strings.repeat(` `, max - field.name.len))
f.write(field_types[i])
if field.attrs.len > 0 && field.attrs[0].name != 'ref_only' { // TODO a bug with [ref_only] attr being added to fields, fix it
f.write(strings.repeat(` `, max_type - field_types[i].len))
f.inline_attrs(field.attrs)
}
if field.has_default_expr {
f.write(' = ')
f.prefix_expr_cast_expr(field.default_expr)
}
f.write('\n')
continue
}
// Handle comments before field // Handle comments before field
mut j := 0 mut comm_idx := 0
for j < comments.len && comments[j].pos.pos < field.pos.pos { for comm_idx < comments.len && comments[comm_idx].pos.pos < field.pos.pos {
f.indent++ f.indent++
f.empty_line = true f.empty_line = true
f.comment(comments[j], { f.comment(comments[comm_idx], {})
inline: true
})
f.writeln('') f.writeln('')
f.indent-- f.indent--
j++ comm_idx++
} }
f.write('\t$field.name ') f.write('\t$field.name ')
// Handle comments between field name and type // Handle comments between field name and type
mut comments_len := 0 mut comments_len := 0
for j < comments.len && comments[j].pos.pos < end_pos { for comm_idx < comments.len && comments[comm_idx].pos.pos < end_pos {
comment := '/* ${comments[j].text} */ ' // TODO: handle in a function comment_text := '/* ${comments[comm_idx].text} */ ' // TODO handle in a function
comments_len += comment.len comments_len += comment_text.len
f.write(comment) f.write(comment_text)
j++ comm_idx++
} }
f.write(strings.repeat(` `, max - field.name.len - comments_len)) f.write(strings.repeat(` `, max - field.name.len - comments_len))
f.write(field_types[i]) f.write(field_types[i])
f.inline_attrs(field.attrs) if field.attrs.len > 0 && field.attrs[0].name != 'ref_only' { // TODO a bug with [ref_only] attr being added to fields, fix it
f.write(strings.repeat(` `, max_type - field_types[i].len))
f.inline_attrs(field.attrs)
}
if field.has_default_expr { if field.has_default_expr {
f.write(' = ') f.write(' = ')
f.prefix_expr_cast_expr(field.default_expr) f.prefix_expr_cast_expr(field.default_expr)
} }
// Handle comments after field type (same line) // Handle comments after field type (same line)
for j < comments.len && field.pos.line_nr == comments[j].pos.line_nr { if comm_idx < comments.len {
f.write(' // ${comments[j].text}') // TODO: handle in a function if comments[comm_idx].pos.line_nr > field.pos.line_nr {
j++ f.writeln('')
} else {
f.write(' ')
}
f.comments(comments[comm_idx..], {
level: .indent
})
} else {
f.writeln('')
} }
f.write('\n')
} }
f.comments_after_last_field(node.end_comments) f.comments_after_last_field(node.end_comments)
f.writeln('}\n') f.writeln('}\n')
@ -1011,7 +1003,6 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
} else { } else {
f.write("'") f.write("'")
} }
f.is_inside_interp = true
for i, val in node.vals { for i, val in node.vals {
f.write(val) f.write(val)
if i >= node.exprs.len { if i >= node.exprs.len {
@ -1028,7 +1019,6 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
f.expr(node.exprs[i]) f.expr(node.exprs[i])
} }
} }
f.is_inside_interp = false
if contains_single_quote { if contains_single_quote {
f.write('"') f.write('"')
} else { } else {
@ -1093,11 +1083,7 @@ pub fn (mut f Fmt) call_args(args []ast.CallArg) {
} }
f.expr(arg.expr) f.expr(arg.expr)
if i < args.len - 1 { if i < args.len - 1 {
if f.is_inside_interp { f.write(', ')
f.write(',')
} else {
f.write(', ')
}
} }
} }
} }
@ -1145,16 +1131,20 @@ enum CommentsLevel {
indent indent
} }
// CommentsOptions defines the way comments are going to be written
// - has_nl: adds an newline at the end of the list of comments
// - inline: single-line comments will be on the same line as the last statement
// - level: either .keep (don't indent), or .indent (increment indentation)
struct CommentsOptions { struct CommentsOptions {
has_nl bool = true has_nl bool = true
inline bool inline bool
level CommentsLevel = .keep level CommentsLevel
} }
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) { pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
if !node.text.contains('\n') { if !node.text.contains('\n') {
is_separate_line := !options.inline || node.text.starts_with('|') is_separate_line := !options.inline || node.text.starts_with('\x01')
mut s := if node.text.starts_with('|') { node.text[1..] } else { node.text } mut s := if node.text.starts_with('\x01') { node.text[1..] } else { node.text }
if s == '' { if s == '' {
s = '//' s = '//'
} else { } else {
@ -1258,73 +1248,67 @@ pub fn (mut f Fmt) lock_expr(lex ast.LockExpr) {
} }
pub fn (mut f Fmt) infix_expr(node ast.InfixExpr) { pub fn (mut f Fmt) infix_expr(node ast.InfixExpr) {
if f.is_inside_interp { buffering_save := f.buffering
f.expr(node.left) if !f.buffering {
f.write('$node.op.str()') f.out_save = f.out
f.expr(node.right)
} else {
buffering_save := f.buffering
if !f.buffering {
f.out_save = f.out
f.out = strings.new_builder(60)
f.buffering = true
}
f.expr(node.left)
is_one_val_array_init := node.op in [.key_in, .not_in] &&
node.right is ast.ArrayInit && (node.right as ast.ArrayInit).exprs.len == 1
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.write(if node.op == .key_in {
' == '
} else {
' != '
})
} else {
f.write(' $node.op.str() ')
}
f.expr_bufs << f.out.str()
mut penalty := 3
match node.left as left {
ast.InfixExpr {
if int(token.precedences[left.op]) > int(token.precedences[node.op]) {
penalty--
}
}
ast.ParExpr {
penalty = 1
}
else {}
}
match node.right as right {
ast.InfixExpr { penalty-- }
ast.ParExpr { penalty = 1 }
else {}
}
f.penalties << penalty
// combine parentheses level with operator precedence to form effective precedence
f.precedences << int(token.precedences[node.op]) | (f.par_level << 16)
f.out = strings.new_builder(60) f.out = strings.new_builder(60)
f.buffering = true f.buffering = true
if is_one_val_array_init { }
// `var in [val]` => `var == val` f.expr(node.left)
f.expr((node.right as ast.ArrayInit).exprs[0]) is_one_val_array_init := node.op in [.key_in, .not_in] &&
node.right is ast.ArrayInit && (node.right as ast.ArrayInit).exprs.len == 1
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.write(if node.op == .key_in {
' == '
} else { } else {
f.expr(node.right) ' != '
} })
if !buffering_save && f.buffering { // now decide if and where to break } else {
f.expr_bufs << f.out.str() f.write(' $node.op.str() ')
f.out = f.out_save }
f.buffering = false f.expr_bufs << f.out.str()
f.adjust_complete_line() mut penalty := 3
for i, p in f.penalties { match node.left as left {
f.write(f.expr_bufs[i]) ast.InfixExpr {
f.wrap_long_line(p, true) if int(token.precedences[left.op]) > int(token.precedences[node.op]) {
penalty--
} }
f.write(f.expr_bufs[f.expr_bufs.len - 1])
f.expr_bufs = []string{}
f.penalties = []int{}
f.precedences = []int{}
} }
ast.ParExpr {
penalty = 1
}
else {}
}
match node.right as right {
ast.InfixExpr { penalty-- }
ast.ParExpr { penalty = 1 }
else {}
}
f.penalties << penalty
// combine parentheses level with operator precedence to form effective precedence
f.precedences << int(token.precedences[node.op]) | (f.par_level << 16)
f.out = strings.new_builder(60)
f.buffering = true
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.expr((node.right as ast.ArrayInit).exprs[0])
} else {
f.expr(node.right)
}
if !buffering_save && f.buffering { // now decide if and where to break
f.expr_bufs << f.out.str()
f.out = f.out_save
f.buffering = false
f.adjust_complete_line()
for i, p in f.penalties {
f.write(f.expr_bufs[i])
f.wrap_long_line(p, true)
}
f.write(f.expr_bufs[f.expr_bufs.len - 1])
f.expr_bufs = []string{}
f.penalties = []int{}
f.precedences = []int{}
} }
} }
@ -1499,13 +1483,9 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
if branch.stmts.len == 0 { if branch.stmts.len == 0 {
continue continue
} }
stmt := branch.stmts[0] if !stmt_is_single_line(branch.stmts[0]) {
if stmt is ast.ExprStmt { single_line = false
// If expressions inside match branches can't be one a single line break
if !expr_is_single_line(stmt.expr) {
single_line = false
break
}
} }
} }
for branch in it.branches { for branch in it.branches {
@ -1595,6 +1575,15 @@ fn (mut f Fmt) write_language_prefix(lang table.Language) {
} }
} }
fn stmt_is_single_line(stmt ast.Stmt) bool {
match stmt {
ast.ExprStmt { return expr_is_single_line(stmt.expr) }
ast.Return { return true }
ast.AssignStmt { return true }
else { return false }
}
}
fn expr_is_single_line(expr ast.Expr) bool { fn expr_is_single_line(expr ast.Expr) bool {
match expr { match expr {
ast.IfExpr { return false } ast.IfExpr { return false }

View File

@ -47,31 +47,31 @@ fn test_fmt() {
opath := ipath opath := ipath
expected_ocontent := os.read_file(opath) or { expected_ocontent := os.read_file(opath) or {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${vrelpath}')) eprintln(fmt_bench.step_message_fail('cannot read from $vrelpath'))
continue continue
} }
table := table.new_table() table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{ file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true, is_fmt: true
ccompiler: 'gcc' ccompiler: 'gcc'
}, &ast.Scope{ }, &ast.Scope{
parent: 0 parent: 0
}) })
result_ocontent := fmt.fmt(file_ast, table, false) result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent { if expected_ocontent != result_ocontent {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${vrelpath} after formatting, does not look as expected.')) eprintln(fmt_bench.step_message_fail('file $vrelpath after formatting, does not look as expected.'))
if diff_cmd == '' { if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found') eprintln('>> sorry, but no working "diff" CLI command can be found')
continue continue
} }
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}') vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent) os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file)) eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue continue
} }
fmt_bench.ok() fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${vrelpath}')) eprintln(fmt_bench.step_message_ok('$vrelpath'))
} }
fmt_bench.stop() fmt_bench.stop()
eprintln(term.h_divider('-')) eprintln(term.h_divider('-'))

View File

@ -23,7 +23,9 @@ fn test_fmt() {
} }
vroot := os.dir(vexe) vroot := os.dir(vexe)
tmpfolder := os.temp_dir() tmpfolder := os.temp_dir()
diff_cmd := util.find_working_diff_command() or { '' } diff_cmd := util.find_working_diff_command() or {
''
}
mut fmt_bench := benchmark.new_benchmark() mut fmt_bench := benchmark.new_benchmark()
// Lookup the existing test _input.vv files: // Lookup the existing test _input.vv files:
input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv') input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv')
@ -35,35 +37,35 @@ fn test_fmt() {
opath := ipath.replace('_input.vv', '_expected.vv') opath := ipath.replace('_input.vv', '_expected.vv')
if !os.exists(opath) { if !os.exists(opath) {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('missing file ${opath}')) eprintln(fmt_bench.step_message_fail('missing file $opath'))
continue continue
} }
expected_ocontent := os.read_file(opath) or { expected_ocontent := os.read_file(opath) or {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${opath}')) eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
continue continue
} }
table := table.new_table() table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{ file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true is_fmt: true
}, &ast.Scope{ }, &ast.Scope{
parent: 0 parent: 0
}) })
result_ocontent := fmt.fmt(file_ast, table, false) result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent { if expected_ocontent != result_ocontent {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.')) eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
if diff_cmd == '' { if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found') eprintln('>> sorry, but no working "diff" CLI command can be found')
continue continue
} }
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}') vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent) os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file)) eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue continue
} }
fmt_bench.ok() fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${ipath}')) eprintln(fmt_bench.step_message_ok('$ipath'))
} }
fmt_bench.stop() fmt_bench.stop()
eprintln(term.h_divider('-')) eprintln(term.h_divider('-'))

View File

@ -40,30 +40,30 @@ fn test_vlib_fmt() {
opath := ipath opath := ipath
expected_ocontent := os.read_file(opath) or { expected_ocontent := os.read_file(opath) or {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${opath}')) eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
continue continue
} }
table := table.new_table() table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{ file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true is_fmt: true
}, &ast.Scope{ }, &ast.Scope{
parent: 0 parent: 0
}) })
result_ocontent := fmt.fmt(file_ast, table, false) result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent { if expected_ocontent != result_ocontent {
fmt_bench.fail() fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.')) eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
if diff_cmd == '' { if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found') eprintln('>> sorry, but no working "diff" CLI command can be found')
continue continue
} }
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}') vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent) os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file)) eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue continue
} }
fmt_bench.ok() fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${ipath}')) eprintln(fmt_bench.step_message_ok('$ipath'))
} }
fmt_bench.stop() fmt_bench.stop()
eprintln(term.h_divider('-')) eprintln(term.h_divider('-'))

View File

@ -1,3 +1,7 @@
pub fn (a []int) reduce(iter fn (int, int) int, accum_start int) int { pub fn (a []int) reduce(iter fn (int, int) int, accum_start int) int {
iter(accum_start) iter(accum_start)
} }
pub fn test_anon_fn_void(func fn ()) int {
return 0
}

View File

@ -27,6 +27,6 @@ fn main() {
println('$st.a.xy${ar.a[2].xy}$aa.xy$z') println('$st.a.xy${ar.a[2].xy}$aa.xy$z')
println('${st.a.xy}ya ${ar.a[2].xy}X2 ${aa.xy}.b ${z}3') println('${st.a.xy}ya ${ar.a[2].xy}X2 ${aa.xy}.b ${z}3')
println('${z:-5} ${z:+5.3} ${z:+09.3f} ${z:-7.2} ${z:+09} ${z:08.3f}') println('${z:-5} ${z:+5.3} ${z:+09.3f} ${z:-7.2} ${z:+09} ${z:08.3f}')
println('$ar.f() ${ar.g(1,2)} ${ar.a}() ${z}(') println('$ar.f() ${ar.g(1, 2)} ${ar.a}() ${z}(')
println('${z>12.3*z-3} ${@VEXE} ${4*5}') println('${z > 12.3 * z - 3} ${@VEXE} ${4 * 5}')
} }

View File

@ -28,7 +28,9 @@ mut:
// 1 // 1
// 2 // 2
// 3 // 3
somefield /* 4 */ /* 5 */ int // 6 // 7 // 8 somefield /* 4 */ /* 5 */ int // 6
// 7
// 8
/* /*
9 9
10 10

View File

@ -170,7 +170,7 @@ fn (mut g Gen) gen_str_for_array_fixed(info table.ArrayFixed, styp string, str_f
g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, ${elem_str_fn_name}(*a[i]));') g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, ${elem_str_fn_name}(*a[i]));')
} }
} }
g.auto_str_funcs.writeln('\t\tif (i < ${info.size-1}) {') g.auto_str_funcs.writeln('\t\tif (i < ${info.size - 1}) {')
g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write(&sb, tos_lit(", "));') g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write(&sb, tos_lit(", "));')
g.auto_str_funcs.writeln('\t\t}') g.auto_str_funcs.writeln('\t\t}')
g.auto_str_funcs.writeln('\t}') g.auto_str_funcs.writeln('\t}')
@ -337,7 +337,7 @@ fn (mut g Gen) gen_str_for_struct(info table.Struct, styp string, str_fn_name st
} }
g.auto_str_funcs.writeln('\t\t"%.*s\\000 $field.name: $fmt\\n"') g.auto_str_funcs.writeln('\t\t"%.*s\\000 $field.name: $fmt\\n"')
} }
g.auto_str_funcs.write('\t\t"%.*s\\000}", ${2*(info.fields.len+1)}') g.auto_str_funcs.write('\t\t"%.*s\\000}", ${2 * (info.fields.len + 1)}')
if info.fields.len > 0 { if info.fields.len > 0 {
g.auto_str_funcs.write(',\n\t\t') g.auto_str_funcs.write(',\n\t\t')
for i, field in info.fields { for i, field in info.fields {

View File

@ -2601,7 +2601,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
node.op in [.eq, .ne, .gt, .lt, .ge, .le] { node.op in [.eq, .ne, .gt, .lt, .ge, .le] {
bitsize := if unaliased_left.idx() == table.u32_type_idx && bitsize := if unaliased_left.idx() == table.u32_type_idx &&
unaliased_right.idx() != table.i64_type_idx { 32 } else { 64 } unaliased_right.idx() != table.i64_type_idx { 32 } else { 64 }
g.write('_us${bitsize}_${cmp_str[int(node.op)-int(token.Kind.eq)]}(') g.write('_us${bitsize}_${cmp_str[int(node.op) - int(token.Kind.eq)]}(')
g.expr(node.left) g.expr(node.left)
g.write(',') g.write(',')
g.expr(node.right) g.expr(node.right)
@ -2610,7 +2610,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
node.op in [.eq, .ne, .gt, .lt, .ge, .le] { node.op in [.eq, .ne, .gt, .lt, .ge, .le] {
bitsize := if unaliased_right.idx() == table.u32_type_idx && bitsize := if unaliased_right.idx() == table.u32_type_idx &&
unaliased_left.idx() != table.i64_type_idx { 32 } else { 64 } unaliased_left.idx() != table.i64_type_idx { 32 } else { 64 }
g.write('_us${bitsize}_${cmp_rev[int(node.op)-int(token.Kind.eq)]}(') g.write('_us${bitsize}_${cmp_rev[int(node.op) - int(token.Kind.eq)]}(')
g.expr(node.right) g.expr(node.right)
g.write(',') g.write(',')
g.expr(node.left) g.expr(node.left)
@ -4016,7 +4016,7 @@ fn verror(s string) {
} }
fn (g &Gen) error(s string, pos token.Position) { fn (g &Gen) error(s string, pos token.Position) {
p := if pos.line_nr == 0 { '?' } else { '${pos.line_nr+1}' } p := if pos.line_nr == 0 { '?' } else { '${pos.line_nr + 1}' }
util.verror('$g.file.path:$p: cgen error', s) util.verror('$g.file.path:$p: cgen error', s)
} }
@ -4151,7 +4151,7 @@ fn (mut g Gen) write_types(types []table.TypeSymbol) {
styp, base := g.optional_type_name(field.typ) styp, base := g.optional_type_name(field.typ)
g.optionals << styp g.optionals << styp
g.typedefs2.writeln('typedef struct $styp $styp;') g.typedefs2.writeln('typedef struct $styp $styp;')
g.type_definitions.writeln('${g.optional_type_text(styp,base)};') g.type_definitions.writeln('${g.optional_type_text(styp, base)};')
g.type_definitions.write(last_text) g.type_definitions.write(last_text)
} }
type_name := g.typ(field.typ) type_name := g.typ(field.typ)
@ -5031,7 +5031,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
g.writeln(';') g.writeln(';')
} }
for i, arg in expr.args { for i, arg in expr.args {
g.write('$arg_tmp_var->arg${i+1} = ') g.write('$arg_tmp_var->arg${i + 1} = ')
g.expr(arg.expr) g.expr(arg.expr)
g.writeln(';') g.writeln(';')
} }
@ -5056,7 +5056,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
} else { } else {
for i, arg in expr.args { for i, arg in expr.args {
styp := g.typ(arg.typ) styp := g.typ(arg.typ)
g.type_definitions.writeln('\t$styp arg${i+1};') g.type_definitions.writeln('\t$styp arg${i + 1};')
} }
} }
g.type_definitions.writeln('} $wrapper_struct_name;') g.type_definitions.writeln('} $wrapper_struct_name;')
@ -5070,7 +5070,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
} }
} }
for i in 0 .. expr.args.len { for i in 0 .. expr.args.len {
g.gowrappers.write('arg->arg${i+1}') g.gowrappers.write('arg->arg${i + 1}')
if i < expr.args.len - 1 { if i < expr.args.len - 1 {
g.gowrappers.write(', ') g.gowrappers.write(', ')
} }

View File

@ -32,14 +32,14 @@ fn test_c_files() {
res = res[..pos] + res[end + 15..] res = res[..pos] + res[end + 15..]
} }
if compare_texts(res, ctext, path) { if compare_texts(res, ctext, path) {
println('${term_ok} ${i}') println('$term_ok $i')
} else { } else {
assert false assert false
} }
} }
} }
fn compare_texts(a, b, path string) bool { fn compare_texts(a string, b string, path string) bool {
lines_a_ := a.trim_space().split_into_lines() lines_a_ := a.trim_space().split_into_lines()
lines_b_ := b.trim_space().split_into_lines() lines_b_ := b.trim_space().split_into_lines()
lines_a := lines_a_.filter(it != '') lines_a := lines_a_.filter(it != '')
@ -60,8 +60,8 @@ fn compare_texts(a, b, path string) bool {
} }
line_b := lines_b[i] line_b := lines_b[i]
if line_a.trim_space() != line_b.trim_space() { if line_a.trim_space() != line_b.trim_space() {
println('${path}: Got\n$a') println('$path: Got\n$a')
println('${path}:${i}: ${term_fail}') println('$path:$i: $term_fail')
println(term.bold(term.bright_yellow('actual : ')) + line_a) println(term.bold(term.bright_yellow('actual : ')) + line_a)
println(term.green('expected: ') + line_b) println(term.green('expected: ') + line_b)
println(lines_b[i + 1]) println(lines_b[i + 1])
@ -72,8 +72,3 @@ fn compare_texts(a, b, path string) bool {
} }
return true return true
} }
fn test_nested_if() {
a := if true { if true { 'a' } else { 'b' } } else { 'c' }
assert a == 'a'
}

View File

@ -35,7 +35,7 @@ fn (mut g Gen) gen_vlines_reset() {
g.vlines_path = util.vlines_escape_path(g.pref.out_name_c, g.pref.ccompiler) g.vlines_path = util.vlines_escape_path(g.pref.out_name_c, g.pref.ccompiler)
g.writeln('') g.writeln('')
g.writeln('\n// Reset the file/line numbers') g.writeln('\n// Reset the file/line numbers')
g.writeln('\n#line $lines_so_far "${g.vlines_path}"') g.writeln('\n#line $lines_so_far "$g.vlines_path"')
g.writeln('') g.writeln('')
} }
} }
@ -111,7 +111,6 @@ void (_vsokol_cleanup_userdata_cb)(void* user_data) {
} }
') ')
} }
g.writeln('// The sokol_main entry point on Android g.writeln('// The sokol_main entry point on Android
sapp_desc sokol_main(int argc, char* argv[]) { sapp_desc sokol_main(int argc, char* argv[]) {
(void)argc; (void)argv; (void)argc; (void)argv;

View File

@ -53,9 +53,9 @@ fn (mut g Gen) comptime_call(node ast.ComptimeCall) {
if m.params[i].typ.is_int() || m.params[i].typ.idx() == table.bool_type_idx { if m.params[i].typ.is_int() || m.params[i].typ.idx() == table.bool_type_idx {
// Gets the type name and cast the string to the type with the string_<type> function // Gets the type name and cast the string to the type with the string_<type> function
type_name := g.table.types[int(m.params[i].typ)].str() type_name := g.table.types[int(m.params[i].typ)].str()
g.write('string_${type_name}(((string*)${node.args_var}.data) [${i-1}])') g.write('string_${type_name}(((string*)${node.args_var}.data) [${i - 1}])')
} else { } else {
g.write('((string*)${node.args_var}.data) [${i-1}] ') g.write('((string*)${node.args_var}.data) [${i - 1}] ')
} }
if i < m.params.len - 1 { if i < m.params.len - 1 {
g.write(', ') g.write(', ')
@ -106,7 +106,9 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
stmt_str := g.go_before_stmt(0) stmt_str := g.go_before_stmt(0)
g.write(tabs[g.indent]) g.write(tabs[g.indent])
stmt_str.trim_space() stmt_str.trim_space()
} else { '' } } else {
''
}
for i, branch in node.branches { for i, branch in node.branches {
start_pos := g.out.len start_pos := g.out.len
if i == node.branches.len - 1 && node.has_else { if i == node.branches.len - 1 && node.has_else {
@ -132,7 +134,7 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
g.indent++ g.indent++
g.writeln('$styp $tmp;') g.writeln('$styp $tmp;')
g.writeln('{') g.writeln('{')
g.stmts(branch.stmts[0 .. len - 1]) g.stmts(branch.stmts[0..len - 1])
g.write('\t$tmp = ') g.write('\t$tmp = ')
g.stmt(last) g.stmt(last)
g.writeln('}') g.writeln('}')
@ -146,13 +148,21 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
} else { } else {
// Only wrap the contents in {} if we're inside a function, not on the top level scope // Only wrap the contents in {} if we're inside a function, not on the top level scope
should_create_scope := g.fn_decl != 0 should_create_scope := g.fn_decl != 0
if should_create_scope { g.writeln('{') } if should_create_scope {
g.writeln('{')
}
g.stmts(branch.stmts) g.stmts(branch.stmts)
if should_create_scope { g.writeln('}') } if should_create_scope {
g.writeln('}')
}
} }
g.defer_ifdef = '' g.defer_ifdef = ''
} }
if node.is_expr { g.write('#endif') } else { g.writeln('#endif') } if node.is_expr {
g.write('#endif')
} else {
g.writeln('#endif')
}
} }
fn (mut g Gen) comp_if_expr(cond ast.Expr) { fn (mut g Gen) comp_if_expr(cond ast.Expr) {
@ -161,13 +171,16 @@ fn (mut g Gen) comp_if_expr(cond ast.Expr) {
g.write('(') g.write('(')
g.comp_if_expr(cond.expr) g.comp_if_expr(cond.expr)
g.write(')') g.write(')')
} ast.PrefixExpr { }
ast.PrefixExpr {
g.write(cond.op.str()) g.write(cond.op.str())
g.comp_if_expr(cond.right) g.comp_if_expr(cond.right)
} ast.PostfixExpr { }
ast.PostfixExpr {
ifdef := g.comp_if_to_ifdef((cond.expr as ast.Ident).name, true) ifdef := g.comp_if_to_ifdef((cond.expr as ast.Ident).name, true)
g.write('defined($ifdef)') g.write('defined($ifdef)')
} ast.InfixExpr { }
ast.InfixExpr {
match cond.op { match cond.op {
.and, .logical_or { .and, .logical_or {
g.comp_if_expr(cond.left) g.comp_if_expr(cond.left)
@ -180,14 +193,18 @@ fn (mut g Gen) comp_if_expr(cond ast.Expr) {
exp_type := g.comptime_var_type_map[name] exp_type := g.comptime_var_type_map[name]
got_type := (cond.right as ast.Type).typ got_type := (cond.right as ast.Type).typ
g.write('$exp_type == $got_type') g.write('$exp_type == $got_type')
} .eq, .ne { }
.eq, .ne {
// TODO Implement `$if method.args.len == 1` // TODO Implement `$if method.args.len == 1`
} else {} }
else {}
} }
} ast.Ident { }
ast.Ident {
ifdef := g.comp_if_to_ifdef(cond.name, false) ifdef := g.comp_if_to_ifdef(cond.name, false)
g.write('defined($ifdef)') g.write('defined($ifdef)')
} else {} }
else {}
} }
} }

View File

@ -48,7 +48,7 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
if field.name == 'id' { if field.name == 'id' {
continue continue
} }
g.write('?${i+0}') g.write('?${i + 0}')
if i < node.fields.len - 1 { if i < node.fields.len - 1 {
g.write(', ') g.write(', ')
} }
@ -78,9 +78,9 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
} }
x := '${node.object_var_name}.$field.name' x := '${node.object_var_name}.$field.name'
if field.typ == table.string_type { if field.typ == table.string_type {
g.writeln('sqlite3_bind_text($g.sql_stmt_name, ${i+0}, ${x}.str, ${x}.len, 0);') g.writeln('sqlite3_bind_text($g.sql_stmt_name, ${i + 0}, ${x}.str, ${x}.len, 0);')
} else { } else {
g.writeln('sqlite3_bind_int($g.sql_stmt_name, ${i+0}, $x); // stmt') g.writeln('sqlite3_bind_int($g.sql_stmt_name, ${i + 0}, $x); // stmt')
} }
} }
} }

View File

@ -34,11 +34,7 @@ const (
) )
pub fn (mut g Gen) generate_elf_header() { pub fn (mut g Gen) generate_elf_header() {
g.buf << [byte(mag0), g.buf << [byte(mag0), mag1, mag2, mag3]
mag1,
mag2,
mag3
]
g.buf << elfclass64 // file class g.buf << elfclass64 // file class
g.buf << elfdata2lsb // data encoding g.buf << elfdata2lsb // data encoding
g.buf << ev_current // file version g.buf << ev_current // file version

View File

@ -32,7 +32,7 @@ fn test_x64() {
bench.step() bench.step()
full_test_path := os.real_path(test) full_test_path := os.real_path(test)
println('x.v: $wrkdir/x.v') println('x.v: $wrkdir/x.v')
os.system('cp ${dir}/${test} $wrkdir/x.v') // cant run .vv file os.system('cp $dir/$test $wrkdir/x.v') // cant run .vv file
os.exec('$vexe -o exe -x64 $wrkdir/x.v') or { os.exec('$vexe -o exe -x64 $wrkdir/x.v') or {
bench.fail() bench.fail()
eprintln(bench.step_message_fail('x64 $test failed')) eprintln(bench.step_message_fail('x64 $test failed'))

View File

@ -63,9 +63,15 @@ fn (mut p Parser) check_cross_variables(exprs []ast.Expr, val ast.Expr) bool {
} }
} }
} }
ast.InfixExpr { return p.check_cross_variables(exprs, val_.left) || p.check_cross_variables(exprs, val_.right) } ast.InfixExpr {
ast.PrefixExpr { return p.check_cross_variables(exprs, val_.right) } return p.check_cross_variables(exprs, val_.left) || p.check_cross_variables(exprs, val_.right)
ast.PostfixExpr { return p.check_cross_variables(exprs, val_.expr) } }
ast.PrefixExpr {
return p.check_cross_variables(exprs, val_.right)
}
ast.PostfixExpr {
return p.check_cross_variables(exprs, val_.expr)
}
ast.SelectorExpr { ast.SelectorExpr {
for expr in exprs { for expr in exprs {
if expr.str() == val.str() { if expr.str() == val.str() {
@ -119,7 +125,8 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
share = iv.share share = iv.share
if iv.is_static { if iv.is_static {
if !p.pref.translated { if !p.pref.translated {
p.error_with_pos('static variables are supported only in -translated mode', lx.pos) p.error_with_pos('static variables are supported only in -translated mode',
lx.pos)
} }
is_static = true is_static = true
} }

View File

@ -22,7 +22,7 @@ fn (mut p Parser) hash() ast.HashStmt {
val := p.tok.lit val := p.tok.lit
kind := val.all_before(' ') kind := val.all_before(' ')
p.next() p.next()
//p.trace('a.v', 'kind: ${kind:-10s} | pos: ${pos:-45s} | hash: $val') // p.trace('a.v', 'kind: ${kind:-10s} | pos: ${pos:-45s} | hash: $val')
return ast.HashStmt{ return ast.HashStmt{
mod: p.mod mod: p.mod
val: val val: val

View File

@ -72,7 +72,8 @@ fn (mut p Parser) array_init() ast.ArrayInit {
last_pos = p.tok.position() last_pos = p.tok.position()
p.check(.rcbr) p.check(.rcbr)
} else { } else {
p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`', last_pos) p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`',
last_pos)
} }
} else { } else {
if p.tok.kind == .not { if p.tok.kind == .not {

View File

@ -13,8 +13,6 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
for p.tok.kind == .name { for p.tok.kind == .name {
lockeds << ast.Ident{ lockeds << ast.Ident{
language: table.Language.v language: table.Language.v
// kind is set in checker once ident is processed
// kind: .variable
pos: p.tok.position() pos: p.tok.position()
mod: p.mod mod: p.mod
name: p.tok.lit name: p.tok.lit

View File

@ -36,8 +36,8 @@ fn (mut p Parser) register_auto_import(alias string) {
p.imports[alias] = alias p.imports[alias] = alias
p.table.imports << alias p.table.imports << alias
node := ast.Import{ node := ast.Import{
pos: p.tok.position() pos: p.tok.position()
mod: alias mod: alias
alias: alias alias: alias
} }
p.ast_imports << node p.ast_imports << node

View File

@ -21,7 +21,8 @@ pub struct Parser {
pref &pref.Preferences pref &pref.Preferences
mut: mut:
scanner &scanner.Scanner scanner &scanner.Scanner
comments_mode scanner.CommentsMode = .skip_comments // see comment in parse_file comments_mode scanner.CommentsMode = .skip_comments
// see comment in parse_file
tok token.Token tok token.Token
prev_tok token.Token prev_tok token.Token
peek_tok token.Token peek_tok token.Token
@ -832,7 +833,7 @@ pub fn (mut p Parser) warn_with_pos(s string, pos token.Position) {
} }
pub fn (mut p Parser) vet_error(s string, line int) { pub fn (mut p Parser) vet_error(s string, line int) {
p.vet_errors << '$p.scanner.file_path:${line+1}: $s' p.vet_errors << '$p.scanner.file_path:${line + 1}: $s'
} }
fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt { fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {

View File

@ -47,13 +47,9 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
} }
.dollar { .dollar {
match p.peek_tok.kind { match p.peek_tok.kind {
.name { .name { return p.vweb() }
return p.vweb() .key_if { return p.if_expr(true) }
} .key_if { else { p.error('unexpected $') }
return p.if_expr(true)
} else {
p.error('unexpected $')
}
} }
} }
.chartoken { .chartoken {
@ -275,7 +271,8 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
return node return node
} }
// added 10/2020: LATER this will be parsed as PrefixExpr instead // added 10/2020: LATER this will be parsed as PrefixExpr instead
p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression', p.tok.position()) p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression',
p.tok.position())
} }
// continue on infix expr // continue on infix expr
node = p.infix_expr(node) node = p.infix_expr(node)
@ -286,9 +283,9 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
} else if p.tok.kind in [.inc, .dec] || (p.tok.kind == .question && p.inside_ct_if_expr) { } else if p.tok.kind in [.inc, .dec] || (p.tok.kind == .question && p.inside_ct_if_expr) {
// Postfix // Postfix
// detect `f(x++)`, `a[x++]` // detect `f(x++)`, `a[x++]`
if p.peek_tok.kind in [.rpar, .rsbr] && if p.peek_tok.kind in [.rpar, .rsbr] && p.mod !in ['builtin', 'regex', 'strconv'] { // temp
p.mod !in ['builtin', 'regex', 'strconv'] { // temp p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement',
p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement', p.peek_tok.position()) p.peek_tok.position())
} }
node = ast.PostfixExpr{ node = ast.PostfixExpr{
op: p.tok.kind op: p.tok.kind
@ -321,8 +318,8 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
} }
right = p.expr(precedence) right = p.expr(precedence)
p.expecting_type = prev_expecting_type p.expecting_type = prev_expecting_type
if p.pref.is_vet && op in [.key_in, .not_in] && if p.pref.is_vet && op in [.key_in, .not_in] && right is ast.ArrayInit && (right as ast.ArrayInit).exprs.len ==
right is ast.ArrayInit && (right as ast.ArrayInit).exprs.len == 1 { 1 {
p.vet_error('Use `var == value` instead of `var in [value]`', pos.line_nr) p.vet_error('Use `var == value` instead of `var in [value]`', pos.line_nr)
} }
return ast.InfixExpr{ return ast.InfixExpr{

View File

@ -154,22 +154,8 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
typ := p.parse_type() typ := p.parse_type()
type_pos := p.prev_tok.position() type_pos := p.prev_tok.position()
field_pos := field_start_pos.extend(type_pos) field_pos := field_start_pos.extend(type_pos)
// if name == '_net_module_s' {
// if name.contains('App') {
// s := p.table.get_type_symbol(typ)
// println('struct decl field type ' + s.str())
// }
// Comments after type (same line) // Comments after type (same line)
line_pos := field_pos.line_nr comments << p.eat_comments()
for p.tok.kind == .comment && line_pos + 1 == p.tok.line_nr {
if p.tok.lit.contains('\n') {
break
}
comments << p.comment()
if p.tok.kind == .rcbr {
break
}
}
if p.tok.kind == .lsbr { if p.tok.kind == .lsbr {
// attrs are stored in `p.attrs` // attrs are stored in `p.attrs`
p.attributes() p.attributes()
@ -284,38 +270,38 @@ fn (mut p Parser) struct_init(short_syntax bool) ast.StructInit {
p.is_amp = false p.is_amp = false
for p.tok.kind != .rcbr && p.tok.kind != .rpar { for p.tok.kind != .rcbr && p.tok.kind != .rpar {
mut field_name := '' mut field_name := ''
mut expr := ast.Expr{}
mut field_pos := token.Position{}
mut comments := []ast.Comment{}
if no_keys { if no_keys {
expr := p.expr(0)
comments := p.eat_comments()
// name will be set later in checker // name will be set later in checker
fields << ast.StructInitField{ expr = p.expr(0)
expr: expr field_pos = expr.position()
pos: expr.position() comments = p.eat_comments()
comments: comments
}
} else { } else {
first_field_pos := p.tok.position() first_field_pos := p.tok.position()
field_name = p.check_name() field_name = p.check_name()
p.check(.colon) p.check(.colon)
expr := p.expr(0) expr = p.expr(0)
comments := p.eat_comments() comments = p.eat_comments()
last_field_pos := expr.position() last_field_pos := expr.position()
field_pos := token.Position{ field_pos = token.Position{
line_nr: first_field_pos.line_nr line_nr: first_field_pos.line_nr
pos: first_field_pos.pos pos: first_field_pos.pos
len: last_field_pos.pos - first_field_pos.pos + last_field_pos.len len: last_field_pos.pos - first_field_pos.pos + last_field_pos.len
} }
fields << ast.StructInitField{
name: field_name
expr: expr
pos: field_pos
comments: comments
}
} }
i++ i++
if p.tok.kind == .comma { if p.tok.kind == .comma {
p.next() p.next()
} }
comments << p.eat_comments()
fields << ast.StructInitField{
name: field_name
expr: expr
pos: field_pos
comments: comments
}
} }
last_pos := p.tok.position() last_pos := p.tok.position()
if !short_syntax { if !short_syntax {

View File

@ -125,14 +125,15 @@ fn test_parse_expr() {
if true { if true {
return return
} }
input := ['1 == 1', '234234', '2 * 8 + 3', 'a := 3', 'a++', 'b := 4 + 2', 'neg := -a', input := ['1 == 1', '234234', '2 * 8 + 3', 'a := 3', 'a++', 'b := 4 + 2', 'neg := -a', 'a + a',
'a + a', 'bo := 2 + 3 == 5', '2 + 1', 'q := 1', 'q + 777', '2 + 3', '2+2*4', 'x := 10', 'bo := 2 + 3 == 5', '2 + 1', 'q := 1', 'q + 777', '2 + 3', '2+2*4', 'x := 10', 'mut aa := 12',
'mut aa := 12', 'ab := 10 + 3 * 9', 's := "hi"', 'x = 11', 'a += 10', '1.2 + 3.4', '4 + 4', 'ab := 10 + 3 * 9', 's := "hi"', 'x = 11', 'a += 10', '1.2 + 3.4', '4 + 4', '1 + 2 * 5', '-a+1',
'1 + 2 * 5', '-a+1', '2+2'] '2+2',
expecting := ['1 == 1;', '234234;', '2 * 8 + 3;', 'int a = 3;', 'a++;', 'int b = 4 + 2;', ]
'int neg = -a;', 'a + a;', 'bool bo = 2 + 3 == 5;', '2 + 1;', 'int q = 1;', 'q + 777;', expecting := ['1 == 1;', '234234;', '2 * 8 + 3;', 'int a = 3;', 'a++;', 'int b = 4 + 2;', 'int neg = -a;',
'2 + 3;', '2 + 2 * 4;', 'int x = 10;', 'int aa = 12;', 'int ab = 10 + 3 * 9;', 'string s = tos3("hi");', 'a + a;', 'bool bo = 2 + 3 == 5;', '2 + 1;', 'int q = 1;', 'q + 777;', '2 + 3;', '2 + 2 * 4;',
'x = 11;', 'a += 10;', '1.2 + 3.4;', '4 + 4;', '1 + 2 * 5;', '-a + 1;', '2 + 2;'] 'int x = 10;', 'int aa = 12;', 'int ab = 10 + 3 * 9;', 'string s = tos3("hi");', 'x = 11;', 'a += 10;',
'1.2 + 3.4;', '4 + 4;', '1 + 2 * 5;', '-a + 1;', '2 + 2;']
mut e := []ast.Stmt{} mut e := []ast.Stmt{}
table := table.new_table() table := table.new_table()
vpref := &pref.Preferences{} vpref := &pref.Preferences{}

View File

@ -22,95 +22,39 @@ pub enum OS {
// Helper function to convert string names to OS enum // Helper function to convert string names to OS enum
pub fn os_from_string(os_str string) ?OS { pub fn os_from_string(os_str string) ?OS {
match os_str { match os_str {
'linux' { 'linux' { return .linux }
return .linux 'windows' { return .windows }
} 'ios' { return .ios }
'windows' { 'macos' { return .macos }
return .windows 'freebsd' { return .freebsd }
} 'openbsd' { return .openbsd }
'ios' { 'netbsd' { return .netbsd }
return .ios 'dragonfly' { return .dragonfly }
} 'js' { return .js }
'macos' { 'solaris' { return .solaris }
return .macos 'android' { return .android }
} 'haiku' { return .haiku }
'freebsd' { 'linux_or_macos' { return .linux }
return .freebsd '' { return ._auto }
} else { return error('bad OS $os_str') }
'openbsd' {
return .openbsd
}
'netbsd' {
return .netbsd
}
'dragonfly' {
return .dragonfly
}
'js' {
return .js
}
'solaris' {
return .solaris
}
'android' {
return .android
}
'haiku' {
return .haiku
}
'linux_or_macos' {
return .linux
}
'' {
return ._auto
}
else {
return error('bad OS $os_str')
}
} }
} }
pub fn (o OS) str() string { pub fn (o OS) str() string {
match o { match o {
._auto { ._auto { return 'RESERVED: AUTO' }
return 'RESERVED: AUTO' .ios { return 'iOS' }
} .macos { return 'MacOS' }
.ios { .linux { return 'Linux' }
return 'iOS' .windows { return 'Windows' }
} .freebsd { return 'FreeBSD' }
.macos { .openbsd { return 'OpenBSD' }
return 'MacOS' .netbsd { return 'NetBSD' }
} .dragonfly { return 'Dragonfly' }
.linux { .js { return 'JavaScript' }
return 'Linux' .android { return 'Android' }
} .solaris { return 'Solaris' }
.windows { .haiku { return 'Haiku' }
return 'Windows'
}
.freebsd {
return 'FreeBSD'
}
.openbsd {
return 'OpenBSD'
}
.netbsd {
return 'NetBSD'
}
.dragonfly {
return 'Dragonfly'
}
.js {
return 'JavaScript'
}
.android {
return 'Android'
}
.solaris {
return 'Solaris'
}
.haiku {
return 'Haiku'
}
} }
} }

View File

@ -1104,7 +1104,7 @@ fn (mut s Scanner) text_scan() token.Token {
} }
} }
if is_separate_line_comment { if is_separate_line_comment {
comment = '|' + comment comment = '\x01' + comment
} }
return s.new_token(.comment, comment, comment.len + 2) return s.new_token(.comment, comment, comment.len + 2)
} }

View File

@ -3,7 +3,7 @@ module scanner
import os import os
struct TestStruct { struct TestStruct {
test string test string
} }
fn (mut t TestStruct) test_struct() { fn (mut t TestStruct) test_struct() {
@ -15,18 +15,19 @@ fn (mut t TestStruct) test_struct_w_return() string {
return t.test return t.test
} }
fn (mut t TestStruct) test_struct_w_high_order(cb fn(int)string) string { fn (mut t TestStruct) test_struct_w_high_order(cb fn (int) string) string {
assert @STRUCT == 'TestStruct' assert @STRUCT == 'TestStruct'
return 'test'+cb(2) return 'test' + cb(2)
} }
struct TestFn { } struct TestFn {
}
fn (mut t TestFn) tst_1() { fn (mut t TestFn) tst_1() {
assert @FN == 'tst_1' assert @FN == 'tst_1'
} }
fn (mut t TestFn) tst_2(cb fn(int)) { fn (mut t TestFn) tst_2(cb fn (int)) {
assert @FN == 'tst_2' assert @FN == 'tst_2'
cb(1) cb(1)
} }
@ -35,7 +36,7 @@ fn fn_name_mod_level() {
assert @FN == 'fn_name_mod_level' assert @FN == 'fn_name_mod_level'
} }
fn fn_name_mod_level_high_order(cb fn(int)) { fn fn_name_mod_level_high_order(cb fn (int)) {
assert @FN == 'fn_name_mod_level_high_order' assert @FN == 'fn_name_mod_level_high_order'
cb(1) cb(1)
} }
@ -49,16 +50,14 @@ fn test_at_file() {
fn test_at_fn() { fn test_at_fn() {
// Test @FN // Test @FN
assert @FN == 'test_at_fn' assert @FN == 'test_at_fn'
fn_name_mod_level() fn_name_mod_level()
fn_name_mod_level_high_order(fn(i int){ fn_name_mod_level_high_order(fn (i int) {
t := i + 1 t := i + 1
assert t == 2 assert t == 2
}) })
mut tfn := TestFn{} mut tfn := TestFn{}
tfn.tst_1() tfn.tst_1()
tfn.tst_2(fn(i int){ tfn.tst_2(fn (i int) {
t := i + 1 t := i + 1
assert t == 2 assert t == 2
}) })
@ -72,10 +71,12 @@ fn test_at_mod() {
fn test_at_struct() { fn test_at_struct() {
// Test @STRUCT // Test @STRUCT
assert @STRUCT == '' assert @STRUCT == ''
mut ts := TestStruct { test: "test" } mut ts := TestStruct{
test: 'test'
}
ts.test_struct() ts.test_struct()
r1 := ts.test_struct_w_return() r1 := ts.test_struct_w_return()
r2 := ts.test_struct_w_high_order(fn(i int)string{ r2 := ts.test_struct_w_high_order(fn (i int) string {
assert @STRUCT == '' assert @STRUCT == ''
return i.str() return i.str()
}) })

View File

@ -59,14 +59,12 @@ fn test_float_without_fraction() {
assert result[0] == .name assert result[0] == .name
assert result[1] == .decl_assign assert result[1] == .decl_assign
assert result[2] == .number assert result[2] == .number
result = scan_kinds('return 3., 4.') result = scan_kinds('return 3., 4.')
assert result.len == 4 assert result.len == 4
assert result[0] == .key_return assert result[0] == .key_return
assert result[1] == .number assert result[1] == .number
assert result[2] == .comma assert result[2] == .comma
assert result[3] == .number assert result[3] == .number
result = scan_kinds('fun(5.)') result = scan_kinds('fun(5.)')
assert result.len == 4 assert result.len == 4
assert result[0] == .name assert result[0] == .name

View File

@ -6,10 +6,10 @@ module table
// e.g. `[unsafe]` // e.g. `[unsafe]`
pub struct Attr { pub struct Attr {
pub: pub:
name string // [name] name string // [name]
is_string bool // ['name'] is_string bool // ['name']
is_ctdefine bool // [if name] is_ctdefine bool // [if name]
arg string // [name: arg] arg string // [name: arg]
is_string_arg bool // [name: 'arg'] is_string_arg bool // [name: 'arg']
} }
@ -21,8 +21,7 @@ pub fn (attr Attr) str() string {
} }
if attr.is_string { if attr.is_string {
s += "'$attr.name'" s += "'$attr.name'"
} } else {
else {
s += attr.name s += attr.name
if attr.arg.len > 0 { if attr.arg.len > 0 {
s += ': ' s += ': '
@ -31,8 +30,7 @@ pub fn (attr Attr) str() string {
// FIXME: other escapes e.g. \r\n // FIXME: other escapes e.g. \r\n
a = a.replace("'", "\\'") a = a.replace("'", "\\'")
s += "'$a'" s += "'$a'"
} } else {
else {
s += attr.arg s += attr.arg
} }
} }

View File

@ -64,7 +64,7 @@ fn assert_parse_invalid_flag(mut t table.Table, flag string) {
assert false assert false
} }
fn make_flag(os, name, value string) cflag.CFlag { fn make_flag(os string, name string, value string) cflag.CFlag {
return cflag.CFlag{ return cflag.CFlag{
mod: module_name mod: module_name
os: os os: os

View File

@ -117,7 +117,10 @@ pub fn (f &Fn) source_signature() string {
sig += ', ' sig += ', '
} }
} }
sig += ') $f.return_type_source_name' sig += ')'
if f.return_type != void_type {
sig += ' $f.return_type_source_name'
}
return sig return sig
} }

View File

@ -56,7 +56,7 @@ fn opendiff_exists() bool {
pub fn color_compare_files(diff_cmd string, file1 string, file2 string) string { pub fn color_compare_files(diff_cmd string, file1 string, file2 string) string {
if diff_cmd != '' { if diff_cmd != '' {
full_cmd := '$diff_cmd --minimal --text --unified=2 ' + ' --show-function-line="fn " "$file1" "$file2" ' full_cmd := '$diff_cmd --minimal --text --unified=2 --show-function-line="fn " "$file1" "$file2" '
x := os.exec(full_cmd) or { x := os.exec(full_cmd) or {
return 'comparison command: `$full_cmd` failed' return 'comparison command: `$full_cmd` failed'
} }

View File

@ -91,7 +91,7 @@ pub fn formatted_error(kind string, omsg string, filepath string, pos token.Posi
} }
} }
column := imax(0, pos.pos - p - 1) column := imax(0, pos.pos - p - 1)
position := '$path:${pos.line_nr+1}:${imax(1,column+1)}:' position := '$path:${pos.line_nr + 1}:${imax(1, column + 1)}:'
scontext := source_context(kind, source, column, pos).join('\n') scontext := source_context(kind, source, column, pos).join('\n')
final_position := bold(position) final_position := bold(position)
final_kind := bold(color(kind, kind)) final_kind := bold(color(kind, kind))
@ -116,7 +116,7 @@ pub fn source_context(kind string, source string, column int, pos token.Position
end_column := imax(0, imin(column + imax(0, pos.len), sline.len)) end_column := imax(0, imin(column + imax(0, pos.len), sline.len))
cline := if iline == pos.line_nr { sline[..start_column] + color(kind, sline[start_column..end_column]) + cline := if iline == pos.line_nr { sline[..start_column] + color(kind, sline[start_column..end_column]) +
sline[end_column..] } else { sline } sline[end_column..] } else { sline }
clines << '${iline+1:5d} | ' + cline.replace('\t', tab_spaces) clines << '${iline + 1:5d} | ' + cline.replace('\t', tab_spaces)
// //
if iline == pos.line_nr { if iline == pos.line_nr {
// The pointerline should have the same spaces/tabs as the offending // The pointerline should have the same spaces/tabs as the offending

View File

@ -21,7 +21,7 @@ fn get_tests_in_dir(dir string) []string {
return tests return tests
} }
fn check_path(vexe, dir string, tests []string) int { fn check_path(vexe string, dir string, tests []string) int {
mut nb_fail := 0 mut nb_fail := 0
paths := vtest.filter_vtest_only(tests, { paths := vtest.filter_vtest_only(tests, {
basepath: dir basepath: dir

View File

@ -166,7 +166,7 @@ fn get_array_content(tokens []Token, st_idx int) ?([]string, int) {
.str { .str {
vals << tok.val vals << tok.val
if tokens[idx + 1].typ !in [.comma, .rabr] { if tokens[idx + 1].typ !in [.comma, .rabr] {
return error('vmod: invalid separator "${tokens[idx+1].val}"') return error('vmod: invalid separator "${tokens[idx + 1].val}"')
} }
idx += if tokens[idx + 1].typ == .comma { 2 } else { 1 } idx += if tokens[idx + 1].typ == .comma { 2 } else { 1 }
} }
@ -233,14 +233,14 @@ fn (mut p Parser) parse() ?Manifest {
mn.author = field_value mn.author = field_value
} }
'dependencies' { 'dependencies' {
deps, idx := get_array_content(tokens, i + 1)? deps, idx := get_array_content(tokens, i + 1) ?
mn.dependencies = deps mn.dependencies = deps
i = idx i = idx
continue continue
} }
else { else {
if tokens[i + 1].typ == .labr { if tokens[i + 1].typ == .labr {
vals, idx := get_array_content(tokens, i + 1)? vals, idx := get_array_content(tokens, i + 1) ?
mn.unknown[field_name] = vals mn.unknown[field_name] = vals
i = idx i = idx
continue continue