vfmt: fix `match x { 10 /* ... */ {} }` and `a := [1/* x */, /* y */2, 3]`
parent
e6cc967ac8
commit
f42a44abec
|
@ -4,19 +4,24 @@ import os
|
|||
import testing
|
||||
import v.util
|
||||
|
||||
// os.v - // embeded comments, mib := [1/* CTL_KERN */, 14/* KERN_PROC */, 12/* KERN_PROC_PATHNAME */, -1] => comment the rest of the line
|
||||
const (
|
||||
known_failing_exceptions = [
|
||||
'vlib/crypto/aes/const.v', // multiple narrow columns of []string turned to 1 long single column, otherwise works
|
||||
'vlib/vweb/vweb.v', // $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error
|
||||
'vlib/v/gen/js/tests/life.v', // error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500)
|
||||
'vlib/builtin/js/builtin.v', // JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c)
|
||||
'vlib/crypto/aes/const.v',
|
||||
/* multiple narrow columns of []string turned to 1 long single column, otherwise works */
|
||||
'vlib/vweb/vweb.v',
|
||||
/* $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error */
|
||||
'vlib/v/gen/js/tests/life.v',
|
||||
/* error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500) */
|
||||
'vlib/builtin/js/builtin.v',
|
||||
/* JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c) */
|
||||
'vlib/builtin/js/jsfns_node.js.v',
|
||||
'vlib/builtin/js/jsfns.js.v',
|
||||
'vlib/builtin/js/jsfns_browser.js.v',
|
||||
'vlib/builtin/bare/linuxsys_bare.v', // error: expr(): bad token `asm`, on `asm {}`
|
||||
'vlib/picoev/picoev.v', // the fn args are removed, then `cb fn (picohttpparser.Request, mut picohttpparser.Response)` can not be reparsed
|
||||
'vlib/os/os.v',
|
||||
'vlib/builtin/bare/linuxsys_bare.v',
|
||||
/* error: expr(): bad token `asm`, on `asm {}` */
|
||||
'vlib/picoev/picoev.v',
|
||||
/* the fn args are removed, then `cb fn (picohttpparser.Request, mut picohttpparser.Response)` can not be reparsed */
|
||||
'vlib/os/os.v' /* os.v - `a := [ c'/bin/sh', c'-c', byteptr(cmd.str), 0 ]` */,
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
@ -5,19 +5,19 @@ import testing
|
|||
import v.util
|
||||
|
||||
const (
|
||||
vet_known_failing_exceptions = [
|
||||
vet_known_failing_exceptions = [
|
||||
'nonexistent',
|
||||
]
|
||||
vet_folders = [
|
||||
vet_folders = [
|
||||
'vlib/sqlite',
|
||||
'vlib/v',
|
||||
'cmd/v',
|
||||
'cmd/tools',
|
||||
]
|
||||
verify_known_failing_exceptions = [
|
||||
'nonexistant'
|
||||
'nonexistant',
|
||||
]
|
||||
verify_list = [
|
||||
verify_list = [
|
||||
'vlib/builtin/array.v',
|
||||
'vlib/os/file.v',
|
||||
'vlib/math/bits/bits.v',
|
||||
|
@ -78,7 +78,8 @@ fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string,
|
|||
|
||||
fn v_test_vetting(vargs string) {
|
||||
vet_session := tsession(vargs, 'vvet.v', 'v vet', 'vet', vet_folders, vet_known_failing_exceptions)
|
||||
verify_session := tsession(vargs, 'vfmt.v', 'v fmt -verify', 'fmt -verify', verify_list, verify_known_failing_exceptions)
|
||||
verify_session := tsession(vargs, 'vfmt.v', 'v fmt -verify', 'fmt -verify', verify_list,
|
||||
verify_known_failing_exceptions)
|
||||
//
|
||||
if vet_session.benchmark.nfail > 0 || verify_session.benchmark.nfail > 0 {
|
||||
eprintln('\n')
|
||||
|
|
|
@ -574,6 +574,7 @@ pub mut:
|
|||
pub struct MatchBranch {
|
||||
pub:
|
||||
exprs []Expr // left side
|
||||
ecmnts [][]Comment // inline comments for each left side expr
|
||||
stmts []Stmt // right side
|
||||
pos token.Position
|
||||
comments []Comment // comment above `xxx {`
|
||||
|
@ -805,6 +806,7 @@ pub:
|
|||
pos token.Position
|
||||
elem_type_pos token.Position
|
||||
exprs []Expr // `[expr, expr]` or `[expr]Type{}` for fixed array
|
||||
ecmnts [][]Comment // optional iembed comments after each expr
|
||||
is_fixed bool
|
||||
has_val bool // fixed size literal `[expr, expr]!!`
|
||||
mod string
|
||||
|
|
|
@ -808,9 +808,15 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
|
|||
f.write('`$node.val`')
|
||||
}
|
||||
ast.Comment {
|
||||
f.comment(node, {
|
||||
inline: true
|
||||
})
|
||||
if f.array_init_depth > 0 {
|
||||
f.comment(node, {
|
||||
iembed: true
|
||||
})
|
||||
} else {
|
||||
f.comment(node, {
|
||||
inline: true
|
||||
})
|
||||
}
|
||||
}
|
||||
ast.ComptimeCall {
|
||||
if node.is_vweb {
|
||||
|
@ -1188,14 +1194,21 @@ enum CommentsLevel {
|
|||
// CommentsOptions defines the way comments are going to be written
|
||||
// - has_nl: adds an newline at the end of the list of comments
|
||||
// - inline: single-line comments will be on the same line as the last statement
|
||||
// - level: either .keep (don't indent), or .indent (increment indentation)
|
||||
// - iembed: a /* ... */ embedded comment; used in expressions; // comments the whole line
|
||||
// - level: either .keep (don't indent), or .indent (increment indentation)
|
||||
struct CommentsOptions {
|
||||
has_nl bool = true
|
||||
inline bool
|
||||
level CommentsLevel
|
||||
iembed bool
|
||||
}
|
||||
|
||||
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
|
||||
if options.iembed {
|
||||
x := node.text.replace('\n', ' ')
|
||||
f.write('/* $x */')
|
||||
return
|
||||
}
|
||||
if !node.text.contains('\n') {
|
||||
is_separate_line := !options.inline || node.text.starts_with('\x01')
|
||||
mut s := if node.text.starts_with('\x01') { node.text[1..] } else { node.text }
|
||||
|
@ -1575,6 +1588,14 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
|
|||
f.is_mbranch_expr = true
|
||||
for j, expr in branch.exprs {
|
||||
f.expr(expr)
|
||||
if j < branch.ecmnts.len && branch.ecmnts[j].len > 0 {
|
||||
f.write(' ')
|
||||
for cmnt in branch.ecmnts[j] {
|
||||
f.comment(cmnt, {
|
||||
iembed: true
|
||||
})
|
||||
}
|
||||
}
|
||||
if j < branch.exprs.len - 1 {
|
||||
f.write(', ')
|
||||
}
|
||||
|
@ -1777,6 +1798,14 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
|
|||
f.write(' ')
|
||||
}
|
||||
f.expr(expr)
|
||||
if i < it.ecmnts.len && it.ecmnts[i].len > 0 {
|
||||
f.write(' ')
|
||||
for cmt in it.ecmnts[i] {
|
||||
f.comment(cmt, {
|
||||
iembed: true
|
||||
})
|
||||
}
|
||||
}
|
||||
if i == it.exprs.len - 1 {
|
||||
if is_new_line {
|
||||
if expr !is ast.Comment {
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
a := [1 /* y */, /* x */ 2, 3]
|
||||
println(a)
|
|
@ -1,9 +1,9 @@
|
|||
fn main() {
|
||||
arr := [
|
||||
// test 0
|
||||
/* test 0 */
|
||||
1,
|
||||
// test 1
|
||||
/* test 1 */
|
||||
2,
|
||||
// test 2
|
||||
/* test 2 */
|
||||
]
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
mut elem_type := table.void_type
|
||||
mut elem_type_pos := first_pos
|
||||
mut exprs := []ast.Expr{}
|
||||
mut ecmnts := [][]ast.Comment{}
|
||||
mut is_fixed := false
|
||||
mut has_val := false
|
||||
mut has_type := false
|
||||
|
@ -40,6 +41,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
// [1,2,3] or [const]byte
|
||||
for i := 0; p.tok.kind != .rsbr; i++ {
|
||||
exprs << p.expr(0)
|
||||
ecmnts << p.eat_comments()
|
||||
if p.tok.kind == .comma {
|
||||
p.next()
|
||||
}
|
||||
|
@ -136,6 +138,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
|||
elem_type: elem_type
|
||||
typ: array_type
|
||||
exprs: exprs
|
||||
ecmnts: ecmnts
|
||||
pos: pos
|
||||
elem_type_pos: elem_type_pos
|
||||
has_len: has_len
|
||||
|
|
|
@ -205,6 +205,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
|||
branch_first_pos := p.tok.position()
|
||||
comments := p.eat_comments() // comments before {}
|
||||
mut exprs := []ast.Expr{}
|
||||
mut ecmnts := [][]ast.Comment{}
|
||||
p.open_scope()
|
||||
// final else
|
||||
mut is_else := false
|
||||
|
@ -232,6 +233,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
|||
for {
|
||||
// Sum type match
|
||||
parsed_type := p.parse_type()
|
||||
ecmnts << p.eat_comments()
|
||||
types << parsed_type
|
||||
exprs << ast.Type{
|
||||
typ: parsed_type
|
||||
|
@ -294,6 +296,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
|||
for {
|
||||
p.inside_match_case = true
|
||||
expr := p.expr(0)
|
||||
ecmnts << p.eat_comments()
|
||||
p.inside_match_case = false
|
||||
if p.tok.kind == .dotdot {
|
||||
p.error_with_pos('match only supports inclusive (`...`) ranges, not exclusive (`..`)',
|
||||
|
@ -331,6 +334,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
|||
post_comments := p.eat_comments()
|
||||
branches << ast.MatchBranch{
|
||||
exprs: exprs
|
||||
ecmnts: ecmnts
|
||||
stmts: stmts
|
||||
pos: pos
|
||||
comments: comments
|
||||
|
|
|
@ -71,7 +71,8 @@ pub fn (mut p Preferences) fill_with_defaults() {
|
|||
// Prepare the cache manager. All options that can affect the generated cached .c files
|
||||
// should go into res.cache_manager.vopts, which is used as a salt for the cache hash.
|
||||
p.cache_manager = vcache.new_cache_manager([
|
||||
@VHASH, // ensure that different v versions use separate build artefacts
|
||||
@VHASH,
|
||||
/* ensure that different v versions use separate build artefacts */
|
||||
'$p.backend | $p.os | $p.ccompiler',
|
||||
p.cflags.trim_space(),
|
||||
p.third_party_option.trim_space(),
|
||||
|
|
Loading…
Reference in New Issue