vfmt: fix `match x { 10 /* ... */ {} }` and `a := [1/* x */, /* y */2, 3]`
parent
e6cc967ac8
commit
f42a44abec
|
@ -4,19 +4,24 @@ import os
|
||||||
import testing
|
import testing
|
||||||
import v.util
|
import v.util
|
||||||
|
|
||||||
// os.v - // embeded comments, mib := [1/* CTL_KERN */, 14/* KERN_PROC */, 12/* KERN_PROC_PATHNAME */, -1] => comment the rest of the line
|
|
||||||
const (
|
const (
|
||||||
known_failing_exceptions = [
|
known_failing_exceptions = [
|
||||||
'vlib/crypto/aes/const.v', // multiple narrow columns of []string turned to 1 long single column, otherwise works
|
'vlib/crypto/aes/const.v',
|
||||||
'vlib/vweb/vweb.v', // $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error
|
/* multiple narrow columns of []string turned to 1 long single column, otherwise works */
|
||||||
'vlib/v/gen/js/tests/life.v', // error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500)
|
'vlib/vweb/vweb.v',
|
||||||
'vlib/builtin/js/builtin.v', // JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c)
|
/* $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error */
|
||||||
|
'vlib/v/gen/js/tests/life.v',
|
||||||
|
/* error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500) */
|
||||||
|
'vlib/builtin/js/builtin.v',
|
||||||
|
/* JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c) */
|
||||||
'vlib/builtin/js/jsfns_node.js.v',
|
'vlib/builtin/js/jsfns_node.js.v',
|
||||||
'vlib/builtin/js/jsfns.js.v',
|
'vlib/builtin/js/jsfns.js.v',
|
||||||
'vlib/builtin/js/jsfns_browser.js.v',
|
'vlib/builtin/js/jsfns_browser.js.v',
|
||||||
'vlib/builtin/bare/linuxsys_bare.v', // error: expr(): bad token `asm`, on `asm {}`
|
'vlib/builtin/bare/linuxsys_bare.v',
|
||||||
'vlib/picoev/picoev.v', // the fn args are removed, then `cb fn (picohttpparser.Request, mut picohttpparser.Response)` can not be reparsed
|
/* error: expr(): bad token `asm`, on `asm {}` */
|
||||||
'vlib/os/os.v',
|
'vlib/picoev/picoev.v',
|
||||||
|
/* the fn args are removed, then `cb fn (picohttpparser.Request, mut picohttpparser.Response)` can not be reparsed */
|
||||||
|
'vlib/os/os.v' /* os.v - `a := [ c'/bin/sh', c'-c', byteptr(cmd.str), 0 ]` */,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -5,19 +5,19 @@ import testing
|
||||||
import v.util
|
import v.util
|
||||||
|
|
||||||
const (
|
const (
|
||||||
vet_known_failing_exceptions = [
|
vet_known_failing_exceptions = [
|
||||||
'nonexistent',
|
'nonexistent',
|
||||||
]
|
]
|
||||||
vet_folders = [
|
vet_folders = [
|
||||||
'vlib/sqlite',
|
'vlib/sqlite',
|
||||||
'vlib/v',
|
'vlib/v',
|
||||||
'cmd/v',
|
'cmd/v',
|
||||||
'cmd/tools',
|
'cmd/tools',
|
||||||
]
|
]
|
||||||
verify_known_failing_exceptions = [
|
verify_known_failing_exceptions = [
|
||||||
'nonexistant'
|
'nonexistant',
|
||||||
]
|
]
|
||||||
verify_list = [
|
verify_list = [
|
||||||
'vlib/builtin/array.v',
|
'vlib/builtin/array.v',
|
||||||
'vlib/os/file.v',
|
'vlib/os/file.v',
|
||||||
'vlib/math/bits/bits.v',
|
'vlib/math/bits/bits.v',
|
||||||
|
@ -78,7 +78,8 @@ fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string,
|
||||||
|
|
||||||
fn v_test_vetting(vargs string) {
|
fn v_test_vetting(vargs string) {
|
||||||
vet_session := tsession(vargs, 'vvet.v', 'v vet', 'vet', vet_folders, vet_known_failing_exceptions)
|
vet_session := tsession(vargs, 'vvet.v', 'v vet', 'vet', vet_folders, vet_known_failing_exceptions)
|
||||||
verify_session := tsession(vargs, 'vfmt.v', 'v fmt -verify', 'fmt -verify', verify_list, verify_known_failing_exceptions)
|
verify_session := tsession(vargs, 'vfmt.v', 'v fmt -verify', 'fmt -verify', verify_list,
|
||||||
|
verify_known_failing_exceptions)
|
||||||
//
|
//
|
||||||
if vet_session.benchmark.nfail > 0 || verify_session.benchmark.nfail > 0 {
|
if vet_session.benchmark.nfail > 0 || verify_session.benchmark.nfail > 0 {
|
||||||
eprintln('\n')
|
eprintln('\n')
|
||||||
|
|
|
@ -574,6 +574,7 @@ pub mut:
|
||||||
pub struct MatchBranch {
|
pub struct MatchBranch {
|
||||||
pub:
|
pub:
|
||||||
exprs []Expr // left side
|
exprs []Expr // left side
|
||||||
|
ecmnts [][]Comment // inline comments for each left side expr
|
||||||
stmts []Stmt // right side
|
stmts []Stmt // right side
|
||||||
pos token.Position
|
pos token.Position
|
||||||
comments []Comment // comment above `xxx {`
|
comments []Comment // comment above `xxx {`
|
||||||
|
@ -805,6 +806,7 @@ pub:
|
||||||
pos token.Position
|
pos token.Position
|
||||||
elem_type_pos token.Position
|
elem_type_pos token.Position
|
||||||
exprs []Expr // `[expr, expr]` or `[expr]Type{}` for fixed array
|
exprs []Expr // `[expr, expr]` or `[expr]Type{}` for fixed array
|
||||||
|
ecmnts [][]Comment // optional iembed comments after each expr
|
||||||
is_fixed bool
|
is_fixed bool
|
||||||
has_val bool // fixed size literal `[expr, expr]!!`
|
has_val bool // fixed size literal `[expr, expr]!!`
|
||||||
mod string
|
mod string
|
||||||
|
|
|
@ -808,9 +808,15 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
|
||||||
f.write('`$node.val`')
|
f.write('`$node.val`')
|
||||||
}
|
}
|
||||||
ast.Comment {
|
ast.Comment {
|
||||||
f.comment(node, {
|
if f.array_init_depth > 0 {
|
||||||
inline: true
|
f.comment(node, {
|
||||||
})
|
iembed: true
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
f.comment(node, {
|
||||||
|
inline: true
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ast.ComptimeCall {
|
ast.ComptimeCall {
|
||||||
if node.is_vweb {
|
if node.is_vweb {
|
||||||
|
@ -1188,14 +1194,21 @@ enum CommentsLevel {
|
||||||
// CommentsOptions defines the way comments are going to be written
|
// CommentsOptions defines the way comments are going to be written
|
||||||
// - has_nl: adds an newline at the end of the list of comments
|
// - has_nl: adds an newline at the end of the list of comments
|
||||||
// - inline: single-line comments will be on the same line as the last statement
|
// - inline: single-line comments will be on the same line as the last statement
|
||||||
// - level: either .keep (don't indent), or .indent (increment indentation)
|
// - iembed: a /* ... */ embedded comment; used in expressions; // comments the whole line
|
||||||
|
// - level: either .keep (don't indent), or .indent (increment indentation)
|
||||||
struct CommentsOptions {
|
struct CommentsOptions {
|
||||||
has_nl bool = true
|
has_nl bool = true
|
||||||
inline bool
|
inline bool
|
||||||
level CommentsLevel
|
level CommentsLevel
|
||||||
|
iembed bool
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
|
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
|
||||||
|
if options.iembed {
|
||||||
|
x := node.text.replace('\n', ' ')
|
||||||
|
f.write('/* $x */')
|
||||||
|
return
|
||||||
|
}
|
||||||
if !node.text.contains('\n') {
|
if !node.text.contains('\n') {
|
||||||
is_separate_line := !options.inline || node.text.starts_with('\x01')
|
is_separate_line := !options.inline || node.text.starts_with('\x01')
|
||||||
mut s := if node.text.starts_with('\x01') { node.text[1..] } else { node.text }
|
mut s := if node.text.starts_with('\x01') { node.text[1..] } else { node.text }
|
||||||
|
@ -1575,6 +1588,14 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
|
||||||
f.is_mbranch_expr = true
|
f.is_mbranch_expr = true
|
||||||
for j, expr in branch.exprs {
|
for j, expr in branch.exprs {
|
||||||
f.expr(expr)
|
f.expr(expr)
|
||||||
|
if j < branch.ecmnts.len && branch.ecmnts[j].len > 0 {
|
||||||
|
f.write(' ')
|
||||||
|
for cmnt in branch.ecmnts[j] {
|
||||||
|
f.comment(cmnt, {
|
||||||
|
iembed: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
if j < branch.exprs.len - 1 {
|
if j < branch.exprs.len - 1 {
|
||||||
f.write(', ')
|
f.write(', ')
|
||||||
}
|
}
|
||||||
|
@ -1777,6 +1798,14 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
|
||||||
f.write(' ')
|
f.write(' ')
|
||||||
}
|
}
|
||||||
f.expr(expr)
|
f.expr(expr)
|
||||||
|
if i < it.ecmnts.len && it.ecmnts[i].len > 0 {
|
||||||
|
f.write(' ')
|
||||||
|
for cmt in it.ecmnts[i] {
|
||||||
|
f.comment(cmt, {
|
||||||
|
iembed: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
if i == it.exprs.len - 1 {
|
if i == it.exprs.len - 1 {
|
||||||
if is_new_line {
|
if is_new_line {
|
||||||
if expr !is ast.Comment {
|
if expr !is ast.Comment {
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
a := [1 /* y */, /* x */ 2, 3]
|
||||||
|
println(a)
|
|
@ -1,9 +1,9 @@
|
||||||
fn main() {
|
fn main() {
|
||||||
arr := [
|
arr := [
|
||||||
// test 0
|
/* test 0 */
|
||||||
1,
|
1,
|
||||||
// test 1
|
/* test 1 */
|
||||||
2,
|
2,
|
||||||
// test 2
|
/* test 2 */
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
||||||
mut elem_type := table.void_type
|
mut elem_type := table.void_type
|
||||||
mut elem_type_pos := first_pos
|
mut elem_type_pos := first_pos
|
||||||
mut exprs := []ast.Expr{}
|
mut exprs := []ast.Expr{}
|
||||||
|
mut ecmnts := [][]ast.Comment{}
|
||||||
mut is_fixed := false
|
mut is_fixed := false
|
||||||
mut has_val := false
|
mut has_val := false
|
||||||
mut has_type := false
|
mut has_type := false
|
||||||
|
@ -40,6 +41,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
||||||
// [1,2,3] or [const]byte
|
// [1,2,3] or [const]byte
|
||||||
for i := 0; p.tok.kind != .rsbr; i++ {
|
for i := 0; p.tok.kind != .rsbr; i++ {
|
||||||
exprs << p.expr(0)
|
exprs << p.expr(0)
|
||||||
|
ecmnts << p.eat_comments()
|
||||||
if p.tok.kind == .comma {
|
if p.tok.kind == .comma {
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
|
@ -136,6 +138,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
||||||
elem_type: elem_type
|
elem_type: elem_type
|
||||||
typ: array_type
|
typ: array_type
|
||||||
exprs: exprs
|
exprs: exprs
|
||||||
|
ecmnts: ecmnts
|
||||||
pos: pos
|
pos: pos
|
||||||
elem_type_pos: elem_type_pos
|
elem_type_pos: elem_type_pos
|
||||||
has_len: has_len
|
has_len: has_len
|
||||||
|
|
|
@ -205,6 +205,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
branch_first_pos := p.tok.position()
|
branch_first_pos := p.tok.position()
|
||||||
comments := p.eat_comments() // comments before {}
|
comments := p.eat_comments() // comments before {}
|
||||||
mut exprs := []ast.Expr{}
|
mut exprs := []ast.Expr{}
|
||||||
|
mut ecmnts := [][]ast.Comment{}
|
||||||
p.open_scope()
|
p.open_scope()
|
||||||
// final else
|
// final else
|
||||||
mut is_else := false
|
mut is_else := false
|
||||||
|
@ -232,6 +233,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
for {
|
for {
|
||||||
// Sum type match
|
// Sum type match
|
||||||
parsed_type := p.parse_type()
|
parsed_type := p.parse_type()
|
||||||
|
ecmnts << p.eat_comments()
|
||||||
types << parsed_type
|
types << parsed_type
|
||||||
exprs << ast.Type{
|
exprs << ast.Type{
|
||||||
typ: parsed_type
|
typ: parsed_type
|
||||||
|
@ -294,6 +296,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
for {
|
for {
|
||||||
p.inside_match_case = true
|
p.inside_match_case = true
|
||||||
expr := p.expr(0)
|
expr := p.expr(0)
|
||||||
|
ecmnts << p.eat_comments()
|
||||||
p.inside_match_case = false
|
p.inside_match_case = false
|
||||||
if p.tok.kind == .dotdot {
|
if p.tok.kind == .dotdot {
|
||||||
p.error_with_pos('match only supports inclusive (`...`) ranges, not exclusive (`..`)',
|
p.error_with_pos('match only supports inclusive (`...`) ranges, not exclusive (`..`)',
|
||||||
|
@ -331,6 +334,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
post_comments := p.eat_comments()
|
post_comments := p.eat_comments()
|
||||||
branches << ast.MatchBranch{
|
branches << ast.MatchBranch{
|
||||||
exprs: exprs
|
exprs: exprs
|
||||||
|
ecmnts: ecmnts
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
pos: pos
|
pos: pos
|
||||||
comments: comments
|
comments: comments
|
||||||
|
|
|
@ -71,7 +71,8 @@ pub fn (mut p Preferences) fill_with_defaults() {
|
||||||
// Prepare the cache manager. All options that can affect the generated cached .c files
|
// Prepare the cache manager. All options that can affect the generated cached .c files
|
||||||
// should go into res.cache_manager.vopts, which is used as a salt for the cache hash.
|
// should go into res.cache_manager.vopts, which is used as a salt for the cache hash.
|
||||||
p.cache_manager = vcache.new_cache_manager([
|
p.cache_manager = vcache.new_cache_manager([
|
||||||
@VHASH, // ensure that different v versions use separate build artefacts
|
@VHASH,
|
||||||
|
/* ensure that different v versions use separate build artefacts */
|
||||||
'$p.backend | $p.os | $p.ccompiler',
|
'$p.backend | $p.os | $p.ccompiler',
|
||||||
p.cflags.trim_space(),
|
p.cflags.trim_space(),
|
||||||
p.third_party_option.trim_space(),
|
p.third_party_option.trim_space(),
|
||||||
|
|
Loading…
Reference in New Issue