fmt: keep single empty lines (#8189)
parent
103901a5cb
commit
129eee346b
|
@ -1210,6 +1210,7 @@ pub fn (expr Expr) position() token.Position {
|
||||||
line_nr: expr.pos.line_nr
|
line_nr: expr.pos.line_nr
|
||||||
pos: left_pos.pos
|
pos: left_pos.pos
|
||||||
len: right_pos.pos - left_pos.pos + right_pos.len
|
len: right_pos.pos - left_pos.pos + right_pos.len
|
||||||
|
last_line: right_pos.last_line
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CTempVar {
|
CTempVar {
|
||||||
|
|
|
@ -4,9 +4,9 @@ vlib/v/checker/tests/prefix_expr_decl_assign_err.vv:2:5: error: non-name on the
|
||||||
| ^
|
| ^
|
||||||
3 | (*d) := 14
|
3 | (*d) := 14
|
||||||
4 | }
|
4 | }
|
||||||
vlib/v/checker/tests/prefix_expr_decl_assign_err.vv:3:10: error: non-name `(*d)` on left side of `:=`
|
vlib/v/checker/tests/prefix_expr_decl_assign_err.vv:3:5: error: non-name `(*d)` on left side of `:=`
|
||||||
1 | fn main() {
|
1 | fn main() {
|
||||||
2 | &a := 12
|
2 | &a := 12
|
||||||
3 | (*d) := 14
|
3 | (*d) := 14
|
||||||
| ~~
|
| ~~~~
|
||||||
4 | }
|
4 | }
|
||||||
|
|
|
@ -339,8 +339,18 @@ pub fn (f Fmt) imp_stmt_str(imp ast.Import) string {
|
||||||
|
|
||||||
pub fn (mut f Fmt) stmts(stmts []ast.Stmt) {
|
pub fn (mut f Fmt) stmts(stmts []ast.Stmt) {
|
||||||
f.indent++
|
f.indent++
|
||||||
|
mut prev_line_nr := 0
|
||||||
|
if stmts.len >= 1 {
|
||||||
|
prev_pos := stmts[0].position()
|
||||||
|
prev_line_nr = util.imax(prev_pos.line_nr, prev_pos.last_line)
|
||||||
|
}
|
||||||
for stmt in stmts {
|
for stmt in stmts {
|
||||||
|
if stmt.position().line_nr - prev_line_nr > 1 {
|
||||||
|
f.out.writeln('')
|
||||||
|
}
|
||||||
f.stmt(stmt)
|
f.stmt(stmt)
|
||||||
|
prev_pos := stmt.position()
|
||||||
|
prev_line_nr = util.imax(prev_pos.line_nr, prev_pos.last_line)
|
||||||
}
|
}
|
||||||
f.indent--
|
f.indent--
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
fn squash_multiple_empty_lines() {
|
||||||
|
println('a')
|
||||||
|
|
||||||
|
println('b')
|
||||||
|
|
||||||
|
c := 0
|
||||||
|
|
||||||
|
d := 0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_leading_and_trailing_empty_lines() {
|
||||||
|
println('a')
|
||||||
|
|
||||||
|
println('b')
|
||||||
|
|
||||||
|
if test {
|
||||||
|
c := 0
|
||||||
|
} else {
|
||||||
|
d := 0
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
fn squash_multiple_empty_lines() {
|
||||||
|
println('a')
|
||||||
|
|
||||||
|
|
||||||
|
println('b')
|
||||||
|
|
||||||
|
|
||||||
|
c := 0
|
||||||
|
|
||||||
|
|
||||||
|
d := 0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_leading_and_trailing_empty_lines() {
|
||||||
|
|
||||||
|
println('a')
|
||||||
|
|
||||||
|
println('b')
|
||||||
|
|
||||||
|
if test {
|
||||||
|
|
||||||
|
c := 0
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
d := 0
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
fn keep_single_empty_line() {
|
||||||
|
println('a')
|
||||||
|
println('b')
|
||||||
|
|
||||||
|
println('c')
|
||||||
|
|
||||||
|
d := 0
|
||||||
|
|
||||||
|
if true {
|
||||||
|
println('e')
|
||||||
|
}
|
||||||
|
|
||||||
|
f := 0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prevent_empty_line_after_multi_line_statements() {
|
||||||
|
// line1
|
||||||
|
/*
|
||||||
|
block1
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
block2
|
||||||
|
*/
|
||||||
|
if test {
|
||||||
|
println('a')
|
||||||
|
}
|
||||||
|
println('b')
|
||||||
|
for test {
|
||||||
|
println('c')
|
||||||
|
}
|
||||||
|
c := fn (s string) {
|
||||||
|
println('s')
|
||||||
|
}
|
||||||
|
}
|
|
@ -35,6 +35,7 @@ fn string_inter_lit(mut c checker.Checker, mut node ast.StringInterLiteral) tabl
|
||||||
}
|
}
|
||||||
node.need_fmts[i] = fmt != c.get_default_fmt(ftyp, typ)
|
node.need_fmts[i] = fmt != c.get_default_fmt(ftyp, typ)
|
||||||
}
|
}
|
||||||
|
|
||||||
return table.string_type
|
return table.string_type
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,5 +9,6 @@ fn my_thread_with_params(s string) {
|
||||||
fn my_fn_calling_threads() {
|
fn my_fn_calling_threads() {
|
||||||
go my_thread()
|
go my_thread()
|
||||||
go my_thread_with_params('yay')
|
go my_thread_with_params('yay')
|
||||||
|
|
||||||
go my_thread_with_params('nono')
|
go my_thread_with_params('nono')
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,7 @@ fn (mut p Parser) check_cross_variables(exprs []ast.Expr, val ast.Expr) bool {
|
||||||
fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comment) ast.Stmt {
|
fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comment) ast.Stmt {
|
||||||
p.is_stmt_ident = false
|
p.is_stmt_ident = false
|
||||||
op := p.tok.kind
|
op := p.tok.kind
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
p.next()
|
p.next()
|
||||||
mut comments := []ast.Comment{cap: 2 * left_comments.len + 1}
|
mut comments := []ast.Comment{cap: 2 * left_comments.len + 1}
|
||||||
comments << left_comments
|
comments << left_comments
|
||||||
|
@ -195,6 +195,7 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.AssignStmt{
|
return ast.AssignStmt{
|
||||||
op: op
|
op: op
|
||||||
left: left
|
left: left
|
||||||
|
|
|
@ -130,7 +130,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
||||||
}
|
}
|
||||||
p.check(.rcbr)
|
p.check(.rcbr)
|
||||||
}
|
}
|
||||||
pos := first_pos.extend(last_pos)
|
pos := first_pos.extend_with_last_line(last_pos, p.prev_tok.line_nr)
|
||||||
return ast.ArrayInit{
|
return ast.ArrayInit{
|
||||||
is_fixed: is_fixed
|
is_fixed: is_fixed
|
||||||
has_val: has_val
|
has_val: has_val
|
||||||
|
@ -151,7 +151,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut p Parser) map_init() ast.MapInit {
|
fn (mut p Parser) map_init() ast.MapInit {
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
mut keys := []ast.Expr{}
|
mut keys := []ast.Expr{}
|
||||||
mut vals := []ast.Expr{}
|
mut vals := []ast.Expr{}
|
||||||
for p.tok.kind != .rcbr && p.tok.kind != .eof {
|
for p.tok.kind != .rcbr && p.tok.kind != .eof {
|
||||||
|
@ -167,6 +167,7 @@ fn (mut p Parser) map_init() ast.MapInit {
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.tok.line_nr)
|
||||||
return ast.MapInit{
|
return ast.MapInit{
|
||||||
keys: keys
|
keys: keys
|
||||||
vals: vals
|
vals: vals
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub fn (mut p Parser) call_expr(language table.Language, mod string) ast.CallExp
|
||||||
if p.tok.kind == .not {
|
if p.tok.kind == .not {
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
pos := first_pos.extend(last_pos)
|
mut pos := first_pos.extend(last_pos)
|
||||||
mut or_stmts := []ast.Stmt{} // TODO remove unnecessary allocations by just using .absent
|
mut or_stmts := []ast.Stmt{} // TODO remove unnecessary allocations by just using .absent
|
||||||
mut or_pos := p.tok.position()
|
mut or_pos := p.tok.position()
|
||||||
if p.tok.kind == .key_orelse {
|
if p.tok.kind == .key_orelse {
|
||||||
|
@ -93,6 +93,7 @@ pub fn (mut p Parser) call_expr(language table.Language, mod string) ast.CallExp
|
||||||
fn_name = p.imported_symbols[fn_name]
|
fn_name = p.imported_symbols[fn_name]
|
||||||
}
|
}
|
||||||
comments := p.eat_line_end_comments()
|
comments := p.eat_line_end_comments()
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.CallExpr{
|
return ast.CallExpr{
|
||||||
name: fn_name
|
name: fn_name
|
||||||
args: args
|
args: args
|
||||||
|
@ -435,7 +436,7 @@ fn (mut p Parser) fn_decl() ast.FnDecl {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut p Parser) anon_fn() ast.AnonFn {
|
fn (mut p Parser) anon_fn() ast.AnonFn {
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
p.check(.key_fn)
|
p.check(.key_fn)
|
||||||
if p.pref.is_script && p.tok.kind == .name {
|
if p.pref.is_script && p.tok.kind == .name {
|
||||||
p.error_with_pos('function declarations in script mode should be before all script statements',
|
p.error_with_pos('function declarations in script mode should be before all script statements',
|
||||||
|
@ -487,6 +488,7 @@ fn (mut p Parser) anon_fn() ast.AnonFn {
|
||||||
idx := p.table.find_or_register_fn_type(p.mod, func, true, false)
|
idx := p.table.find_or_register_fn_type(p.mod, func, true, false)
|
||||||
typ := table.new_type(idx)
|
typ := table.new_type(idx)
|
||||||
// name := p.table.get_type_name(typ)
|
// name := p.table.get_type_name(typ)
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.AnonFn{
|
return ast.AnonFn{
|
||||||
decl: ast.FnDecl{
|
decl: ast.FnDecl{
|
||||||
name: name
|
name: name
|
||||||
|
|
|
@ -20,7 +20,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
|
||||||
if p.tok.kind == .lcbr {
|
if p.tok.kind == .lcbr {
|
||||||
p.inside_for = false
|
p.inside_for = false
|
||||||
stmts := p.parse_block_no_scope(false)
|
stmts := p.parse_block_no_scope(false)
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
for_stmt := ast.ForStmt{
|
for_stmt := ast.ForStmt{
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
pos: pos
|
pos: pos
|
||||||
|
@ -64,7 +64,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
|
||||||
}
|
}
|
||||||
p.inside_for = false
|
p.inside_for = false
|
||||||
stmts := p.parse_block_no_scope(false)
|
stmts := p.parse_block_no_scope(false)
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
for_c_stmt := ast.ForCStmt{
|
for_c_stmt := ast.ForCStmt{
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
has_init: has_init
|
has_init: has_init
|
||||||
|
@ -159,7 +159,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
|
||||||
}
|
}
|
||||||
p.inside_for = false
|
p.inside_for = false
|
||||||
stmts := p.parse_block_no_scope(false)
|
stmts := p.parse_block_no_scope(false)
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
// println('nr stmts=$stmts.len')
|
// println('nr stmts=$stmts.len')
|
||||||
for_in_stmt := ast.ForInStmt{
|
for_in_stmt := ast.ForInStmt{
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
|
@ -181,7 +181,7 @@ fn (mut p Parser) for_stmt() ast.Stmt {
|
||||||
// extra scope for the body
|
// extra scope for the body
|
||||||
p.open_scope()
|
p.open_scope()
|
||||||
stmts := p.parse_block_no_scope(false)
|
stmts := p.parse_block_no_scope(false)
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
for_stmt := ast.ForStmt{
|
for_stmt := ast.ForStmt{
|
||||||
cond: cond
|
cond: cond
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
|
|
|
@ -15,7 +15,7 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
|
||||||
p.inside_ct_if_expr = was_inside_ct_if_expr
|
p.inside_ct_if_expr = was_inside_ct_if_expr
|
||||||
}
|
}
|
||||||
p.inside_if_expr = true
|
p.inside_if_expr = true
|
||||||
pos := if is_comptime {
|
mut pos := if is_comptime {
|
||||||
p.inside_ct_if_expr = true
|
p.inside_ct_if_expr = true
|
||||||
p.next() // `$`
|
p.next() // `$`
|
||||||
p.prev_tok.position().extend(p.tok.position())
|
p.prev_tok.position().extend(p.tok.position())
|
||||||
|
@ -143,6 +143,10 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
|
if comments.len > 0 {
|
||||||
|
pos.last_line = comments.last().pos.last_line
|
||||||
|
}
|
||||||
return ast.IfExpr{
|
return ast.IfExpr{
|
||||||
is_comptime: is_comptime
|
is_comptime: is_comptime
|
||||||
branches: branches
|
branches: branches
|
||||||
|
@ -235,8 +239,12 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
branch_scope := p.scope
|
branch_scope := p.scope
|
||||||
p.close_scope()
|
p.close_scope()
|
||||||
p.inside_match_body = false
|
p.inside_match_body = false
|
||||||
pos := branch_first_pos.extend(branch_last_pos)
|
mut pos := branch_first_pos.extend(branch_last_pos)
|
||||||
post_comments := p.eat_comments()
|
post_comments := p.eat_comments()
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
|
if post_comments.len > 0 {
|
||||||
|
pos.last_line = post_comments.last().pos.last_line
|
||||||
|
}
|
||||||
branches << ast.MatchBranch{
|
branches << ast.MatchBranch{
|
||||||
exprs: exprs
|
exprs: exprs
|
||||||
ecmnts: ecmnts
|
ecmnts: ecmnts
|
||||||
|
@ -255,7 +263,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match_last_pos := p.tok.position()
|
match_last_pos := p.tok.position()
|
||||||
pos := token.Position{
|
mut pos := token.Position{
|
||||||
line_nr: match_first_pos.line_nr
|
line_nr: match_first_pos.line_nr
|
||||||
pos: match_first_pos.pos
|
pos: match_first_pos.pos
|
||||||
len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len
|
len: match_last_pos.pos - match_first_pos.pos + match_last_pos.len
|
||||||
|
@ -264,6 +272,7 @@ fn (mut p Parser) match_expr() ast.MatchExpr {
|
||||||
p.check(.rcbr)
|
p.check(.rcbr)
|
||||||
}
|
}
|
||||||
// return ast.StructInit{}
|
// return ast.StructInit{}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.MatchExpr{
|
return ast.MatchExpr{
|
||||||
branches: branches
|
branches: branches
|
||||||
cond: cond
|
cond: cond
|
||||||
|
@ -397,12 +406,16 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
|
||||||
stmts := p.parse_block_no_scope(false)
|
stmts := p.parse_block_no_scope(false)
|
||||||
p.close_scope()
|
p.close_scope()
|
||||||
p.inside_match_body = false
|
p.inside_match_body = false
|
||||||
pos := token.Position{
|
mut pos := token.Position{
|
||||||
line_nr: branch_first_pos.line_nr
|
line_nr: branch_first_pos.line_nr
|
||||||
pos: branch_first_pos.pos
|
pos: branch_first_pos.pos
|
||||||
len: branch_last_pos.pos - branch_first_pos.pos + branch_last_pos.len
|
len: branch_last_pos.pos - branch_first_pos.pos + branch_last_pos.len
|
||||||
}
|
}
|
||||||
post_comments := p.eat_comments()
|
post_comments := p.eat_comments()
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
|
if post_comments.len > 0 {
|
||||||
|
pos.last_line = post_comments.last().pos.last_line
|
||||||
|
}
|
||||||
branches << ast.SelectBranch{
|
branches << ast.SelectBranch{
|
||||||
stmt: stmt
|
stmt: stmt
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
|
@ -427,7 +440,7 @@ fn (mut p Parser) select_expr() ast.SelectExpr {
|
||||||
}
|
}
|
||||||
return ast.SelectExpr{
|
return ast.SelectExpr{
|
||||||
branches: branches
|
branches: branches
|
||||||
pos: pos
|
pos: pos.extend_with_last_line(p.prev_tok.position(), p.prev_tok.line_nr)
|
||||||
has_exception: has_else || has_timeout
|
has_exception: has_else || has_timeout
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import v.table
|
||||||
fn (mut p Parser) lock_expr() ast.LockExpr {
|
fn (mut p Parser) lock_expr() ast.LockExpr {
|
||||||
// TODO Handle aliasing sync
|
// TODO Handle aliasing sync
|
||||||
p.register_auto_import('sync')
|
p.register_auto_import('sync')
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
is_rlock := p.tok.kind == .key_rlock
|
is_rlock := p.tok.kind == .key_rlock
|
||||||
p.next()
|
p.next()
|
||||||
mut lockeds := []ast.Ident{}
|
mut lockeds := []ast.Ident{}
|
||||||
|
@ -27,6 +27,7 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
|
||||||
p.check(.comma)
|
p.check(.comma)
|
||||||
}
|
}
|
||||||
stmts := p.parse_block()
|
stmts := p.parse_block()
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.LockExpr{
|
return ast.LockExpr{
|
||||||
lockeds: lockeds
|
lockeds: lockeds
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
|
|
|
@ -569,8 +569,9 @@ pub fn (mut p Parser) check_comment() ast.Comment {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut p Parser) comment() ast.Comment {
|
pub fn (mut p Parser) comment() ast.Comment {
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
text := p.tok.lit
|
text := p.tok.lit
|
||||||
|
pos.last_line = pos.line_nr + text.count('\n')
|
||||||
p.next()
|
p.next()
|
||||||
// p.next_with_comment()
|
// p.next_with_comment()
|
||||||
return ast.Comment{
|
return ast.Comment{
|
||||||
|
@ -629,11 +630,12 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
|
||||||
}
|
}
|
||||||
.key_assert {
|
.key_assert {
|
||||||
p.next()
|
p.next()
|
||||||
assert_pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
expr := p.expr(0)
|
expr := p.expr(0)
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.AssertStmt{
|
return ast.AssertStmt{
|
||||||
expr: expr
|
expr: expr
|
||||||
pos: assert_pos
|
pos: pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.key_for {
|
.key_for {
|
||||||
|
@ -692,16 +694,24 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
|
||||||
.dollar {
|
.dollar {
|
||||||
match p.peek_tok.kind {
|
match p.peek_tok.kind {
|
||||||
.key_if {
|
.key_if {
|
||||||
|
mut pos := p.tok.position()
|
||||||
|
expr := p.if_expr(true)
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.ExprStmt{
|
return ast.ExprStmt{
|
||||||
expr: p.if_expr(true)
|
expr: expr
|
||||||
|
pos: pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.key_for {
|
.key_for {
|
||||||
return p.comp_for()
|
return p.comp_for()
|
||||||
}
|
}
|
||||||
.name {
|
.name {
|
||||||
|
mut pos := p.tok.position()
|
||||||
|
expr := p.comp_call()
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.ExprStmt{
|
return ast.ExprStmt{
|
||||||
expr: p.comp_call()
|
expr: expr
|
||||||
|
pos: pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -985,6 +995,7 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
|
||||||
// a, mut b ... :=/= // multi-assign
|
// a, mut b ... :=/= // multi-assign
|
||||||
// collect things upto hard boundaries
|
// collect things upto hard boundaries
|
||||||
tok := p.tok
|
tok := p.tok
|
||||||
|
mut pos := tok.position()
|
||||||
left, left_comments := p.expr_list()
|
left, left_comments := p.expr_list()
|
||||||
left0 := left[0]
|
left0 := left[0]
|
||||||
if tok.kind == .key_mut && p.tok.kind != .decl_assign {
|
if tok.kind == .key_mut && p.tok.kind != .decl_assign {
|
||||||
|
@ -1002,6 +1013,7 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
|
||||||
p.error_with_pos('expression evaluated but not used', left0.position())
|
p.error_with_pos('expression evaluated but not used', left0.position())
|
||||||
return ast.Stmt{}
|
return ast.Stmt{}
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
if left.len == 1 {
|
if left.len == 1 {
|
||||||
return ast.ExprStmt{
|
return ast.ExprStmt{
|
||||||
expr: left0
|
expr: left0
|
||||||
|
@ -1015,7 +1027,7 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
|
||||||
vals: left
|
vals: left
|
||||||
pos: tok.position()
|
pos: tok.position()
|
||||||
}
|
}
|
||||||
pos: tok.position()
|
pos: pos
|
||||||
comments: left_comments
|
comments: left_comments
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2417,7 +2429,7 @@ fn (mut p Parser) unsafe_stmt() ast.Stmt {
|
||||||
}
|
}
|
||||||
if p.tok.kind == .rcbr {
|
if p.tok.kind == .rcbr {
|
||||||
// `unsafe {}`
|
// `unsafe {}`
|
||||||
pos.last_line = p.tok.line_nr - 1
|
pos.update_last_line(p.tok.line_nr)
|
||||||
p.next()
|
p.next()
|
||||||
return ast.Block{
|
return ast.Block{
|
||||||
is_unsafe: true
|
is_unsafe: true
|
||||||
|
@ -2436,7 +2448,7 @@ fn (mut p Parser) unsafe_stmt() ast.Stmt {
|
||||||
// `unsafe {expr}`
|
// `unsafe {expr}`
|
||||||
if stmt.expr.is_expr() {
|
if stmt.expr.is_expr() {
|
||||||
p.next()
|
p.next()
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
ue := ast.UnsafeExpr{
|
ue := ast.UnsafeExpr{
|
||||||
expr: stmt.expr
|
expr: stmt.expr
|
||||||
pos: pos
|
pos: pos
|
||||||
|
@ -2456,6 +2468,7 @@ fn (mut p Parser) unsafe_stmt() ast.Stmt {
|
||||||
stmts << p.stmt(false)
|
stmts << p.stmt(false)
|
||||||
}
|
}
|
||||||
p.next()
|
p.next()
|
||||||
|
pos.update_last_line(p.tok.line_nr)
|
||||||
return ast.Block{
|
return ast.Block{
|
||||||
stmts: stmts
|
stmts: stmts
|
||||||
is_unsafe: true
|
is_unsafe: true
|
||||||
|
|
|
@ -95,12 +95,13 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
|
||||||
node = p.parse_number_literal()
|
node = p.parse_number_literal()
|
||||||
}
|
}
|
||||||
.lpar {
|
.lpar {
|
||||||
|
mut pos := p.tok.position()
|
||||||
p.check(.lpar)
|
p.check(.lpar)
|
||||||
node = p.expr(0)
|
node = p.expr(0)
|
||||||
p.check(.rpar)
|
p.check(.rpar)
|
||||||
node = ast.ParExpr{
|
node = ast.ParExpr{
|
||||||
expr: node
|
expr: node
|
||||||
pos: p.tok.position()
|
pos: pos.extend(p.prev_tok.position())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.key_if {
|
.key_if {
|
||||||
|
@ -118,7 +119,7 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
|
||||||
p.check(.lcbr)
|
p.check(.lcbr)
|
||||||
e := p.expr(0)
|
e := p.expr(0)
|
||||||
p.check(.rcbr)
|
p.check(.rcbr)
|
||||||
pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
node = ast.UnsafeExpr{
|
node = ast.UnsafeExpr{
|
||||||
expr: e
|
expr: e
|
||||||
pos: pos
|
pos: pos
|
||||||
|
@ -315,9 +316,10 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
|
||||||
} else if p.tok.kind == .left_shift && p.is_stmt_ident {
|
} else if p.tok.kind == .left_shift && p.is_stmt_ident {
|
||||||
// arr << elem
|
// arr << elem
|
||||||
tok := p.tok
|
tok := p.tok
|
||||||
pos := tok.position()
|
mut pos := tok.position()
|
||||||
p.next()
|
p.next()
|
||||||
right := p.expr(precedence - 1)
|
right := p.expr(precedence - 1)
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
node = ast.InfixExpr{
|
node = ast.InfixExpr{
|
||||||
left: node
|
left: node
|
||||||
right: right
|
right: right
|
||||||
|
@ -370,7 +372,13 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
|
||||||
// mut typ := p.
|
// mut typ := p.
|
||||||
// println('infix op=$op.str()')
|
// println('infix op=$op.str()')
|
||||||
precedence := p.tok.precedence()
|
precedence := p.tok.precedence()
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
|
if left.position().line_nr < pos.line_nr {
|
||||||
|
pos = {
|
||||||
|
pos |
|
||||||
|
line_nr: left.position().line_nr
|
||||||
|
}
|
||||||
|
}
|
||||||
p.next()
|
p.next()
|
||||||
mut right := ast.Expr{}
|
mut right := ast.Expr{}
|
||||||
prev_expecting_type := p.expecting_type
|
prev_expecting_type := p.expecting_type
|
||||||
|
@ -415,6 +423,7 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
|
||||||
}
|
}
|
||||||
p.or_is_handled = false
|
p.or_is_handled = false
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.InfixExpr{
|
return ast.InfixExpr{
|
||||||
left: left
|
left: left
|
||||||
right: right
|
right: right
|
||||||
|
@ -429,7 +438,7 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut p Parser) prefix_expr() ast.PrefixExpr {
|
fn (mut p Parser) prefix_expr() ast.PrefixExpr {
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
op := p.tok.kind
|
op := p.tok.kind
|
||||||
if op == .amp {
|
if op == .amp {
|
||||||
p.is_amp = true
|
p.is_amp = true
|
||||||
|
@ -482,6 +491,7 @@ fn (mut p Parser) prefix_expr() ast.PrefixExpr {
|
||||||
}
|
}
|
||||||
p.or_is_handled = false
|
p.or_is_handled = false
|
||||||
}
|
}
|
||||||
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.PrefixExpr{
|
return ast.PrefixExpr{
|
||||||
op: op
|
op: op
|
||||||
right: right
|
right: right
|
||||||
|
|
|
@ -107,7 +107,7 @@ fn (mut p Parser) sql_expr() ast.Expr {
|
||||||
// insert user into User
|
// insert user into User
|
||||||
// update User set nr_oders=nr_orders+1 where id == user_id
|
// update User set nr_oders=nr_orders+1 where id == user_id
|
||||||
fn (mut p Parser) sql_stmt() ast.SqlStmt {
|
fn (mut p Parser) sql_stmt() ast.SqlStmt {
|
||||||
pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
p.inside_match = true
|
p.inside_match = true
|
||||||
defer {
|
defer {
|
||||||
p.inside_match = false
|
p.inside_match = false
|
||||||
|
@ -194,6 +194,7 @@ fn (mut p Parser) sql_stmt() ast.SqlStmt {
|
||||||
where_expr = p.expr(0)
|
where_expr = p.expr(0)
|
||||||
}
|
}
|
||||||
p.check(.rcbr)
|
p.check(.rcbr)
|
||||||
|
pos.last_line = p.prev_tok.line_nr
|
||||||
return ast.SqlStmt{
|
return ast.SqlStmt{
|
||||||
db_expr: db_expr
|
db_expr: db_expr
|
||||||
table_name: table_name
|
table_name: table_name
|
||||||
|
|
|
@ -429,7 +429,7 @@ fn (mut p Parser) struct_init(short_syntax bool) ast.StructInit {
|
||||||
|
|
||||||
fn (mut p Parser) interface_decl() ast.InterfaceDecl {
|
fn (mut p Parser) interface_decl() ast.InterfaceDecl {
|
||||||
p.top_level_statement_start()
|
p.top_level_statement_start()
|
||||||
mut start_pos := p.tok.position()
|
mut pos := p.tok.position()
|
||||||
is_pub := p.tok.kind == .key_pub
|
is_pub := p.tok.kind == .key_pub
|
||||||
if is_pub {
|
if is_pub {
|
||||||
p.next()
|
p.next()
|
||||||
|
@ -506,12 +506,12 @@ fn (mut p Parser) interface_decl() ast.InterfaceDecl {
|
||||||
}
|
}
|
||||||
p.top_level_statement_end()
|
p.top_level_statement_end()
|
||||||
p.check(.rcbr)
|
p.check(.rcbr)
|
||||||
start_pos.last_line = p.prev_tok.line_nr - 1
|
pos.update_last_line(p.prev_tok.line_nr)
|
||||||
return ast.InterfaceDecl{
|
return ast.InterfaceDecl{
|
||||||
name: interface_name
|
name: interface_name
|
||||||
methods: methods
|
methods: methods
|
||||||
is_pub: is_pub
|
is_pub: is_pub
|
||||||
pos: start_pos
|
pos: pos
|
||||||
pre_comments: pre_comments
|
pre_comments: pre_comments
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -161,6 +161,20 @@ fn (mut s Scanner) new_token(tok_kind token.Kind, lit string, len int) token.Tok
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[inline]
|
||||||
|
fn (mut s Scanner) new_mulitline_token(tok_kind token.Kind, lit string, len int, start_line int) token.Token {
|
||||||
|
cidx := s.tidx
|
||||||
|
s.tidx++
|
||||||
|
return token.Token{
|
||||||
|
kind: tok_kind
|
||||||
|
lit: lit
|
||||||
|
line_nr: start_line + 1
|
||||||
|
pos: s.pos - len + 1
|
||||||
|
len: len
|
||||||
|
tidx: cidx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
[inline]
|
[inline]
|
||||||
fn (mut s Scanner) ident_name() string {
|
fn (mut s Scanner) ident_name() string {
|
||||||
start := s.pos
|
start := s.pos
|
||||||
|
@ -938,6 +952,7 @@ fn (mut s Scanner) text_scan() token.Token {
|
||||||
// Multiline comments
|
// Multiline comments
|
||||||
if nextc == `*` {
|
if nextc == `*` {
|
||||||
start := s.pos + 2
|
start := s.pos + 2
|
||||||
|
start_line := s.line_nr
|
||||||
mut nest_count := 1
|
mut nest_count := 1
|
||||||
// Skip comment
|
// Skip comment
|
||||||
for nest_count > 0 && s.pos < s.text.len - 1 {
|
for nest_count > 0 && s.pos < s.text.len - 1 {
|
||||||
|
@ -964,7 +979,8 @@ fn (mut s Scanner) text_scan() token.Token {
|
||||||
if !comment.contains('\n') {
|
if !comment.contains('\n') {
|
||||||
comment = '\x01' + comment
|
comment = '\x01' + comment
|
||||||
}
|
}
|
||||||
return s.new_token(.comment, comment, comment.len + 4)
|
return s.new_mulitline_token(.comment, comment, comment.len + 4,
|
||||||
|
start_line)
|
||||||
}
|
}
|
||||||
// Skip if not in fmt mode
|
// Skip if not in fmt mode
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -20,6 +20,7 @@ pub fn (pos Position) extend(end Position) Position {
|
||||||
return {
|
return {
|
||||||
pos |
|
pos |
|
||||||
len: end.pos - pos.pos + end.len
|
len: end.pos - pos.pos + end.len
|
||||||
|
last_line: end.last_line
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,11 +33,16 @@ pub fn (pos Position) extend_with_last_line(end Position, last_line int) Positio
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn (mut pos Position) update_last_line(last_line int) {
|
||||||
|
pos.last_line = last_line - 1
|
||||||
|
}
|
||||||
|
|
||||||
[inline]
|
[inline]
|
||||||
pub fn (tok &Token) position() Position {
|
pub fn (tok &Token) position() Position {
|
||||||
return Position{
|
return Position{
|
||||||
len: tok.len
|
len: tok.len
|
||||||
line_nr: tok.line_nr - 1
|
line_nr: tok.line_nr - 1
|
||||||
pos: tok.pos
|
pos: tok.pos
|
||||||
|
last_line: tok.line_nr - 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue