vfmt: fix struct init indent and wrapped lines
parent
2fbed2f880
commit
d54150cd22
|
@ -235,7 +235,7 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
||||||
f.expr(it.cond)
|
f.expr(it.cond)
|
||||||
f.write('; ')
|
f.write('; ')
|
||||||
f.expr(it.inc)
|
f.expr(it.inc)
|
||||||
f.writeln('{ ')
|
f.writeln(' {')
|
||||||
f.stmts(it.stmts)
|
f.stmts(it.stmts)
|
||||||
f.writeln('}')
|
f.writeln('}')
|
||||||
}
|
}
|
||||||
|
@ -439,7 +439,7 @@ fn (f mut Fmt) expr(node ast.Expr) {
|
||||||
f.writeln('\t$it.var_name |')
|
f.writeln('\t$it.var_name |')
|
||||||
// TODO StructInit copy pasta
|
// TODO StructInit copy pasta
|
||||||
for i, field in it.fields {
|
for i, field in it.fields {
|
||||||
f.write('\t$field: ')
|
f.write('$field: ')
|
||||||
f.expr(it.exprs[i])
|
f.expr(it.exprs[i])
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
}
|
}
|
||||||
|
@ -625,11 +625,13 @@ fn (f mut Fmt) expr(node ast.Expr) {
|
||||||
f.write('$name{}')
|
f.write('$name{}')
|
||||||
} else {
|
} else {
|
||||||
f.writeln('$name{')
|
f.writeln('$name{')
|
||||||
|
f.indent++
|
||||||
for i, field in it.fields {
|
for i, field in it.fields {
|
||||||
f.write('\t$field: ')
|
f.write('$field: ')
|
||||||
f.expr(it.exprs[i])
|
f.expr(it.exprs[i])
|
||||||
f.writeln('')
|
f.writeln('')
|
||||||
}
|
}
|
||||||
|
f.indent--
|
||||||
f.write('}')
|
f.write('}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -652,6 +654,9 @@ fn (f mut Fmt) expr(node ast.Expr) {
|
||||||
|
|
||||||
fn (f mut Fmt) wrap_long_line() {
|
fn (f mut Fmt) wrap_long_line() {
|
||||||
if f.line_len > max_len {
|
if f.line_len > max_len {
|
||||||
|
if f.out.buf[f.out.buf.len - 1] == ' ' {
|
||||||
|
f.out.go_back(1)
|
||||||
|
}
|
||||||
f.write('\n' + tabs[f.indent + 1])
|
f.write('\n' + tabs[f.indent + 1])
|
||||||
f.line_len = 0
|
f.line_len = 0
|
||||||
}
|
}
|
||||||
|
@ -713,8 +718,8 @@ fn short_module(name string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (f mut Fmt) if_expr(it ast.IfExpr) {
|
fn (f mut Fmt) if_expr(it ast.IfExpr) {
|
||||||
single_line := it.branches.len == 2 && it.has_else && it.branches[0].stmts.len == 1 && it.branches[1].stmts.len ==
|
single_line := it.branches.len == 2 && it.has_else && it.branches[0].stmts.len == 1 &&
|
||||||
1 && (it.is_expr || f.is_assign)
|
it.branches[1].stmts.len == 1 && (it.is_expr || f.is_assign)
|
||||||
f.single_line_if = single_line
|
f.single_line_if = single_line
|
||||||
for i, branch in it.branches {
|
for i, branch in it.branches {
|
||||||
if branch.comment.text != '' {
|
if branch.comment.text != '' {
|
||||||
|
|
|
@ -46,9 +46,9 @@ pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
|
||||||
pref: &pref.Preferences{}
|
pref: &pref.Preferences{}
|
||||||
scope: scope
|
scope: scope
|
||||||
global_scope: &ast.Scope{
|
global_scope: &ast.Scope{
|
||||||
start_pos: 0
|
start_pos: 0
|
||||||
parent: 0
|
parent: 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.init_parse_fns()
|
p.init_parse_fns()
|
||||||
p.read_first_token()
|
p.read_first_token()
|
||||||
|
@ -67,9 +67,9 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
|
||||||
file_name: path
|
file_name: path
|
||||||
pref: pref
|
pref: pref
|
||||||
scope: &ast.Scope{
|
scope: &ast.Scope{
|
||||||
start_pos: 0
|
start_pos: 0
|
||||||
parent: 0
|
parent: 0
|
||||||
}
|
}
|
||||||
global_scope: global_scope
|
global_scope: global_scope
|
||||||
}
|
}
|
||||||
// comments_mode: comments_mode
|
// comments_mode: comments_mode
|
||||||
|
@ -661,9 +661,9 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
||||||
x := p.call_expr(is_c, mod) // TODO `node,typ :=` should work
|
x := p.call_expr(is_c, mod) // TODO `node,typ :=` should work
|
||||||
node = x
|
node = x
|
||||||
}
|
}
|
||||||
} else if p.peek_tok.kind == .lcbr && !p.inside_match_case &&
|
} else if p.peek_tok.kind == .lcbr && !p.inside_match_case && (is_c || p.tok.lit[0].is_capital() ||
|
||||||
( is_c || p.tok.lit[0].is_capital() || (p.builtin_mod && p.tok.lit in table.builtin_type_names) ) &&
|
(p.builtin_mod && p.tok.lit in table.builtin_type_names)) && (p.tok.lit.len in [1, 2, 3] ||
|
||||||
( p.tok.lit.len in [1, 2, 3] || !p.tok.lit[p.tok.lit.len - 1].is_capital() || p.table.known_type(p.tok.lit) ) {
|
!p.tok.lit[p.tok.lit.len - 1].is_capital() || p.table.known_type(p.tok.lit)) {
|
||||||
// short_syntax: false
|
// short_syntax: false
|
||||||
return p.struct_init(false)
|
return p.struct_init(false)
|
||||||
} else if p.peek_tok.kind == .dot && (p.tok.lit[0].is_capital() && !known_var) {
|
} else if p.peek_tok.kind == .dot && (p.tok.lit[0].is_capital() && !known_var) {
|
||||||
|
@ -885,10 +885,10 @@ fn (p mut Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
||||||
left: left
|
left: left
|
||||||
pos: p.tok.position()
|
pos: p.tok.position()
|
||||||
index: ast.RangeExpr{
|
index: ast.RangeExpr{
|
||||||
low: ast.Expr{}
|
low: ast.Expr{}
|
||||||
high: high
|
high: high
|
||||||
has_high: true
|
has_high: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
expr := p.expr(0) // `[expr]` or `[expr..]`
|
expr := p.expr(0) // `[expr]` or `[expr..]`
|
||||||
|
@ -906,11 +906,11 @@ fn (p mut Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
||||||
left: left
|
left: left
|
||||||
pos: p.tok.position()
|
pos: p.tok.position()
|
||||||
index: ast.RangeExpr{
|
index: ast.RangeExpr{
|
||||||
low: expr
|
low: expr
|
||||||
high: high
|
high: high
|
||||||
has_high: has_high
|
has_high: has_high
|
||||||
has_low: has_low
|
has_low: has_low
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// [expr]
|
// [expr]
|
||||||
|
@ -1287,7 +1287,7 @@ fn (p mut Parser) array_init() ast.ArrayInit {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// [1,2,3]
|
// [1,2,3]
|
||||||
for i := 0; p.tok.kind != .rsbr; i++{
|
for i := 0; p.tok.kind != .rsbr; i++ {
|
||||||
expr := p.expr(0)
|
expr := p.expr(0)
|
||||||
exprs << expr
|
exprs << expr
|
||||||
if p.tok.kind == .comma {
|
if p.tok.kind == .comma {
|
||||||
|
@ -1533,9 +1533,9 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||||
kind: .struct_
|
kind: .struct_
|
||||||
name: name
|
name: name
|
||||||
info: table.Struct{
|
info: table.Struct{
|
||||||
fields: fields
|
fields: fields
|
||||||
is_typedef: is_typedef
|
is_typedef: is_typedef
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mut ret := 0
|
mut ret := 0
|
||||||
if p.builtin_mod && t.name in table.builtin_type_names {
|
if p.builtin_mod && t.name in table.builtin_type_names {
|
||||||
|
@ -1868,8 +1868,8 @@ fn (p mut Parser) enum_decl() ast.EnumDecl {
|
||||||
kind: .enum_
|
kind: .enum_
|
||||||
name: name
|
name: name
|
||||||
info: table.Enum{
|
info: table.Enum{
|
||||||
vals: vals
|
vals: vals
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return ast.EnumDecl{
|
return ast.EnumDecl{
|
||||||
name: name
|
name: name
|
||||||
|
@ -1902,8 +1902,8 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||||
kind: .sum_type
|
kind: .sum_type
|
||||||
name: p.prepend_mod(name)
|
name: p.prepend_mod(name)
|
||||||
info: table.SumType{
|
info: table.SumType{
|
||||||
variants: sum_variants
|
variants: sum_variants
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return ast.SumTypeDecl{
|
return ast.SumTypeDecl{
|
||||||
name: name
|
name: name
|
||||||
|
@ -1929,8 +1929,8 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||||
name: p.prepend_mod(name)
|
name: p.prepend_mod(name)
|
||||||
parent_idx: pid
|
parent_idx: pid
|
||||||
info: table.Alias{
|
info: table.Alias{
|
||||||
foo: ''
|
foo: ''
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return ast.AliasTypeDecl{
|
return ast.AliasTypeDecl{
|
||||||
name: name
|
name: name
|
||||||
|
|
Loading…
Reference in New Issue