parser,fmt: refactor array comment handling to fully rely on the ecmnts field (#8929)

pull/8951/head
Lukas Neubert 2021-02-24 19:33:59 +01:00 committed by GitHub
parent b18d091e18
commit 8e69d3629f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 43 additions and 43 deletions

View File

@ -914,11 +914,7 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
f.write('`$node.val`') f.write('`$node.val`')
} }
ast.Comment { ast.Comment {
if f.array_init_depth > 0 { f.comment(node, inline: true)
f.comment(node, iembed: true)
} else {
f.comment(node, inline: true)
}
} }
ast.ComptimeCall { ast.ComptimeCall {
f.comptime_call(node) f.comptime_call(node)
@ -1937,29 +1933,29 @@ fn should_decrease_arr_penalty(e ast.Expr) bool {
return false return false
} }
pub fn (mut f Fmt) array_init(it ast.ArrayInit) { pub fn (mut f Fmt) array_init(node ast.ArrayInit) {
if it.exprs.len == 0 && it.typ != 0 && it.typ != table.void_type { if node.exprs.len == 0 && node.typ != 0 && node.typ != table.void_type {
// `x := []string{}` // `x := []string{}`
f.mark_types_import_as_used(it.typ) f.mark_types_import_as_used(node.typ)
f.write(f.table.type_to_str_using_aliases(it.typ, f.mod2alias)) f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias))
f.write('{') f.write('{')
if it.has_len { if node.has_len {
f.write('len: ') f.write('len: ')
f.expr(it.len_expr) f.expr(node.len_expr)
if it.has_cap || it.has_default { if node.has_cap || node.has_default {
f.write(', ') f.write(', ')
} }
} }
if it.has_cap { if node.has_cap {
f.write('cap: ') f.write('cap: ')
f.expr(it.cap_expr) f.expr(node.cap_expr)
if it.has_default { if node.has_default {
f.write(', ') f.write(', ')
} }
} }
if it.has_default { if node.has_default {
f.write('init: ') f.write('init: ')
f.expr(it.default_expr) f.expr(node.default_expr)
} }
f.write('}') f.write('}')
return return
@ -1967,9 +1963,9 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
// `[1,2,3]` // `[1,2,3]`
f.write('[') f.write('[')
mut inc_indent := false mut inc_indent := false
mut last_line_nr := it.pos.line_nr // to have the same newlines between array elements mut last_line_nr := node.pos.line_nr // to have the same newlines between array elements
f.array_init_depth++ f.array_init_depth++
for i, c in it.pre_cmnts { for i, c in node.pre_cmnts {
if c.pos.line_nr > last_line_nr { if c.pos.line_nr > last_line_nr {
f.writeln('') f.writeln('')
} else if i > 0 { } else if i > 0 {
@ -1978,15 +1974,16 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
f.comment(c, level: .indent, iembed: true) f.comment(c, level: .indent, iembed: true)
last_line_nr = c.pos.last_line last_line_nr = c.pos.last_line
} }
if it.pre_cmnts.len > 0 { if node.pre_cmnts.len > 0 {
same_line := it.pre_cmnts[0].pos.line_nr == it.pos.line_nr same_line := node.pre_cmnts[0].pos.line_nr == node.pos.line_nr
if same_line && it.exprs.len > 0 { if same_line && node.exprs.len > 0 {
f.write(' ') f.write(' ')
} else if !same_line && it.exprs.len == 0 { } else if !same_line && node.exprs.len == 0 {
f.writeln('') f.writeln('')
} }
} }
for i, expr in it.exprs { mut set_comma := false
for i, expr in node.exprs {
line_nr := expr.position().line_nr line_nr := expr.position().line_nr
if i == 0 { if i == 0 {
if f.array_init_depth > f.array_init_break.len { if f.array_init_depth > f.array_init_break.len {
@ -1994,11 +1991,11 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
} }
} }
is_same_line_comment := i > 0 is_same_line_comment := i > 0
&& (expr is ast.Comment && line_nr == it.exprs[i - 1].position().line_nr) && (expr is ast.Comment && line_nr == node.exprs[i - 1].position().line_nr)
line_break := f.array_init_break[f.array_init_depth - 1] line_break := f.array_init_break[f.array_init_depth - 1]
mut penalty := if line_break && !is_same_line_comment { 0 } else { 4 } mut penalty := if line_break && !is_same_line_comment { 0 } else { 4 }
if penalty > 0 { if penalty > 0 {
if i == 0 || should_decrease_arr_penalty(it.exprs[i - 1]) { if i == 0 || should_decrease_arr_penalty(node.exprs[i - 1]) {
penalty-- penalty--
} }
if should_decrease_arr_penalty(expr) { if should_decrease_arr_penalty(expr) {
@ -2014,33 +2011,35 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
f.write(' ') f.write(' ')
} }
f.expr(expr) f.expr(expr)
if i < it.ecmnts.len && it.ecmnts[i].len > 0 { if i < node.ecmnts.len && node.ecmnts[i].len > 0 {
mut last_cmt := it.ecmnts[i][0] expr_pos := expr.position()
if last_cmt.pos.line_nr > expr.position().last_line { for cmt in node.ecmnts[i] {
f.writeln('') if !set_comma && cmt.pos.pos > expr_pos.pos + expr_pos.len + 2 {
} else { f.write(',')
f.write(' ') set_comma = true
} }
for cmt in it.ecmnts[i] { if cmt.pos.line_nr > expr_pos.last_line {
if cmt.pos.line_nr > last_cmt.pos.last_line {
f.writeln('') f.writeln('')
} else {
f.write(' ')
} }
f.comment(cmt, iembed: true) f.comment(cmt, iembed: true)
} }
} }
if i == it.exprs.len - 1 { if i == node.exprs.len - 1 {
if is_new_line { if is_new_line {
if expr !is ast.Comment { if !set_comma && expr !is ast.Comment {
f.write(',') f.write(',')
} }
f.writeln('') f.writeln('')
} else if is_same_line_comment { } else if is_same_line_comment {
f.writeln('') f.writeln('')
} }
} else if expr !is ast.Comment { } else if !set_comma && expr !is ast.Comment {
f.write(',') f.write(',')
} }
last_line_nr = line_nr last_line_nr = line_nr
set_comma = false
} }
f.array_init_depth-- f.array_init_depth--
if f.array_init_depth == 0 { if f.array_init_depth == 0 {
@ -2051,15 +2050,15 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
} }
f.write(']') f.write(']')
// `[100]byte` // `[100]byte`
if it.is_fixed { if node.is_fixed {
if it.has_val { if node.has_val {
f.write('!') f.write('!')
return return
} }
f.write(f.table.type_to_str(it.elem_type)) f.write(f.table.type_to_str(node.elem_type))
if it.has_default { if node.has_default {
f.write('{init: ') f.write('{init: ')
f.expr(it.default_expr) f.expr(node.default_expr)
f.write('}') f.write('}')
} else { } else {
f.write('{}') f.write('{}')

View File

@ -47,6 +47,7 @@ fn (mut p Parser) array_init() ast.ArrayInit {
if p.tok.kind == .comma { if p.tok.kind == .comma {
p.next() p.next()
} }
ecmnts.last() << p.eat_comments({})
} }
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
$if tinyc { $if tinyc {