compiler: blank ident - consolidate duplicated code into single function

pull/2460/head
joe-conigliaro 2019-10-20 20:24:12 +11:00 committed by Alexander Medvednikov
parent 8a31ee4b53
commit f7c00b8180
4 changed files with 20 additions and 25 deletions

View File

@ -185,7 +185,7 @@ fn (p mut Parser) chash() {
flag = flag.replace('@VROOT', p.vroot) flag = flag.replace('@VROOT', p.vroot)
flag = flag.replace('@VMOD', v_modules_path) flag = flag.replace('@VMOD', v_modules_path)
//p.log('adding flag "$flag"') //p.log('adding flag "$flag"')
_p := p.table.parse_cflag(flag, p.mod) or { _ = p.table.parse_cflag(flag, p.mod) or {
p.error_with_token_index(err, p.cur_tok_index()-1) p.error_with_token_index(err, p.cur_tok_index()-1)
return return
} }

View File

@ -93,21 +93,11 @@ fn (p mut Parser) gen_blank_identifier_assign() {
p.check_name() p.check_name()
p.check_space(.assign) p.check_space(.assign)
is_indexer := p.peek() == .lsbr is_indexer := p.peek() == .lsbr
mut expr := p.lit is_fn_call, next_expr := p.is_next_expr_fn_call()
mut is_fn_call := p.peek() == .lpar
if !is_fn_call {
mut i := p.token_idx+1
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) &&
p.tokens[i].lit != '_' {
expr += if p.tokens[i].tok == .dot { '.' } else { p.tokens[i].lit }
i++
}
is_fn_call = p.tokens[i].tok == .lpar
}
pos := p.cgen.add_placeholder() pos := p.cgen.add_placeholder()
mut typ := p.bool_expression() mut typ := p.bool_expression()
if !is_indexer && !is_fn_call { if !is_indexer && !is_fn_call {
p.error_with_token_index('assigning `$expr` to `_` is redundant', assign_error_tok_idx) p.error_with_token_index('assigning `$next_expr` to `_` is redundant', assign_error_tok_idx)
} }
tmp := p.get_tmp() tmp := p.get_tmp()
// handle or // handle or

View File

@ -39,20 +39,10 @@ fn (p mut Parser) gen_blank_identifier_assign() {
p.check_name() p.check_name()
p.check_space(.assign) p.check_space(.assign)
is_indexer := p.peek() == .lsbr is_indexer := p.peek() == .lsbr
mut expr := p.lit is_fn_call, next_expr := p.is_next_expr_fn_call()
mut is_fn_call := p.peek() == .lpar
if !is_fn_call {
mut i := p.token_idx+1
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) &&
p.tokens[i].lit != '_' {
expr += if p.tokens[i].tok == .dot { '.' } else { p.tokens[i].lit }
i++
}
is_fn_call = p.tokens[i].tok == .lpar
}
p.bool_expression() p.bool_expression()
if !is_indexer && !is_fn_call { if !is_indexer && !is_fn_call {
p.error_with_token_index('assigning `$expr` to `_` is redundant', assign_error_tok_idx) p.error_with_token_index('assigning `$next_expr` to `_` is redundant', assign_error_tok_idx)
} }
or_else := p.tok == .key_orelse or_else := p.tok == .key_orelse
//tmp := p.get_tmp() //tmp := p.get_tmp()

View File

@ -4209,3 +4209,18 @@ fn (p mut Parser) check_unused_imports() {
// the imports are usually at the start of the file // the imports are usually at the start of the file
p.production_error_with_token_index( 'the following imports were never used: $output', 0 ) p.production_error_with_token_index( 'the following imports were never used: $output', 0 )
} }
fn (p mut Parser) is_next_expr_fn_call() (bool, string) {
mut next_expr := p.lit
mut is_fn_call := p.peek() == .lpar
if !is_fn_call {
mut i := p.token_idx+1
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) &&
p.tokens[i].lit != '_' && i < p.tokens.len {
next_expr += if p.tokens[i].tok == .dot { '.' } else { p.tokens[i].lit }
i++
}
is_fn_call = p.tokens[i].tok == .lpar
}
return is_fn_call, next_expr
}