compiler: more precise 'declared and not used' error positioning
* compiler: extract and cleanup error handling functionality into its own file compiler/compile_errors.v * compiler: implement p.error_with_token_index and p.warn_with_token_index and use them. Fix tests. * tools/performance_compare: add a 'Source lines in compiler/' line * MSVC does not have STDOUT_FILENO nor STDERR_FILENO ...pull/2166/head
parent
6d483c0a56
commit
e72fe25224
|
@ -0,0 +1,245 @@
|
|||
module main
|
||||
|
||||
import (
|
||||
os
|
||||
term
|
||||
)
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// NB: The code in this file is organized in layers (between the ///// lines).
|
||||
// This allows for easier keeping in sync of error/warn functions.
|
||||
// The functions in each of the layers, call the functions from the layers *below*.
|
||||
// The functions in each of the layers, also have more details about the warn/error situation,
|
||||
// so they can display more informative message, so please call the lowest level variant you can.
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// TLDR: If you have a token index, call:
|
||||
// p.error_with_token_index(msg, token_index)
|
||||
// ... not just :
|
||||
// p.error(msg)
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (p mut Parser) error(s string) {
|
||||
// no positioning info, so just assume that the last token was the culprit:
|
||||
p.error_with_token_index(s, p.token_idx-1 )
|
||||
}
|
||||
|
||||
fn (p mut Parser) warn(s string) {
|
||||
p.warn_with_token_index(s, p.token_idx-1 )
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (p mut Parser) production_error_with_token_index(e string, tokenindex int) {
|
||||
if p.pref.is_prod {
|
||||
p.error_with_token_index( e, tokenindex )
|
||||
}else {
|
||||
p.warn_with_token_index( e, tokenindex )
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (p mut Parser) error_with_token_index(s string, tokenindex int) {
|
||||
p.error_with_position(s, p.scanner.get_scanner_pos_of_token( p.tokens[ tokenindex ] ) )
|
||||
}
|
||||
|
||||
fn (p mut Parser) warn_with_token_index(s string, tokenindex int) {
|
||||
p.warn_with_position(s, p.scanner.get_scanner_pos_of_token( p.tokens[ tokenindex ] ) )
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (p mut Parser) error_with_position(s string, sp ScannerPos) {
|
||||
p.print_error_context()
|
||||
e := normalized_error( s )
|
||||
p.scanner.goto_scanner_position( sp )
|
||||
p.scanner.error_with_col(e, sp.pos - sp.last_nl_pos)
|
||||
}
|
||||
|
||||
fn (p mut Parser) warn_with_position(s string, sp ScannerPos) {
|
||||
// on a warning, restore the scanner state after printing the warning:
|
||||
cpos := p.scanner.get_scanner_pos()
|
||||
e := normalized_error( s )
|
||||
p.scanner.goto_scanner_position( sp )
|
||||
p.scanner.warn_with_col(e, sp.pos - sp.last_nl_pos)
|
||||
p.scanner.goto_scanner_position( cpos )
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (s &Scanner) error(msg string) {
|
||||
s.error_with_col(msg, 0)
|
||||
}
|
||||
|
||||
fn (s &Scanner) warn(msg string) {
|
||||
s.warn_with_col(msg, 0)
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (s &Scanner) warn_with_col(msg string, col int) {
|
||||
fullpath := os.realpath( s.file_path )
|
||||
color_on := s.is_color_output_on()
|
||||
final_message := if color_on { term.bold(term.bright_blue( msg )) } else { msg }
|
||||
eprintln('warning: ${fullpath}:${s.line_nr+1}:${col}: $final_message')
|
||||
}
|
||||
|
||||
fn (s &Scanner) error_with_col(msg string, col int) {
|
||||
fullpath := os.realpath( s.file_path )
|
||||
color_on := s.is_color_output_on()
|
||||
final_message := if color_on { term.red( term.bold( msg ) ) } else { msg }
|
||||
// The filepath:line:col: format is the default C compiler
|
||||
// error output format. It allows editors and IDE's like
|
||||
// emacs to quickly find the errors in the output
|
||||
// and jump to their source with a keyboard shortcut.
|
||||
// Using only the filename leads to inability of IDE/editors
|
||||
// to find the source file, when it is in another folder.
|
||||
eprintln('${fullpath}:${s.line_nr + 1}:${col}: $final_message')
|
||||
|
||||
if s.should_print_line_on_error && s.file_lines.len > 0 {
|
||||
context_start_line := imax(0, (s.line_nr - error_context_before + 1 ))
|
||||
context_end_line := imin(s.file_lines.len, (s.line_nr + error_context_after + 1 ))
|
||||
for cline := context_start_line; cline < context_end_line; cline++ {
|
||||
line := '${(cline+1):5d}| ' + s.file_lines[ cline ]
|
||||
coloredline := if cline == s.line_nr && color_on { term.red(line) } else { line }
|
||||
eprintln( coloredline )
|
||||
if cline != s.line_nr { continue }
|
||||
// The pointerline should have the same spaces/tabs as the offending
|
||||
// line, so that it prints the ^ character exactly on the *same spot*
|
||||
// where it is needed. That is the reason we can not just
|
||||
// use strings.repeat(` `, col) to form it.
|
||||
mut pointerline := []string
|
||||
for i , c in line {
|
||||
if i < col {
|
||||
x := if c.is_space() { c } else { ` ` }
|
||||
pointerline << x.str()
|
||||
continue
|
||||
}
|
||||
pointerline << if color_on { term.bold( term.blue('^') ) } else { '^' }
|
||||
break
|
||||
}
|
||||
eprintln( ' ' + pointerline.join('') )
|
||||
}
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Misc error helper functions, can be called by any of the functions above
|
||||
|
||||
[inline] fn (p &Parser) cur_tok_index() int { return p.token_idx - 1 }
|
||||
[inline] fn imax(a,b int) int { return if a > b { a } else { b } }
|
||||
[inline] fn imin(a,b int) int { return if a < b { a } else { b } }
|
||||
|
||||
fn (s &Scanner) is_color_output_on() bool {
|
||||
return s.should_print_errors_in_color && term.can_show_color_on_stderr()
|
||||
}
|
||||
|
||||
fn (p mut Parser) print_error_context(){
|
||||
// Dump all vars and types for debugging
|
||||
if p.pref.is_debug {
|
||||
// os.write_to_file('/var/tmp/lang.types', '')//pes(p.table.types))
|
||||
os.write_file('fns.txt', p.table.debug_fns())
|
||||
}
|
||||
if p.pref.is_verbose || p.pref.is_debug {
|
||||
println('pass=$p.pass fn=`$p.cur_fn.name`\n')
|
||||
}
|
||||
p.cgen.save()
|
||||
// V up hint
|
||||
cur_path := os.getwd()
|
||||
if !p.pref.is_repl && !p.pref.is_test && ( p.file_path.contains('v/compiler') || cur_path.contains('v/compiler') ){
|
||||
println('\n=========================')
|
||||
println('It looks like you are building V. It is being frequently updated every day.')
|
||||
println('If you didn\'t modify V\'s code, most likely there was a change that ')
|
||||
println('lead to this error.')
|
||||
println('\nRun `v up`, that will most likely fix it.')
|
||||
//println('\nIf this doesn\'t help, re-install V from source or download a precompiled' + ' binary from\nhttps://vlang.io.')
|
||||
println('\nIf this doesn\'t help, please create a GitHub issue.')
|
||||
println('=========================\n')
|
||||
}
|
||||
if p.pref.is_debug {
|
||||
print_backtrace()
|
||||
}
|
||||
// p.scanner.debug_tokens()
|
||||
}
|
||||
|
||||
fn normalized_error( s string ) string {
|
||||
// Print `[]int` instead of `array_int` in errors
|
||||
return s.replace('array_', '[]')
|
||||
.replace('__', '.')
|
||||
.replace('Option_', '?')
|
||||
.replace('main.', '')
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// The goal of ScannerPos is to track the current scanning position,
|
||||
// so that if there is an error found later, v could show a more accurate
|
||||
// position about where the error initially was.
|
||||
// NB: The fields of ScannerPos *should be kept synchronized* with the
|
||||
// corresponding fields in Scanner.
|
||||
|
||||
struct ScannerPos {
|
||||
mut:
|
||||
pos int
|
||||
line_nr int
|
||||
last_nl_pos int
|
||||
}
|
||||
|
||||
fn (s ScannerPos) str() string {
|
||||
return 'ScannerPos{ ${s.pos:5d} , ${s.line_nr:5d} , ${s.last_nl_pos:5d} }'
|
||||
}
|
||||
|
||||
fn (s &Scanner) get_scanner_pos() ScannerPos {
|
||||
return ScannerPos{ pos: s.pos line_nr: s.line_nr last_nl_pos: s.last_nl_pos }
|
||||
}
|
||||
|
||||
fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
|
||||
s.pos = scp.pos
|
||||
s.line_nr = scp.line_nr
|
||||
s.last_nl_pos = scp.last_nl_pos
|
||||
}
|
||||
|
||||
// get_scanner_pos_of_token rescans *the whole source* till it reaches {t.line_nr, t.col} .
|
||||
fn (s mut Scanner) get_scanner_pos_of_token(t &Tok) ScannerPos {
|
||||
// This rescanning is done just once on error, so it is fine for now.
|
||||
// Be careful for the performance implications, if you want to
|
||||
// do it more frequently. The alternative would be to store
|
||||
// the scanpos (12 bytes) for each token, and there are potentially many tokens.
|
||||
tline := t.line_nr
|
||||
tcol := if t.line_nr == 0 { t.col + 1 } else { t.col - 1 }
|
||||
// save the current scanner position, it will be restored later
|
||||
cpos := s.get_scanner_pos()
|
||||
mut sptoken := ScannerPos{}
|
||||
// Starting from the start, scan the source lines
|
||||
// till the desired tline is reached, then
|
||||
// s.pos + tcol would be the proper position
|
||||
// of the token. Continue scanning for some more lines of context too.
|
||||
s.goto_scanner_position(ScannerPos{})
|
||||
s.file_lines = []string
|
||||
mut prevlinepos := 0
|
||||
for {
|
||||
prevlinepos = s.pos
|
||||
if s.pos >= s.text.len { break }
|
||||
if s.line_nr > tline + 10 { break }
|
||||
////////////////////////////////////////
|
||||
if tline == s.line_nr {
|
||||
sptoken = s.get_scanner_pos()
|
||||
sptoken.pos += tcol
|
||||
}
|
||||
s.ignore_line() s.eat_single_newline()
|
||||
sline := s.text.substr( prevlinepos, s.pos ).trim_right('\r\n')
|
||||
s.file_lines << sline
|
||||
}
|
||||
//////////////////////////////////////////////////
|
||||
s.goto_scanner_position(cpos)
|
||||
return sptoken
|
||||
}
|
||||
|
||||
fn (s mut Scanner) eat_single_newline(){
|
||||
if s.pos >= s.text.len { return }
|
||||
if s.expect('\r\n', s.pos) { s.pos += 2 return }
|
||||
if s.text[ s.pos ] == `\n` { s.pos ++ return }
|
||||
if s.text[ s.pos ] == `\r` { s.pos ++ return }
|
||||
}
|
|
@ -33,7 +33,7 @@ mut:
|
|||
is_decl bool // type myfn fn(int, int)
|
||||
defer_text []string
|
||||
//gen_types []string
|
||||
fn_name_token Tok
|
||||
fn_name_token_idx int // used by error reporting
|
||||
}
|
||||
|
||||
fn (p &Parser) find_var(name string) ?Var {
|
||||
|
@ -110,8 +110,8 @@ fn (p mut Parser) known_var(name string) bool {
|
|||
fn (p mut Parser) register_var(v Var) {
|
||||
mut new_var := {v | idx: p.var_idx, scope_level: p.cur_fn.scope_level}
|
||||
if v.line_nr == 0 {
|
||||
new_var.token = p.cur_tok()
|
||||
new_var.line_nr = new_var.token.line_nr
|
||||
new_var.token_idx = p.cur_tok_index()
|
||||
new_var.line_nr = p.cur_tok().line_nr
|
||||
}
|
||||
// Expand the array
|
||||
if p.var_idx >= p.local_vars.len {
|
||||
|
@ -213,7 +213,7 @@ fn (p mut Parser) fn_decl() {
|
|||
ref: is_amp
|
||||
ptr: is_mut
|
||||
line_nr: p.scanner.line_nr
|
||||
token: p.cur_tok()
|
||||
token_idx: p.cur_tok_index()
|
||||
}
|
||||
f.args << receiver
|
||||
p.register_var(receiver)
|
||||
|
@ -226,7 +226,7 @@ fn (p mut Parser) fn_decl() {
|
|||
else {
|
||||
f.name = p.check_name()
|
||||
}
|
||||
f.fn_name_token = p.cur_tok()
|
||||
f.fn_name_token_idx = p.cur_tok_index()
|
||||
// C function header def? (fn C.NSMakeRect(int,int,int,int))
|
||||
is_c := f.name == 'C' && p.tok == .dot
|
||||
// Just fn signature? only builtin.v + default build mode
|
||||
|
@ -334,7 +334,7 @@ fn (p mut Parser) fn_decl() {
|
|||
// Special case for main() args
|
||||
if f.name == 'main__main' && !has_receiver {
|
||||
if str_args != '' || typ != 'void' {
|
||||
p.error_with_tok('fn main must have no arguments and no return values', f.fn_name_token)
|
||||
p.error_with_token_index('fn main must have no arguments and no return values', f.fn_name_token_idx)
|
||||
}
|
||||
}
|
||||
dll_export_linkage := if p.os == .msvc && p.attr == 'live' && p.pref.is_so {
|
||||
|
@ -439,7 +439,7 @@ fn (p mut Parser) fn_decl() {
|
|||
|
||||
if f.name == 'main__main' || f.name == 'main' || f.name == 'WinMain' {
|
||||
if p.pref.is_test && !p.scanner.file_path.contains('/volt') {
|
||||
p.error_with_tok('tests cannot have function `main`', f.fn_name_token)
|
||||
p.error_with_token_index('tests cannot have function `main`', f.fn_name_token_idx)
|
||||
}
|
||||
}
|
||||
// println('is_c=$is_c name=$f.name')
|
||||
|
@ -472,7 +472,7 @@ fn (p mut Parser) fn_decl() {
|
|||
}
|
||||
}
|
||||
if typ != 'void' && !p.returns {
|
||||
p.error_with_tok('$f.name must return "$typ"', f.fn_name_token)
|
||||
p.error_with_token_index('$f.name must return "$typ"', f.fn_name_token_idx)
|
||||
}
|
||||
if p.attr == 'live' && p.pref.is_so {
|
||||
//p.genln('// live_function body end')
|
||||
|
@ -535,10 +535,10 @@ fn (p mut Parser) check_unused_variables() {
|
|||
break
|
||||
}
|
||||
if !var.is_used && !p.pref.is_repl && !var.is_arg && !p.pref.translated {
|
||||
p.production_error_with_token('`$var.name` declared and not used', var.token )
|
||||
p.production_error_with_token_index('`$var.name` declared and not used', var.token_idx )
|
||||
}
|
||||
if !var.is_changed && var.is_mut && !p.pref.is_repl && !p.pref.translated {
|
||||
p.error_with_tok( '`$var.name` is declared as mutable, but it was never changed', var.token )
|
||||
p.error_with_token_index( '`$var.name` is declared as mutable, but it was never changed', var.token_idx )
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -698,7 +698,7 @@ fn (p mut Parser) fn_args(f mut Fn) {
|
|||
if f.is_interface {
|
||||
int_arg := Var {
|
||||
typ: f.receiver_typ
|
||||
token: p.cur_tok()
|
||||
token_idx: p.cur_tok_index()
|
||||
}
|
||||
f.args << int_arg
|
||||
}
|
||||
|
@ -714,7 +714,7 @@ fn (p mut Parser) fn_args(f mut Fn) {
|
|||
is_arg: true
|
||||
// is_mut: is_mut
|
||||
line_nr: p.scanner.line_nr
|
||||
token: p.cur_tok()
|
||||
token_idx: p.cur_tok_index()
|
||||
}
|
||||
// f.register_var(v)
|
||||
f.args << v
|
||||
|
@ -757,7 +757,7 @@ fn (p mut Parser) fn_args(f mut Fn) {
|
|||
is_mut: is_mut
|
||||
ptr: is_mut
|
||||
line_nr: p.scanner.line_nr
|
||||
token: p.cur_tok()
|
||||
token_idx: p.cur_tok_index()
|
||||
}
|
||||
p.register_var(v)
|
||||
f.args << v
|
||||
|
|
|
@ -207,20 +207,18 @@ fn (p & Parser) peek() Token {
|
|||
}
|
||||
|
||||
// TODO remove dups
|
||||
fn (p &Parser) prev_token() Tok {
|
||||
[inline] fn (p &Parser) prev_token() Tok {
|
||||
return p.tokens[p.token_idx - 2]
|
||||
}
|
||||
|
||||
fn (p &Parser) cur_tok() Tok {
|
||||
[inline] fn (p &Parser) cur_tok() Tok {
|
||||
return p.tokens[p.token_idx - 1]
|
||||
}
|
||||
|
||||
fn (p &Parser) peek_token() Tok {
|
||||
[inline] fn (p &Parser) peek_token() Tok {
|
||||
if p.token_idx >= p.tokens.len - 2 {
|
||||
return Tok{ tok:Token.eof }
|
||||
}
|
||||
tok := p.tokens[p.token_idx]
|
||||
return tok
|
||||
return p.tokens[p.token_idx]
|
||||
}
|
||||
|
||||
fn (p &Parser) log(s string) {
|
||||
|
@ -887,84 +885,6 @@ if p.scanner.line_comment != '' {
|
|||
}
|
||||
}
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
fn (p &Parser) warn(s string) {
|
||||
e := normalized_error( s )
|
||||
println('warning: $p.scanner.file_path:${p.scanner.line_nr+1}: $e')
|
||||
}
|
||||
|
||||
fn (p mut Parser) print_error_context(){
|
||||
// Dump all vars and types for debugging
|
||||
if p.pref.is_debug {
|
||||
// os.write_to_file('/var/tmp/lang.types', '')//pes(p.table.types))
|
||||
os.write_file('fns.txt', p.table.debug_fns())
|
||||
}
|
||||
if p.pref.is_verbose || p.pref.is_debug {
|
||||
println('pass=$p.pass fn=`$p.cur_fn.name`\n')
|
||||
}
|
||||
p.cgen.save()
|
||||
// V up hint
|
||||
cur_path := os.getwd()
|
||||
if !p.pref.is_repl && !p.pref.is_test && ( p.file_path.contains('v/compiler') || cur_path.contains('v/compiler') ){
|
||||
println('\n=========================')
|
||||
println('It looks like you are building V. It is being frequently updated every day.')
|
||||
println('If you didn\'t modify V\'s code, most likely there was a change that ')
|
||||
println('lead to this error.')
|
||||
println('\nRun `v up`, that will most likely fix it.')
|
||||
//println('\nIf this doesn\'t help, re-install V from source or download a precompiled' + ' binary from\nhttps://vlang.io.')
|
||||
println('\nIf this doesn\'t help, please create a GitHub issue.')
|
||||
println('=========================\n')
|
||||
}
|
||||
if p.pref.is_debug {
|
||||
print_backtrace()
|
||||
}
|
||||
// p.scanner.debug_tokens()
|
||||
}
|
||||
|
||||
fn normalized_error( s string ) string {
|
||||
// Print `[]int` instead of `array_int` in errors
|
||||
return s.replace('array_', '[]')
|
||||
.replace('__', '.')
|
||||
.replace('Option_', '?')
|
||||
.replace('main.', '')
|
||||
}
|
||||
|
||||
fn (p mut Parser) error_with_position(s string, sp ScannerPos) {
|
||||
p.print_error_context()
|
||||
e := normalized_error( s )
|
||||
p.scanner.goto_scanner_position( sp )
|
||||
p.scanner.error_with_col(e, sp.pos - sp.last_nl_pos)
|
||||
}
|
||||
|
||||
fn (p mut Parser) warn_with_position(e string, sp ScannerPos) {
|
||||
// on a warning, restore the scanner state after printing the warning:
|
||||
cpos := p.scanner.get_scanner_pos()
|
||||
p.scanner.goto_scanner_position( sp )
|
||||
p.warn(e)
|
||||
p.scanner.goto_scanner_position( cpos )
|
||||
}
|
||||
|
||||
fn (p mut Parser) production_error_with_token(e string, tok Tok) {
|
||||
if p.pref.is_prod {
|
||||
p.error_with_tok( e, tok )
|
||||
}else {
|
||||
p.warn_with_token( e, tok )
|
||||
}
|
||||
}
|
||||
|
||||
fn (p &Parser) warn_with_token(s string, tok Tok) {
|
||||
e := normalized_error( s )
|
||||
println('warning: $p.scanner.file_path:${tok.line_nr+1}:${tok.col}: $e')
|
||||
}
|
||||
fn (p mut Parser) error_with_tok(s string, tok Tok) {
|
||||
p.error_with_position(s, p.scanner.get_scanner_pos_of_token(tok) )
|
||||
}
|
||||
|
||||
fn (p mut Parser) error(s string) {
|
||||
// no positioning info, so just assume that the last token was the culprit:
|
||||
p.error_with_tok(s, p.tokens[p.token_idx-1] )
|
||||
}
|
||||
/////////////////////////////////////////////////////////////////
|
||||
|
||||
fn (p &Parser) first_pass() bool {
|
||||
return p.pass == .decl
|
||||
|
@ -1405,7 +1325,7 @@ fn (p mut Parser) statement(add_semi bool) string {
|
|||
// is_map: are we in map assignment? (m[key] = val) if yes, dont generate '='
|
||||
// this can be `user = ...` or `user.field = ...`, in both cases `v` is `user`
|
||||
fn (p mut Parser) assign_statement(v Var, ph int, is_map bool) {
|
||||
errtok := p.cur_tok()
|
||||
errtok := p.cur_tok_index()
|
||||
//p.log('assign_statement() name=$v.name tok=')
|
||||
is_vid := p.fileis('vid') // TODO remove
|
||||
tok := p.tok
|
||||
|
@ -1460,7 +1380,7 @@ fn ($v.name mut $v.typ) $p.cur_fn.name (...) {
|
|||
p.cgen.resetln(left + 'opt_ok($expr, sizeof($typ))')
|
||||
}
|
||||
else if !p.builtin_mod && !p.check_types_no_throw(expr_type, p.assigned_type) {
|
||||
p.error_with_tok( 'cannot use type `$expr_type` as type `$p.assigned_type` in assignment', errtok)
|
||||
p.error_with_token_index( 'cannot use type `$expr_type` as type `$p.assigned_type` in assignment', errtok)
|
||||
}
|
||||
if (is_str || is_ustr) && tok == .plus_assign && !p.is_js {
|
||||
p.gen(')')
|
||||
|
@ -1486,10 +1406,14 @@ fn (p mut Parser) var_decl() {
|
|||
}
|
||||
|
||||
mut names := []string
|
||||
mut vtoken_idxs := []int
|
||||
|
||||
vtoken_idxs << p.cur_tok_index()
|
||||
names << p.check_name()
|
||||
p.scanner.validate_var_name(names[0])
|
||||
for p.tok == .comma {
|
||||
p.check(.comma)
|
||||
vtoken_idxs << p.cur_tok_index()
|
||||
names << p.check_name()
|
||||
}
|
||||
mr_var_name := if names.len > 1 { '__ret_'+names.join('_') } else { names[0] }
|
||||
|
@ -1504,30 +1428,28 @@ fn (p mut Parser) var_decl() {
|
|||
types = t.replace('_V_MulRet_', '').replace('_PTR_', '*').split('_V_')
|
||||
}
|
||||
for i, name in names {
|
||||
if name == '_' {
|
||||
if names.len == 1 {
|
||||
p.error('no new variables on left side of `:=`')
|
||||
}
|
||||
var_token_idx := vtoken_idxs[i]
|
||||
if name == '_' && names.len == 1 {
|
||||
p.error_with_token_index('no new variables on left side of `:=`', var_token_idx)
|
||||
continue
|
||||
}
|
||||
typ := types[i]
|
||||
// println('var decl tok=${p.strtok()} ismut=$is_mut')
|
||||
var_token := p.cur_tok()
|
||||
// name := p.check_name()
|
||||
// p.var_decl_name = name
|
||||
// Don't allow declaring a variable with the same name. Even in a child scope
|
||||
// (shadowing is not allowed)
|
||||
if !p.builtin_mod && p.known_var(name) {
|
||||
// v := p.cur_fn.find_var(name)
|
||||
p.error('redefinition of `$name`')
|
||||
p.error_with_token_index('redefinition of `$name`', var_token_idx)
|
||||
}
|
||||
if name.len > 1 && contains_capital(name) {
|
||||
p.error('variable names cannot contain uppercase letters, use snake_case instead')
|
||||
p.error_with_token_index('variable names cannot contain uppercase letters, use snake_case instead', var_token_idx)
|
||||
}
|
||||
if names.len > 1 {
|
||||
if names.len != types.len {
|
||||
mr_fn := p.cgen.cur_line.find_between('=', '(').trim_space()
|
||||
p.error('assignment mismatch: ${names.len} variables but `$mr_fn` returns $types.len values')
|
||||
p.error_with_token_index('assignment mismatch: ${names.len} variables but `$mr_fn` returns $types.len values', var_token_idx)
|
||||
}
|
||||
p.gen(';\n')
|
||||
p.gen('$typ $name = ${mr_var_name}.var_$i')
|
||||
|
@ -1539,8 +1461,8 @@ fn (p mut Parser) var_decl() {
|
|||
typ: typ
|
||||
is_mut: is_mut
|
||||
is_alloc: p.is_alloc || typ.starts_with('array_')
|
||||
line_nr: var_token.line_nr
|
||||
token: var_token
|
||||
line_nr: p.tokens[ var_token_idx ].line_nr
|
||||
token_idx: var_token_idx
|
||||
})
|
||||
//if p.fileis('str.v') {
|
||||
//if p.is_alloc { println('REG VAR IS ALLOC $name') }
|
||||
|
@ -3795,7 +3717,7 @@ fn (p mut Parser) return_st() {
|
|||
else {
|
||||
// Don't allow `return val` in functions that don't return anything
|
||||
if !p.is_vweb && (p.tok == .name || p.tok == .number || p.tok == .str) {
|
||||
p.error('function `$p.cur_fn.name` should not return a value')
|
||||
p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx)
|
||||
}
|
||||
|
||||
if p.cur_fn.name == 'main' {
|
||||
|
@ -3818,7 +3740,7 @@ fn (p &Parser) prepend_mod(name string) string {
|
|||
|
||||
fn (p mut Parser) go_statement() {
|
||||
p.check(.key_go)
|
||||
mut gotoken := p.cur_tok()
|
||||
mut gotoken_idx := p.cur_tok_index()
|
||||
// TODO copypasta of name_expr() ?
|
||||
if p.peek() == .dot {
|
||||
// Method
|
||||
|
@ -3827,12 +3749,12 @@ fn (p mut Parser) go_statement() {
|
|||
return
|
||||
}
|
||||
p.mark_var_used(v)
|
||||
gotoken = p.cur_tok()
|
||||
gotoken_idx = p.cur_tok_index()
|
||||
p.next()
|
||||
p.check(.dot)
|
||||
typ := p.table.find_type(v.typ)
|
||||
method := p.table.find_method(typ, p.lit) or {
|
||||
p.error_with_tok('go method missing $var_name', gotoken)
|
||||
p.error_with_token_index('go method missing $var_name', gotoken_idx)
|
||||
return
|
||||
}
|
||||
p.async_fn_call(method, 0, var_name, v.typ)
|
||||
|
@ -3842,11 +3764,11 @@ fn (p mut Parser) go_statement() {
|
|||
// Normal function
|
||||
f := p.table.find_fn(p.prepend_mod(f_name)) or {
|
||||
println( p.table.debug_fns() )
|
||||
p.error_with_tok('can not find function $f_name', gotoken)
|
||||
p.error_with_token_index('can not find function $f_name', gotoken_idx)
|
||||
return
|
||||
}
|
||||
if f.name == 'println' || f.name == 'print' {
|
||||
p.error_with_tok('`go` cannot be used with `println`', gotoken)
|
||||
p.error_with_token_index('`go` cannot be used with `println`', gotoken_idx)
|
||||
}
|
||||
p.async_fn_call(f, 0, '', '')
|
||||
}
|
||||
|
@ -3868,6 +3790,7 @@ fn (p mut Parser) js_decode() string {
|
|||
p.check(.name)// json
|
||||
p.check(.dot)
|
||||
op := p.check_name()
|
||||
op_token_idx := p.cur_tok_index()
|
||||
if op == 'decode' {
|
||||
// User tmp2; tmp2.foo = 0; tmp2.bar = 0;// I forgot to zero vals before => huge bug
|
||||
// Option_User tmp3 = jsdecode_User(json_parse( s), &tmp2); ;
|
||||
|
@ -3915,7 +3838,7 @@ fn (p mut Parser) js_decode() string {
|
|||
return 'string'
|
||||
}
|
||||
else {
|
||||
p.error('bad json op "$op"')
|
||||
p.error_with_token_index('bad json op "$op"', op_token_idx)
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
@ -3923,6 +3846,7 @@ fn (p mut Parser) js_decode() string {
|
|||
fn (p mut Parser) attribute() {
|
||||
p.check(.lsbr)
|
||||
p.attr = p.check_name()
|
||||
attr_token_idx := p.cur_tok_index()
|
||||
if p.tok == .colon {
|
||||
p.check(.colon)
|
||||
p.attr = p.attr + ':' + p.check_name()
|
||||
|
@ -3938,7 +3862,7 @@ fn (p mut Parser) attribute() {
|
|||
p.attr = ''
|
||||
return
|
||||
}
|
||||
p.error('bad attribute usage')
|
||||
p.error_with_token_index('bad attribute usage', attr_token_idx)
|
||||
}
|
||||
|
||||
fn (p mut Parser) defer_st() {
|
||||
|
@ -3986,10 +3910,6 @@ fn (p mut Parser) check_unused_imports() {
|
|||
}
|
||||
}
|
||||
if output == '' { return }
|
||||
output = '$p.file_path: the following imports were never used:$output'
|
||||
if p.pref.is_prod {
|
||||
verror(output)
|
||||
} else {
|
||||
println('warning: $output')
|
||||
}
|
||||
// the imports are usually at the start of the file
|
||||
p.production_error_with_token_index( 'the following imports were never used: $output', 0 )
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ module main
|
|||
import (
|
||||
os
|
||||
strings
|
||||
term
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -81,72 +80,6 @@ fn new_scanner(text string) &Scanner {
|
|||
}
|
||||
|
||||
|
||||
// The goal of ScannerPos is to track the current scanning position,
|
||||
// so that if there is an error found later, v could show a more accurate
|
||||
// position about where the error initially was.
|
||||
// NB: The fields of ScannerPos *should be kept synchronized* with the
|
||||
// corresponding fields in Scanner.
|
||||
struct ScannerPos {
|
||||
mut:
|
||||
pos int
|
||||
line_nr int
|
||||
last_nl_pos int
|
||||
}
|
||||
fn (s ScannerPos) str() string {
|
||||
return 'ScannerPos{ ${s.pos:5d} , ${s.line_nr:5d} , ${s.last_nl_pos:5d} }'
|
||||
}
|
||||
fn (s &Scanner) get_scanner_pos() ScannerPos {
|
||||
return ScannerPos{ pos: s.pos line_nr: s.line_nr last_nl_pos: s.last_nl_pos }
|
||||
}
|
||||
fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
|
||||
s.pos = scp.pos
|
||||
s.line_nr = scp.line_nr
|
||||
s.last_nl_pos = scp.last_nl_pos
|
||||
}
|
||||
|
||||
// get_scanner_pos_of_token rescans *the whole source* till it reaches {t.line_nr, t.col} .
|
||||
fn (s mut Scanner) get_scanner_pos_of_token(t Tok) ScannerPos {
|
||||
// This rescanning is done just once on error, so it is fine for now.
|
||||
// Be careful for the performance implications, if you want to
|
||||
// do it more frequently. The alternative would be to store
|
||||
// the scanpos (12 bytes) for each token, and there are potentially many tokens.
|
||||
tline := t.line_nr
|
||||
tcol := if t.line_nr == 0 { t.col + 1 } else { t.col - 1 }
|
||||
// save the current scanner position, it will be restored later
|
||||
cpos := s.get_scanner_pos()
|
||||
mut sptoken := ScannerPos{}
|
||||
// Starting from the start, scan the source lines
|
||||
// till the desired tline is reached, then
|
||||
// s.pos + tcol would be the proper position
|
||||
// of the token. Continue scanning for some more lines of context too.
|
||||
s.goto_scanner_position(ScannerPos{})
|
||||
s.file_lines = []string
|
||||
mut prevlinepos := 0
|
||||
for {
|
||||
prevlinepos = s.pos
|
||||
if s.pos >= s.text.len { break }
|
||||
if s.line_nr > tline + 10 { break }
|
||||
////////////////////////////////////////
|
||||
if tline == s.line_nr {
|
||||
sptoken = s.get_scanner_pos()
|
||||
sptoken.pos += tcol
|
||||
}
|
||||
s.ignore_line() s.eat_single_newline()
|
||||
sline := s.text.substr( prevlinepos, s.pos ).trim_right('\r\n')
|
||||
s.file_lines << sline
|
||||
}
|
||||
//////////////////////////////////////////////////
|
||||
s.goto_scanner_position(cpos)
|
||||
return sptoken
|
||||
}
|
||||
fn (s mut Scanner) eat_single_newline(){
|
||||
if s.pos >= s.text.len { return }
|
||||
if s.expect('\r\n', s.pos) { s.pos += 2 return }
|
||||
if s.text[ s.pos ] == `\n` { s.pos ++ return }
|
||||
if s.text[ s.pos ] == `\r` { s.pos ++ return }
|
||||
}
|
||||
|
||||
|
||||
// TODO remove once multiple return values are implemented
|
||||
struct ScanRes {
|
||||
tok Token
|
||||
|
@ -652,52 +585,6 @@ fn (s &Scanner) current_column() int {
|
|||
return s.pos - s.last_nl_pos
|
||||
}
|
||||
|
||||
fn imax(a,b int) int { return if a > b { a } else { b } }
|
||||
fn imin(a,b int) int { return if a < b { a } else { b } }
|
||||
fn (s &Scanner) error(msg string) {
|
||||
s.error_with_col(msg, 0)
|
||||
}
|
||||
|
||||
fn (s &Scanner) error_with_col(msg string, col int) {
|
||||
fullpath := os.realpath( s.file_path )
|
||||
color_on := s.should_print_errors_in_color && term.can_show_color()
|
||||
final_message := if color_on { term.red( term.bold( msg ) ) } else { msg }
|
||||
// The filepath:line:col: format is the default C compiler
|
||||
// error output format. It allows editors and IDE's like
|
||||
// emacs to quickly find the errors in the output
|
||||
// and jump to their source with a keyboard shortcut.
|
||||
// Using only the filename leads to inability of IDE/editors
|
||||
// to find the source file, when it is in another folder.
|
||||
eprintln('${fullpath}:${s.line_nr + 1}:${col}: $final_message')
|
||||
|
||||
if s.should_print_line_on_error && s.file_lines.len > 0 {
|
||||
context_start_line := imax(0, (s.line_nr - error_context_before + 1 ))
|
||||
context_end_line := imin(s.file_lines.len, (s.line_nr + error_context_after + 1 ))
|
||||
for cline := context_start_line; cline < context_end_line; cline++ {
|
||||
line := '${(cline+1):5d}| ' + s.file_lines[ cline ]
|
||||
coloredline := if cline == s.line_nr && color_on { term.red(line) } else { line }
|
||||
println( coloredline )
|
||||
if cline != s.line_nr { continue }
|
||||
// The pointerline should have the same spaces/tabs as the offending
|
||||
// line, so that it prints the ^ character exactly on the *same spot*
|
||||
// where it is needed. That is the reason we can not just
|
||||
// use strings.repeat(` `, col) to form it.
|
||||
mut pointerline := []string
|
||||
for i , c in line {
|
||||
if i < col {
|
||||
x := if c.is_space() { c } else { ` ` }
|
||||
pointerline << x.str()
|
||||
continue
|
||||
}
|
||||
pointerline << if color_on { term.bold( term.blue('^') ) } else { '^' }
|
||||
break
|
||||
}
|
||||
println( ' ' + pointerline.join('') )
|
||||
}
|
||||
}
|
||||
exit(1)
|
||||
}
|
||||
|
||||
fn (s Scanner) count_symbol_before(p int, sym byte) int {
|
||||
mut count := 0
|
||||
for i:=p; i>=0; i-- {
|
||||
|
|
|
@ -97,7 +97,7 @@ mut:
|
|||
is_c bool // todo remove once `typ` is `Type`, not string
|
||||
is_moved bool
|
||||
line_nr int
|
||||
token Tok // TODO: use only var.token.line_nr, remove var.line_nr
|
||||
token_idx int // this is a token index, which will be used by error reporting
|
||||
}
|
||||
|
||||
struct Type {
|
||||
|
|
|
@ -94,6 +94,7 @@ fn (c &Context) prepare_v( cdir string, commit string ) {
|
|||
show_sizes_of_files(["$cdir/v", "$cdir/v_stripped", "$cdir/v_stripped_upxed"])
|
||||
show_sizes_of_files(["$cdir/vprod", "$cdir/vprod_stripped", "$cdir/vprod_stripped_upxed"])
|
||||
println("V version is: " + run("$cdir/v --version") + " , local source commit: " + run("git rev-parse --short --verify HEAD") )
|
||||
println('Source lines in compiler/ ' + run('wc compiler/*.v | tail -n -1') )
|
||||
}
|
||||
|
||||
|
||||
|
@ -119,7 +120,7 @@ fn validate_commit_exists( commit string ){
|
|||
}
|
||||
|
||||
fn main(){
|
||||
used_tools_must_exist(['cp','rm','strip','make','git','upx','cc','hyperfine'])
|
||||
used_tools_must_exist(['cp','rm','strip','make','git','upx','cc','wc','tail','hyperfine'])
|
||||
mut context := new_context()
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application(os.filename(os.executable()))
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
module term
|
||||
|
||||
pub fn can_show_color_on_stdout() bool {
|
||||
return can_show_color_on_fd(1)
|
||||
}
|
||||
|
||||
pub fn can_show_color_on_stderr() bool {
|
||||
return can_show_color_on_fd(2)
|
||||
}
|
|
@ -4,8 +4,8 @@ import os
|
|||
|
||||
fn C.isatty(int) int
|
||||
|
||||
pub fn can_show_color() bool {
|
||||
pub fn can_show_color_on_fd(fd int) bool {
|
||||
if os.getenv('TERM') == 'dumb' { return false }
|
||||
if C.isatty(1) != 0 { return true }
|
||||
if C.isatty(fd) != 0 { return true }
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import os
|
|||
|
||||
// TODO: implement proper checking on windows too.
|
||||
// For now, just return false by default
|
||||
pub fn can_show_color() bool {
|
||||
pub fn can_show_color_on_fd(fd int) bool {
|
||||
if os.getenv('TERM') == 'dumb' { return false }
|
||||
return false
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue