compiler: use token.pos instead of token.col, calculate col when needed

pull/2856/head
joe-conigliaro 2019-11-24 03:31:28 +11:00 committed by Alexander Medvednikov
parent f42be0622e
commit 666509623e
3 changed files with 15 additions and 37 deletions

View File

@ -208,44 +208,21 @@ fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
s.last_nl_pos = scp.last_nl_pos
}
// get_scanner_pos_of_token rescans *the whole source* till it reaches {t.line_nr, t.col} .
fn (s mut Scanner) get_scanner_pos_of_token(t &Token) ScannerPos {
// This rescanning is done just once on error, so it is fine for now.
// Be careful for the performance implications, if you want to
// do it more frequently. The alternative would be to store
// the scanpos (12 bytes) for each token, and there are potentially many tokens.
tline := t.line_nr
tcol := if t.line_nr == 0 { t.col + 1 } else { t.col - 1 }
// save the current scanner position, it will be restored later
cpos := s.get_scanner_pos()
mut sptoken := ScannerPos{}
// Starting from the start, scan the source lines
// till the desired tline is reached, then
// s.pos + tcol would be the proper position
// of the token.
s.goto_scanner_position(ScannerPos{})
maxline := imin( s.nlines, tline + 2 * error_context_after)
for {
if s.pos >= s.text.len { break }
if s.line_nr > maxline { break }
////////////////////////////////////////
if tline == s.line_nr {
sptoken = s.get_scanner_pos()
sptoken.pos += tcol
fn (s mut Scanner) get_last_nl_from_pos(pos int) int {
for i := pos; i >= 0; i-- {
if s.text[i] == `\n` {
return i
}
s.ignore_line() s.eat_single_newline()
}
//////////////////////////////////////////////////
s.goto_scanner_position(cpos)
return sptoken
return 0
}
fn (s mut Scanner) eat_single_newline(){
if s.pos >= s.text.len { return }
if s.expect('\r\n', s.pos) { s.pos += 2 return }
if s.text[ s.pos ] == `\n` { s.pos ++ return }
if s.text[ s.pos ] == `\r` { s.pos ++ return }
fn (s mut Scanner) get_scanner_pos_of_token(tok &Token) ScannerPos {
return ScannerPos{
pos: tok.pos
line_nr: tok.line_nr
last_nl_pos: s.get_last_nl_from_pos(tok.pos)
}
}
///////////////////////////////

View File

@ -188,7 +188,7 @@ fn (p mut Parser) scan_tokens() {
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
col: p.scanner.pos - p.scanner.last_nl_pos
pos: p.scanner.pos
}
if res.tok == .eof {
break
@ -2384,7 +2384,8 @@ fn (p mut Parser) array_init() string {
if is_integer && p.tok == .rsbr && p.peek() == .name &&
p.cur_tok().line_nr == p.peek_token().line_nr {
// there is no space between `[10]` and `byte`
if p.cur_tok().col + p.peek_token().lit.len == p.peek_token().col {
// if p.cur_tok().col + p.peek_token().lit.len == p.peek_token().col {
if p.cur_tok().pos + p.peek_token().lit.len == p.peek_token().pos {
p.check(.rsbr)
array_elem_typ := p.get_type()
if !p.table.known_type(array_elem_typ) {

View File

@ -9,7 +9,7 @@ struct Token {
lit string // literal representation of the token
line_nr int // the line number in the source where the token occured
name_idx int // name table index for O(1) lookup
col int // the column where the token ends
pos int // the position of the token in scanner text
}