compiler: make vgen use strings.Builder & allow parser creation from string

pull/2130/head^2
joe-conigliaro 2019-09-27 21:02:01 +10:00 committed by Alexander Medvednikov
parent 8cbfd7a9da
commit ad99b82930
6 changed files with 100 additions and 69 deletions

View File

@ -117,7 +117,7 @@ fn (p mut Parser) comp_time() {
// Parse the function and embed resulting C code in current function so that
// all variables are available.
pos := p.cgen.lines.len - 1
mut pp := p.v.new_parser('.vwebtmpl.v')
mut pp := p.v.new_parser_file('.vwebtmpl.v')
if !p.pref.is_debug {
os.rm('.vwebtmpl.v')
}
@ -245,7 +245,7 @@ fn (p mut Parser) gen_array_str(typ Type) {
!p.table.type_has_method(elm_type2, 'str') {
p.error('cant print ${elm_type}[], unhandled print of ${elm_type}')
}
p.v.vgen_file.writeln('
p.v.vgen_buf.writeln('
fn (a $typ.name) str() string {
mut sb := strings.new_builder(a.len * 3)
sb.write("[")
@ -281,7 +281,7 @@ fn (p mut Parser) gen_struct_str(typ Type) {
}
sb.writeln("\n}'")
sb.writeln('}')
p.v.vgen_file.writeln(sb.str())
p.v.vgen_buf.writeln(sb.str())
// Need to manually add the definition to `fns` so that it stays
// at the top of the file.
// This function will get parsee by V after the main pass.

View File

@ -178,7 +178,7 @@ fn (p mut Parser) fn_decl() {
}
// Don't allow modifying types from a different module
if !p.first_pass() && !p.builtin_mod && T.mod != p.mod &&
!p.fileis(vgen_file_name) { // allow .str() on builtin arrays
p.id != 'vgen' { // allow .str() on builtin arrays
println('T.mod=$T.mod')
println('p.mod=$p.mod')
p.error('cannot define new methods on non-local type `$receiver_typ`')

View File

@ -74,7 +74,7 @@ mut:
vroot string
mod string // module being built with -lib
parsers []Parser
vgen_file os.File
vgen_buf strings.Builder
}
struct Preferences {
@ -213,7 +213,7 @@ fn main() {
fn (v mut V) add_parser(parser Parser) {
for p in v.parsers {
if p.file_path == parser.file_path {
if p.id == parser.id {
return
}
}
@ -334,9 +334,10 @@ fn (v mut V) compile() {
// new vfmt is not ready yet
}
}
// Close the file with generated V code (str() methods etc) and parse it
v.vgen_file.close()
mut vgen_parser := v.new_parser(vgen_file_name)
// parse generated V code (str() methods etc)
mut vgen_parser := v.new_parser_string_id(v.vgen_buf.str(), 'vgen')
// free the string builder which held the generated methods
v.vgen_buf.free()
vgen_parser.parse(.main)
v.log('Done parsing.')
// Write everything
@ -590,13 +591,13 @@ fn (v mut V) add_v_files_to_compile() {
}
// Parse builtin imports
for file in v.files {
mut p := v.new_parser(file)
mut p := v.new_parser_file(file)
p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
}
// Parse user imports
for file in user_files {
mut p := v.new_parser(file)
mut p := v.new_parser_file(file)
p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
}
@ -615,7 +616,7 @@ fn (v mut V) add_v_files_to_compile() {
}
// Add all imports referenced by these libs
for file in vfiles {
mut p := v.new_parser(file, Pass.imports)
mut p := v.new_parser_file(file, Pass.imports)
p.parse()
if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
@ -635,7 +636,7 @@ fn (v mut V) add_v_files_to_compile() {
}
// Add all imports referenced by these libs
for file in vfiles {
mut p := v.new_parser(file)
mut p := v.new_parser_file(file)
p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
}
@ -737,9 +738,8 @@ fn (v &V) log(s string) {
}
fn new_v(args[]string) &V {
os.rm(vgen_file_name)
vgen_file := os.open_append(vgen_file_name) or { panic(err) }
vgen_file.writeln('module main\nimport strings')
mut vgen_buf := strings.new_builder(1000)
vgen_buf.writeln('module main\nimport strings')
joined_args := args.join(' ')
target_os := get_arg(joined_args, 'os', '')
@ -932,7 +932,7 @@ fn new_v(args[]string) &V {
vroot: vroot
pref: pref
mod: mod
vgen_file: vgen_file
vgen_buf: vgen_buf
}
}

View File

@ -7,10 +7,7 @@ module main
import (
os
strings
)
const (
vgen_file_name = 'vgen.tmp'
crypto.sha1
)
// TODO rename to Token
@ -24,6 +21,7 @@ struct Tok {
}
struct Parser {
id string // unique id. if parsing file will be same as file_path
file_path string // "/home/user/hello.v"
file_name string // "hello.v"
file_platform string // ".v", "_win.v", "_nix.v", "_mac.v", "_lin.v" ...
@ -97,7 +95,22 @@ const (
MaxModuleDepth = 4
)
fn (v mut V) new_parser(path string) Parser {
// new parser from string. parser id will be hash of s
fn (v mut V) new_parser_string(text string) Parser {
return v.new_parser_string_id(text, sha1.hexhash(text))
}
// new parser from string. with id specified in `id`
fn (v mut V) new_parser_string_id(text string, id string) Parser {
mut p := v.new_parser(new_scanner(text), id)
p.import_table = v.table.get_file_import_table(id)
p.scan_tokens()
v.add_parser(p)
return p
}
// new parser from file.
fn (v mut V) new_parser_file(path string) Parser {
//println('new_parser("$path")')
mut path_pcguard := ''
mut path_platform := '.v'
@ -108,21 +121,33 @@ fn (v mut V) new_parser(path string) Parser {
break
}
}
//vgen_file := os.open_append(vgen_file_name) or { panic(err) }
mut p := v.new_parser(new_scanner_file(path), path)
p = { p|
file_path: path,
file_name: path.all_after('/'),
file_platform: path_platform,
file_pcguard: path_pcguard,
import_table: v.table.get_file_import_table(path),
is_script: (v.pref.is_script && path == v.dir)
}
v.cgen.file = path
p.scan_tokens()
//p.scanner.debug_tokens()
v.add_parser(p)
return p
}
fn (v mut V) new_parser(scanner &Scanner, id string) Parser {
mut p := Parser {
id: id
scanner: scanner
v: v
file_path: path
file_name: path.all_after('/')
file_platform: path_platform
file_pcguard: path_pcguard
scanner: new_scanner(path)
table: v.table
import_table: v.table.get_file_import_table(path)
cur_fn: EmptyFn
cgen: v.cgen
is_script: (v.pref.is_script && path == v.dir)
is_script: false
pref: v.pref
os: v.os
vroot: v.vroot
@ -135,24 +160,25 @@ fn (v mut V) new_parser(path string) Parser {
p.scanner.should_print_line_on_error = false
}
v.cgen.line_directives = v.pref.is_debuggable
v.cgen.file = path
for {
res := p.scanner.scan()
p.tokens << Tok {
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
col: p.scanner.pos - p.scanner.last_nl_pos
}
if res.tok == .eof {
break
}
}
v.add_parser(p)
//p.scanner.debug_tokens()
// v.cgen.file = path
return p
}
fn (p mut Parser) scan_tokens() {
for {
res := p.scanner.scan()
p.tokens << Tok {
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
col: p.scanner.pos - p.scanner.last_nl_pos
}
if res.tok == .eof {
break
}
}
}
fn (p mut Parser) set_current_fn(f Fn) {
p.cur_fn = f
//p.cur_fn = p.table.fns[f.name]
@ -164,9 +190,9 @@ fn (p mut Parser) next() {
p.prev_tok = p.tok
p.scanner.prev_tok = p.tok
if p.token_idx >= p.tokens.len {
p.tok = Token.eof
p.lit = ''
return
p.tok = Token.eof
p.lit = ''
return
}
res := p.tokens[p.token_idx]
p.token_idx++
@ -238,7 +264,7 @@ fn (p mut Parser) parse(pass Pass) {
p.error('module `builtin` cannot be imported')
}
// save file import table
p.table.file_imports[p.file_path] = p.import_table
p.table.file_imports[p.id] = p.import_table
return
}
// Go through every top level token or throw a compilation error if a non-top level token is met
@ -1736,7 +1762,7 @@ fn (p mut Parser) name_expr() string {
// struct initialization
else if p.peek() == .lcbr {
if ptr {
name += '*' // `&User{}` => type `User*`
name += '*' // `&User{}` => type `User*`
}
if name == 'T' {
name = p.cur_gen_type
@ -2134,7 +2160,7 @@ fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
if is_arr {
if is_arr0 {
typ = typ.right(6)
}
}
p.gen_array_at(typ, is_arr0, fn_ph)
}
// map is tricky
@ -3885,9 +3911,9 @@ fn (p mut Parser) check_and_register_used_imported_type(typ_name string) {
fn (p mut Parser) check_unused_imports() {
// Don't run in the generated V file with `.str()`
if p.fileis(vgen_file_name) {
if p.id == 'vgen' {
return
}
}
mut output := ''
for alias, mod in p.import_table.imports {
if !p.import_table.is_used_import(alias) {

View File

@ -37,7 +37,8 @@ mut:
quote byte // which quote is used to denote current string: ' or "
}
fn new_scanner(file_path string) &Scanner {
// new scanner from file.
fn new_scanner_file(file_path string) &Scanner {
if !os.file_exists(file_path) {
verror("$file_path doesn't exist")
}
@ -46,7 +47,7 @@ fn new_scanner(file_path string) &Scanner {
verror('scanner: failed to open $file_path')
return 0
}
// BOM check
if raw_text.len >= 3 {
c_text := raw_text.str
@ -58,15 +59,21 @@ fn new_scanner(file_path string) &Scanner {
}
}
mut s := new_scanner(raw_text)
s.file_path = file_path
return s
}
// new scanner from string.
fn new_scanner(text string) &Scanner {
return &Scanner {
file_path: file_path
text: raw_text
text: text
fmt_out: strings.new_builder(1000)
should_print_line_on_error: true
}
}
struct ScannerPos {
mut:
pos int
@ -682,10 +689,10 @@ fn (s &Scanner) error_with_col(msg string, col int) {
fn (s Scanner) count_symbol_before(p int, sym byte) int {
mut count := 0
for i:=p; i>=0; i-- {
if s.text[i] != sym {
break
}
count++
if s.text[i] != sym {
break
}
count++
}
return count
}
@ -863,5 +870,3 @@ fn good_type_name(s string) bool {
}
return true
}

View File

@ -855,14 +855,14 @@ fn (table &Table) qualify_module(mod string, file_path string) string {
return mod
}
fn (table &Table) get_file_import_table(file_path string) FileImportTable {
fn (table &Table) get_file_import_table(id string) FileImportTable {
// if file_path.clone() in table.file_imports {
// return table.file_imports[file_path.clone()]
// }
// just get imports. memory error when recycling import table
mut fit := new_file_import_table(file_path)
if file_path in table.file_imports {
fit.imports = table.file_imports[file_path].imports
mut fit := new_file_import_table(id)
if id in table.file_imports {
fit.imports = table.file_imports[id].imports
}
return fit
}