compiler: make vgen use strings.Builder & allow parser creation from string

pull/2130/head^2
joe-conigliaro 2019-09-27 21:02:01 +10:00 committed by Alexander Medvednikov
parent 8cbfd7a9da
commit ad99b82930
6 changed files with 100 additions and 69 deletions

View File

@ -117,7 +117,7 @@ fn (p mut Parser) comp_time() {
// Parse the function and embed resulting C code in current function so that // Parse the function and embed resulting C code in current function so that
// all variables are available. // all variables are available.
pos := p.cgen.lines.len - 1 pos := p.cgen.lines.len - 1
mut pp := p.v.new_parser('.vwebtmpl.v') mut pp := p.v.new_parser_file('.vwebtmpl.v')
if !p.pref.is_debug { if !p.pref.is_debug {
os.rm('.vwebtmpl.v') os.rm('.vwebtmpl.v')
} }
@ -245,7 +245,7 @@ fn (p mut Parser) gen_array_str(typ Type) {
!p.table.type_has_method(elm_type2, 'str') { !p.table.type_has_method(elm_type2, 'str') {
p.error('cant print ${elm_type}[], unhandled print of ${elm_type}') p.error('cant print ${elm_type}[], unhandled print of ${elm_type}')
} }
p.v.vgen_file.writeln(' p.v.vgen_buf.writeln('
fn (a $typ.name) str() string { fn (a $typ.name) str() string {
mut sb := strings.new_builder(a.len * 3) mut sb := strings.new_builder(a.len * 3)
sb.write("[") sb.write("[")
@ -281,7 +281,7 @@ fn (p mut Parser) gen_struct_str(typ Type) {
} }
sb.writeln("\n}'") sb.writeln("\n}'")
sb.writeln('}') sb.writeln('}')
p.v.vgen_file.writeln(sb.str()) p.v.vgen_buf.writeln(sb.str())
// Need to manually add the definition to `fns` so that it stays // Need to manually add the definition to `fns` so that it stays
// at the top of the file. // at the top of the file.
// This function will get parsee by V after the main pass. // This function will get parsee by V after the main pass.

View File

@ -178,7 +178,7 @@ fn (p mut Parser) fn_decl() {
} }
// Don't allow modifying types from a different module // Don't allow modifying types from a different module
if !p.first_pass() && !p.builtin_mod && T.mod != p.mod && if !p.first_pass() && !p.builtin_mod && T.mod != p.mod &&
!p.fileis(vgen_file_name) { // allow .str() on builtin arrays p.id != 'vgen' { // allow .str() on builtin arrays
println('T.mod=$T.mod') println('T.mod=$T.mod')
println('p.mod=$p.mod') println('p.mod=$p.mod')
p.error('cannot define new methods on non-local type `$receiver_typ`') p.error('cannot define new methods on non-local type `$receiver_typ`')

View File

@ -74,7 +74,7 @@ mut:
vroot string vroot string
mod string // module being built with -lib mod string // module being built with -lib
parsers []Parser parsers []Parser
vgen_file os.File vgen_buf strings.Builder
} }
struct Preferences { struct Preferences {
@ -213,7 +213,7 @@ fn main() {
fn (v mut V) add_parser(parser Parser) { fn (v mut V) add_parser(parser Parser) {
for p in v.parsers { for p in v.parsers {
if p.file_path == parser.file_path { if p.id == parser.id {
return return
} }
} }
@ -334,9 +334,10 @@ fn (v mut V) compile() {
// new vfmt is not ready yet // new vfmt is not ready yet
} }
} }
// Close the file with generated V code (str() methods etc) and parse it // parse generated V code (str() methods etc)
v.vgen_file.close() mut vgen_parser := v.new_parser_string_id(v.vgen_buf.str(), 'vgen')
mut vgen_parser := v.new_parser(vgen_file_name) // free the string builder which held the generated methods
v.vgen_buf.free()
vgen_parser.parse(.main) vgen_parser.parse(.main)
v.log('Done parsing.') v.log('Done parsing.')
// Write everything // Write everything
@ -590,13 +591,13 @@ fn (v mut V) add_v_files_to_compile() {
} }
// Parse builtin imports // Parse builtin imports
for file in v.files { for file in v.files {
mut p := v.new_parser(file) mut p := v.new_parser_file(file)
p.parse(.imports) p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) } //if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
} }
// Parse user imports // Parse user imports
for file in user_files { for file in user_files {
mut p := v.new_parser(file) mut p := v.new_parser_file(file)
p.parse(.imports) p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) } //if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
} }
@ -615,7 +616,7 @@ fn (v mut V) add_v_files_to_compile() {
} }
// Add all imports referenced by these libs // Add all imports referenced by these libs
for file in vfiles { for file in vfiles {
mut p := v.new_parser(file, Pass.imports) mut p := v.new_parser_file(file, Pass.imports)
p.parse() p.parse()
if p.pref.autofree { p.scanner.text.free() free(p.scanner) } if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
@ -635,7 +636,7 @@ fn (v mut V) add_v_files_to_compile() {
} }
// Add all imports referenced by these libs // Add all imports referenced by these libs
for file in vfiles { for file in vfiles {
mut p := v.new_parser(file) mut p := v.new_parser_file(file)
p.parse(.imports) p.parse(.imports)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) } //if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
} }
@ -737,9 +738,8 @@ fn (v &V) log(s string) {
} }
fn new_v(args[]string) &V { fn new_v(args[]string) &V {
os.rm(vgen_file_name) mut vgen_buf := strings.new_builder(1000)
vgen_file := os.open_append(vgen_file_name) or { panic(err) } vgen_buf.writeln('module main\nimport strings')
vgen_file.writeln('module main\nimport strings')
joined_args := args.join(' ') joined_args := args.join(' ')
target_os := get_arg(joined_args, 'os', '') target_os := get_arg(joined_args, 'os', '')
@ -932,7 +932,7 @@ fn new_v(args[]string) &V {
vroot: vroot vroot: vroot
pref: pref pref: pref
mod: mod mod: mod
vgen_file: vgen_file vgen_buf: vgen_buf
} }
} }

View File

@ -7,10 +7,7 @@ module main
import ( import (
os os
strings strings
) crypto.sha1
const (
vgen_file_name = 'vgen.tmp'
) )
// TODO rename to Token // TODO rename to Token
@ -24,6 +21,7 @@ struct Tok {
} }
struct Parser { struct Parser {
id string // unique id. if parsing file will be same as file_path
file_path string // "/home/user/hello.v" file_path string // "/home/user/hello.v"
file_name string // "hello.v" file_name string // "hello.v"
file_platform string // ".v", "_win.v", "_nix.v", "_mac.v", "_lin.v" ... file_platform string // ".v", "_win.v", "_nix.v", "_mac.v", "_lin.v" ...
@ -97,7 +95,22 @@ const (
MaxModuleDepth = 4 MaxModuleDepth = 4
) )
fn (v mut V) new_parser(path string) Parser { // new parser from string. parser id will be hash of s
fn (v mut V) new_parser_string(text string) Parser {
return v.new_parser_string_id(text, sha1.hexhash(text))
}
// new parser from string. with id specified in `id`
fn (v mut V) new_parser_string_id(text string, id string) Parser {
mut p := v.new_parser(new_scanner(text), id)
p.import_table = v.table.get_file_import_table(id)
p.scan_tokens()
v.add_parser(p)
return p
}
// new parser from file.
fn (v mut V) new_parser_file(path string) Parser {
//println('new_parser("$path")') //println('new_parser("$path")')
mut path_pcguard := '' mut path_pcguard := ''
mut path_platform := '.v' mut path_platform := '.v'
@ -109,20 +122,32 @@ fn (v mut V) new_parser(path string) Parser {
} }
} }
//vgen_file := os.open_append(vgen_file_name) or { panic(err) } mut p := v.new_parser(new_scanner_file(path), path)
p = { p|
file_path: path,
file_name: path.all_after('/'),
file_platform: path_platform,
file_pcguard: path_pcguard,
import_table: v.table.get_file_import_table(path),
is_script: (v.pref.is_script && path == v.dir)
}
v.cgen.file = path
p.scan_tokens()
//p.scanner.debug_tokens()
v.add_parser(p)
return p
}
fn (v mut V) new_parser(scanner &Scanner, id string) Parser {
mut p := Parser { mut p := Parser {
id: id
scanner: scanner
v: v v: v
file_path: path
file_name: path.all_after('/')
file_platform: path_platform
file_pcguard: path_pcguard
scanner: new_scanner(path)
table: v.table table: v.table
import_table: v.table.get_file_import_table(path)
cur_fn: EmptyFn cur_fn: EmptyFn
cgen: v.cgen cgen: v.cgen
is_script: (v.pref.is_script && path == v.dir) is_script: false
pref: v.pref pref: v.pref
os: v.os os: v.os
vroot: v.vroot vroot: v.vroot
@ -135,24 +160,25 @@ fn (v mut V) new_parser(path string) Parser {
p.scanner.should_print_line_on_error = false p.scanner.should_print_line_on_error = false
} }
v.cgen.line_directives = v.pref.is_debuggable v.cgen.line_directives = v.pref.is_debuggable
v.cgen.file = path // v.cgen.file = path
for {
res := p.scanner.scan()
p.tokens << Tok {
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
col: p.scanner.pos - p.scanner.last_nl_pos
}
if res.tok == .eof {
break
}
}
v.add_parser(p)
//p.scanner.debug_tokens()
return p return p
} }
fn (p mut Parser) scan_tokens() {
for {
res := p.scanner.scan()
p.tokens << Tok {
tok: res.tok
lit: res.lit
line_nr: p.scanner.line_nr
col: p.scanner.pos - p.scanner.last_nl_pos
}
if res.tok == .eof {
break
}
}
}
fn (p mut Parser) set_current_fn(f Fn) { fn (p mut Parser) set_current_fn(f Fn) {
p.cur_fn = f p.cur_fn = f
//p.cur_fn = p.table.fns[f.name] //p.cur_fn = p.table.fns[f.name]
@ -164,9 +190,9 @@ fn (p mut Parser) next() {
p.prev_tok = p.tok p.prev_tok = p.tok
p.scanner.prev_tok = p.tok p.scanner.prev_tok = p.tok
if p.token_idx >= p.tokens.len { if p.token_idx >= p.tokens.len {
p.tok = Token.eof p.tok = Token.eof
p.lit = '' p.lit = ''
return return
} }
res := p.tokens[p.token_idx] res := p.tokens[p.token_idx]
p.token_idx++ p.token_idx++
@ -238,7 +264,7 @@ fn (p mut Parser) parse(pass Pass) {
p.error('module `builtin` cannot be imported') p.error('module `builtin` cannot be imported')
} }
// save file import table // save file import table
p.table.file_imports[p.file_path] = p.import_table p.table.file_imports[p.id] = p.import_table
return return
} }
// Go through every top level token or throw a compilation error if a non-top level token is met // Go through every top level token or throw a compilation error if a non-top level token is met
@ -1736,7 +1762,7 @@ fn (p mut Parser) name_expr() string {
// struct initialization // struct initialization
else if p.peek() == .lcbr { else if p.peek() == .lcbr {
if ptr { if ptr {
name += '*' // `&User{}` => type `User*` name += '*' // `&User{}` => type `User*`
} }
if name == 'T' { if name == 'T' {
name = p.cur_gen_type name = p.cur_gen_type
@ -2134,7 +2160,7 @@ fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
if is_arr { if is_arr {
if is_arr0 { if is_arr0 {
typ = typ.right(6) typ = typ.right(6)
} }
p.gen_array_at(typ, is_arr0, fn_ph) p.gen_array_at(typ, is_arr0, fn_ph)
} }
// map is tricky // map is tricky
@ -3885,7 +3911,7 @@ fn (p mut Parser) check_and_register_used_imported_type(typ_name string) {
fn (p mut Parser) check_unused_imports() { fn (p mut Parser) check_unused_imports() {
// Don't run in the generated V file with `.str()` // Don't run in the generated V file with `.str()`
if p.fileis(vgen_file_name) { if p.id == 'vgen' {
return return
} }
mut output := '' mut output := ''

View File

@ -37,7 +37,8 @@ mut:
quote byte // which quote is used to denote current string: ' or " quote byte // which quote is used to denote current string: ' or "
} }
fn new_scanner(file_path string) &Scanner { // new scanner from file.
fn new_scanner_file(file_path string) &Scanner {
if !os.file_exists(file_path) { if !os.file_exists(file_path) {
verror("$file_path doesn't exist") verror("$file_path doesn't exist")
} }
@ -58,15 +59,21 @@ fn new_scanner(file_path string) &Scanner {
} }
} }
mut s := new_scanner(raw_text)
s.file_path = file_path
return s
}
// new scanner from string.
fn new_scanner(text string) &Scanner {
return &Scanner { return &Scanner {
file_path: file_path text: text
text: raw_text
fmt_out: strings.new_builder(1000) fmt_out: strings.new_builder(1000)
should_print_line_on_error: true should_print_line_on_error: true
} }
} }
struct ScannerPos { struct ScannerPos {
mut: mut:
pos int pos int
@ -682,10 +689,10 @@ fn (s &Scanner) error_with_col(msg string, col int) {
fn (s Scanner) count_symbol_before(p int, sym byte) int { fn (s Scanner) count_symbol_before(p int, sym byte) int {
mut count := 0 mut count := 0
for i:=p; i>=0; i-- { for i:=p; i>=0; i-- {
if s.text[i] != sym { if s.text[i] != sym {
break break
} }
count++ count++
} }
return count return count
} }
@ -863,5 +870,3 @@ fn good_type_name(s string) bool {
} }
return true return true
} }

View File

@ -855,14 +855,14 @@ fn (table &Table) qualify_module(mod string, file_path string) string {
return mod return mod
} }
fn (table &Table) get_file_import_table(file_path string) FileImportTable { fn (table &Table) get_file_import_table(id string) FileImportTable {
// if file_path.clone() in table.file_imports { // if file_path.clone() in table.file_imports {
// return table.file_imports[file_path.clone()] // return table.file_imports[file_path.clone()]
// } // }
// just get imports. memory error when recycling import table // just get imports. memory error when recycling import table
mut fit := new_file_import_table(file_path) mut fit := new_file_import_table(id)
if file_path in table.file_imports { if id in table.file_imports {
fit.imports = table.file_imports[file_path].imports fit.imports = table.file_imports[id].imports
} }
return fit return fit
} }