ast: mark ast.File{} with `[heap]`, and use []&ast.File consistently

pull/10176/head
Delyan Angelov 2021-05-22 18:59:17 +03:00
parent e512caf8f5
commit bc01de2181
No known key found for this signature in database
GPG Key ID: 66886C0F12D595ED
13 changed files with 39 additions and 39 deletions

View File

@ -552,16 +552,17 @@ pub:
// Each V source file is represented by one File structure.
// When the V compiler runs, the parser will fill an []File.
// That array is then passed to V's checker.
[heap]
pub struct File {
pub:
path string // absolute path of the source file - '/projects/v/file.v'
path_base string // file name - 'file.v' (useful for tracing)
nr_lines int // number of source code lines in the file (including newlines and comments)
nr_bytes int // number of processed source code bytes
mod Module // the module of the source file (from `module xyz` at the top)
global_scope &Scope
is_test bool // true for _test.v files
pub mut:
path string // absolute path of the source file - '/projects/v/file.v'
path_base string // file name - 'file.v' (useful for tracing)
scope &Scope
stmts []Stmt // all the statements in the source file
imports []Import // all the imports

View File

@ -3,7 +3,7 @@ import v.ast.walker
import v.parser
import v.pref
fn parse_text(text string) ast.File {
fn parse_text(text string) &ast.File {
tbl := ast.new_table()
prefs := pref.new_preferences()
scope := &ast.Scope{
@ -49,7 +49,7 @@ fn test_inspect() {
module main
'
file := parse_text(source)
walker.inspect(&file, voidptr(0), fn (node ast.Node, data voidptr) bool {
walker.inspect(file, voidptr(0), fn (node ast.Node, data voidptr) bool {
// Second visit must be ast.Stmt
if node is ast.Stmt {
if node !is ast.Module {
@ -61,7 +61,6 @@ module main
assert mod.name == 'main'
return false
}
// First visit must be ast.File
assert node is ast.File
// True means that the inspector must now
// inspect the ast.File's children

View File

@ -17,7 +17,7 @@ pub:
module_path string
mut:
pref &pref.Preferences
checker checker.Checker
checker &checker.Checker
global_scope &ast.Scope
out_name_c string
out_name_js string
@ -26,7 +26,7 @@ mut:
stats_bytes int // size of backend generated source code in bytes
pub mut:
module_search_paths []string
parsed_files []ast.File
parsed_files []&ast.File
cached_msvc MsvcResult
table &ast.Table
ccoptions CcompilerOptions
@ -192,7 +192,7 @@ pub fn (mut b Builder) resolve_deps() {
eprintln(mods.str())
eprintln('-------------------------------')
}
mut reordered_parsed_files := []ast.File{}
mut reordered_parsed_files := []&ast.File{}
for m in mods {
for pf in b.parsed_files {
if m == pf.mod.name {

View File

@ -48,7 +48,7 @@ pub fn compile(command string, pref &pref.Preferences) {
compilation_time_micros := 1 + sw.elapsed().microseconds()
scompilation_time_ms := util.bold('${f64(compilation_time_micros) / 1000.0:6.3f}')
mut all_v_source_lines, mut all_v_source_bytes := 0, 0
for mut pf in b.parsed_files {
for pf in b.parsed_files {
all_v_source_lines += pf.nr_lines
all_v_source_bytes += pf.nr_bytes
}

View File

@ -34,6 +34,7 @@ const (
vroot_is_deprecated_message = '@VROOT is deprecated, use @VMODROOT or @VEXEROOT instead'
)
[heap]
pub struct Checker {
pref &pref.Preferences // Preferences shared from V struct
pub mut:
@ -92,12 +93,12 @@ mut:
is_c_call bool // remove once C.c_call("string") deprecation is removed
}
pub fn new_checker(table &ast.Table, pref &pref.Preferences) Checker {
pub fn new_checker(table &ast.Table, pref &pref.Preferences) &Checker {
mut timers_should_print := false
$if time_checking ? {
timers_should_print = true
}
return Checker{
return &Checker{
table: table
pref: pref
timers: util.new_timers(timers_should_print)
@ -160,13 +161,13 @@ pub fn (mut c Checker) change_current_file(file &ast.File) {
c.mod = file.mod.name
}
pub fn (mut c Checker) check_files(ast_files []ast.File) {
pub fn (mut c Checker) check_files(ast_files []&ast.File) {
// c.files = ast_files
mut has_main_mod_file := false
mut has_main_fn := false
mut files_from_main_module := []&ast.File{}
for i in 0 .. ast_files.len {
file := unsafe { &ast_files[i] }
file := unsafe { ast_files[i] }
c.timers.start('checker_check $file.path')
c.check(file)
if file.mod.name == 'main' {
@ -204,7 +205,7 @@ pub fn (mut c Checker) check_files(ast_files []ast.File) {
for {
for file in ast_files {
if file.generic_fns.len > 0 {
c.change_current_file(&file)
c.change_current_file(file)
c.post_process_generic_fns()
}
}

View File

@ -64,7 +64,7 @@ mut:
json_forward_decls strings.Builder // json type forward decls
enum_typedefs strings.Builder // enum types
sql_buf strings.Builder // for writing exprs to args via `sqlite3_bind_int()` etc
file ast.File
file &ast.File
fn_decl &ast.FnDecl // pointer to the FnDecl we are currently inside otherwise 0
last_fn_c_name string
tmp_count int // counter for unique tmp vars (_tmp1, tmp2 etc)
@ -175,7 +175,7 @@ mut:
// main_fn_decl_node ast.FnDecl
}
pub fn gen(files []ast.File, table &ast.Table, pref &pref.Preferences) string {
pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
// println('start cgen2')
mut module_built := ''
if pref.build_mode == .build_module {
@ -192,6 +192,7 @@ pub fn gen(files []ast.File, table &ast.Table, pref &pref.Preferences) string {
timers_should_print = true
}
mut g := Gen{
file: 0
out: strings.new_builder(512000)
cheaders: strings.new_builder(15000)
includes: strings.new_builder(100)

View File

@ -42,7 +42,7 @@ mut:
namespaces map[string]&Namespace
doc &JsDoc
enable_doc bool
file ast.File
file &ast.File
tmp_count int
inside_ternary bool
inside_loop bool
@ -62,7 +62,7 @@ mut:
call_stack []ast.CallExpr
}
pub fn gen(files []ast.File, table &ast.Table, pref &pref.Preferences) string {
pub fn gen(files []&ast.File, table &ast.Table, pref &pref.Preferences) string {
mut g := &JsGen{
definitions: strings.new_builder(100)
table: table
@ -72,6 +72,7 @@ pub fn gen(files []ast.File, table &ast.Table, pref &pref.Preferences) string {
doc: 0
ns: 0
enable_doc: true
file: 0
}
g.doc = new_jsdoc(g)
// TODO: Add '[-no]-jsdoc' flag

View File

@ -67,7 +67,7 @@ fn (g &Gen) get_backend() ?CodeGen {
return error('unsupported architecture')
}
pub fn gen(files []ast.File, table &ast.Table, out_name string, pref &pref.Preferences) (int, int) {
pub fn gen(files []&ast.File, table &ast.Table, out_name string, pref &pref.Preferences) (int, int) {
mut g := &Gen{
table: table
sect_header_name_pos: 0

View File

@ -7,7 +7,7 @@ import v.util
import v.pref
// mark_used walks the AST, starting at main() and marks all used fns transitively
pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []ast.File) {
pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []&ast.File) {
mut all_fns, all_consts := all_fn_and_const(ast_files)
util.timing_start(@METHOD)
defer {
@ -267,7 +267,7 @@ pub fn mark_used(mut table ast.Table, pref &pref.Preferences, ast_files []ast.Fi
}
}
fn all_fn_and_const(ast_files []ast.File) (map[string]ast.FnDecl, map[string]ast.ConstField) {
fn all_fn_and_const(ast_files []&ast.File) (map[string]ast.FnDecl, map[string]ast.ConstField) {
util.timing_start(@METHOD)
defer {
util.timing_measure(@METHOD)
@ -275,7 +275,7 @@ fn all_fn_and_const(ast_files []ast.File) (map[string]ast.FnDecl, map[string]ast
mut all_fns := map[string]ast.FnDecl{}
mut all_consts := map[string]ast.ConstField{}
for i in 0 .. ast_files.len {
file := unsafe { &ast_files[i] }
file := ast_files[i]
for node in file.stmts {
match node {
ast.FnDecl {

View File

@ -14,7 +14,7 @@ pub mut:
n_maps int
n_asserts int
mut:
files []ast.File
files []&ast.File
all_fns map[string]ast.FnDecl
all_consts map[string]ast.ConstField
}

View File

@ -187,10 +187,7 @@ fn (mut p Parser) comp_call() ast.ComptimeCall {
println('')
}
mut file := parse_comptime(v_code, p.table, p.pref, scope, p.global_scope)
file = ast.File{
...file
path: tmpl_path
}
file.path = tmpl_path
// copy vars from current fn scope into vweb_tmpl scope
for stmt in file.stmts {
if stmt is ast.FnDecl {

View File

@ -101,7 +101,7 @@ pub fn parse_stmt(text string, table &ast.Table, scope &ast.Scope) ast.Stmt {
return p.stmt(false)
}
pub fn parse_comptime(text string, table &ast.Table, pref &pref.Preferences, scope &ast.Scope, global_scope &ast.Scope) ast.File {
pub fn parse_comptime(text string, table &ast.Table, pref &pref.Preferences, scope &ast.Scope, global_scope &ast.Scope) &ast.File {
mut p := Parser{
scanner: scanner.new_scanner(text, .skip_comments, pref)
table: table
@ -114,7 +114,7 @@ pub fn parse_comptime(text string, table &ast.Table, pref &pref.Preferences, sco
return p.parse()
}
pub fn parse_text(text string, path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) ast.File {
pub fn parse_text(text string, path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) &ast.File {
mut p := Parser{
scanner: scanner.new_scanner(text, comments_mode, pref)
comments_mode: comments_mode
@ -173,7 +173,7 @@ pub fn (mut p Parser) set_path(path string) {
}
}
pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) ast.File {
pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) &ast.File {
// NB: when comments_mode == .toplevel_comments,
// the parser gives feedback to the scanner about toplevel statements, so that the scanner can skip
// all the tricky inner comments. This is needed because we do not have a good general solution
@ -199,7 +199,7 @@ pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsM
return p.parse()
}
pub fn parse_vet_file(path string, table_ &ast.Table, pref &pref.Preferences) (ast.File, []vet.Error) {
pub fn parse_vet_file(path string, table_ &ast.Table, pref &pref.Preferences) (&ast.File, []vet.Error) {
global_scope := &ast.Scope{
parent: 0
}
@ -235,7 +235,7 @@ pub fn parse_vet_file(path string, table_ &ast.Table, pref &pref.Preferences) (a
return file, p.vet_errors
}
pub fn (mut p Parser) parse() ast.File {
pub fn (mut p Parser) parse() &ast.File {
util.timing_start('PARSE')
defer {
util.timing_measure_cumulative('PARSE')
@ -283,7 +283,7 @@ pub fn (mut p Parser) parse() ast.File {
// println(stmts[0])
p.scope.end_pos = p.tok.pos
//
return ast.File{
return &ast.File{
path: p.file_name
path_base: p.file_base
is_test: p.inside_test_file
@ -310,7 +310,7 @@ mut:
mu2 &sync.Mutex
paths []string
table &ast.Table
parsed_ast_files []ast.File
parsed_ast_files []&ast.File
pref &pref.Preferences
global_scope &ast.Scope
}
@ -335,7 +335,7 @@ fn (mut q Queue) run() {
}
}
*/
pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences, global_scope &ast.Scope) []ast.File {
pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences, global_scope &ast.Scope) []&ast.File {
mut timers := util.new_timers(false)
$if time_parsing ? {
timers.should_print = true
@ -365,7 +365,7 @@ pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences, glo
*/
}
// ///////////////
mut files := []ast.File{}
mut files := []&ast.File{}
for path in paths {
// println('parse_files $path')
timers.start('parse_file $path')

View File

@ -43,7 +43,7 @@ fn test_eval() {
for input in inputs {
stmts << parse_stmt(input, table, scope)
}
file := ast.File{
file := &ast.File{
stmts: stmts
scope: scope
}
@ -105,7 +105,7 @@ fn test_one() {
for line in input {
e << parse_stmt(line, table, scope)
}
program := ast.File{
program := &ast.File{
stmts: e
scope: scope
global_scope: scope
@ -147,7 +147,7 @@ fn test_parse_expr() {
println('\n\nst="$s"')
e << parse_stmt(s, table, scope)
}
program := ast.File{
program := &ast.File{
stmts: e
scope: scope
global_scope: scope