tools: add cmd/tools/measure/scanner_speed.v and cmd/tools/measure/parser_speed.v
							parent
							
								
									d20eae2d34
								
							
						
					
					
						commit
						7ca1d2a93c
					
				|  | @ -0,0 +1,67 @@ | |||
| import os | ||||
| import time | ||||
| import v.ast | ||||
| import v.pref | ||||
| import v.parser | ||||
| import v.errors | ||||
| import v.scanner | ||||
| 
 | ||||
| fn main() { | ||||
| 	files := os.args#[1..] | ||||
| 	if files.len > 0 && files[0].starts_with('@') { | ||||
| 		lst_path := files[0].all_after('@') | ||||
| 		listed_files := os.read_file(lst_path)?.split('\n') | ||||
| 		process_files(listed_files)? | ||||
| 		return | ||||
| 	} | ||||
| 	process_files(files)? | ||||
| } | ||||
| 
 | ||||
| fn process_files(files []string) ? { | ||||
| 	mut table := ast.new_table() | ||||
| 	mut pref := pref.new_preferences() | ||||
| 	pref.is_fmt = true | ||||
| 	pref.skip_warnings = true | ||||
| 	pref.output_mode = .silent | ||||
| 	mut sw := time.new_stopwatch() | ||||
| 	mut total_us := i64(0) | ||||
| 	mut total_bytes := i64(0) | ||||
| 	mut total_tokens := i64(0) | ||||
| 	for f in files { | ||||
| 		if f == '' { | ||||
| 			continue | ||||
| 		} | ||||
| 		if f.ends_with('_test.v') { | ||||
| 			continue | ||||
| 		} | ||||
| 		// do not measure the scanning, but only the parsing:
 | ||||
| 		mut p := new_parser(f, .skip_comments, table, pref) | ||||
| 		///
 | ||||
| 		sw.restart() | ||||
| 		_ := p.parse() | ||||
| 		f_us := sw.elapsed().microseconds() | ||||
| 		///
 | ||||
| 		total_us += f_us | ||||
| 		total_bytes += p.scanner.text.len | ||||
| 		total_tokens += p.scanner.all_tokens.len | ||||
| 		println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} $f') | ||||
| 	} | ||||
| 	println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s') | ||||
| } | ||||
| 
 | ||||
| fn new_parser(path string, comments_mode scanner.CommentsMode, table &ast.Table, pref &pref.Preferences) &parser.Parser { | ||||
| 	mut p := &parser.Parser{ | ||||
| 		scanner: scanner.new_scanner_file(path, comments_mode, pref) or { panic(err) } | ||||
| 		comments_mode: comments_mode | ||||
| 		table: table | ||||
| 		pref: pref | ||||
| 		scope: &ast.Scope{ | ||||
| 			start_pos: 0 | ||||
| 			parent: table.global_scope | ||||
| 		} | ||||
| 		errors: []errors.Error{} | ||||
| 		warnings: []errors.Warning{} | ||||
| 	} | ||||
| 	p.set_path(path) | ||||
| 	return p | ||||
| } | ||||
|  | @ -0,0 +1,42 @@ | |||
| import os | ||||
| import time | ||||
| import v.scanner | ||||
| import v.pref | ||||
| 
 | ||||
| fn main() { | ||||
| 	files := os.args#[1..] | ||||
| 	if files.len > 0 && files[0].starts_with('@') { | ||||
| 		lst_path := files[0].all_after('@') | ||||
| 		listed_files := os.read_file(lst_path)?.split('\n') | ||||
| 		process_files(listed_files)? | ||||
| 		return | ||||
| 	} | ||||
| 	process_files(files)? | ||||
| } | ||||
| 
 | ||||
| fn process_files(files []string) ? { | ||||
| 	mut pref := pref.new_preferences() | ||||
| 	pref.is_fmt = true | ||||
| 	pref.skip_warnings = true | ||||
| 	pref.output_mode = .silent | ||||
| 	mut sw := time.new_stopwatch() | ||||
| 	mut total_us := i64(0) | ||||
| 	mut total_bytes := i64(0) | ||||
| 	mut total_tokens := i64(0) | ||||
| 	for f in files { | ||||
| 		if f == '' { | ||||
| 			continue | ||||
| 		} | ||||
| 		if f.ends_with('_test.v') { | ||||
| 			continue | ||||
| 		} | ||||
| 		sw.restart() | ||||
| 		s := scanner.new_scanner_file(f, .skip_comments, pref)? | ||||
| 		f_us := sw.elapsed().microseconds() | ||||
| 		total_us += f_us | ||||
| 		total_bytes += s.text.len | ||||
| 		total_tokens += s.all_tokens.len | ||||
| 		println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} $f') | ||||
| 	} | ||||
| 	println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3f} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s') | ||||
| } | ||||
|  | @ -23,7 +23,6 @@ mut: | |||
| 	file_name_dir     string       // "/home/user"
 | ||||
| 	unique_prefix     string       // a hash of p.file_name, used for making anon fn generation unique
 | ||||
| 	file_backend_mode ast.Language // .c for .c.v|.c.vv|.c.vsh files; .js for .js.v files, .amd64/.rv32/other arches for .amd64.v/.rv32.v/etc. files, .v otherwise.
 | ||||
| 	scanner           &scanner.Scanner | ||||
| 	comments_mode     scanner.CommentsMode = .skip_comments | ||||
| 	// see comment in parse_file
 | ||||
| 	tok                       token.Token | ||||
|  | @ -78,10 +77,6 @@ mut: | |||
| 	returns                   bool | ||||
| 	is_stmt_ident             bool // true while the beginning of a statement is an ident/selector
 | ||||
| 	expecting_type            bool // `is Type`, expecting type
 | ||||
| 	errors                    []errors.Error | ||||
| 	warnings                  []errors.Warning | ||||
| 	notices                   []errors.Notice | ||||
| 	vet_errors                []vet.Error | ||||
| 	cur_fn_name               string | ||||
| 	label_names               []string | ||||
| 	name_error                bool // indicates if the token is not a name or the name is on another line
 | ||||
|  | @ -95,6 +90,12 @@ mut: | |||
| 	if_cond_comments          []ast.Comment | ||||
| 	script_mode               bool | ||||
| 	script_mode_start_token   token.Token | ||||
| pub mut: | ||||
| 	scanner &scanner.Scanner | ||||
| 	errors                    []errors.Error | ||||
| 	warnings                  []errors.Warning | ||||
| 	notices                   []errors.Notice | ||||
| 	vet_errors                []vet.Error | ||||
| } | ||||
| 
 | ||||
| __global codegen_files = []&ast.File{} | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue