new vh generation + simpler match statement
parent
14c273f273
commit
4574039e4d
|
@ -3,6 +3,7 @@ module main
|
|||
import os
|
||||
import flag
|
||||
import compiler
|
||||
import strings
|
||||
|
||||
const (
|
||||
tool_version = '0.0.1'
|
||||
|
@ -24,14 +25,15 @@ fn f_to_string(fmod string, f compiler.Fn) ?string {
|
|||
|
||||
fn analyze_v_file(file string) {
|
||||
println('')
|
||||
println('###################### $file ######################')
|
||||
hash := strings.repeat(`#`, (76 - file.len) / 2)
|
||||
println('$hash $file $hash')
|
||||
|
||||
// main work:
|
||||
mut v := compiler.new_v_compiler_with_args([file])
|
||||
v.add_v_files_to_compile()
|
||||
for f in v.files { v.parse(f, .decl) }
|
||||
fi := v.get_file_parser_index( file ) or { panic(err) }
|
||||
fmod := v.parsers[fi].mod
|
||||
fmod := v.parsers[fi].mod
|
||||
|
||||
// output:
|
||||
mut fns :=[]string
|
||||
|
|
|
@ -1425,3 +1425,12 @@ fn (fns []Fn) contains(f Fn) bool {
|
|||
}
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn (f Fn) v_fn_module() string {
|
||||
return f.mod
|
||||
}
|
||||
|
||||
pub fn (f Fn) v_fn_name() string {
|
||||
return f.name.replace('${f.mod}__', '')
|
||||
}
|
||||
|
||||
|
|
|
@ -219,7 +219,7 @@ pub fn (v mut V) compile() {
|
|||
cgen.genln('#define V_COMMIT_HASH "$v_hash"')
|
||||
cgen.genln('#endif')
|
||||
}
|
||||
|
||||
|
||||
q := cgen.nogen // TODO hack
|
||||
cgen.nogen = false
|
||||
$if js {
|
||||
|
@ -269,7 +269,7 @@ pub fn (v mut V) compile() {
|
|||
}
|
||||
// Generate .vh if we are building a module
|
||||
if v.pref.build_mode == .build_module {
|
||||
v.generate_vh()
|
||||
generate_vh(v.dir)
|
||||
}
|
||||
|
||||
// parse generated V code (str() methods etc)
|
||||
|
@ -585,7 +585,6 @@ pub fn (v mut V) add_v_files_to_compile() {
|
|||
v.table.file_imports[p.file_path_id] = p.import_table
|
||||
p.table.imports << 'os'
|
||||
p.table.register_module('os')
|
||||
println('got v script')
|
||||
}
|
||||
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
|
||||
v.add_parser(p)
|
||||
|
|
|
@ -117,7 +117,81 @@ fn v_type_str(typ_ string) string {
|
|||
return typ
|
||||
}
|
||||
|
||||
fn (v &V) generate_vh() {
|
||||
// `mod` == "vlib/os"
|
||||
fn generate_vh(mod string) {
|
||||
println('\n\n\n\nGenerating a V header file for module `$mod`')
|
||||
vexe := os.executable()
|
||||
full_mod_path := os.dir(vexe) + '/' + mod
|
||||
|
||||
mod_path := mod.replace('.', os.path_separator)
|
||||
dir := if mod.starts_with('vlib') {
|
||||
'$compiler.v_modules_path${os.path_separator}$mod'
|
||||
} else {
|
||||
'$compiler.v_modules_path${os.path_separator}$mod'
|
||||
}
|
||||
path := dir + '.vh'
|
||||
pdir := dir.all_before_last(os.path_separator)
|
||||
if !os.dir_exists(pdir) {
|
||||
os.mkdir_all(pdir)
|
||||
// os.mkdir(os.realpath(dir))
|
||||
}
|
||||
out := os.create(path) or { panic(err) }
|
||||
// Consts
|
||||
println(full_mod_path)
|
||||
mut vfiles := os.walk_ext(full_mod_path, '.v')
|
||||
filtered := vfiles.filter(!it.ends_with('test.v') && !it.ends_with('_win.v')) // TODO merge once filter allows it
|
||||
println(filtered)
|
||||
mut v := new_v(['foo.v'])
|
||||
//v.pref.generating_vh = true
|
||||
for file in filtered {
|
||||
mut p := v.new_parser_from_file(file)
|
||||
p.parse(.decl)
|
||||
for i, tok in p.tokens {
|
||||
if !p.tok.is_decl() {
|
||||
continue
|
||||
}
|
||||
match tok.tok {
|
||||
TokenKind.key_fn => { generate_fn(out, p.tokens, i) }
|
||||
TokenKind.key_const => { generate_const(out, p.tokens, i) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_fn(file os.File, tokens []Token, i int) {
|
||||
mut out := strings.new_builder(100)
|
||||
mut next := tokens[i+1]
|
||||
if tokens[i-1].tok != .key_pub {
|
||||
// Skip private fns
|
||||
return
|
||||
}
|
||||
|
||||
if next.tok == .name && next.lit == 'C' {
|
||||
println('skipping C')
|
||||
return
|
||||
}
|
||||
//out.write('pub ')
|
||||
mut tok := tokens[i]
|
||||
for i < tokens.len && tok.tok != .lcbr {
|
||||
next = tokens[i+1]
|
||||
|
||||
out.write(tok.str())
|
||||
if tok.tok != .lpar && !(next.tok in [.comma, .rpar]) {
|
||||
// No space after (), [], etc
|
||||
out.write(' ')
|
||||
}
|
||||
i++
|
||||
tok = tokens[i]
|
||||
}
|
||||
file.writeln(out.str())
|
||||
}
|
||||
|
||||
fn generate_const(file os.File, tokens []Token, i int) {
|
||||
//mut out := strings.new_builder(100)
|
||||
|
||||
}
|
||||
|
||||
fn (v &V) generate_vh_old() {
|
||||
println('\n\n\n\nGenerating a V header file for module `$v.mod`')
|
||||
mod_path := v.mod.replace('.', os.path_separator)
|
||||
dir := if v.dir.starts_with('vlib') {
|
||||
|
|
|
@ -321,7 +321,7 @@ fn (p mut Parser) parse(pass Pass) {
|
|||
p.check(.name)
|
||||
}
|
||||
case TokenKind.key_pub:
|
||||
if p.peek() == .func {
|
||||
if p.peek() == .key_fn {
|
||||
p.fn_decl()
|
||||
} else if p.peek() == .key_struct {
|
||||
p.error('structs can\'t be declared public *yet*')
|
||||
|
@ -329,7 +329,7 @@ fn (p mut Parser) parse(pass Pass) {
|
|||
} else {
|
||||
p.error('wrong pub keyword usage')
|
||||
}
|
||||
case TokenKind.func:
|
||||
case TokenKind.key_fn:
|
||||
p.fn_decl()
|
||||
case TokenKind.key_type:
|
||||
p.type_decl()
|
||||
|
@ -816,8 +816,8 @@ fn (p mut Parser) struct_decl() {
|
|||
p.check(.colon)
|
||||
mut val := ''
|
||||
match p.tok {
|
||||
.name => { val = p.check_name() }
|
||||
.str => { val = p.check_string() }
|
||||
.name => { val = p.check_name() }
|
||||
.str => { val = p.check_string() }
|
||||
else => {
|
||||
p.error('attribute value should be either name or string')
|
||||
}
|
||||
|
@ -942,7 +942,7 @@ fn (p mut Parser) get_type() string {
|
|||
return typ
|
||||
}
|
||||
// fn type
|
||||
if p.tok == .func {
|
||||
if p.tok == .key_fn {
|
||||
mut f := Fn{name: '_', mod: p.mod}
|
||||
p.next()
|
||||
line_nr := p.scanner.line_nr
|
||||
|
@ -2949,6 +2949,7 @@ fn (p mut Parser) string_expr() {
|
|||
}
|
||||
// '$age'! means the user wants this to be a tmp string (uses global buffer, no allocation,
|
||||
// won't be used again)
|
||||
// TODO remove this hack, do this automatically
|
||||
if p.tok == .not {
|
||||
p.check(.not)
|
||||
p.gen('_STR_TMP($format$args)')
|
||||
|
@ -3689,7 +3690,18 @@ fn (p mut Parser) match_statement(is_expr bool) string {
|
|||
for p.tok != .rcbr {
|
||||
if p.tok == .key_else {
|
||||
p.check(.key_else)
|
||||
p.check(.arrow)
|
||||
if p.tok == .arrow {
|
||||
/*
|
||||
p.warn('=> is no longer needed in match statements, use\n' +
|
||||
'match foo {
|
||||
1 { bar }
|
||||
2 { baz }
|
||||
else { ... }
|
||||
}')
|
||||
*/
|
||||
|
||||
p.check(.arrow)
|
||||
}
|
||||
|
||||
// unwrap match if there is only else
|
||||
if i == 0 {
|
||||
|
@ -4112,7 +4124,7 @@ fn (p mut Parser) attribute() {
|
|||
p.attr = p.attr + ':' + p.check_name()
|
||||
}
|
||||
p.check(.rsbr)
|
||||
if p.tok == .func || (p.tok == .key_pub && p.peek() == .func) {
|
||||
if p.tok == .key_fn || (p.tok == .key_pub && p.peek() == .key_fn) {
|
||||
p.fn_decl()
|
||||
p.attr = ''
|
||||
return
|
||||
|
|
|
@ -8,7 +8,7 @@ fn (p mut Parser) get_type2() Type {
|
|||
mut typ := ''
|
||||
mut cat := TypeCategory.struct_
|
||||
// fn type
|
||||
if p.tok == .func {
|
||||
if p.tok == .key_fn {
|
||||
mut f := Fn{name: '_', mod: p.mod}
|
||||
p.next()
|
||||
line_nr := p.scanner.line_nr
|
||||
|
|
|
@ -200,13 +200,6 @@ fn (f Fn) str() string {
|
|||
return '$f.name($str_args) $f.typ'
|
||||
}
|
||||
|
||||
pub fn (f Fn) v_fn_module() string {
|
||||
return f.mod
|
||||
}
|
||||
pub fn (f Fn) v_fn_name() string {
|
||||
return f.name.replace('${f.mod}__', '')
|
||||
}
|
||||
|
||||
pub fn (t &Table) debug_fns() string {
|
||||
mut s := strings.new_builder(1000)
|
||||
for _, f in t.fns {
|
||||
|
|
|
@ -84,7 +84,7 @@ enum TokenKind {
|
|||
key_enum
|
||||
key_false
|
||||
key_for
|
||||
func
|
||||
key_fn
|
||||
key_global
|
||||
key_go
|
||||
key_goto
|
||||
|
@ -204,7 +204,7 @@ fn build_token_str() []string {
|
|||
s[TokenKind.key_for] = 'for'
|
||||
s[TokenKind.key_switch] = 'switch'
|
||||
s[TokenKind.key_case] = 'case'
|
||||
s[TokenKind.func] = 'fn'
|
||||
s[TokenKind.key_fn] = 'fn'
|
||||
s[TokenKind.key_true] = 'true'
|
||||
s[TokenKind.key_false] = 'false'
|
||||
s[TokenKind.key_continue] = 'continue'
|
||||
|
@ -252,12 +252,8 @@ fn (t TokenKind) str() string {
|
|||
}
|
||||
|
||||
fn (t TokenKind) is_decl() bool {
|
||||
// TODO i
|
||||
//return t in [.key_enum, .key_interface, .func, .typ, .key_const,
|
||||
//.key_import_const, .key_struct, .key_pub, .eof]
|
||||
return t == .key_enum || t == .key_interface || t == .func ||
|
||||
t == .key_struct || t == .key_type ||
|
||||
t == .key_const || t == .key_import_const || t == .key_pub || t == .eof
|
||||
return t in [TokenKind.key_enum, .key_interface, .key_fn,
|
||||
.key_struct ,.key_type, .key_const, .key_import_const, .key_pub, .eof]
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -284,3 +280,10 @@ fn (t []TokenKind) contains(val TokenKind) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
fn (t Token) str() string {
|
||||
if t.tok < .plus {
|
||||
return t.lit // string, number etc
|
||||
}
|
||||
return t.tok.str()
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue