compiler: update to get vweb working
parent
475807566d
commit
7158a012c3
|
@ -163,29 +163,14 @@ fn (p mut Parser) comp_time() {
|
||||||
p.check(.lpar)
|
p.check(.lpar)
|
||||||
p.check(.rpar)
|
p.check(.rpar)
|
||||||
v_code := tmpl.compile_template(path)
|
v_code := tmpl.compile_template(path)
|
||||||
if os.file_exists('.vwebtmpl.v') {
|
is_strings_imorted := p.import_table.known_import('strings')
|
||||||
os.rm('.vwebtmpl.v')
|
if !is_strings_imorted {
|
||||||
|
p.register_import('strings', 0) // used by v_code
|
||||||
}
|
}
|
||||||
os.write_file('.vwebtmpl.v', v_code.clone()) // TODO don't need clone, compiler bug
|
p.import_table.register_used_import('strings')
|
||||||
p.genln('')
|
|
||||||
// Parse the function and embed resulting C code in current function so that
|
|
||||||
// all variables are available.
|
|
||||||
pos := p.cgen.lines.len - 1
|
|
||||||
mut pp := p.v.new_parser_from_file('.vwebtmpl.v')
|
|
||||||
if !p.pref.is_debug {
|
|
||||||
os.rm('.vwebtmpl.v')
|
|
||||||
}
|
|
||||||
pp.is_vweb = true
|
|
||||||
pp.set_current_fn( p.cur_fn ) // give access too all variables in current function
|
|
||||||
pp.parse(.main)
|
|
||||||
pp.v.add_parser(pp)
|
|
||||||
tmpl_fn_body := p.cgen.lines.slice(pos + 2, p.cgen.lines.len).join('\n').clone()
|
|
||||||
end_pos := tmpl_fn_body.last_index('Builder_str( sb )') + 19 // TODO
|
|
||||||
p.cgen.lines = p.cgen.lines[..pos]
|
|
||||||
p.genln('/////////////////// tmpl start')
|
p.genln('/////////////////// tmpl start')
|
||||||
p.genln(tmpl_fn_body[..end_pos])
|
p.statements_from_text(v_code, false)
|
||||||
p.genln('/////////////////// tmpl end')
|
p.genln('/////////////////// tmpl end')
|
||||||
// `app.vweb.html(index_view())`
|
|
||||||
receiver := p.cur_fn.args[0]
|
receiver := p.cur_fn.args[0]
|
||||||
dot := if receiver.is_mut { '->' } else { '.' }
|
dot := if receiver.is_mut { '->' } else { '.' }
|
||||||
p.genln('vweb__Context_html($receiver.name $dot vweb, tmpl_res)')
|
p.genln('vweb__Context_html($receiver.name $dot vweb, tmpl_res)')
|
||||||
|
|
|
@ -89,6 +89,16 @@ const (
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
struct ParserState {
|
||||||
|
scanner_text string
|
||||||
|
tokens []Token
|
||||||
|
token_idx int
|
||||||
|
tok TokenKind
|
||||||
|
prev_tok TokenKind
|
||||||
|
prev_tok2 TokenKind
|
||||||
|
lit string
|
||||||
|
}
|
||||||
|
|
||||||
// new parser from string. unique id specified in `id`.
|
// new parser from string. unique id specified in `id`.
|
||||||
// tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text)
|
// tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text)
|
||||||
fn (v mut V) new_parser_from_string(text string) Parser {
|
fn (v mut V) new_parser_from_string(text string) Parser {
|
||||||
|
@ -265,6 +275,34 @@ fn (p &Parser) log(s string) {
|
||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn (p mut Parser) save_state() ParserState {
|
||||||
|
return ParserState{
|
||||||
|
scanner_text : p.scanner.text
|
||||||
|
tokens : p.tokens
|
||||||
|
token_idx : p.token_idx
|
||||||
|
tok : p.tok
|
||||||
|
prev_tok : p.prev_tok
|
||||||
|
prev_tok2 : p.prev_tok2
|
||||||
|
lit : p.lit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (p mut Parser) restore_state(state ParserState) {
|
||||||
|
p.scanner.text = state.scanner_text
|
||||||
|
p.tokens = state.tokens
|
||||||
|
p.token_idx = state.token_idx
|
||||||
|
p.tok = state.tok
|
||||||
|
p.prev_tok = state.prev_tok
|
||||||
|
p.prev_tok2 = state.prev_tok2
|
||||||
|
p.lit = state.lit
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (p mut Parser) clear_state() {
|
||||||
|
p.tokens = []
|
||||||
|
p.token_idx = 0
|
||||||
|
p.scanner.text = ''
|
||||||
|
}
|
||||||
|
|
||||||
pub fn (p mut Parser) add_text(text string) {
|
pub fn (p mut Parser) add_text(text string) {
|
||||||
if p.tokens.len > 1 && p.tokens[p.tokens.len-1].tok == .eof {
|
if p.tokens.len > 1 && p.tokens[p.tokens.len-1].tok == .eof {
|
||||||
p.tokens.delete(p.tokens.len-1)
|
p.tokens.delete(p.tokens.len-1)
|
||||||
|
@ -273,6 +311,18 @@ pub fn (p mut Parser) add_text(text string) {
|
||||||
p.scan_tokens()
|
p.scan_tokens()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn (p mut Parser) statements_from_text(text string, rcbr bool) {
|
||||||
|
saved_state := p.save_state()
|
||||||
|
p.clear_state()
|
||||||
|
p.add_text(text)
|
||||||
|
if rcbr {
|
||||||
|
p.statements()
|
||||||
|
} else {
|
||||||
|
p.statements_no_rcbr()
|
||||||
|
}
|
||||||
|
p.restore_state(saved_state)
|
||||||
|
}
|
||||||
|
|
||||||
fn (p mut Parser) parse(pass Pass) {
|
fn (p mut Parser) parse(pass Pass) {
|
||||||
p.cgen.line = 0
|
p.cgen.line = 0
|
||||||
p.cgen.file = cescaped_path(os.realpath(p.file_path))
|
p.cgen.file = cescaped_path(os.realpath(p.file_path))
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub fn compile_template(path string) string {
|
||||||
lines := html.split_into_lines()
|
lines := html.split_into_lines()
|
||||||
mut s := strings.new_builder(1000)
|
mut s := strings.new_builder(1000)
|
||||||
base := path.all_after('/').replace('.html', '')
|
base := path.all_after('/').replace('.html', '')
|
||||||
s.writeln('module main import strings fn ${base}_view() string { // this line will get removed becase only function body is embedded
|
s.writeln('
|
||||||
mut sb := strings.new_builder(${lines.len * 30})
|
mut sb := strings.new_builder(${lines.len * 30})
|
||||||
header := \'$header\'
|
header := \'$header\'
|
||||||
_ = header
|
_ = header
|
||||||
|
|
Loading…
Reference in New Issue