2019-06-23 04:21:30 +02:00
|
|
|
// Copyright (c) 2019 Alexander Medvednikov. All rights reserved.
|
|
|
|
// Use of this source code is governed by an MIT license
|
|
|
|
// that can be found in the LICENSE file.
|
2019-10-13 15:37:43 +02:00
|
|
|
module compiler
|
2019-06-22 20:20:28 +02:00
|
|
|
|
2019-07-29 18:21:36 +02:00
|
|
|
import (
|
|
|
|
os
|
|
|
|
strings
|
2019-12-23 11:09:22 +01:00
|
|
|
filepath
|
2019-12-31 19:42:16 +01:00
|
|
|
//compiler.x64
|
2019-12-19 03:41:12 +01:00
|
|
|
// time
|
2019-09-26 04:28:43 +02:00
|
|
|
)
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
struct Parser {
|
2019-12-18 02:34:50 +01:00
|
|
|
file_path string // "/home/user/hello.v"
|
|
|
|
file_name string // "hello.v"
|
|
|
|
file_platform string // ".v", "_windows.v", "_nix.v", "_darwin.v", "_linux.v" ...
|
2019-09-24 21:30:30 +02:00
|
|
|
// When p.file_pcguard != '', it contains a
|
|
|
|
// C ifdef guard clause that must be put before
|
|
|
|
// the #include directives in the parsed .v file
|
2019-12-18 02:34:50 +01:00
|
|
|
file_pcguard string
|
|
|
|
v &V
|
|
|
|
pref &Preferences // Preferences shared from V struct
|
2019-09-19 16:25:00 +02:00
|
|
|
mut:
|
2019-12-18 02:34:50 +01:00
|
|
|
scanner &Scanner
|
|
|
|
tokens []Token
|
|
|
|
token_idx int
|
|
|
|
tok TokenKind
|
|
|
|
prev_tok TokenKind
|
|
|
|
prev_tok2 TokenKind // TODO remove these once the tokens are cached
|
|
|
|
lit string
|
|
|
|
cgen &CGen
|
2019-12-31 19:42:16 +01:00
|
|
|
//x64 &x64.Gen
|
2019-12-18 02:34:50 +01:00
|
|
|
table &Table
|
|
|
|
import_table ImportTable // Holds imports for just the file being parsed
|
|
|
|
pass Pass
|
|
|
|
os OS
|
|
|
|
inside_const bool
|
|
|
|
expr_var Var
|
|
|
|
has_immutable_field bool
|
|
|
|
first_immutable_field Var
|
|
|
|
assigned_type string // non-empty if we are in an assignment expression
|
|
|
|
expected_type string
|
|
|
|
tmp_cnt int
|
|
|
|
builtin_mod bool
|
|
|
|
inside_if_expr bool
|
|
|
|
// inside_unwrapping_match bool
|
|
|
|
inside_return_expr bool
|
|
|
|
inside_unsafe bool
|
|
|
|
is_struct_init bool
|
|
|
|
is_var_decl bool
|
|
|
|
if_expr_cnt int
|
|
|
|
for_expr_cnt int // to detect whether `continue` can be used
|
|
|
|
ptr_cast bool
|
|
|
|
calling_c bool
|
|
|
|
cur_fn Fn
|
|
|
|
local_vars []Var // local function variables
|
|
|
|
global_vars []Var // only for "script" programs without "fn main"
|
|
|
|
var_idx int
|
|
|
|
returns bool
|
|
|
|
vroot string
|
|
|
|
is_c_struct_init bool
|
2019-12-17 15:28:25 +01:00
|
|
|
is_empty_c_struct_init bool // for `foo := C.Foo{}` => `Foo foo;`
|
2019-12-18 02:34:50 +01:00
|
|
|
is_c_fn_call bool
|
|
|
|
can_chash bool
|
|
|
|
attr string
|
|
|
|
v_script bool // "V bash", import all os functions into global space
|
|
|
|
var_decl_name string // To allow declaring the variable so that it can be used in the struct initialization
|
|
|
|
is_alloc bool // Whether current expression resulted in an allocation
|
|
|
|
is_const_literal bool // `1`, `2.0` etc, so that `u64_var == 0` works
|
|
|
|
in_dispatch bool // dispatching generic instance?
|
|
|
|
is_vgen bool
|
|
|
|
is_sql bool
|
|
|
|
is_js bool
|
|
|
|
sql_i int // $1 $2 $3
|
|
|
|
sql_params []string // ("select * from users where id = $1", ***"100"***)
|
|
|
|
sql_types []string // int, string and so on; see sql_params
|
|
|
|
is_vh bool // parsing .vh file (for example `const (a int)` is allowed)
|
|
|
|
generic_dispatch TypeInst
|
2019-12-13 15:18:01 +01:00
|
|
|
pub mut:
|
2019-12-18 02:34:50 +01:00
|
|
|
mod string
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-07-10 09:02:04 +02:00
|
|
|
const (
|
2019-12-04 23:27:04 +01:00
|
|
|
max_module_depth = 5
|
|
|
|
reserved_types = {
|
2019-12-18 02:34:50 +01:00
|
|
|
'i8': true,
|
|
|
|
'i16': true,
|
|
|
|
'int': true,
|
|
|
|
'i64': true,
|
|
|
|
'i128': true,
|
|
|
|
'byte': true,
|
|
|
|
'u16': true,
|
|
|
|
'u32': true,
|
|
|
|
'u64': true,
|
|
|
|
'u128': true,
|
|
|
|
'f32': true,
|
|
|
|
'f64': true,
|
|
|
|
'rune': true,
|
|
|
|
'byteptr': true,
|
|
|
|
'voidptr': true
|
2019-11-07 04:00:48 +01:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
)
|
2019-07-10 09:02:04 +02:00
|
|
|
|
2019-11-25 11:48:26 +01:00
|
|
|
struct ParserState {
|
2020-01-05 16:29:33 +01:00
|
|
|
scanner_file_path string
|
2019-12-18 02:34:50 +01:00
|
|
|
scanner_line_nr int
|
|
|
|
scanner_text string
|
|
|
|
scanner_pos int
|
2019-11-25 20:22:40 +01:00
|
|
|
scanner_line_ends []int
|
|
|
|
scanner_nlines int
|
2019-12-18 02:34:50 +01:00
|
|
|
cgen_lines []string
|
|
|
|
cgen_cur_line string
|
|
|
|
cgen_tmp_line string
|
|
|
|
cgen_is_tmp bool
|
|
|
|
tokens []Token
|
|
|
|
token_idx int
|
|
|
|
tok TokenKind
|
|
|
|
prev_tok TokenKind
|
|
|
|
prev_tok2 TokenKind
|
|
|
|
lit string
|
2019-11-25 11:48:26 +01:00
|
|
|
}
|
|
|
|
|
2019-09-28 03:56:05 +02:00
|
|
|
// new parser from string. unique id specified in `id`.
|
|
|
|
// tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text)
|
2019-10-25 15:34:12 +02:00
|
|
|
fn (v mut V) new_parser_from_string(text string) Parser {
|
2019-12-17 15:28:25 +01:00
|
|
|
// line comment 1
|
2019-10-25 15:34:12 +02:00
|
|
|
mut p := v.new_parser(new_scanner(text))
|
2019-12-17 15:28:25 +01:00
|
|
|
p.scan_tokens() // same line comment
|
2019-09-27 13:02:01 +02:00
|
|
|
return p
|
2019-12-17 15:28:25 +01:00
|
|
|
// final comment
|
2019-09-27 13:02:01 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 02:34:50 +01:00
|
|
|
fn (v mut V) reset_cgen_file_line_parameters() {
|
2019-10-11 23:04:42 +02:00
|
|
|
v.cgen.line = 0
|
|
|
|
v.cgen.file = ''
|
2019-10-12 12:54:01 +02:00
|
|
|
v.cgen.line_directives = v.pref.is_vlines
|
2019-10-11 23:04:42 +02:00
|
|
|
}
|
|
|
|
|
2019-10-04 14:48:09 +02:00
|
|
|
fn (v mut V) new_parser_from_file(path string) Parser {
|
2019-10-11 23:04:42 +02:00
|
|
|
v.reset_cgen_file_line_parameters()
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('new_parser("$path")')
|
2019-08-04 20:39:43 +02:00
|
|
|
mut path_pcguard := ''
|
|
|
|
mut path_platform := '.v'
|
2019-12-18 02:34:50 +01:00
|
|
|
for path_ending in ['_lin.v', '_mac.v', '_win.v', '_nix.v', '_linux.v', '_darwin.v', '_windows.v'] {
|
2019-08-04 20:39:43 +02:00
|
|
|
if path.ends_with(path_ending) {
|
2019-10-23 07:18:44 +02:00
|
|
|
if path_ending == '_mac.v' {
|
|
|
|
p := path_ending.replace('_mac.v', '_darwin.v')
|
|
|
|
println('warning: use "$p" file name instead of "$path"')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-23 07:18:44 +02:00
|
|
|
if path_ending == '_lin.v' {
|
|
|
|
p := path_ending.replace('_lin.v', '_linux.v')
|
|
|
|
println('warning: use "$p" file name instead of "$path"')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-23 07:18:44 +02:00
|
|
|
if path_ending == '_win.v' {
|
|
|
|
p := path_ending.replace('_win.v', '_windows.v')
|
|
|
|
println('warning: use "$p" file name instead of "$path"')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-08-04 20:39:43 +02:00
|
|
|
path_platform = path_ending
|
2019-12-23 11:09:00 +01:00
|
|
|
path_pcguard = v.platform_postfix_to_ifdefguard(path_ending)
|
2019-08-04 20:39:43 +02:00
|
|
|
break
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-08-04 20:39:43 +02:00
|
|
|
}
|
2019-12-24 09:07:26 +01:00
|
|
|
|
2019-12-23 11:09:00 +01:00
|
|
|
if v.compile_defines.len > 0 {
|
|
|
|
for cdefine in v.compile_defines {
|
|
|
|
custom_path_ending := '_d_${cdefine}.v'
|
|
|
|
if path.ends_with(custom_path_ending){
|
|
|
|
path_platform = custom_path_ending
|
|
|
|
path_pcguard = v.platform_postfix_to_ifdefguard('custom $cdefine')
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-24 09:07:26 +01:00
|
|
|
|
2019-10-25 15:34:12 +02:00
|
|
|
mut p := v.new_parser(new_scanner_file(path))
|
2019-12-18 02:34:50 +01:00
|
|
|
p = {
|
2019-12-18 05:52:14 +01:00
|
|
|
p |
|
|
|
|
file_path:path,
|
2019-12-18 02:34:50 +01:00
|
|
|
file_name:path.all_after(os.path_separator),
|
|
|
|
file_platform:path_platform,
|
|
|
|
file_pcguard:path_pcguard,
|
|
|
|
is_vh:path.ends_with('.vh'),
|
|
|
|
v_script:path.ends_with('.vsh')
|
2019-10-25 15:34:12 +02:00
|
|
|
}
|
|
|
|
if p.v_script {
|
|
|
|
println('new_parser: V script')
|
2019-09-27 13:02:01 +02:00
|
|
|
}
|
2019-10-01 14:39:19 +02:00
|
|
|
if p.pref.building_v {
|
2019-12-18 06:13:31 +01:00
|
|
|
p.scanner.print_rel_paths_on_error = true
|
2019-10-01 14:39:19 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.pref.generating_vh {
|
|
|
|
// Keep newlines
|
|
|
|
// p.scanner.is_vh = true
|
|
|
|
// }
|
2019-09-27 13:02:01 +02:00
|
|
|
p.scan_tokens()
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.scanner.debug_tokens()
|
2019-09-27 13:02:01 +02:00
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2019-09-28 03:56:05 +02:00
|
|
|
// creates a new parser. most likely you will want to use
|
|
|
|
// `new_parser_file` or `new_parser_string` instead.
|
2019-10-25 15:34:12 +02:00
|
|
|
fn (v mut V) new_parser(scanner &Scanner) Parser {
|
2019-10-11 23:04:42 +02:00
|
|
|
v.reset_cgen_file_line_parameters()
|
2019-12-18 02:34:50 +01:00
|
|
|
mut p := Parser{
|
2019-09-27 13:02:01 +02:00
|
|
|
scanner: scanner
|
2019-08-17 21:19:37 +02:00
|
|
|
v: v
|
2019-07-29 18:21:36 +02:00
|
|
|
table: v.table
|
2019-06-22 20:20:28 +02:00
|
|
|
cur_fn: EmptyFn
|
2019-07-29 18:21:36 +02:00
|
|
|
cgen: v.cgen
|
2019-12-31 19:42:16 +01:00
|
|
|
//x64: v.x64
|
2019-07-29 18:21:36 +02:00
|
|
|
pref: v.pref
|
|
|
|
os: v.os
|
|
|
|
vroot: v.vroot
|
2019-12-18 02:34:50 +01:00
|
|
|
local_vars: [Var{
|
|
|
|
}].repeat(MaxLocalVars)
|
2019-10-25 15:34:12 +02:00
|
|
|
import_table: new_import_table()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-14 22:48:30 +02:00
|
|
|
$if js {
|
2019-12-18 05:52:14 +01:00
|
|
|
p.is_js = true
|
|
|
|
}
|
2019-09-10 12:08:43 +02:00
|
|
|
if p.pref.is_repl {
|
2019-12-18 06:13:31 +01:00
|
|
|
p.scanner.print_line_on_error = false
|
|
|
|
p.scanner.print_colored_error = false
|
|
|
|
p.scanner.print_rel_paths_on_error = true
|
2019-09-10 12:08:43 +02:00
|
|
|
}
|
2019-09-27 13:02:01 +02:00
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2019-12-18 19:56:30 +01:00
|
|
|
// __global scan_time i64
|
2019-09-27 13:02:01 +02:00
|
|
|
fn (p mut Parser) scan_tokens() {
|
2019-12-19 03:41:12 +01:00
|
|
|
// t := time.ticks()
|
2019-09-27 03:00:48 +02:00
|
|
|
for {
|
2019-09-27 13:02:01 +02:00
|
|
|
res := p.scanner.scan()
|
2019-10-09 00:05:34 +02:00
|
|
|
p.tokens << Token{
|
2019-11-06 22:40:37 +01:00
|
|
|
tok: res.tok
|
|
|
|
lit: res.lit
|
|
|
|
line_nr: p.scanner.line_nr
|
2019-11-23 17:31:28 +01:00
|
|
|
pos: p.scanner.pos
|
2019-09-27 13:02:01 +02:00
|
|
|
}
|
|
|
|
if res.tok == .eof {
|
2019-12-18 02:34:50 +01:00
|
|
|
break
|
2019-09-27 13:02:01 +02:00
|
|
|
}
|
2019-09-27 03:00:48 +02:00
|
|
|
}
|
2019-12-18 19:56:30 +01:00
|
|
|
// scan_time += time.ticks() - t
|
|
|
|
// println('scan tokens $p.file_name $scan_time ')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-09-09 15:22:39 +02:00
|
|
|
fn (p mut Parser) set_current_fn(f Fn) {
|
2019-09-07 12:44:41 +02:00
|
|
|
p.cur_fn = f
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.cur_fn = p.table.fns[f.name]
|
2019-09-07 12:44:41 +02:00
|
|
|
p.scanner.fn_name = '${f.mod}.${f.name}'
|
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) next() {
|
2019-11-09 17:13:26 +01:00
|
|
|
// Generate a formatted version of this token
|
|
|
|
// (only when vfmt compile time flag is enabled, otherwise this function
|
2019-11-11 15:18:32 +01:00
|
|
|
// is not even generated)
|
2019-11-09 17:13:26 +01:00
|
|
|
p.fnext()
|
2019-11-11 15:18:32 +01:00
|
|
|
p.prev_tok2 = p.prev_tok
|
|
|
|
p.prev_tok = p.tok
|
|
|
|
p.scanner.prev_tok = p.tok
|
|
|
|
if p.token_idx >= p.tokens.len {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.tok = .eof
|
|
|
|
p.lit = ''
|
|
|
|
return
|
2019-11-11 15:18:32 +01:00
|
|
|
}
|
|
|
|
res := p.tokens[p.token_idx]
|
|
|
|
p.token_idx++
|
|
|
|
p.tok = res.tok
|
|
|
|
p.lit = res.lit
|
|
|
|
p.scanner.line_nr = res.line_nr
|
|
|
|
p.cgen.line = res.line_nr
|
2019-09-27 04:27:57 +02:00
|
|
|
}
|
2019-09-27 03:00:48 +02:00
|
|
|
|
2019-11-11 15:18:32 +01:00
|
|
|
fn (p &Parser) peek() TokenKind {
|
2019-09-27 03:00:48 +02:00
|
|
|
if p.token_idx >= p.tokens.len - 2 {
|
2019-10-30 13:04:25 +01:00
|
|
|
return .eof
|
2019-09-27 03:00:48 +02:00
|
|
|
}
|
2019-11-11 15:18:32 +01:00
|
|
|
return p.tokens[p.token_idx].tok
|
|
|
|
/*
|
|
|
|
mut i := p.token_idx
|
|
|
|
for i < p.tokens.len {
|
|
|
|
tok := p.tokens[i]
|
|
|
|
if tok.tok != .mline_comment && tok.tok != .line_comment {
|
|
|
|
return tok.tok
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-11-11 15:18:32 +01:00
|
|
|
i++
|
|
|
|
}
|
|
|
|
return .eof
|
|
|
|
*/
|
2019-12-18 02:34:50 +01:00
|
|
|
|
2019-09-27 03:00:48 +02:00
|
|
|
}
|
|
|
|
|
2019-09-27 19:59:12 +02:00
|
|
|
// TODO remove dups
|
2019-12-18 02:34:50 +01:00
|
|
|
[inline]
|
|
|
|
fn (p &Parser) prev_token() Token {
|
2019-09-27 19:59:12 +02:00
|
|
|
return p.tokens[p.token_idx - 2]
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
|
|
|
|
[inline]
|
|
|
|
fn (p &Parser) cur_tok() Token {
|
2019-09-27 19:59:12 +02:00
|
|
|
return p.tokens[p.token_idx - 1]
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
|
|
|
|
[inline]
|
|
|
|
fn (p &Parser) peek_token() Token {
|
2019-09-27 04:56:38 +02:00
|
|
|
if p.token_idx >= p.tokens.len - 2 {
|
2019-12-18 02:34:50 +01:00
|
|
|
return Token{
|
|
|
|
tok: .eof
|
|
|
|
}
|
2019-09-27 04:56:38 +02:00
|
|
|
}
|
2019-09-29 19:37:39 +02:00
|
|
|
return p.tokens[p.token_idx]
|
2019-09-27 04:56:38 +02:00
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p &Parser) log(s string) {
|
2019-12-28 09:43:22 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
/*
|
2019-06-30 22:03:17 +02:00
|
|
|
if !p.pref.is_verbose {
|
2019-06-22 20:20:28 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
println(s)
|
2019-08-17 21:19:37 +02:00
|
|
|
*/
|
2019-06-22 20:20:28 +02:00
|
|
|
|
2019-12-06 13:24:53 +01:00
|
|
|
pub fn (p &Parser) save_state() ParserState {
|
2019-11-25 11:48:26 +01:00
|
|
|
return ParserState{
|
2020-01-05 16:29:33 +01:00
|
|
|
scanner_file_path: p.scanner.file_path
|
2019-11-25 13:29:55 +01:00
|
|
|
scanner_line_nr: p.scanner.line_nr
|
2019-12-18 02:34:50 +01:00
|
|
|
scanner_text: p.scanner.text
|
|
|
|
scanner_pos: p.scanner.pos
|
|
|
|
scanner_line_ends: p.scanner.line_ends
|
|
|
|
scanner_nlines: p.scanner.nlines
|
|
|
|
cgen_lines: p.cgen.lines
|
|
|
|
cgen_cur_line: p.cgen.cur_line
|
|
|
|
cgen_tmp_line: p.cgen.tmp_line
|
|
|
|
cgen_is_tmp: p.cgen.is_tmp
|
|
|
|
tokens: p.tokens
|
|
|
|
token_idx: p.token_idx
|
|
|
|
tok: p.tok
|
|
|
|
prev_tok: p.prev_tok
|
|
|
|
prev_tok2: p.prev_tok2
|
|
|
|
lit: p.lit
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
pub fn (p mut Parser) restore_state(state ParserState, scanner bool, cgen bool) {
|
2019-11-29 14:46:43 +01:00
|
|
|
if scanner {
|
2020-01-05 16:29:33 +01:00
|
|
|
p.scanner.file_path = state.scanner_file_path
|
2019-11-29 14:46:43 +01:00
|
|
|
p.scanner.line_nr = state.scanner_line_nr
|
2019-12-18 02:34:50 +01:00
|
|
|
p.scanner.text = state.scanner_text
|
|
|
|
p.scanner.pos = state.scanner_pos
|
2019-11-29 14:46:43 +01:00
|
|
|
p.scanner.line_ends = state.scanner_line_ends
|
2019-12-18 02:34:50 +01:00
|
|
|
p.scanner.nlines = state.scanner_nlines
|
2019-11-29 14:46:43 +01:00
|
|
|
}
|
|
|
|
if cgen {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.lines = state.cgen_lines
|
2019-11-29 14:46:43 +01:00
|
|
|
p.cgen.cur_line = state.cgen_cur_line
|
|
|
|
p.cgen.tmp_line = state.cgen_tmp_line
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.is_tmp = state.cgen_is_tmp
|
2019-11-29 14:46:43 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
p.tokens = state.tokens
|
|
|
|
p.token_idx = state.token_idx
|
|
|
|
p.tok = state.tok
|
|
|
|
p.prev_tok = state.prev_tok
|
|
|
|
p.prev_tok2 = state.prev_tok2
|
|
|
|
p.lit = state.lit
|
2019-11-25 11:48:26 +01:00
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) clear_state(scanner bool, cgen bool) {
|
2019-11-29 14:46:43 +01:00
|
|
|
if scanner {
|
|
|
|
p.scanner.line_nr = 0
|
|
|
|
p.scanner.text = ''
|
|
|
|
p.scanner.pos = 0
|
|
|
|
p.scanner.line_ends = []
|
|
|
|
p.scanner.nlines = 0
|
|
|
|
}
|
|
|
|
if cgen {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.lines = []
|
2019-11-29 14:46:43 +01:00
|
|
|
p.cgen.cur_line = ''
|
|
|
|
p.cgen.tmp_line = ''
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.is_tmp = false
|
2019-11-29 14:46:43 +01:00
|
|
|
}
|
2019-11-25 11:48:26 +01:00
|
|
|
p.tokens = []
|
|
|
|
p.token_idx = 0
|
2019-11-25 13:29:55 +01:00
|
|
|
p.lit = ''
|
2019-11-25 11:48:26 +01:00
|
|
|
}
|
|
|
|
|
2019-11-22 06:22:11 +01:00
|
|
|
pub fn (p mut Parser) add_text(text string) {
|
2019-12-18 02:34:50 +01:00
|
|
|
if p.tokens.len > 1 && p.tokens[p.tokens.len - 1].tok == .eof {
|
|
|
|
p.tokens.delete(p.tokens.len - 1)
|
2019-11-22 06:22:11 +01:00
|
|
|
}
|
|
|
|
p.scanner.text = p.scanner.text + '\n' + text
|
|
|
|
p.scan_tokens()
|
2019-11-25 11:48:26 +01:00
|
|
|
}
|
|
|
|
|
2020-01-05 16:29:33 +01:00
|
|
|
fn (p mut Parser) statements_from_text(text string, rcbr bool, fpath string) {
|
2019-11-25 11:48:26 +01:00
|
|
|
saved_state := p.save_state()
|
2019-11-29 14:46:43 +01:00
|
|
|
p.clear_state(true, false)
|
2020-01-05 16:29:33 +01:00
|
|
|
if fpath != '' {
|
|
|
|
p.scanner.file_path = fpath
|
|
|
|
}
|
2019-11-25 11:48:26 +01:00
|
|
|
p.add_text(text)
|
2019-11-25 13:29:55 +01:00
|
|
|
p.next()
|
2019-11-25 11:48:26 +01:00
|
|
|
if rcbr {
|
|
|
|
p.statements()
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-11-25 11:48:26 +01:00
|
|
|
p.statements_no_rcbr()
|
|
|
|
}
|
2019-11-29 14:46:43 +01:00
|
|
|
p.restore_state(saved_state, true, false)
|
2019-11-22 06:22:11 +01:00
|
|
|
}
|
|
|
|
|
2019-10-12 21:01:50 +02:00
|
|
|
fn (p mut Parser) parse(pass Pass) {
|
2019-10-11 23:04:42 +02:00
|
|
|
p.cgen.line = 0
|
2019-10-25 15:34:12 +02:00
|
|
|
p.cgen.file = cescaped_path(os.realpath(p.file_path))
|
2019-12-18 02:34:50 +01:00
|
|
|
// ///////////////////////////////////
|
2019-08-23 12:43:31 +02:00
|
|
|
p.pass = pass
|
2019-09-27 03:00:48 +02:00
|
|
|
p.token_idx = 0
|
|
|
|
p.next()
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('\nparse() run=$p.pass file=$p.file_name tok=${p.strtok()}')// , "script_file=", script_file)
|
2019-06-22 20:20:28 +02:00
|
|
|
// `module main` is not required if it's a single file program
|
2019-10-13 02:05:11 +02:00
|
|
|
if p.pref.is_script || p.pref.is_test {
|
2019-06-22 20:20:28 +02:00
|
|
|
// User may still specify `module main`
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .key_module {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-11-09 17:13:26 +01:00
|
|
|
p.fspace()
|
2019-10-13 02:05:11 +02:00
|
|
|
p.mod = p.check_name()
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-12 00:17:37 +02:00
|
|
|
p.mod = 'main'
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.key_module)
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fspace()
|
2019-10-13 02:05:11 +02:00
|
|
|
p.mod = p.check_name()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-10-07 00:31:01 +02:00
|
|
|
//
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fgen_nl()
|
2019-10-07 00:31:01 +02:00
|
|
|
p.cgen.nogen = false
|
|
|
|
if p.pref.build_mode == .build_module && p.mod != p.v.mod {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('skipping $p.mod (v.mod = $p.v.mod)')
|
2019-10-07 00:31:01 +02:00
|
|
|
p.cgen.nogen = true
|
2019-12-18 02:34:50 +01:00
|
|
|
// defer { p.cgen.nogen = false }
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fgen_nl()
|
2019-08-05 09:49:52 +02:00
|
|
|
p.builtin_mod = p.mod == 'builtin'
|
2020-01-01 13:27:21 +01:00
|
|
|
p.can_chash = p.mod in ['ui', 'uiold', 'darwin', 'clipboard', 'webview'] // TODO tmp remove
|
2019-06-22 20:20:28 +02:00
|
|
|
// Import pass - the first and the smallest pass that only analyzes imports
|
2019-10-13 02:05:11 +02:00
|
|
|
// if we are a building module get the full module name from v.mod
|
2019-12-19 03:41:12 +01:00
|
|
|
fq_mod := if p.pref.build_mode == .build_module && p.v.mod.ends_with(p.mod) { p.v.mod }
|
2019-10-12 02:38:56 +02:00
|
|
|
// fully qualify the module name, eg base64 to encoding.base64
|
2019-12-19 03:41:12 +01:00
|
|
|
else { p.table.qualify_module(p.mod, p.file_path) }
|
2019-08-05 09:49:52 +02:00
|
|
|
p.table.register_module(fq_mod)
|
2019-10-13 02:05:11 +02:00
|
|
|
p.mod = fq_mod
|
2019-07-29 18:21:36 +02:00
|
|
|
if p.pass == .imports {
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok == .key_import && p.peek() != .key_const {
|
2019-07-16 17:59:07 +02:00
|
|
|
p.imports()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-11 14:07:18 +02:00
|
|
|
if 'builtin' in p.table.imports {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.error('module `builtin` cannot be imported')
|
2019-08-05 09:49:52 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
// Go through every top level token or throw a compilation error if a non-top level token is met
|
|
|
|
for {
|
2019-10-24 14:44:46 +02:00
|
|
|
match p.tok {
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_import {
|
|
|
|
p.imports()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_enum {
|
|
|
|
next := p.peek()
|
|
|
|
if next == .name {
|
|
|
|
p.enum_decl(false)
|
|
|
|
}
|
|
|
|
else if next == .lcbr && p.pref.translated {
|
|
|
|
// enum without a name, only allowed in code,
|
|
|
|
// translated from C. it's a very bad practice
|
|
|
|
// in C as well, but is used unfortunately
|
|
|
|
// (for example, by DOOM). such fields are
|
|
|
|
// basically int consts
|
|
|
|
p.enum_decl(true)
|
2019-10-24 11:19:05 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_pub {
|
|
|
|
next := p.peek()
|
|
|
|
match next {
|
|
|
|
.key_fn {
|
|
|
|
p.fn_decl()
|
|
|
|
}
|
|
|
|
.key_const {
|
|
|
|
p.const_decl()
|
|
|
|
}
|
2019-12-19 22:29:37 +01:00
|
|
|
.key_struct, .key_union, .key_interface {
|
2019-12-21 01:53:58 +01:00
|
|
|
p.struct_decl([])
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
.key_enum {
|
|
|
|
p.enum_decl(false)
|
|
|
|
}
|
|
|
|
.key_type {
|
|
|
|
p.type_decl()
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
p.error('wrong pub keyword usage')
|
|
|
|
}}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_fn {
|
|
|
|
p.fn_decl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_type {
|
|
|
|
p.type_decl()
|
2019-10-07 00:31:01 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.lsbr {
|
|
|
|
// `[` can only mean an [attribute] before a function
|
|
|
|
// or a struct definition
|
|
|
|
p.attribute()
|
|
|
|
}
|
2019-12-19 22:29:37 +01:00
|
|
|
.key_struct, .key_interface, .key_union, .lsbr {
|
2019-12-21 01:53:58 +01:00
|
|
|
p.struct_decl([])
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
.key_const {
|
|
|
|
p.const_decl()
|
|
|
|
}
|
|
|
|
.hash {
|
|
|
|
// insert C code (only for ui module)
|
|
|
|
// # puts("hello");
|
|
|
|
p.chash()
|
|
|
|
}
|
|
|
|
.dollar {
|
|
|
|
// $if, $else
|
|
|
|
p.comp_time()
|
|
|
|
}
|
|
|
|
.key_global {
|
2020-01-01 13:27:21 +01:00
|
|
|
if !p.pref.translated && !p.pref.is_live && !p.builtin_mod && !p.pref.building_v &&
|
|
|
|
p.mod != 'ui' && p.mod != 'uiold' && !os.getwd().contains('/volt') &&
|
|
|
|
!p.pref.enable_globals {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error('use `v --enable-globals ...` to enable globals')
|
|
|
|
// p.error('__global is only allowed in translated code')
|
|
|
|
}
|
|
|
|
p.next()
|
|
|
|
p.fspace()
|
|
|
|
name := p.check_name()
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fspace()
|
2019-12-18 02:34:50 +01:00
|
|
|
typ := p.get_type()
|
|
|
|
p.register_global(name, typ)
|
|
|
|
// p.genln(p.table.cgen_name_type_pair(name, typ))
|
|
|
|
mut g := p.table.cgen_name_type_pair(name, typ)
|
|
|
|
if p.tok == .assign {
|
|
|
|
p.next()
|
|
|
|
g += ' = '
|
|
|
|
_,expr := p.tmp_expr()
|
|
|
|
g += expr
|
|
|
|
}
|
|
|
|
// p.genln('; // global')
|
|
|
|
g += '; // global'
|
|
|
|
if !p.cgen.nogen {
|
|
|
|
p.cgen.consts << g
|
|
|
|
}
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-12-18 02:34:50 +01:00
|
|
|
if p.tok != .key_global {
|
|
|
|
// An extra empty line to separate a block of globals
|
|
|
|
p.fgen_nl()
|
|
|
|
}
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.eof {
|
|
|
|
// p.log('end of parse()')
|
|
|
|
// TODO: check why this was added? everything seems to work
|
|
|
|
// without it, and it's already happening in fn_decl
|
|
|
|
// if p.pref.is_script && !p.pref.is_test {
|
|
|
|
// p.set_current_fn( MainFn )
|
|
|
|
// p.check_unused_variables()
|
|
|
|
// }
|
|
|
|
if !p.first_pass() && !p.pref.is_repl {
|
|
|
|
p.check_unused_imports()
|
|
|
|
}
|
|
|
|
p.gen_fmt() // not generated unless `-d vfmt` is provided
|
|
|
|
return
|
2019-06-28 20:50:29 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else {
|
|
|
|
// no `fn main`, add this "global" statement to cgen.fn_main
|
|
|
|
if p.pref.is_script && !p.pref.is_test {
|
|
|
|
// cur_fn is empty since there was no fn main declared
|
|
|
|
// we need to set it to save and find variables
|
|
|
|
if p.cur_fn.name == '' {
|
|
|
|
p.set_current_fn(MainFn)
|
|
|
|
if p.pref.is_repl {
|
|
|
|
if p.first_pass() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
p.clear_vars()
|
2019-10-13 16:57:18 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
start := p.cgen.lines.len
|
|
|
|
p.statement(true)
|
|
|
|
// if start > 0 && p.cgen.lines[start - 1] != '' &&
|
|
|
|
// p.cgen.fn_main != '' {
|
|
|
|
// start--
|
|
|
|
// }
|
|
|
|
p.genln('')
|
|
|
|
end := p.cgen.lines.len
|
|
|
|
lines := p.cgen.lines[start..end]
|
|
|
|
// println('adding "' + lines.join('\n') + '"\n')
|
|
|
|
// mut line := p.cgen.fn_main + lines.join('\n')
|
|
|
|
// line = line.trim_space()
|
|
|
|
p.cgen.fn_main += lines.join('\n')
|
|
|
|
p.cgen.resetln('')
|
|
|
|
for i := start; i < end; i++ {
|
|
|
|
p.cgen.lines[i] = ''
|
2019-06-28 17:15:52 +02:00
|
|
|
}
|
2019-12-21 23:27:03 +01:00
|
|
|
p.fgen_nl()
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else {
|
|
|
|
p.error('unexpected token `${p.strtok()}`')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
}}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-16 17:59:07 +02:00
|
|
|
fn (p mut Parser) imports() {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.key_import)
|
2019-06-22 20:20:28 +02:00
|
|
|
// `import ()`
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .lpar {
|
2019-11-11 15:18:32 +01:00
|
|
|
p.fspace()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lpar)
|
2019-11-11 15:18:32 +01:00
|
|
|
p.fmt_inc()
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok != .rpar && p.tok != .eof {
|
2019-07-30 03:49:47 +02:00
|
|
|
p.import_statement()
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-11-11 15:18:32 +01:00
|
|
|
p.fmt_dec()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rpar)
|
2019-11-11 15:18:32 +01:00
|
|
|
p.fgenln('\n')
|
2019-06-22 20:20:28 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
// `import foo`
|
2019-07-30 03:49:47 +02:00
|
|
|
p.import_statement()
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-11-10 01:08:53 +01:00
|
|
|
if p.tok != .key_import {
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-11-10 01:08:53 +01:00
|
|
|
}
|
2019-07-30 03:49:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) import_statement() {
|
2019-11-09 17:13:26 +01:00
|
|
|
p.fspace()
|
2019-07-30 03:49:47 +02:00
|
|
|
if p.tok != .name {
|
|
|
|
p.error('bad import format')
|
|
|
|
}
|
2019-10-24 18:44:49 +02:00
|
|
|
if p.peek() == .number {
|
2019-11-09 17:13:26 +01:00
|
|
|
p.error('bad import format: module/submodule names cannot begin with a number')
|
2019-07-30 03:49:47 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
import_tok_idx := p.token_idx - 1
|
2019-08-05 09:49:52 +02:00
|
|
|
mut mod := p.check_name().trim_space()
|
|
|
|
mut mod_alias := mod
|
2019-07-30 03:49:47 +02:00
|
|
|
// submodule support
|
|
|
|
mut depth := 1
|
|
|
|
for p.tok == .dot {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.dot)
|
2019-07-30 03:49:47 +02:00
|
|
|
submodule := p.check_name()
|
|
|
|
mod_alias = submodule
|
2019-08-05 09:49:52 +02:00
|
|
|
mod += '.' + submodule
|
2019-07-30 03:49:47 +02:00
|
|
|
depth++
|
2019-12-04 23:27:04 +01:00
|
|
|
if depth > max_module_depth {
|
|
|
|
p.error('module depth of $max_module_depth exceeded: $mod')
|
2019-07-30 03:49:47 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// aliasing (import encoding.base64 as b64)
|
|
|
|
if p.tok == .key_as && p.peek() == .name {
|
2019-12-31 19:26:44 +01:00
|
|
|
p.fspace()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.key_as)
|
2019-12-31 19:26:44 +01:00
|
|
|
p.fspace()
|
2019-07-30 03:49:47 +02:00
|
|
|
mod_alias = p.check_name()
|
|
|
|
}
|
|
|
|
// add import to file scope import table
|
2019-10-25 15:34:12 +02:00
|
|
|
p.register_import_alias(mod_alias, mod, import_tok_idx)
|
2019-07-30 03:49:47 +02:00
|
|
|
// Make sure there are no duplicate imports
|
2019-09-11 14:07:18 +02:00
|
|
|
if mod in p.table.imports {
|
2019-07-30 03:49:47 +02:00
|
|
|
return
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('adding import $mod')
|
2019-08-05 09:49:52 +02:00
|
|
|
p.table.imports << mod
|
|
|
|
p.table.register_module(mod)
|
2019-07-23 23:12:55 +02:00
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) const_decl() {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('const decl $p.file_path')
|
2019-10-24 11:19:05 +02:00
|
|
|
is_pub := p.tok == .key_pub
|
|
|
|
if is_pub {
|
|
|
|
p.next()
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fspace()
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-08-22 22:19:31 +02:00
|
|
|
p.inside_const = true
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.key_const)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.fspace()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lpar)
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fmt_inc()
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok == .name {
|
2019-10-07 00:31:01 +02:00
|
|
|
if p.lit == '_' && p.peek() == .assign && !p.cgen.nogen {
|
2019-09-28 12:59:54 +02:00
|
|
|
p.gen_blank_identifier_assign()
|
2019-12-18 02:34:50 +01:00
|
|
|
// if !p.cgen.nogen {
|
2019-09-28 12:59:54 +02:00
|
|
|
p.cgen.consts_init << p.cgen.cur_line.trim_space()
|
|
|
|
p.cgen.resetln('')
|
2019-12-18 02:34:50 +01:00
|
|
|
// }
|
2019-09-28 12:59:54 +02:00
|
|
|
continue
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
mut name := p.check_name() // `Age = 20`
|
|
|
|
// if !p.pref.building_v && p.mod != 'os' && contains_capital(name) {
|
|
|
|
// p.warn('const names cannot contain uppercase letters, use snake_case instead')
|
|
|
|
// }
|
2019-08-22 22:19:31 +02:00
|
|
|
name = p.prepend_mod(name)
|
2019-10-07 00:31:01 +02:00
|
|
|
mut typ := ''
|
|
|
|
if p.is_vh {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('CONST VH $p.file_path')
|
2019-10-31 11:08:01 +01:00
|
|
|
// .vh files may not have const values, just types: `const (a int)`
|
2019-10-23 12:03:14 +02:00
|
|
|
if p.tok == .assign {
|
|
|
|
p.next()
|
2019-10-31 11:08:01 +01:00
|
|
|
// Otherwise parse the expression to get its type,
|
|
|
|
// but don't generate it. Const's value is generated
|
|
|
|
// in "module.o".
|
|
|
|
p.cgen.nogen = true
|
2019-10-23 12:03:14 +02:00
|
|
|
typ = p.expression()
|
2019-10-31 11:08:01 +01:00
|
|
|
p.cgen.nogen = false
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-23 12:03:14 +02:00
|
|
|
typ = p.get_type()
|
|
|
|
}
|
2019-10-24 11:19:05 +02:00
|
|
|
p.table.register_const(name, typ, p.mod, is_pub)
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.consts << ('extern ' + p.table.cgen_name_type_pair(name, typ)) + ';'
|
2019-10-07 00:31:01 +02:00
|
|
|
continue // Don't generate C code when building a .vh file
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-07 00:31:01 +02:00
|
|
|
p.check_space(.assign)
|
|
|
|
typ = p.expression()
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if p.first_pass() && p.table.known_const(name) {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.error('redefinition of `$name`')
|
|
|
|
}
|
2019-10-04 14:48:09 +02:00
|
|
|
if p.first_pass() {
|
2019-10-24 11:19:05 +02:00
|
|
|
p.table.register_const(name, typ, p.mod, is_pub)
|
2019-10-04 14:48:09 +02:00
|
|
|
}
|
2019-10-21 07:57:29 +02:00
|
|
|
// Check to see if this constant exists, and is void. If so, try and get the type again:
|
2019-12-18 02:34:50 +01:00
|
|
|
if my_const:=p.v.table.find_const(name){
|
2019-10-21 07:57:29 +02:00
|
|
|
if my_const.typ == 'void' {
|
2019-12-18 08:17:07 +01:00
|
|
|
for i, v in p.v.table.consts {
|
2019-10-21 07:57:29 +02:00
|
|
|
if v.name == name {
|
|
|
|
p.v.table.consts[i].typ = typ
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-10-31 11:08:01 +01:00
|
|
|
if p.pass == .main && p.cgen.nogen && p.pref.build_mode == .build_module {
|
|
|
|
// We are building module `ui`, but are parsing `gx` right now
|
|
|
|
// (because of nogen). We need to import gx constants with `extern`.
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('extern const mod=$p.mod name=$name')
|
|
|
|
p.cgen.consts << ('extern ' + p.table.cgen_name_type_pair(name, typ)) + ';'
|
2019-10-31 11:08:01 +01:00
|
|
|
}
|
2019-10-07 00:31:01 +02:00
|
|
|
if p.pass == .main && !p.cgen.nogen {
|
2019-06-22 20:20:28 +02:00
|
|
|
// TODO hack
|
|
|
|
// cur_line has const's value right now. if it's just a number, then optimize generation:
|
|
|
|
// output a #define so that we don't pollute the binary with unnecessary global vars
|
2019-10-10 01:59:33 +02:00
|
|
|
// Do not do this when building a module, otherwise the consts
|
|
|
|
// will not be accessible.
|
|
|
|
if p.pref.build_mode != .build_module && is_compile_time_const(p.cgen.cur_line) {
|
2019-12-06 00:38:35 +01:00
|
|
|
p.cgen.const_defines << '#define $name $p.cgen.cur_line'
|
2019-07-18 20:21:23 +02:00
|
|
|
p.cgen.resetln('')
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if typ.starts_with('[') {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.consts << p.table.cgen_name_type_pair(name, typ) + ' = $p.cgen.cur_line;'
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
p.cgen.consts << p.table.cgen_name_type_pair(name, typ) + ';'
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('adding to init "$name"')
|
2019-06-22 20:20:28 +02:00
|
|
|
p.cgen.consts_init << '$name = $p.cgen.cur_line;'
|
|
|
|
}
|
2019-07-18 20:21:23 +02:00
|
|
|
p.cgen.resetln('')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fmt_dec()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rpar)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.inside_const = false
|
2019-12-17 15:28:25 +01:00
|
|
|
p.fgen_nl()
|
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// `type myint int`
|
|
|
|
// `type onclickfn fn(voidptr) int`
|
|
|
|
fn (p mut Parser) type_decl() {
|
2019-12-12 20:57:47 +01:00
|
|
|
is_pub := p.tok == .key_pub
|
|
|
|
if is_pub {
|
|
|
|
p.next()
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fspace()
|
2019-12-12 20:57:47 +01:00
|
|
|
}
|
2019-07-16 12:17:17 +02:00
|
|
|
p.check(.key_type)
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fspace()
|
2019-12-22 02:34:37 +01:00
|
|
|
mut name := p.check_name()
|
2019-12-19 20:52:27 +01:00
|
|
|
p.fspace()
|
2019-08-10 13:08:59 +02:00
|
|
|
// V used to have 'type Foo struct', many Go users might use this syntax
|
|
|
|
if p.tok == .key_struct {
|
|
|
|
p.error('use `struct $name {` instead of `type $name struct {`')
|
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
is_sum := p.tok == .assign
|
|
|
|
if is_sum {
|
2019-12-24 09:07:26 +01:00
|
|
|
p.next()
|
2019-12-28 09:43:22 +01:00
|
|
|
p.fspace()
|
2019-12-24 09:07:26 +01:00
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
mut parent := Type{}
|
2019-12-24 09:07:26 +01:00
|
|
|
// Sum type
|
2019-12-27 13:57:49 +01:00
|
|
|
//is_sum := p.tok == .pipe
|
2019-12-24 09:07:26 +01:00
|
|
|
if is_sum {
|
2019-12-22 02:34:37 +01:00
|
|
|
if !p.builtin_mod && p.mod != 'main' {
|
|
|
|
name = p.prepend_mod(name)
|
|
|
|
}
|
2019-12-24 09:07:26 +01:00
|
|
|
// Register the first child (name we already parsed)
|
|
|
|
/*
|
|
|
|
p.table.register_type(Type{
|
|
|
|
parent: name
|
|
|
|
name: parent.name // yeah it's not a parent here
|
|
|
|
mod: p.mod
|
|
|
|
is_public: is_pub
|
|
|
|
})
|
|
|
|
*/
|
|
|
|
// Register the rest of them
|
2019-12-22 02:34:37 +01:00
|
|
|
mut idx := 0
|
2019-12-27 13:57:49 +01:00
|
|
|
mut done := false
|
|
|
|
for {//p.tok == .pipe {
|
2019-12-22 02:34:37 +01:00
|
|
|
idx++
|
2019-12-27 13:57:49 +01:00
|
|
|
//p.next()
|
2019-12-22 02:34:37 +01:00
|
|
|
child_type_name := p.check_name()
|
2019-12-27 13:57:49 +01:00
|
|
|
//println('$idx $child_type_name')
|
|
|
|
if p.tok != .pipe {
|
|
|
|
done = true
|
|
|
|
}
|
2019-12-24 09:07:26 +01:00
|
|
|
if p.pass == .main {
|
|
|
|
// Update the type's parent
|
2019-12-22 02:34:37 +01:00
|
|
|
//println('child=$child_type_name parent=$name')
|
|
|
|
mut t := p.find_type(child_type_name)
|
2019-12-24 09:07:26 +01:00
|
|
|
if t.name == '' {
|
2020-01-06 23:15:37 +01:00
|
|
|
p.error('unknown type `$child_type_name`')
|
2019-12-24 09:07:26 +01:00
|
|
|
}
|
|
|
|
t.parent = name
|
|
|
|
p.table.rewrite_type(t)
|
2019-12-22 02:34:37 +01:00
|
|
|
p.cgen.consts << '#define SumType_$child_type_name $idx // DEF2'
|
2019-12-24 09:07:26 +01:00
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
if done {
|
|
|
|
break
|
|
|
|
}
|
2019-12-28 09:43:22 +01:00
|
|
|
p.fspace()
|
2019-12-27 13:57:49 +01:00
|
|
|
p.check(.pipe)
|
2019-12-28 09:43:22 +01:00
|
|
|
p.fspace()
|
|
|
|
if p.tokens[p.token_idx - 2].line_nr < p.tokens[p.token_idx - 1].line_nr {
|
|
|
|
p.fgenln('\t')
|
|
|
|
//p.fgen_nl()
|
|
|
|
}
|
2019-12-24 09:07:26 +01:00
|
|
|
}
|
|
|
|
if p.pass == .decl {
|
|
|
|
p.table.sum_types << name
|
2019-12-30 05:21:46 +01:00
|
|
|
// println(p.table.sum_types)
|
2019-12-24 09:07:26 +01:00
|
|
|
}
|
|
|
|
// Register the actual sum type
|
2019-12-30 05:21:46 +01:00
|
|
|
// println('registering sum $name')
|
2019-12-24 09:07:26 +01:00
|
|
|
p.table.register_type(Type{
|
|
|
|
name: name
|
|
|
|
mod: p.mod
|
|
|
|
cat: .alias
|
|
|
|
is_public: is_pub
|
|
|
|
})
|
|
|
|
p.gen_typedef('typedef struct {
|
|
|
|
void* obj;
|
|
|
|
int typ;
|
|
|
|
} $name;
|
|
|
|
')
|
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
else {
|
|
|
|
|
|
|
|
parent = p.get_type2()
|
|
|
|
}
|
2019-09-02 13:17:20 +02:00
|
|
|
nt_pair := p.table.cgen_name_type_pair(name, parent.name)
|
2019-06-22 20:20:28 +02:00
|
|
|
// TODO dirty C typedef hacks for DOOM
|
|
|
|
// Unknown type probably means it's a struct, and it's used before the struct is defined,
|
|
|
|
// so specify "struct"
|
2019-12-19 03:41:12 +01:00
|
|
|
_struct := if parent.cat != .array && parent.cat != .func && !p.table.known_type(parent.name) { 'struct' } else { '' }
|
2019-12-24 09:07:26 +01:00
|
|
|
if !is_sum {
|
|
|
|
p.gen_typedef('typedef $_struct $nt_pair; //type alias name="$name" parent=`$parent.name`')
|
|
|
|
p.table.register_type(Type{
|
|
|
|
name: name
|
|
|
|
parent: parent.name
|
|
|
|
mod: p.mod
|
|
|
|
cat: .alias
|
|
|
|
is_public: is_pub
|
|
|
|
})
|
|
|
|
}
|
2019-12-28 09:43:22 +01:00
|
|
|
//if p.tok != .key_type {
|
|
|
|
p.fgen_nl()
|
|
|
|
p.fgen_nl()
|
|
|
|
//}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-09-27 19:59:12 +02:00
|
|
|
// current token is `(`
|
2019-07-03 21:53:25 +02:00
|
|
|
fn (p mut Parser) interface_method(field_name, receiver string) &Fn {
|
2019-12-18 02:34:50 +01:00
|
|
|
mut method := &Fn{
|
2019-07-03 21:53:25 +02:00
|
|
|
name: field_name
|
|
|
|
is_interface: true
|
|
|
|
is_method: true
|
|
|
|
receiver_typ: receiver
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('is interface. field=$field_name run=$p.pass')
|
2019-07-03 21:53:25 +02:00
|
|
|
p.fn_args(mut method)
|
2019-09-27 19:59:12 +02:00
|
|
|
prev_tok := p.prev_token()
|
|
|
|
cur_tok := p.cur_tok()
|
|
|
|
// No type on the same line, this method doesn't return a type, process next
|
|
|
|
if prev_tok.line_nr != cur_tok.line_nr {
|
2019-07-03 21:53:25 +02:00
|
|
|
method.typ = 'void'
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
method.typ = p.get_type() // method return type
|
|
|
|
// p.fspace()
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-07-03 21:53:25 +02:00
|
|
|
}
|
|
|
|
return method
|
|
|
|
}
|
|
|
|
|
2019-10-09 00:05:34 +02:00
|
|
|
fn key_to_type_cat(tok TokenKind) TypeCategory {
|
2019-10-24 18:19:03 +02:00
|
|
|
match tok {
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_interface {
|
|
|
|
return .interface_
|
|
|
|
}
|
|
|
|
.key_struct {
|
|
|
|
return .struct_
|
|
|
|
}
|
|
|
|
.key_union {
|
|
|
|
return .union_
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
}}
|
2019-09-23 23:40:34 +02:00
|
|
|
verror('Unknown token: $tok')
|
2019-10-24 18:44:49 +02:00
|
|
|
return .builtin
|
2019-08-20 13:34:29 +02:00
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
// check_name checks for a name token and returns its literal
|
|
|
|
fn (p mut Parser) check_name() string {
|
|
|
|
name := p.lit
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.name)
|
2019-06-22 20:20:28 +02:00
|
|
|
return name
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) check_string() string {
|
|
|
|
s := p.lit
|
2019-07-16 17:59:07 +02:00
|
|
|
p.check(.str)
|
2019-06-22 20:20:28 +02:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2019-12-18 02:34:50 +01:00
|
|
|
fn (p mut Parser) check_not_reserved() {
|
2019-12-04 23:27:04 +01:00
|
|
|
if reserved_types[p.lit] {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error("`$p.lit` can\'t be used as name")
|
2019-11-07 04:00:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p &Parser) strtok() string {
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .name {
|
2019-06-22 20:20:28 +02:00
|
|
|
return p.lit
|
|
|
|
}
|
2019-11-09 20:05:44 +01:00
|
|
|
if p.tok == .number {
|
|
|
|
return p.lit
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-11-11 15:18:32 +01:00
|
|
|
if p.tok == .chartoken {
|
2019-12-18 07:04:35 +01:00
|
|
|
if p.lit == '`' {
|
|
|
|
return '`\\$p.lit`'
|
|
|
|
}
|
2019-11-11 15:18:32 +01:00
|
|
|
return '`$p.lit`'
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-07-16 17:59:07 +02:00
|
|
|
if p.tok == .str {
|
2019-12-19 20:52:27 +01:00
|
|
|
if p.lit.contains("'") && !p.lit.contains('"') {
|
2019-11-09 20:05:44 +01:00
|
|
|
return '"$p.lit"'
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-11-09 20:05:44 +01:00
|
|
|
return "'$p.lit'"
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 19:56:30 +01:00
|
|
|
if p.tok == .hash {
|
|
|
|
return '#' + p.lit
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
res := p.tok.str()
|
|
|
|
if res == '' {
|
|
|
|
n := int(p.tok)
|
|
|
|
return n.str()
|
|
|
|
}
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
2019-07-14 16:30:04 +02:00
|
|
|
// same as check(), but adds a space to the formatter output
|
2019-06-22 20:20:28 +02:00
|
|
|
// TODO bad name
|
2019-10-09 00:05:34 +02:00
|
|
|
fn (p mut Parser) check_space(expected TokenKind) {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.fspace()
|
|
|
|
p.check(expected)
|
|
|
|
p.fspace()
|
|
|
|
}
|
|
|
|
|
2019-10-09 00:05:34 +02:00
|
|
|
fn (p mut Parser) check(expected TokenKind) {
|
2019-06-22 20:20:28 +02:00
|
|
|
if p.tok != expected {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('check()')
|
2019-11-29 15:06:42 +01:00
|
|
|
s := 'syntax error: unexpected `${p.strtok()}`, expecting `${expected.str()}`'
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
println('next token = `${p.strtok()}`')
|
2019-11-05 00:30:51 +01:00
|
|
|
if p.pref.is_debug {
|
|
|
|
print_backtrace()
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.error(s)
|
|
|
|
}
|
2019-09-26 04:28:43 +02:00
|
|
|
/*
|
2019-07-07 22:30:15 +02:00
|
|
|
if expected == .rcbr {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fmt_dec()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
p.fgen(p.strtok())
|
|
|
|
// vfmt: increase indentation on `{` unless it's `{}`
|
2019-11-09 17:13:26 +01:00
|
|
|
if expected == .lcbr { //&& p.scanner.pos + 1 < p.scanner.text.len && p.scanner.text[p.scanner.pos + 1] != `}` {
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fmt_inc()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-26 04:28:43 +02:00
|
|
|
*/
|
2019-07-16 17:59:07 +02:00
|
|
|
|
2019-12-18 02:34:50 +01:00
|
|
|
p.next()
|
|
|
|
// if p.scanner.line_comment != '' {
|
|
|
|
// p.fgenln('// ! "$p.scanner.line_comment"')
|
|
|
|
// p.scanner.line_comment = ''
|
|
|
|
// }
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-10-04 14:48:09 +02:00
|
|
|
[inline]
|
2019-07-29 18:21:36 +02:00
|
|
|
fn (p &Parser) first_pass() bool {
|
|
|
|
return p.pass == .decl
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO return Type instead of string?
|
|
|
|
fn (p mut Parser) get_type() string {
|
|
|
|
mut mul := false
|
|
|
|
mut nr_muls := 0
|
2019-06-28 14:19:46 +02:00
|
|
|
mut typ := ''
|
2019-09-23 04:45:19 +02:00
|
|
|
// multiple returns
|
|
|
|
if p.tok == .lpar {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.warn('`()` are no longer necessary in multiple returns' +
|
|
|
|
// '\nuse `fn foo() int, int {` instead of `fn foo() (int, int) {`')
|
2019-09-23 12:39:35 +02:00
|
|
|
// if p.inside_tuple {p.error('unexpected (')}
|
2019-09-23 19:34:08 +02:00
|
|
|
// p.inside_tuple = true
|
|
|
|
p.check(.lpar)
|
|
|
|
mut types := []string
|
2019-09-23 04:45:19 +02:00
|
|
|
for {
|
2019-09-23 12:39:35 +02:00
|
|
|
types << p.get_type()
|
2019-09-23 04:45:19 +02:00
|
|
|
if p.tok != .comma {
|
2019-09-23 19:34:08 +02:00
|
|
|
break
|
|
|
|
}
|
|
|
|
p.check(.comma)
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-09-23 19:34:08 +02:00
|
|
|
p.check(.rpar)
|
|
|
|
// p.inside_tuple = false
|
2019-10-09 19:55:36 +02:00
|
|
|
typ = p.register_multi_return_stuct(types)
|
|
|
|
return typ
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// fn type
|
2019-10-21 13:21:30 +02:00
|
|
|
if p.tok == .key_fn {
|
2019-12-18 02:34:50 +01:00
|
|
|
mut f := Fn{
|
2019-12-18 06:13:31 +01:00
|
|
|
name: '_'
|
2019-12-18 02:34:50 +01:00
|
|
|
mod: p.mod
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
line_nr := p.scanner.line_nr
|
|
|
|
p.fn_args(mut f)
|
|
|
|
// Same line, it's a return type
|
|
|
|
if p.scanner.line_nr == line_nr {
|
2019-12-18 05:52:14 +01:00
|
|
|
if p.tok in [.name, .mul, .amp, .lsbr, .question, .lpar] {
|
2019-07-16 14:31:08 +02:00
|
|
|
f.typ = p.get_type()
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-07-16 14:31:08 +02:00
|
|
|
f.typ = 'void'
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// println('fn return typ=$f.typ')
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
f.typ = 'void'
|
|
|
|
}
|
|
|
|
// Register anon fn type
|
2019-12-18 02:34:50 +01:00
|
|
|
fn_typ := Type{
|
|
|
|
name: f.typ_str() // 'fn (int, int) string'
|
2019-12-21 23:27:03 +01:00
|
|
|
|
2019-07-07 22:30:15 +02:00
|
|
|
mod: p.mod
|
2019-06-22 20:20:28 +02:00
|
|
|
func: f
|
|
|
|
}
|
2019-12-05 12:09:33 +01:00
|
|
|
p.table.register_type(fn_typ)
|
2019-06-22 20:20:28 +02:00
|
|
|
return f.typ_str()
|
|
|
|
}
|
|
|
|
// arrays ([]int)
|
2019-10-17 05:52:58 +02:00
|
|
|
mut arr_level := 0
|
2019-07-07 22:30:15 +02:00
|
|
|
is_question := p.tok == .question
|
2019-06-22 20:20:28 +02:00
|
|
|
if is_question {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.question)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-10-17 05:52:58 +02:00
|
|
|
for p.tok == .lsbr {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lsbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
// [10]int
|
2019-07-24 00:06:48 +02:00
|
|
|
if p.tok == .number {
|
2019-10-17 05:52:58 +02:00
|
|
|
typ += '[$p.lit]'
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
}
|
|
|
|
else {
|
2019-10-17 05:52:58 +02:00
|
|
|
arr_level++
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rsbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
// map[string]int
|
2019-08-05 09:49:52 +02:00
|
|
|
if !p.builtin_mod && p.tok == .name && p.lit == 'map' {
|
2019-06-30 22:44:15 +02:00
|
|
|
p.next()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.lsbr)
|
|
|
|
key_type := p.check_name()
|
2019-06-30 22:44:15 +02:00
|
|
|
if key_type != 'string' {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.error('maps only support string keys for now')
|
|
|
|
}
|
|
|
|
p.check(.rsbr)
|
2019-12-18 02:34:50 +01:00
|
|
|
val_type := p.get_type() // p.check_name()
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = 'map_${stringify_pointer(val_type)}'
|
2019-06-30 22:44:15 +02:00
|
|
|
p.register_map(typ)
|
2019-08-17 21:19:37 +02:00
|
|
|
return typ
|
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
// ptr/ref
|
2019-09-09 15:22:39 +02:00
|
|
|
mut warn := false
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok == .mul {
|
2019-09-01 21:51:16 +02:00
|
|
|
if p.first_pass() {
|
2019-09-09 15:22:39 +02:00
|
|
|
warn = true
|
2019-09-01 21:51:16 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
mul = true
|
|
|
|
nr_muls++
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.mul)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .amp {
|
2019-06-22 20:20:28 +02:00
|
|
|
mul = true
|
|
|
|
nr_muls++
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.amp)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-11-29 14:46:43 +01:00
|
|
|
// generic type check
|
2019-12-21 01:53:58 +01:00
|
|
|
ti := p.generic_dispatch.inst
|
2019-11-30 10:00:05 +01:00
|
|
|
if p.lit in ti.keys() {
|
2019-11-29 14:46:43 +01:00
|
|
|
typ += ti[p.lit]
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-11-30 10:00:05 +01:00
|
|
|
typ += p.lit
|
2019-11-29 14:46:43 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// C.Struct import
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.lit == 'C' && p.peek() == .dot {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.dot)
|
2019-06-22 20:20:28 +02:00
|
|
|
typ = p.lit
|
|
|
|
}
|
|
|
|
else {
|
2019-09-24 21:30:30 +02:00
|
|
|
if warn && p.mod != 'ui' {
|
2019-09-09 15:22:39 +02:00
|
|
|
p.warn('use `&Foo` instead of `*Foo`')
|
|
|
|
}
|
2019-08-05 09:49:52 +02:00
|
|
|
// Module specified? (e.g. gx.Image)
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.peek() == .dot {
|
2019-08-08 14:35:53 +02:00
|
|
|
// try resolve full submodule
|
|
|
|
if !p.builtin_mod && p.import_table.known_alias(typ) {
|
|
|
|
mod := p.import_table.resolve_alias(typ)
|
|
|
|
if mod.contains('.') {
|
2019-10-12 00:17:37 +02:00
|
|
|
typ = mod_gen_name(mod)
|
2019-08-08 14:35:53 +02:00
|
|
|
}
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.dot)
|
2019-06-22 20:20:28 +02:00
|
|
|
typ += '__$p.lit'
|
|
|
|
}
|
|
|
|
mut t := p.table.find_type(typ)
|
2019-08-05 09:49:52 +02:00
|
|
|
// "typ" not found? try "mod__typ"
|
|
|
|
if t.name == '' && !p.builtin_mod {
|
2019-07-29 18:21:36 +02:00
|
|
|
// && !p.first_pass() {
|
2019-12-18 02:34:50 +01:00
|
|
|
if !typ.contains('array_') && p.mod != 'main' && !typ.contains('__') && !typ.starts_with('[') {
|
2019-08-05 09:49:52 +02:00
|
|
|
typ = p.prepend_mod(typ)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
t = p.table.find_type(typ)
|
2019-07-29 18:21:36 +02:00
|
|
|
if t.name == '' && !p.pref.translated && !p.first_pass() && !typ.starts_with('[') {
|
2019-12-31 19:26:44 +01:00
|
|
|
// println('get_type() bad type')
|
2019-06-22 20:20:28 +02:00
|
|
|
// println('all registered types:')
|
|
|
|
// for q in p.table.types {
|
|
|
|
// println(q.name)
|
|
|
|
// }
|
2019-12-18 02:34:50 +01:00
|
|
|
mut t_suggest,tc_suggest := p.table.find_misspelled_type(typ, p, 0.50)
|
2019-11-04 02:59:28 +01:00
|
|
|
if t_suggest.len > 0 {
|
|
|
|
t_suggest = '. did you mean: ($tc_suggest) `$t_suggest`'
|
|
|
|
}
|
2019-12-31 19:26:44 +01:00
|
|
|
econtext := if p.pref.is_debug { '('+@FILE+':'+@LINE+')' } else {''}
|
|
|
|
p.error('unknown type `$typ`$t_suggest $econtext')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-02 11:17:56 +01:00
|
|
|
else if !t.is_public && t.mod != p.mod && !p.is_vgen && t.name != '' && !p.first_pass() {
|
2019-10-24 12:19:27 +02:00
|
|
|
p.error('type `$t.name` is private')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-21 03:33:59 +01:00
|
|
|
// generic struct init
|
2019-12-21 01:53:58 +01:00
|
|
|
if p.peek() == .lt {
|
|
|
|
p.next()
|
|
|
|
p.check(.lt)
|
|
|
|
typ = '${typ}_T'
|
2019-12-21 03:33:59 +01:00
|
|
|
for {
|
2019-12-21 01:53:58 +01:00
|
|
|
type_param := p.check_name()
|
2019-12-21 03:33:59 +01:00
|
|
|
if type_param in p.generic_dispatch.inst {
|
|
|
|
typ = '${typ}_' + p.generic_dispatch.inst[type_param]
|
2019-12-21 01:53:58 +01:00
|
|
|
}
|
2019-12-21 22:54:37 +01:00
|
|
|
if p.tok != .comma {
|
|
|
|
break
|
|
|
|
}
|
2019-12-21 03:33:59 +01:00
|
|
|
p.check(.comma)
|
2019-12-21 01:53:58 +01:00
|
|
|
}
|
|
|
|
p.check(.gt)
|
|
|
|
return typ
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
if typ == 'void' {
|
|
|
|
p.error('unknown type `$typ`')
|
|
|
|
}
|
|
|
|
if mul {
|
2019-07-03 22:11:27 +02:00
|
|
|
typ += strings.repeat(`*`, nr_muls)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// Register an []array type
|
2019-10-17 05:52:58 +02:00
|
|
|
if arr_level > 0 {
|
2019-07-29 18:21:36 +02:00
|
|
|
// p.log('ARR TYPE="$typ" run=$p.pass')
|
2019-06-22 20:20:28 +02:00
|
|
|
// We come across "[]User" etc ?
|
2019-10-17 05:52:58 +02:00
|
|
|
for i := 0; i < arr_level; i++ {
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = 'array_${stringify_pointer(typ)}'
|
2019-10-17 05:52:58 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.register_array(typ)
|
|
|
|
}
|
|
|
|
p.next()
|
2019-09-23 22:17:06 +02:00
|
|
|
if is_question {
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = 'Option_${stringify_pointer(typ)}'
|
2019-06-22 20:20:28 +02:00
|
|
|
p.table.register_type_with_parent(typ, 'Option')
|
|
|
|
}
|
|
|
|
// Because the code uses * to see if it's a pointer
|
|
|
|
if typ == 'byteptr' {
|
|
|
|
return 'byte*'
|
|
|
|
}
|
|
|
|
if typ == 'voidptr' {
|
2019-12-18 02:34:50 +01:00
|
|
|
// if !p.builtin_mod && p.mod != 'os' && p.mod != 'gx' && p.mod != 'gg' && !p.pref.translated {
|
|
|
|
// p.error('voidptr can only be used in unsafe code')
|
|
|
|
// }
|
2019-06-22 20:20:28 +02:00
|
|
|
return 'void*'
|
|
|
|
}
|
2019-11-30 11:09:05 +01:00
|
|
|
/*
|
|
|
|
TODO this is not needed?
|
2019-06-22 20:20:28 +02:00
|
|
|
if typ.last_index('__') > typ.index('__') {
|
|
|
|
p.error('2 __ in gettype(): typ="$typ"')
|
|
|
|
}
|
2019-11-30 11:09:05 +01:00
|
|
|
*/
|
2019-12-18 02:34:50 +01:00
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p &Parser) print_tok() {
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .name {
|
2019-06-22 20:20:28 +02:00
|
|
|
println(p.lit)
|
|
|
|
return
|
|
|
|
}
|
2019-07-16 17:59:07 +02:00
|
|
|
if p.tok == .str {
|
2019-06-22 20:20:28 +02:00
|
|
|
println('"$p.lit"')
|
|
|
|
return
|
|
|
|
}
|
|
|
|
println(p.tok.str())
|
|
|
|
}
|
|
|
|
|
|
|
|
// statements() returns the type of the last statement
|
|
|
|
fn (p mut Parser) statements() string {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('statements()')
|
2019-07-24 02:35:25 +02:00
|
|
|
typ := p.statements_no_rcbr()
|
2019-06-22 20:20:28 +02:00
|
|
|
if !p.inside_if_expr {
|
|
|
|
p.genln('}')
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('if_expr') {
|
|
|
|
// println('statements() ret=$typ line=$p.scanner.line_nr')
|
|
|
|
// }
|
2019-06-22 20:20:28 +02:00
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
2019-07-24 02:35:25 +02:00
|
|
|
fn (p mut Parser) statements_no_rcbr() string {
|
2019-09-09 15:22:39 +02:00
|
|
|
p.open_scope()
|
2019-12-28 09:43:22 +01:00
|
|
|
//if !p.inside_if_expr {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.genln('')
|
2019-12-28 09:43:22 +01:00
|
|
|
//}
|
2019-06-22 20:20:28 +02:00
|
|
|
mut i := 0
|
|
|
|
mut last_st_typ := ''
|
2019-10-27 12:05:50 +01:00
|
|
|
for p.tok != .rcbr && p.tok != .eof {
|
2019-10-20 09:19:37 +02:00
|
|
|
// println('stm: '+p.tok.str()+', next: '+p.peek().str())
|
2019-06-22 20:20:28 +02:00
|
|
|
last_st_typ = p.statement(true)
|
|
|
|
// println('last st typ=$last_st_typ')
|
|
|
|
if !p.inside_if_expr {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.genln('')// // end st tok= ${p.strtok()}')
|
|
|
|
// p.fgenln('// ST')
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
i++
|
|
|
|
if i > 50000 {
|
|
|
|
p.cgen.save()
|
|
|
|
p.error('more than 50 000 statements in function `$p.cur_fn.name`')
|
|
|
|
}
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.next()
|
2019-12-18 19:56:30 +01:00
|
|
|
if p.inside_if_expr {
|
|
|
|
p.fspace()
|
|
|
|
}
|
2019-10-27 12:05:50 +01:00
|
|
|
p.check(.rcbr)
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.fmt_dec()
|
2019-07-21 12:43:47 +02:00
|
|
|
p.close_scope()
|
2019-06-22 20:20:28 +02:00
|
|
|
return last_st_typ
|
|
|
|
}
|
|
|
|
|
2019-07-21 12:43:47 +02:00
|
|
|
fn (p mut Parser) close_scope() {
|
|
|
|
// println('close_scope level=$f.scope_level var_idx=$f.var_idx')
|
2019-09-08 17:53:40 +02:00
|
|
|
// Move back `var_idx` (pointer to the end of the array) till we reach
|
|
|
|
// the previous scope level. This effectivly deletes (closes) current
|
|
|
|
// scope.
|
2019-09-23 19:34:08 +02:00
|
|
|
mut i := p.var_idx - 1
|
2019-08-02 05:20:18 +02:00
|
|
|
for ; i >= 0; i-- {
|
2019-09-23 19:34:08 +02:00
|
|
|
v := p.local_vars[i]
|
2019-12-18 02:34:50 +01:00
|
|
|
if p.pref.autofree && (v.is_alloc || (v.is_arg && v.typ == 'string')) {
|
|
|
|
// && !p.pref.is_test {
|
2019-11-25 06:38:00 +01:00
|
|
|
p.free_var(v)
|
2019-07-21 12:43:47 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('mem.v') {
|
|
|
|
// println(v.name + ' $v.is_arg scope=$v.scope_level cur=$p.cur_fn.scope_level')}
|
|
|
|
if v.scope_level != p.cur_fn.scope_level {
|
|
|
|
// && !v.is_arg {
|
2019-11-25 06:38:00 +01:00
|
|
|
break
|
2019-09-13 15:27:01 +02:00
|
|
|
}
|
2019-08-07 17:51:21 +02:00
|
|
|
}
|
|
|
|
if p.cur_fn.defer_text.last() != '' {
|
|
|
|
p.genln(p.cur_fn.defer_text.last())
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.cur_fn.defer_text[f] = ''
|
2019-07-21 12:43:47 +02:00
|
|
|
}
|
2019-09-09 15:22:39 +02:00
|
|
|
p.cur_fn.scope_level--
|
2019-10-27 08:03:15 +01:00
|
|
|
p.cur_fn.defer_text = p.cur_fn.defer_text[..p.cur_fn.scope_level + 1]
|
2019-09-23 19:34:08 +02:00
|
|
|
p.var_idx = i + 1
|
2019-07-21 12:43:47 +02:00
|
|
|
// println('close_scope new var_idx=$f.var_idx\n')
|
2019-08-07 17:51:21 +02:00
|
|
|
}
|
2019-07-21 12:43:47 +02:00
|
|
|
|
2019-11-25 06:38:00 +01:00
|
|
|
fn (p mut Parser) free_var(v Var) {
|
|
|
|
// Clean up memory, only do this if -autofree was passed for now
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('mem.v') {println('free_var() $v.name')}
|
|
|
|
// println(p.cur_fn.name)
|
2019-12-18 05:52:14 +01:00
|
|
|
if p.cur_fn.name in ['add', 'clone', 'free'] {
|
2019-12-12 02:09:31 +01:00
|
|
|
return
|
|
|
|
}
|
2019-11-25 06:38:00 +01:00
|
|
|
mut free_fn := 'free'
|
|
|
|
if v.typ.starts_with('array_') {
|
|
|
|
free_fn = 'v_array_free'
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if v.typ == 'string' {
|
2019-11-25 06:38:00 +01:00
|
|
|
free_fn = 'v_string_free'
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('str.v') {
|
|
|
|
// println('freeing str $v.name')
|
|
|
|
// }
|
|
|
|
// continue
|
|
|
|
}
|
|
|
|
else if v.ptr || v.typ.ends_with('*') {
|
2019-11-25 06:38:00 +01:00
|
|
|
free_fn = 'v_ptr_free'
|
2019-12-18 02:34:50 +01:00
|
|
|
// continue
|
|
|
|
}
|
|
|
|
else {
|
2019-11-25 06:38:00 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if p.returns {
|
|
|
|
// Don't free a variable that's being returned
|
2019-12-18 02:34:50 +01:00
|
|
|
if !v.is_returned && v.typ != 'FILE*' {
|
|
|
|
// !v.is_c {
|
|
|
|
// p.cgen.cur_line = '/* free */' + p.cgen.cur_line
|
|
|
|
// p.cgen.set_placeholder(0, '/*free2*/')
|
|
|
|
prev_line := p.cgen.lines[p.cgen.lines.len - 1]
|
2019-12-12 02:09:31 +01:00
|
|
|
free := '$free_fn ($v.name); /* :) close_scope free $v.typ */'
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.lines[p.cgen.lines.len - 1] = free + '\n' + prev_line
|
|
|
|
// '$free_fn ($v.name); /* :) close_scope free $v.typ */\n' + prev_line
|
2019-11-25 06:38:00 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if p.mod != 'strings' {
|
|
|
|
// && p.mod != 'builtin' {
|
2019-12-12 02:09:31 +01:00
|
|
|
/*
|
|
|
|
prev_line := p.cgen.lines[p.cgen.lines.len-1]
|
|
|
|
free := '$free_fn ($v.name); /* :) close_scope free $v.typ */'
|
|
|
|
p.cgen.lines[p.cgen.lines.len-1] = free + '\n' + prev_line
|
|
|
|
*/
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('mem.v') {println(v.name)}
|
2019-12-08 20:22:47 +01:00
|
|
|
p.genln('$free_fn ($v.name); // close_scope free')
|
2019-11-25 06:38:00 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) genln(s string) {
|
|
|
|
p.cgen.genln(s)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) gen(s string) {
|
|
|
|
p.cgen.gen(s)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Generate V header from V source
|
|
|
|
fn (p mut Parser) statement(add_semi bool) string {
|
2019-10-24 14:41:29 +02:00
|
|
|
p.expected_type = ''
|
2019-11-26 07:23:11 +01:00
|
|
|
if p.returns {
|
2019-08-10 10:26:42 +02:00
|
|
|
p.error('unreachable code')
|
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
// if !p.in_dispatch {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.cgen.is_tmp = false
|
2019-10-20 09:19:37 +02:00
|
|
|
// }
|
2019-06-22 20:20:28 +02:00
|
|
|
tok := p.tok
|
|
|
|
mut q := ''
|
2019-10-24 15:02:24 +02:00
|
|
|
match tok {
|
2019-12-18 02:34:50 +01:00
|
|
|
.name {
|
|
|
|
next := p.peek()
|
|
|
|
// if p.pref.is_verbose {
|
|
|
|
// println(next.str())
|
|
|
|
// }
|
|
|
|
// goto_label:
|
|
|
|
if p.peek() == .colon {
|
|
|
|
p.fmt_dec()
|
|
|
|
label := p.check_name()
|
|
|
|
p.fmt_inc()
|
|
|
|
p.genln(label + ': ;')
|
|
|
|
p.check(.colon)
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
// `a := 777`
|
|
|
|
else if p.peek() == .decl_assign || p.peek() == .comma {
|
|
|
|
p.check_not_reserved()
|
|
|
|
// p.log('var decl')
|
|
|
|
p.var_decl()
|
|
|
|
}
|
|
|
|
// `_ = 777`
|
|
|
|
else if p.lit == '_' && p.peek() == .assign {
|
|
|
|
p.gen_blank_identifier_assign()
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// panic and exit count as returns since they stop the function
|
|
|
|
is_panic := p.lit == 'panic' || p.lit == 'exit'
|
|
|
|
if is_panic {
|
|
|
|
p.returns = true
|
|
|
|
}
|
|
|
|
// `a + 3`, `a(7)`, or just `a`
|
|
|
|
q = p.bool_expression()
|
|
|
|
// Fix "control reaches end of non-void function" error
|
|
|
|
if is_panic && p.cur_fn.typ == 'bool' {
|
|
|
|
p.genln(';\nreturn false;')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
.key_goto {
|
|
|
|
p.check(.key_goto)
|
|
|
|
p.fspace()
|
2019-06-22 20:20:28 +02:00
|
|
|
label := p.check_name()
|
2019-12-18 02:34:50 +01:00
|
|
|
p.genln('goto $label;')
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
.key_defer {
|
|
|
|
p.defer_st()
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
.hash {
|
|
|
|
p.chash()
|
2019-06-22 20:20:28 +02:00
|
|
|
return ''
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_unsafe {
|
|
|
|
p.next()
|
|
|
|
p.inside_unsafe = true
|
|
|
|
p.check(.lcbr)
|
|
|
|
p.genln('{')
|
|
|
|
p.statements()
|
|
|
|
p.inside_unsafe = false
|
|
|
|
// p.check(.rcbr)
|
|
|
|
}
|
|
|
|
.dollar {
|
|
|
|
p.comp_time()
|
|
|
|
}
|
|
|
|
.key_if {
|
|
|
|
p.if_statement(false, 0)
|
|
|
|
}
|
|
|
|
.key_for {
|
|
|
|
p.for_st()
|
|
|
|
}
|
|
|
|
.key_switch {
|
|
|
|
p.switch_statement()
|
|
|
|
}
|
|
|
|
.key_match {
|
|
|
|
p.match_statement(false)
|
|
|
|
}
|
2019-12-19 22:29:37 +01:00
|
|
|
.key_mut, .key_static {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.var_decl()
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_return {
|
|
|
|
p.return_st()
|
2019-09-25 13:53:44 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.lcbr {
|
|
|
|
// {} block
|
|
|
|
// Do not allow {} block to start on the same line
|
|
|
|
// to avoid e.g. `foo() {` instead of `if foo() {`
|
|
|
|
if p.prev_token().line_nr == p.scanner.line_nr {
|
|
|
|
p.genln('')
|
|
|
|
p.error('{} block has to start on a new line')
|
2019-08-09 08:17:31 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
p.check(.lcbr)
|
|
|
|
if p.tok == .rcbr {
|
|
|
|
p.error('empty statements block')
|
2019-12-04 22:14:23 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
p.genln('{')
|
|
|
|
p.statements()
|
|
|
|
return ''
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_continue {
|
|
|
|
if p.for_expr_cnt == 0 {
|
|
|
|
p.error('`continue` statement outside `for`')
|
|
|
|
}
|
|
|
|
p.genln('continue')
|
|
|
|
p.check(.key_continue)
|
2019-12-12 18:22:11 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_break {
|
|
|
|
if p.for_expr_cnt == 0 {
|
|
|
|
p.error('`break` statement outside `for`')
|
|
|
|
}
|
|
|
|
p.genln('break')
|
|
|
|
p.check(.key_break)
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_go {
|
|
|
|
p.go_statement()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_assert {
|
|
|
|
p.assert_statement()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.key_asm {
|
|
|
|
p.inline_asm()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-12-18 02:34:50 +01:00
|
|
|
// An expression as a statement
|
|
|
|
typ := p.expression()
|
|
|
|
if p.inside_if_expr {
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
p.genln('; ')
|
|
|
|
}
|
|
|
|
return typ
|
|
|
|
}}
|
2019-06-22 20:20:28 +02:00
|
|
|
// ? : uses , as statement separators
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.inside_if_expr && p.tok != .rcbr {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen(', ')
|
|
|
|
}
|
|
|
|
if add_semi && !p.inside_if_expr {
|
|
|
|
p.genln(';')
|
|
|
|
}
|
|
|
|
return q
|
|
|
|
// p.cgen.end_statement()
|
|
|
|
}
|
|
|
|
// is_map: are we in map assignment? (m[key] = val) if yes, dont generate '='
|
|
|
|
// this can be `user = ...` or `user.field = ...`, in both cases `v` is `user`
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) assign_statement(v Var, ph int, is_map bool) {
|
2019-09-29 19:37:39 +02:00
|
|
|
errtok := p.cur_tok_index()
|
2019-08-17 21:19:37 +02:00
|
|
|
is_vid := p.fileis('vid') // TODO remove
|
2019-06-22 20:20:28 +02:00
|
|
|
tok := p.tok
|
2019-12-18 02:34:50 +01:00
|
|
|
// if !v.is_mut && !v.is_arg && !p.pref.translated && !v.is_global{
|
2019-08-07 13:00:19 +02:00
|
|
|
if !v.is_mut && !p.pref.translated && !v.is_global && !is_vid {
|
2019-08-08 06:55:08 +02:00
|
|
|
if v.is_arg {
|
|
|
|
if p.cur_fn.args.len > 0 && p.cur_fn.args[0].name == v.name {
|
|
|
|
println('make the receiver `$v.name` mutable:
|
2019-12-12 02:09:31 +01:00
|
|
|
fn ($v.name mut $v.typ) ${p.cur_fn.name}(...) {
|
2019-08-17 21:19:37 +02:00
|
|
|
')
|
|
|
|
}
|
|
|
|
}
|
2019-09-25 16:59:50 +02:00
|
|
|
p.error('`$v.name` is immutable')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-25 13:16:17 +02:00
|
|
|
if !v.is_changed {
|
2019-09-09 15:22:39 +02:00
|
|
|
p.mark_var_changed(v)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-12-12 02:09:31 +01:00
|
|
|
is_str := p.expected_type == 'string'
|
|
|
|
is_ustr := p.expected_type == 'ustring'
|
2019-10-24 15:02:24 +02:00
|
|
|
match tok {
|
2019-12-18 02:34:50 +01:00
|
|
|
.assign {
|
|
|
|
if !is_map && !p.is_empty_c_struct_init {
|
|
|
|
p.gen(' = ')
|
|
|
|
}
|
2019-09-20 18:07:38 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
.plus_assign {
|
|
|
|
if is_str && !p.is_js {
|
|
|
|
expr := p.cgen.cur_line
|
|
|
|
p.gen('= string_add($expr, ')
|
|
|
|
}
|
|
|
|
else if is_ustr {
|
|
|
|
p.gen('= ustring_add($v.name, ')
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
next := p.peek_token()
|
|
|
|
if next.tok == .number && next.lit == '1' {
|
|
|
|
p.error('use `++` instead of `+= 1`')
|
|
|
|
}
|
|
|
|
p.gen(' += ')
|
2019-12-08 12:11:19 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else {
|
|
|
|
p.gen(' ' + p.tok.str() + ' ')
|
2020-01-12 01:46:25 +01:00
|
|
|
}
|
|
|
|
}
|
2019-07-03 13:20:43 +02:00
|
|
|
p.fspace()
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-11-09 20:05:44 +01:00
|
|
|
p.fspace()
|
2019-06-22 20:20:28 +02:00
|
|
|
pos := p.cgen.cur_line.len
|
2020-01-12 01:46:25 +01:00
|
|
|
expr_tok := p.cur_tok_index()
|
2019-11-26 07:07:35 +01:00
|
|
|
p.is_var_decl = true
|
2019-06-22 20:20:28 +02:00
|
|
|
expr_type := p.bool_expression()
|
2019-11-26 07:07:35 +01:00
|
|
|
p.is_var_decl = false
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.expected_type.starts_with('array_') {
|
|
|
|
// p.warn('expecting array got $expr_type')
|
|
|
|
// }
|
2019-11-04 12:35:10 +01:00
|
|
|
if expr_type == 'void' {
|
2020-01-12 01:46:25 +01:00
|
|
|
_,fn_name := p.is_expr_fn_call(expr_tok+1)
|
|
|
|
p.error_with_token_index('${fn_name}() $err_used_as_value', expr_tok)
|
2019-11-04 12:35:10 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// Allow `num = 4` where `num` is an `?int`
|
2019-12-21 23:44:16 +01:00
|
|
|
if p.assigned_type.starts_with('Option_') && expr_type == parse_pointer(p.assigned_type['Option_'.len..]) {
|
2019-10-27 08:03:15 +01:00
|
|
|
expr := p.cgen.cur_line[pos..]
|
|
|
|
left := p.cgen.cur_line[..pos]
|
2019-12-21 23:44:16 +01:00
|
|
|
typ := parse_pointer(expr_type.replace('Option_', ''))
|
2019-12-16 22:36:28 +01:00
|
|
|
p.cgen.resetln(left + 'opt_ok(($typ[]){ $expr }, sizeof($typ))')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-21 23:44:16 +01:00
|
|
|
else if expr_type.starts_with('Option_') && p.assigned_type == parse_pointer(expr_type['Option_'.len..]) && p.tok == .key_orelse {
|
2019-11-26 07:07:35 +01:00
|
|
|
line := p.cgen.cur_line
|
2019-12-18 02:34:50 +01:00
|
|
|
vname := line[..pos].replace('=', '') // TODO cgen line hack
|
|
|
|
if idx:=line.index('='){
|
|
|
|
p.cgen.resetln(line.replace(line[..idx + 1], ''))
|
2019-11-30 11:09:05 +01:00
|
|
|
p.gen_handle_option_or_else(expr_type, vname, ph)
|
|
|
|
}
|
2019-11-26 07:07:35 +01:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else if expr_type[0] == `[` {
|
2019-11-29 09:11:53 +01:00
|
|
|
// assignment to a fixed_array `mut a:=[3]int a=[1,2,3]!!`
|
2019-11-26 07:07:35 +01:00
|
|
|
expr := p.cgen.cur_line[pos..].all_after('{').all_before('}') // TODO cgen line hack
|
2019-10-27 08:03:15 +01:00
|
|
|
left := p.cgen.cur_line[..pos].all_before('=')
|
|
|
|
cline_pos := p.cgen.cur_line[pos..]
|
2019-10-06 02:07:02 +02:00
|
|
|
etype := cline_pos.all_before(' {')
|
|
|
|
if p.assigned_type != p.expected_type {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('incompatible types: $p.assigned_type != $p.expected_type', errtok)
|
2019-10-06 02:07:02 +02:00
|
|
|
}
|
2019-10-21 13:14:28 +02:00
|
|
|
p.cgen.resetln('memcpy( (& $left), ($etype{$expr}), sizeof( $left ) );')
|
2019-11-06 04:16:12 +01:00
|
|
|
}
|
2019-11-09 19:57:36 +01:00
|
|
|
// check type for <<= >>= %= ^= &= |=
|
2019-12-18 05:52:14 +01:00
|
|
|
else if tok in [.left_shift_assign, .righ_shift_assign, .mod_assign, .xor_assign, .and_assign, .or_assign] {
|
2019-11-06 01:37:32 +01:00
|
|
|
if !is_integer_type(p.assigned_type) {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('cannot use ${tok.str()} assignment operator on non-integer type `$p.assigned_type`', errtok)
|
2019-11-06 01:37:32 +01:00
|
|
|
}
|
|
|
|
if !is_integer_type(expr_type) {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('cannot use non-integer type `$expr_type` as ${tok.str()} argument', errtok)
|
2019-11-07 15:49:40 +01:00
|
|
|
}
|
|
|
|
}
|
2019-08-05 09:49:52 +02:00
|
|
|
else if !p.builtin_mod && !p.check_types_no_throw(expr_type, p.assigned_type) {
|
2019-12-18 02:34:50 +01:00
|
|
|
t := p.table.find_type(p.assigned_type)
|
2019-12-10 04:16:47 +01:00
|
|
|
if t.cat == .enum_ && t.is_flag {
|
|
|
|
p.error_with_token_index(err_modify_bitfield, errtok)
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('cannot use type `$expr_type` as type `$p.assigned_type` in assignment', errtok)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-20 18:07:38 +02:00
|
|
|
if (is_str || is_ustr) && tok == .plus_assign && !p.is_js {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen(')')
|
|
|
|
}
|
|
|
|
// p.assigned_var = ''
|
|
|
|
p.assigned_type = ''
|
|
|
|
if !v.is_used {
|
2019-09-09 15:22:39 +02:00
|
|
|
p.mark_var_used(v)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) var_decl() {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.is_alloc = false
|
2019-07-07 22:30:15 +02:00
|
|
|
is_mut := p.tok == .key_mut || p.prev_tok == .key_for
|
|
|
|
is_static := p.tok == .key_static
|
|
|
|
if p.tok == .key_mut {
|
|
|
|
p.check(.key_mut)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.fspace()
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .key_static {
|
|
|
|
p.check(.key_static)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.fspace()
|
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
mut var_token_idxs := [p.cur_tok_index()]
|
|
|
|
mut var_mut := [is_mut] // add first var mut
|
|
|
|
mut var_names := [p.check_name()] // add first variable
|
|
|
|
p.scanner.validate_var_name(var_names[0])
|
2019-10-08 18:26:07 +02:00
|
|
|
mut new_vars := 0
|
|
|
|
if var_names[0] != '_' && !p.known_var(var_names[0]) {
|
|
|
|
new_vars++
|
|
|
|
}
|
2019-10-05 18:17:09 +02:00
|
|
|
// more than 1 vars (multiple returns)
|
2019-09-23 04:45:19 +02:00
|
|
|
for p.tok == .comma {
|
|
|
|
p.check(.comma)
|
2019-10-07 12:19:44 +02:00
|
|
|
if p.tok == .key_mut {
|
|
|
|
p.check(.key_mut)
|
2019-12-20 01:02:16 +01:00
|
|
|
p.fspace()
|
2019-10-07 12:19:44 +02:00
|
|
|
var_mut << true
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-07 12:19:44 +02:00
|
|
|
var_mut << false
|
|
|
|
}
|
|
|
|
var_token_idxs << p.cur_tok_index()
|
|
|
|
var_name := p.check_name()
|
|
|
|
p.scanner.validate_var_name(var_name)
|
2019-10-08 18:26:07 +02:00
|
|
|
if var_name != '_' && !p.known_var(var_name) {
|
|
|
|
new_vars++
|
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
var_names << var_name
|
2019-10-05 18:17:09 +02:00
|
|
|
}
|
|
|
|
is_assign := p.tok == .assign
|
|
|
|
is_decl_assign := p.tok == .decl_assign
|
|
|
|
if is_assign {
|
|
|
|
p.check_space(.assign) // =
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if is_decl_assign {
|
2019-10-05 18:17:09 +02:00
|
|
|
p.check_space(.decl_assign) // :=
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-05 18:17:09 +02:00
|
|
|
p.error('expected `=` or `:=`')
|
2019-07-15 22:44:26 +02:00
|
|
|
}
|
2020-01-12 01:46:25 +01:00
|
|
|
expr_tok := p.cur_tok_index()
|
2019-10-08 18:26:07 +02:00
|
|
|
// all vars on left of `:=` already defined (or `_`)
|
2019-10-09 19:54:52 +02:00
|
|
|
if is_decl_assign && var_names.len == 1 && var_names[0] == '_' {
|
|
|
|
p.error_with_token_index('use `=` instead of `:=`', var_token_idxs.last())
|
2019-10-08 18:26:07 +02:00
|
|
|
}
|
2019-12-19 03:41:12 +01:00
|
|
|
p.var_decl_name = if var_names.len > 1 { '_V_mret_${p.token_idx}_' + var_names.join('_') } else { var_names[0] }
|
2019-10-05 18:17:09 +02:00
|
|
|
t := p.gen_var_decl(p.var_decl_name, is_static)
|
2019-11-04 12:35:10 +01:00
|
|
|
if t == 'void' {
|
2020-01-12 01:46:25 +01:00
|
|
|
_,fn_name := p.is_expr_fn_call(expr_tok+1)
|
|
|
|
p.error_with_token_index('${fn_name}() $err_used_as_value', expr_tok)
|
2019-11-04 12:35:10 +01:00
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
mut var_types := [t]
|
2019-10-05 18:17:09 +02:00
|
|
|
// multiple returns types
|
2019-10-07 12:19:44 +02:00
|
|
|
if var_names.len > 1 {
|
|
|
|
var_types = t.replace('_V_MulRet_', '').replace('_PTR_', '*').split('_V_')
|
|
|
|
}
|
|
|
|
// mismatched number of return & assignment vars
|
|
|
|
if var_names.len != var_types.len {
|
|
|
|
mr_fn := p.cgen.cur_line.find_between('=', '(').trim_space()
|
|
|
|
p.error_with_token_index('assignment mismatch: ${var_names.len} variables but `$mr_fn` returns $var_types.len values', var_token_idxs.last())
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-12-18 08:17:07 +01:00
|
|
|
for i, var_name in var_names {
|
2019-10-07 12:19:44 +02:00
|
|
|
var_token_idx := var_token_idxs[i]
|
|
|
|
var_is_mut := var_mut[i]
|
|
|
|
var_type := var_types[i]
|
|
|
|
known_var := p.known_var(var_name)
|
2019-10-08 18:26:07 +02:00
|
|
|
if var_name == '_' {
|
|
|
|
if var_is_mut {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('`mut` has no effect here', var_token_idx - 1)
|
2019-10-08 18:26:07 +02:00
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
// println('var decl tok=${p.strtok()} name=type=$var_name type=$var_type ismut=$var_is_mut')
|
|
|
|
// var decl, already exists (shadowing is not allowed)
|
2019-09-23 04:45:19 +02:00
|
|
|
// Don't allow declaring a variable with the same name. Even in a child scope
|
2019-10-07 12:19:44 +02:00
|
|
|
// if var_names.len == 1 && !p.builtin_mod && known_var {
|
|
|
|
if is_decl_assign && known_var {
|
|
|
|
p.error_with_token_index('redefinition of `$var_name`', var_token_idx)
|
|
|
|
}
|
|
|
|
// mut specified with assignment
|
2019-12-18 02:34:50 +01:00
|
|
|
// if /*is_assign && implicit*/ known_var && var_is_mut {
|
2019-10-24 13:25:03 +02:00
|
|
|
if known_var && var_is_mut {
|
2019-10-07 12:19:44 +02:00
|
|
|
p.error_with_token_index('cannot specify mutability for existing var `$var_name`, only for new vars', var_token_idx)
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-10-05 18:17:09 +02:00
|
|
|
// assignment, but var does not exist
|
2019-10-07 12:19:44 +02:00
|
|
|
if is_assign && !known_var {
|
|
|
|
suggested := p.find_misspelled_local_var(var_name, 50)
|
2019-10-05 18:17:09 +02:00
|
|
|
if suggested != '' {
|
2019-10-07 12:19:44 +02:00
|
|
|
p.error_with_token_index('undefined: `$var_name`. did you mean:$suggested', var_token_idx)
|
2019-10-05 18:17:09 +02:00
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
p.error_with_token_index('undefined: `$var_name`.', var_token_idx)
|
2019-10-05 18:17:09 +02:00
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
if var_name.len > 1 && contains_capital(var_name) {
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('variable names cannot contain uppercase letters, use snake_case instead', var_token_idx)
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-10-05 18:17:09 +02:00
|
|
|
// multiple return
|
2019-10-07 12:19:44 +02:00
|
|
|
if var_names.len > 1 {
|
2019-09-25 13:53:44 +02:00
|
|
|
p.gen(';\n')
|
2019-10-05 18:17:09 +02:00
|
|
|
// assigment
|
2019-10-07 12:19:44 +02:00
|
|
|
// if !p.builtin_mod && known_var {
|
|
|
|
if known_var {
|
2019-12-18 05:52:14 +01:00
|
|
|
v := p.find_var(var_name) or {
|
2019-10-07 12:19:44 +02:00
|
|
|
p.error_with_token_index('cannot find `$var_name`', var_token_idx)
|
2019-10-05 18:17:09 +02:00
|
|
|
break
|
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
p.check_types_with_token_index(var_type, v.typ, var_token_idx)
|
2019-10-05 18:17:09 +02:00
|
|
|
if !v.is_mut {
|
|
|
|
p.error_with_token_index('`$v.name` is immutable', var_token_idx)
|
|
|
|
}
|
2019-10-07 12:19:44 +02:00
|
|
|
p.mark_var_used(v)
|
2019-10-05 18:17:09 +02:00
|
|
|
p.mark_var_changed(v)
|
2019-10-07 12:19:44 +02:00
|
|
|
p.gen('$var_name = ${p.var_decl_name}.var_$i')
|
2019-10-05 18:17:09 +02:00
|
|
|
continue
|
|
|
|
}
|
2019-12-06 13:24:53 +01:00
|
|
|
// declaration
|
2019-10-07 12:19:44 +02:00
|
|
|
p.gen('$var_type $var_name = ${p.var_decl_name}.var_$i')
|
2019-09-25 13:53:44 +02:00
|
|
|
}
|
2019-12-17 03:04:50 +01:00
|
|
|
// Function bodies are always parsed once in the second pass, but
|
|
|
|
// script programs are parsed in the first pass, need to handle that.
|
|
|
|
if p.pass == .main {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.register_var(Var{
|
2019-12-17 03:04:50 +01:00
|
|
|
name: var_name
|
|
|
|
typ: var_type
|
|
|
|
is_mut: var_is_mut
|
|
|
|
is_alloc: p.is_alloc || var_type.starts_with('array_')
|
2019-12-18 02:34:50 +01:00
|
|
|
line_nr: p.tokens[var_token_idx].line_nr
|
2019-12-17 03:04:50 +01:00
|
|
|
token_idx: var_token_idx
|
|
|
|
})
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('str.v') {
|
|
|
|
// if p.is_alloc { println('REG VAR IS ALLOC $name') }
|
|
|
|
// }
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
p.var_decl_name = ''
|
2019-08-20 13:34:29 +02:00
|
|
|
p.is_empty_c_struct_init = false
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) get_struct_type(name_ string, is_c bool, is_ptr bool) string {
|
2019-10-20 09:19:37 +02:00
|
|
|
mut name := name_
|
|
|
|
if is_ptr {
|
2019-12-18 02:34:50 +01:00
|
|
|
name += '*' // `&User{}` => type `User*`
|
2019-10-20 09:19:37 +02:00
|
|
|
}
|
|
|
|
if name in reserved_type_param_names {
|
|
|
|
p.warn('name `$name` is reserved for type parameters')
|
|
|
|
}
|
|
|
|
p.is_c_struct_init = is_c
|
|
|
|
return p.struct_init(name)
|
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) get_var_type(name string, is_ptr bool, deref_nr int) string {
|
2019-12-18 05:52:14 +01:00
|
|
|
v := p.find_var_check_new_var(name) or {
|
2019-12-18 02:34:50 +01:00
|
|
|
return ''
|
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
if is_ptr {
|
|
|
|
p.gen('&')
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else if deref_nr > 0 {
|
|
|
|
for _ in 0 .. deref_nr {
|
|
|
|
p.gen('*')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
/*
|
2019-10-20 09:19:37 +02:00
|
|
|
if p.pref.autofree && v.typ == 'string' && v.is_arg &&
|
|
|
|
p.assigned_type == 'string' {
|
|
|
|
p.warn('setting moved ' + v.typ)
|
|
|
|
p.mark_arg_moved(v)
|
|
|
|
}
|
2019-11-25 06:38:00 +01:00
|
|
|
*/
|
2019-12-18 02:34:50 +01:00
|
|
|
|
2019-10-20 09:19:37 +02:00
|
|
|
mut typ := p.var_expr(v)
|
|
|
|
// *var
|
2019-11-11 16:43:22 +01:00
|
|
|
if deref_nr > 0 {
|
2019-11-10 18:32:11 +01:00
|
|
|
/*
|
2019-11-16 19:49:55 +01:00
|
|
|
if !p.inside_unsafe && !p.pref.building_v && p.mod != 'os' {
|
2019-11-10 18:14:57 +01:00
|
|
|
p.error('dereferencing can only be done inside an `unsafe` block')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-11-10 18:32:11 +01:00
|
|
|
*/
|
2019-10-20 09:19:37 +02:00
|
|
|
if !typ.contains('*') && !typ.ends_with('ptr') {
|
|
|
|
println('name="$name", t=$v.typ')
|
|
|
|
p.error('dereferencing requires a pointer, but got `$typ`')
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
for _ in 0 .. deref_nr {
|
|
|
|
typ = typ.replace_once('ptr', '') // TODO
|
|
|
|
typ = typ.replace_once('*', '') // TODO
|
2019-11-11 16:43:22 +01:00
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
}
|
|
|
|
// &var
|
|
|
|
else if is_ptr {
|
|
|
|
typ += '*'
|
|
|
|
}
|
|
|
|
if p.inside_return_expr {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('marking $v.name returned')
|
2019-10-20 09:19:37 +02:00
|
|
|
p.mark_var_returned(v)
|
|
|
|
// v.is_returned = true // TODO modifying a local variable
|
|
|
|
// that's not used afterwards, this should be a compilation
|
|
|
|
// error
|
|
|
|
}
|
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) get_const_type(name string, is_ptr bool) string {
|
2019-12-18 05:52:14 +01:00
|
|
|
c := p.table.find_const(name) or {
|
2019-12-18 02:34:50 +01:00
|
|
|
return ''
|
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
if is_ptr && !c.is_global {
|
|
|
|
p.error('cannot take the address of constant `$c.name`')
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if is_ptr && c.is_global {
|
2019-10-20 09:19:37 +02:00
|
|
|
// c.ptr = true
|
|
|
|
p.gen('& /*const*/ ')
|
|
|
|
}
|
2019-10-24 11:36:57 +02:00
|
|
|
if !c.is_public && c.mod != p.mod {
|
|
|
|
p.warn('constant `$c.name` is private')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
mut typ := p.var_expr(c)
|
|
|
|
if is_ptr {
|
|
|
|
typ += '*'
|
|
|
|
}
|
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) get_c_func_type(name string) string {
|
2019-12-18 02:34:50 +01:00
|
|
|
f := Fn{
|
2019-10-20 09:19:37 +02:00
|
|
|
name: name
|
|
|
|
is_c: true
|
|
|
|
}
|
|
|
|
p.is_c_fn_call = true
|
|
|
|
p.fn_call(mut f, 0, '', '')
|
|
|
|
p.is_c_fn_call = false
|
2019-11-24 04:27:02 +01:00
|
|
|
// C functions must be defined with `C.fn_name() fn_type`
|
2019-12-18 05:52:14 +01:00
|
|
|
cfn := p.table.find_fn(name) or {
|
2019-11-24 04:27:02 +01:00
|
|
|
// Is the user trying to do `var := C.foo()` or `bar(C.foo())`
|
|
|
|
// without declaring `foo`?
|
|
|
|
// Do not allow it.
|
|
|
|
if !name.starts_with('gl') && !name.starts_with('glad') {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error('undefined C function `$f.name`\n' + 'define it with `fn C.${name}([args]) [return_type]`')
|
2019-11-24 04:27:02 +01:00
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
return 'void*'
|
|
|
|
}
|
|
|
|
// println("C fn $name has type $cfn.typ")
|
|
|
|
return cfn.typ
|
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) undefined_error(name string, orig_name string) {
|
2019-11-04 02:59:28 +01:00
|
|
|
name_dotted := mod_gen_name_rev(name.replace('__', '.'))
|
|
|
|
// check for misspelled function / variable / module / type
|
|
|
|
suggested := p.identify_typo(name)
|
|
|
|
if suggested.len != 0 {
|
|
|
|
p.error('undefined: `$name_dotted`. did you mean:\n$suggested\n')
|
|
|
|
}
|
|
|
|
// If orig_name is a mod, then printing undefined: `mod` tells us nothing
|
|
|
|
if p.table.known_mod(orig_name) || p.import_table.known_alias(orig_name) {
|
|
|
|
p.error('undefined: `$name_dotted` (in module `$orig_name`)')
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if orig_name in reserved_type_param_names {
|
2019-11-04 02:59:28 +01:00
|
|
|
p.error('the letter `$orig_name` is reserved for type parameters')
|
2019-10-20 09:19:37 +02:00
|
|
|
}
|
2019-11-07 15:57:54 +01:00
|
|
|
p.error('undefined: `$orig_name`')
|
2019-10-20 09:19:37 +02:00
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) var_expr(v Var) string {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('\nvar_expr() v.name="$v.name" v.typ="$v.typ"')
|
2019-06-22 20:20:28 +02:00
|
|
|
// println('var expr is_tmp=$p.cgen.is_tmp\n')
|
2019-09-23 21:47:09 +02:00
|
|
|
if !v.is_const {
|
|
|
|
p.mark_var_used(v)
|
2019-12-06 13:24:53 +01:00
|
|
|
// `C.foo(&var)` means that `var` is changed. Mark it as changed
|
|
|
|
// to avoid `var was declared as mutable but was never changed` errors.
|
|
|
|
if p.calling_c && !v.is_changed {
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('marking C var changed: $v.name')
|
2019-12-06 13:24:53 +01:00
|
|
|
p.mark_var_changed(v)
|
|
|
|
}
|
2019-09-23 21:47:09 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
fn_ph := p.cgen.add_placeholder()
|
|
|
|
p.expr_var = v
|
|
|
|
p.gen(p.table.var_cgen_name(v.name))
|
|
|
|
p.next()
|
|
|
|
mut typ := v.typ
|
2019-08-17 21:19:37 +02:00
|
|
|
// Function pointer?
|
2019-12-18 11:21:49 +01:00
|
|
|
if p.base_type(typ).starts_with('fn ') && p.tok == .lpar {
|
|
|
|
T := p.table.find_type(p.base_type(typ))
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen('(')
|
2019-12-21 01:53:58 +01:00
|
|
|
p.fn_call_args(mut T.func, [])
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen(')')
|
|
|
|
typ = T.func.typ
|
|
|
|
}
|
|
|
|
// users[0].name
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .lsbr {
|
2019-06-22 20:20:28 +02:00
|
|
|
typ = p.index_expr(typ, fn_ph)
|
2019-12-18 11:21:49 +01:00
|
|
|
if p.base_type(typ).starts_with('fn ') && p.tok == .lpar {
|
2019-12-18 19:56:30 +01:00
|
|
|
T := p.table.find_type(p.base_type(typ))
|
|
|
|
p.gen('(')
|
2019-12-21 01:53:58 +01:00
|
|
|
p.fn_call_args(mut T.func, [])
|
2019-12-18 19:56:30 +01:00
|
|
|
p.gen(')')
|
|
|
|
typ = T.func.typ
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// a.b.c().d chain
|
|
|
|
// mut dc := 0
|
2019-12-18 02:34:50 +01:00
|
|
|
for p.tok == .dot {
|
2019-08-09 18:10:59 +02:00
|
|
|
if p.peek() == .key_select {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.next()
|
|
|
|
return p.select_query(fn_ph)
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if typ == 'pg__DB' && !p.fileis('pg.v') && p.peek() == .name {
|
2019-11-25 06:38:00 +01:00
|
|
|
name := p.tokens[p.token_idx].lit
|
|
|
|
if !name.contains('exec') && !name.starts_with('q_') {
|
|
|
|
p.next()
|
2019-12-10 12:32:12 +01:00
|
|
|
if name == 'insert' {
|
|
|
|
p.insert_query(fn_ph)
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if name == 'update' {
|
2019-12-10 12:32:12 +01:00
|
|
|
p.update_query(fn_ph)
|
|
|
|
}
|
2019-11-25 06:38:00 +01:00
|
|
|
return 'void'
|
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// println('dot #$dc')
|
|
|
|
typ = p.dot(typ, fn_ph)
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('typ after dot=$typ')
|
2019-06-22 20:20:28 +02:00
|
|
|
// print('tok after dot()')
|
|
|
|
// p.print_tok()
|
|
|
|
// dc++
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .lsbr {
|
2019-12-27 04:09:09 +01:00
|
|
|
typ = p.index_expr(typ, fn_ph)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-08 15:59:01 +01:00
|
|
|
// `a++` and `a--`
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .inc || p.tok == .dec {
|
2019-11-08 15:59:01 +01:00
|
|
|
if !v.is_mut && !p.pref.translated {
|
2019-07-10 14:18:21 +02:00
|
|
|
p.error('`$v.name` is immutable')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-25 13:16:17 +02:00
|
|
|
if !v.is_changed {
|
2019-09-09 15:22:39 +02:00
|
|
|
p.mark_var_changed(v)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-11-11 21:39:16 +01:00
|
|
|
if typ != 'int' && !typ.contains('*') {
|
2019-06-30 22:03:17 +02:00
|
|
|
if !p.pref.translated && !is_number_type(typ) {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.error('cannot ++/-- value of type `$typ`')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
p.gen(p.tok.str())
|
2019-12-18 02:34:50 +01:00
|
|
|
p.next() // ++/--
|
2019-11-08 15:59:01 +01:00
|
|
|
// allow `a := c++` in translated code TODO remove once c2v handles this
|
2019-06-30 22:03:17 +02:00
|
|
|
if p.pref.translated {
|
2019-12-18 02:34:50 +01:00
|
|
|
// return p.index_expr(typ, fn_ph)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-11-11 21:39:16 +01:00
|
|
|
return typ
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
typ = p.index_expr(typ, fn_ph)
|
2019-08-17 21:19:37 +02:00
|
|
|
// TODO hack to allow `foo.bar[0] = 2`
|
|
|
|
if p.tok == .dot {
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok == .dot {
|
2019-07-02 00:00:27 +02:00
|
|
|
typ = p.dot(typ, fn_ph)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-07-02 00:00:27 +02:00
|
|
|
typ = p.index_expr(typ, fn_ph)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
|
|
|
// user.name => `str_typ` is `User`
|
|
|
|
// user.company.name => `str_typ` is `Company`
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('orm_test') {
|
|
|
|
// println('ORM dot $str_typ')
|
|
|
|
// }
|
2019-12-06 13:24:53 +01:00
|
|
|
str_typ := str_typ_
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.dot)
|
2019-11-02 11:17:56 +01:00
|
|
|
is_variadic_arg := str_typ.starts_with('varg_')
|
2019-12-06 13:24:53 +01:00
|
|
|
typ := p.find_type(str_typ)
|
2019-07-29 18:21:36 +02:00
|
|
|
if typ.name.len == 0 {
|
|
|
|
p.error('dot(): cannot find type `$str_typ`')
|
|
|
|
}
|
2019-12-11 19:42:22 +01:00
|
|
|
// foo.$action()
|
2019-07-29 18:21:36 +02:00
|
|
|
if p.tok == .dollar {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.comptime_method_call(typ)
|
|
|
|
return 'void'
|
|
|
|
}
|
2019-07-25 13:16:17 +02:00
|
|
|
field_name := p.lit
|
2019-10-17 09:44:20 +02:00
|
|
|
if field_name == 'filter' && str_typ.starts_with('array_') {
|
2019-10-18 06:58:54 +02:00
|
|
|
p.gen_array_filter(str_typ, method_ph)
|
2019-10-17 09:44:20 +02:00
|
|
|
return str_typ
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-26 13:25:03 +02:00
|
|
|
else if field_name == 'map' && str_typ.starts_with('array_') {
|
2019-10-26 13:49:36 +02:00
|
|
|
return p.gen_array_map(str_typ, method_ph)
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-06 02:07:02 +02:00
|
|
|
fname_tidx := p.cur_tok_index()
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.log('dot() field_name=$field_name typ=$str_typ')
|
|
|
|
// if p.fileis('main.v') {
|
|
|
|
// println('dot() field_name=$field_name typ=$str_typ prev_tok=${prev_tok.str()}')
|
|
|
|
// }
|
2019-08-29 21:19:37 +02:00
|
|
|
has_field := p.table.type_has_field(typ, p.table.var_cgen_name(field_name))
|
2019-08-10 23:02:48 +02:00
|
|
|
mut has_method := p.table.type_has_method(typ, field_name)
|
2019-09-30 16:11:12 +02:00
|
|
|
if is_variadic_arg && field_name == 'len' {
|
2019-09-30 12:46:50 +02:00
|
|
|
p.gen('->$field_name')
|
|
|
|
p.next()
|
|
|
|
return 'int'
|
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
// generate `.str()`
|
|
|
|
if !has_method && field_name == 'str' && typ.name.starts_with('array_') {
|
2019-08-31 15:38:13 +02:00
|
|
|
p.gen_array_str(typ)
|
2019-08-17 21:19:37 +02:00
|
|
|
has_method = true
|
|
|
|
}
|
2019-08-20 13:34:29 +02:00
|
|
|
if !typ.is_c && !p.is_c_fn_call && !has_field && !has_method && !p.first_pass() {
|
2019-06-22 20:20:28 +02:00
|
|
|
if typ.name.starts_with('Option_') {
|
2019-12-21 23:44:16 +01:00
|
|
|
opt_type := typ.name[7..].replace('ptr_', '&')
|
2019-09-18 14:37:34 +02:00
|
|
|
p.error('unhandled option type: `?$opt_type`')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// println('error in dot():')
|
|
|
|
// println('fields:')
|
|
|
|
// for field in typ.fields {
|
|
|
|
// println(field.name)
|
|
|
|
// }
|
|
|
|
// println('methods:')
|
|
|
|
// for field in typ.methods {
|
|
|
|
// println(field.name)
|
|
|
|
// }
|
|
|
|
// println('str_typ=="$str_typ"')
|
2019-10-06 02:07:02 +02:00
|
|
|
p.error_with_token_index('type `$typ.name` has no field or method `$field_name`', fname_tidx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
mut dot := '.'
|
2019-12-18 02:34:50 +01:00
|
|
|
if str_typ.ends_with('*') || str_typ == 'FT_Face' {
|
|
|
|
// TODO fix C ptr typedefs
|
2019-09-14 22:48:30 +02:00
|
|
|
dot = dot_ptr
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// field
|
|
|
|
if has_field {
|
2019-12-19 03:41:12 +01:00
|
|
|
struct_field := if typ.name != 'Option' { p.table.var_cgen_name(field_name) } else { field_name }
|
2019-12-18 05:52:14 +01:00
|
|
|
field := p.table.find_field(typ, struct_field) or {
|
2019-10-06 02:07:02 +02:00
|
|
|
p.error_with_token_index('missing field: $struct_field in type $typ.name', fname_tidx)
|
2019-09-28 19:42:29 +02:00
|
|
|
exit(1)
|
|
|
|
}
|
2019-08-30 19:19:06 +02:00
|
|
|
if !field.is_mut && !p.has_immutable_field {
|
|
|
|
p.has_immutable_field = true
|
|
|
|
p.first_immutable_field = field
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// Is the next token `=`, `+=` etc? (Are we modifying the field?)
|
|
|
|
next := p.peek()
|
2019-12-18 02:34:50 +01:00
|
|
|
modifying := next.is_assign() || next == .inc || next == .dec || (field.typ.starts_with('array_') && next == .left_shift)
|
2020-01-09 01:39:47 +01:00
|
|
|
if modifying {
|
|
|
|
p.expected_type = field.typ
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if !p.builtin_mod && !p.pref.translated && modifying && p.has_immutable_field {
|
2019-08-30 19:19:06 +02:00
|
|
|
f := p.first_immutable_field
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('cannot modify immutable field `$f.name` (type `$f.parent_fn`)\n' + 'declare the field with `mut:`
|
2019-08-30 19:19:06 +02:00
|
|
|
struct $f.parent_fn {
|
2019-12-18 02:59:42 +01:00
|
|
|
mut:
|
2019-08-30 19:19:06 +02:00
|
|
|
$f.name $f.typ
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-10-06 02:07:02 +02:00
|
|
|
', fname_tidx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// Don't allow `arr.data`
|
2019-10-25 15:34:12 +02:00
|
|
|
if field.access_mod == .private && !p.builtin_mod && !p.pref.translated && p.mod != typ.mod && !p.is_vgen {
|
2019-06-22 20:20:28 +02:00
|
|
|
// println('$typ.name :: $field.name ')
|
|
|
|
// println(field.access_mod)
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error_with_token_index('cannot refer to unexported field `$struct_field` (type `$typ.name`)\n' + 'declare the field with `pub:`
|
2019-10-14 09:15:21 +02:00
|
|
|
struct $typ.name {
|
2019-12-18 02:59:42 +01:00
|
|
|
pub:
|
2019-10-14 09:15:21 +02:00
|
|
|
$struct_field $field.typ
|
|
|
|
}
|
|
|
|
', fname_tidx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 11:21:49 +01:00
|
|
|
base := p.base_type(field.typ)
|
|
|
|
if base.starts_with('fn ') && p.peek() == .lpar {
|
|
|
|
tmp_typ := p.table.find_type(base)
|
2019-12-17 20:00:08 +01:00
|
|
|
mut f := tmp_typ.func
|
2019-12-19 19:23:29 +01:00
|
|
|
p.gen('$dot$field.name')
|
2019-12-17 20:00:08 +01:00
|
|
|
p.gen('(')
|
|
|
|
p.check(.name)
|
2019-12-21 01:53:58 +01:00
|
|
|
p.fn_call_args(mut f, [])
|
2019-12-17 20:00:08 +01:00
|
|
|
p.gen(')')
|
|
|
|
return f.typ
|
|
|
|
}
|
2019-08-29 21:19:37 +02:00
|
|
|
p.gen(dot + struct_field)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
return field.typ
|
|
|
|
}
|
|
|
|
// method
|
2019-12-18 05:52:14 +01:00
|
|
|
mut method := p.table.find_method(typ, field_name) or {
|
2019-10-06 02:07:02 +02:00
|
|
|
p.error_with_token_index('could not find method `$field_name`', fname_tidx) // should never happen
|
2019-09-18 14:37:34 +02:00
|
|
|
exit(1)
|
|
|
|
}
|
2019-10-20 09:19:37 +02:00
|
|
|
p.fn_call(mut method, method_ph, '', str_typ)
|
2019-12-18 02:34:50 +01:00
|
|
|
// optional method call `a.method() or {}`, no return assignment
|
|
|
|
is_or_else := p.tok == .key_orelse
|
|
|
|
if is_or_else {
|
|
|
|
p.fspace()
|
|
|
|
}
|
2019-11-26 07:04:59 +01:00
|
|
|
if p.tok == .question {
|
|
|
|
// `files := os.ls('.')?`
|
|
|
|
return p.gen_handle_question_suffix(method, method_ph)
|
|
|
|
}
|
|
|
|
else if !p.is_var_decl && is_or_else {
|
2019-11-04 00:38:49 +01:00
|
|
|
method.typ = p.gen_handle_option_or_else(method.typ, '', method_ph)
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else if !p.is_var_decl && !is_or_else && !p.inside_return_expr && method.typ.starts_with('Option_') {
|
2019-12-21 23:44:16 +01:00
|
|
|
opt_type := method.typ[7..].replace('ptr_', '&')
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error('unhandled option type: `?$opt_type`')
|
|
|
|
}
|
2019-10-27 01:36:43 +02:00
|
|
|
// Methods returning `array` should return `array_string` etc
|
2019-06-22 20:20:28 +02:00
|
|
|
if method.typ == 'array' && typ.name.starts_with('array_') {
|
|
|
|
return typ.name
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
// Array methods returning `voidptr` (like `last()`) should return element type
|
2019-06-22 20:20:28 +02:00
|
|
|
if method.typ == 'void*' && typ.name.starts_with('array_') {
|
2019-12-21 23:44:16 +01:00
|
|
|
return parse_pointer(typ.name[6..])
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// if false && p.tok == .lsbr {
|
|
|
|
// if is_indexer {
|
|
|
|
// return p.index_expr(method.typ, method_ph)
|
|
|
|
// }
|
2019-09-09 15:22:39 +02:00
|
|
|
if method.typ.ends_with('*') {
|
|
|
|
p.is_alloc = true
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
return method.typ
|
|
|
|
}
|
|
|
|
|
2019-09-14 22:48:30 +02:00
|
|
|
enum IndexType {
|
2019-09-17 21:41:58 +02:00
|
|
|
noindex
|
2019-09-14 22:48:30 +02:00
|
|
|
str
|
|
|
|
map
|
|
|
|
array
|
|
|
|
array0
|
|
|
|
fixed_array
|
|
|
|
ptr
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_index_type(typ string) IndexType {
|
2019-12-18 02:34:50 +01:00
|
|
|
if typ.starts_with('map_') {
|
|
|
|
return .map
|
|
|
|
}
|
|
|
|
if typ == 'string' {
|
|
|
|
return .str
|
|
|
|
}
|
|
|
|
if typ.starts_with('array_') || typ == 'array' {
|
|
|
|
return .array
|
|
|
|
}
|
2019-09-14 22:48:30 +02:00
|
|
|
if typ == 'byte*' || typ == 'byteptr' || typ.contains('*') {
|
2019-10-06 02:56:08 +02:00
|
|
|
return .ptr
|
2019-09-14 22:48:30 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if typ[0] == `[` {
|
|
|
|
return .fixed_array
|
|
|
|
}
|
2019-10-06 02:56:08 +02:00
|
|
|
return .noindex
|
2019-09-14 22:48:30 +02:00
|
|
|
}
|
|
|
|
|
2019-12-18 08:17:07 +01:00
|
|
|
fn (p mut Parser) index_expr(typ_ string, fn_ph int) string {
|
2019-08-17 21:19:37 +02:00
|
|
|
mut typ := typ_
|
2019-06-22 20:20:28 +02:00
|
|
|
// a[0]
|
|
|
|
v := p.expr_var
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('fn_test.v') {
|
|
|
|
// println('index expr typ=$typ')
|
|
|
|
// println(v.name)
|
|
|
|
// }
|
2019-11-02 11:17:56 +01:00
|
|
|
is_variadic_arg := typ.starts_with('varg_')
|
2019-06-22 20:20:28 +02:00
|
|
|
is_map := typ.starts_with('map_')
|
|
|
|
is_str := typ == 'string'
|
|
|
|
is_arr0 := typ.starts_with('array_')
|
|
|
|
is_arr := is_arr0 || typ == 'array'
|
|
|
|
is_ptr := typ == 'byte*' || typ == 'byteptr' || typ.contains('*')
|
2019-10-26 20:58:26 +02:00
|
|
|
mut is_slice := false
|
2019-07-07 22:30:15 +02:00
|
|
|
is_indexer := p.tok == .lsbr
|
2019-06-22 20:20:28 +02:00
|
|
|
mut close_bracket := false
|
2019-10-01 13:48:19 +02:00
|
|
|
index_error_tok_pos := p.token_idx
|
2019-06-22 20:20:28 +02:00
|
|
|
if is_indexer {
|
|
|
|
is_fixed_arr := typ[0] == `[`
|
2019-09-30 12:46:50 +02:00
|
|
|
if !is_str && !is_arr && !is_map && !is_ptr && !is_fixed_arr && !is_variadic_arg {
|
2019-11-12 20:35:53 +01:00
|
|
|
p.error('invalid operation: type `$typ` does not support indexing')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lsbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
// Get element type (set `typ` to it)
|
|
|
|
if is_str {
|
|
|
|
typ = 'byte'
|
2019-08-05 09:49:52 +02:00
|
|
|
// Direct faster access to .str[i] in builtin modules
|
2019-11-16 21:22:40 +01:00
|
|
|
if p.builtin_mod || p.pref.is_bare {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen('.str[')
|
|
|
|
close_bracket = true
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
else {
|
|
|
|
// Bounds check everywhere else
|
2019-11-12 20:35:53 +01:00
|
|
|
p.gen(', ')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if is_variadic_arg {
|
|
|
|
typ = typ[5..]
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
if is_fixed_arr {
|
|
|
|
// `[10]int` => `int`, `[10][3]int` => `[3]int`
|
|
|
|
if typ.contains('][') {
|
|
|
|
pos := typ.index_after('[', 1)
|
2019-10-27 08:03:15 +01:00
|
|
|
typ = typ[pos..]
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
typ = typ.all_after(']')
|
|
|
|
}
|
|
|
|
p.gen('[')
|
|
|
|
close_bracket = true
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
else if is_ptr && !is_variadic_arg {
|
2019-06-22 20:20:28 +02:00
|
|
|
// typ = 'byte'
|
|
|
|
typ = typ.replace('*', '')
|
|
|
|
// modify(mut []string) fix
|
2019-11-12 20:35:53 +01:00
|
|
|
if !is_arr && !is_map {
|
|
|
|
p.gen('[/*ptr!*/')
|
2019-06-22 20:20:28 +02:00
|
|
|
close_bracket = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if is_arr {
|
|
|
|
if is_arr0 {
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = parse_pointer(typ[6..])
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
2019-09-14 22:48:30 +02:00
|
|
|
p.gen_array_at(typ, is_arr0, fn_ph)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// map is tricky
|
|
|
|
// need to replace "m[key] = val" with "tmp = val; map_set(&m, key, &tmp)"
|
|
|
|
// need to replace "m[key]" with "tmp = val; map_get(&m, key, &tmp)"
|
|
|
|
// can only do that later once we know whether there's an "=" or not
|
|
|
|
if is_map {
|
|
|
|
typ = typ.replace('map_', '')
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = parse_pointer(typ)
|
2019-06-22 20:20:28 +02:00
|
|
|
if typ == 'map' {
|
|
|
|
typ = 'void*'
|
|
|
|
}
|
|
|
|
p.gen(',')
|
|
|
|
}
|
|
|
|
// expression inside [ ]
|
2019-10-15 10:02:30 +02:00
|
|
|
if is_arr || is_str {
|
2019-10-27 07:36:04 +01:00
|
|
|
// [2..
|
|
|
|
if p.tok != .dotdot {
|
|
|
|
index_pos := p.cgen.cur_line.len
|
|
|
|
T := p.table.find_type(p.expression())
|
|
|
|
// Allows only i8-64 and byte-64 to be used when accessing an array
|
|
|
|
if T.parent != 'int' && T.parent != 'u32' {
|
|
|
|
p.check_types(T.name, 'int')
|
|
|
|
}
|
2019-10-27 08:03:15 +01:00
|
|
|
if p.cgen.cur_line[index_pos..].replace(' ', '').int() < 0 {
|
2019-10-27 07:36:04 +01:00
|
|
|
p.error('cannot access negative array index')
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-10-27 07:36:04 +01:00
|
|
|
// [..
|
|
|
|
else {
|
|
|
|
p.gen('0')
|
2019-08-18 18:26:02 +02:00
|
|
|
}
|
2019-10-26 20:58:26 +02:00
|
|
|
if p.tok == .dotdot {
|
|
|
|
if is_arr {
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = 'array_' + stringify_pointer(typ)
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if is_str {
|
2019-10-26 20:58:26 +02:00
|
|
|
typ = 'string'
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-26 20:58:26 +02:00
|
|
|
p.error('slicing is supported by arrays and strings only')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-10-26 20:58:26 +02:00
|
|
|
is_slice = true
|
|
|
|
p.next()
|
|
|
|
p.gen(',')
|
2019-10-27 07:36:04 +01:00
|
|
|
// ..4]
|
|
|
|
if p.tok != .rsbr {
|
|
|
|
p.check_types(p.expression(), 'int')
|
|
|
|
p.gen(', false')
|
|
|
|
}
|
|
|
|
// ..]
|
|
|
|
else {
|
|
|
|
p.gen('-1, true')
|
|
|
|
}
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-08-13 19:13:39 +02:00
|
|
|
T := p.table.find_type(p.expression())
|
|
|
|
// TODO: Get the key type of the map instead of only string.
|
|
|
|
if is_map && T.parent != 'string' {
|
|
|
|
p.check_types(T.name, 'string')
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rsbr)
|
2019-08-05 09:49:52 +02:00
|
|
|
// if (is_str && p.builtin_mod) || is_ptr || is_fixed_arr && ! (is_ptr && is_arr) {
|
2019-06-22 20:20:28 +02:00
|
|
|
if close_bracket {
|
|
|
|
p.gen(']/*r$typ $v.is_mut*/')
|
|
|
|
}
|
2019-08-30 11:16:39 +02:00
|
|
|
p.expr_var = v
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-30 12:46:50 +02:00
|
|
|
// accessing variadiac args
|
2019-09-30 16:11:12 +02:00
|
|
|
if is_variadic_arg {
|
|
|
|
// TODO: why was this here?
|
|
|
|
// if p.calling_c {
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.error('you cannot currently pass varg to a C function.')
|
2019-09-30 16:11:12 +02:00
|
|
|
// }
|
|
|
|
if is_indexer {
|
|
|
|
l := p.cgen.cur_line.trim_space()
|
2019-12-18 05:52:14 +01:00
|
|
|
idx := l.last_index(' ') or {
|
2019-12-12 19:44:52 +01:00
|
|
|
panic('idx')
|
|
|
|
}
|
|
|
|
index_val := l[idx..].trim_space()
|
2019-10-27 08:03:15 +01:00
|
|
|
p.cgen.resetln(l[..fn_ph])
|
2019-09-30 16:11:12 +02:00
|
|
|
p.table.varg_access << VargAccess{
|
2019-12-18 06:13:31 +01:00
|
|
|
fn_name: p.cur_fn.name
|
|
|
|
tok_idx: index_error_tok_pos
|
2019-09-30 16:11:12 +02:00
|
|
|
index: index_val.int()
|
|
|
|
}
|
|
|
|
p.cgen.set_placeholder(fn_ph, '${v.name}->args[$index_val]')
|
|
|
|
return typ
|
|
|
|
}
|
2019-09-30 12:46:50 +02:00
|
|
|
}
|
2019-10-26 20:58:26 +02:00
|
|
|
// `m[key] = val`
|
2019-08-17 21:19:37 +02:00
|
|
|
// TODO move this from index_expr()
|
2019-09-24 21:30:30 +02:00
|
|
|
if (p.tok == .assign && !p.is_sql) || p.tok.is_assign() {
|
2019-08-05 09:49:52 +02:00
|
|
|
if is_indexer && is_str && !p.builtin_mod {
|
2019-07-10 14:18:21 +02:00
|
|
|
p.error('strings are immutable')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
p.assigned_type = typ
|
2019-07-07 22:30:15 +02:00
|
|
|
p.expected_type = typ
|
2019-09-14 22:48:30 +02:00
|
|
|
assign_pos := p.cgen.cur_line.len
|
2019-09-27 17:55:30 +02:00
|
|
|
is_cao := p.tok != .assign
|
2019-06-22 20:20:28 +02:00
|
|
|
p.assign_statement(v, fn_ph, is_indexer && (is_map || is_arr))
|
2019-09-14 22:48:30 +02:00
|
|
|
// `m[key] = val`
|
2019-06-22 20:20:28 +02:00
|
|
|
if is_indexer && (is_map || is_arr) {
|
2019-09-14 22:48:30 +02:00
|
|
|
p.gen_array_set(typ, is_ptr, is_map, fn_ph, assign_pos, is_cao)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
return typ
|
|
|
|
}
|
2019-06-30 22:03:17 +02:00
|
|
|
// else if p.pref.is_verbose && p.assigned_var != '' {
|
2019-06-22 20:20:28 +02:00
|
|
|
// p.error('didnt assign')
|
|
|
|
// }
|
2019-10-26 20:58:26 +02:00
|
|
|
// `m[key]`. no =, just a getter
|
2019-08-05 09:49:52 +02:00
|
|
|
else if (is_map || is_arr || (is_str && !p.builtin_mod)) && is_indexer {
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = parse_pointer(typ)
|
2019-10-26 20:58:26 +02:00
|
|
|
p.index_get(typ, fn_ph, IndexConfig{
|
2019-09-14 22:48:30 +02:00
|
|
|
is_arr: is_arr
|
|
|
|
is_map: is_map
|
|
|
|
is_ptr: is_ptr
|
|
|
|
is_str: is_str
|
2019-10-26 20:58:26 +02:00
|
|
|
is_slice: is_slice
|
2019-09-14 22:48:30 +02:00
|
|
|
})
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// else if is_arr && is_indexer{}
|
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
2019-10-26 20:58:26 +02:00
|
|
|
struct IndexConfig {
|
2019-12-18 02:34:50 +01:00
|
|
|
is_map bool
|
|
|
|
is_str bool
|
|
|
|
is_ptr bool
|
|
|
|
is_arr bool
|
|
|
|
is_arr0 bool
|
2019-10-26 20:58:26 +02:00
|
|
|
is_slice bool
|
2019-09-14 22:48:30 +02:00
|
|
|
}
|
|
|
|
|
2019-10-20 09:19:37 +02:00
|
|
|
// for debugging only
|
|
|
|
fn (p &Parser) fileis(s string) bool {
|
2019-12-23 11:09:22 +01:00
|
|
|
return filepath.filename(p.scanner.file_path).contains(s)
|
2019-10-20 09:19:37 +02:00
|
|
|
}
|
|
|
|
|
2019-09-21 17:21:45 +02:00
|
|
|
// in and dot have higher priority than `!`
|
|
|
|
fn (p mut Parser) indot_expr() string {
|
|
|
|
ph := p.cgen.add_placeholder()
|
|
|
|
mut typ := p.term()
|
2019-12-18 02:34:50 +01:00
|
|
|
if p.tok == .dot {
|
2019-09-21 17:21:45 +02:00
|
|
|
for p.tok == .dot {
|
|
|
|
typ = p.dot(typ, ph)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// `a in [1, 2, 3]`
|
|
|
|
// `key in map`
|
|
|
|
if p.tok == .key_in {
|
2019-11-10 01:08:53 +01:00
|
|
|
p.fspace()
|
2019-09-21 17:21:45 +02:00
|
|
|
p.check(.key_in)
|
2019-12-04 20:05:36 +01:00
|
|
|
p.expected_type = typ // this allows short enum syntax `foo in [.val1, .val2, .val3]`
|
2019-10-06 03:27:29 +02:00
|
|
|
if p.tok == .lsbr {
|
|
|
|
// a in [1,2,3] optimization => `a == 1 || a == 2 || a == 3`
|
|
|
|
// avoids an allocation
|
2019-12-18 05:52:14 +01:00
|
|
|
p.fspace()
|
2019-10-06 03:27:29 +02:00
|
|
|
p.in_optimization(typ, ph)
|
|
|
|
return 'bool'
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-11-10 01:08:53 +01:00
|
|
|
p.fspace()
|
2019-09-21 17:21:45 +02:00
|
|
|
p.gen('), ')
|
|
|
|
arr_typ := p.expression()
|
|
|
|
is_map := arr_typ.starts_with('map_')
|
2019-12-18 02:34:50 +01:00
|
|
|
is_arr := arr_typ.starts_with('array_')
|
2019-12-04 20:05:36 +01:00
|
|
|
if !is_arr && !is_map {
|
2019-09-21 17:21:45 +02:00
|
|
|
p.error('`in` requires an array/map')
|
|
|
|
}
|
2019-12-21 23:44:16 +01:00
|
|
|
if is_arr && parse_pointer(arr_typ[6..]) != typ {
|
2019-12-04 20:05:36 +01:00
|
|
|
p.error('bad element type: `$typ` in `$arr_typ`')
|
|
|
|
}
|
|
|
|
if is_map && typ != 'string' {
|
|
|
|
p.error('bad element type: expecting `string`')
|
|
|
|
}
|
2019-09-21 17:21:45 +02:00
|
|
|
T := p.table.find_type(arr_typ)
|
|
|
|
if !is_map && !T.has_method('contains') {
|
|
|
|
p.error('$arr_typ has no method `contains`')
|
|
|
|
}
|
|
|
|
// `typ` is element's type
|
|
|
|
if is_map {
|
2019-11-06 19:20:37 +01:00
|
|
|
p.cgen.set_placeholder(ph, '(_IN_MAP( (')
|
2019-09-21 17:21:45 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-11-06 19:20:37 +01:00
|
|
|
p.cgen.set_placeholder(ph, '(_IN($typ, (')
|
2019-09-21 17:21:45 +02:00
|
|
|
}
|
2019-11-06 19:20:37 +01:00
|
|
|
p.gen('))')
|
2019-09-21 17:21:45 +02:00
|
|
|
return 'bool'
|
|
|
|
}
|
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
2019-11-08 04:03:06 +01:00
|
|
|
// { user | name: 'new name' }
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) assoc() string {
|
|
|
|
// println('assoc()')
|
|
|
|
p.next()
|
|
|
|
name := p.check_name()
|
2019-12-18 05:52:14 +01:00
|
|
|
p.fspace()
|
|
|
|
var := p.find_var_or_const(name) or {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.error('unknown variable `$name`')
|
2019-09-17 21:41:58 +02:00
|
|
|
exit(1)
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-11-26 04:41:56 +01:00
|
|
|
if !var.is_const {
|
|
|
|
p.mark_var_used(var)
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.pipe)
|
2019-12-18 05:52:14 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen('($var.typ){')
|
2019-12-12 18:32:55 +01:00
|
|
|
typ := p.table.find_type(var.typ)
|
2019-12-18 02:34:50 +01:00
|
|
|
mut fields := []string // track the fields user is setting, the rest will be copied from the old object
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok != .rcbr {
|
2019-06-22 20:20:28 +02:00
|
|
|
field := p.check_name()
|
2019-12-18 02:34:50 +01:00
|
|
|
// if !typ.has_field(field) {
|
2019-12-18 05:52:14 +01:00
|
|
|
f := typ.find_field(field) or {
|
2019-12-12 18:32:55 +01:00
|
|
|
p.error('`$typ.name` has no field `$field`')
|
|
|
|
exit(1)
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
fields << field
|
|
|
|
p.gen('.$field = ')
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.colon)
|
2019-12-12 18:32:55 +01:00
|
|
|
p.check_types(p.bool_expression(), f.typ)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen(',')
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok != .rcbr {
|
|
|
|
p.check(.comma)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
// Copy the rest of the fields
|
2019-12-12 18:32:55 +01:00
|
|
|
for ffield in typ.fields {
|
2019-06-22 20:20:28 +02:00
|
|
|
f := ffield.name
|
|
|
|
if f in fields {
|
|
|
|
continue
|
|
|
|
}
|
2019-11-26 04:41:56 +01:00
|
|
|
p.gen('.$f = ${var.name}.$f,')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rcbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen('}')
|
|
|
|
return var.typ
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) char_expr() {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.gen("\'$p.lit\'")
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
}
|
|
|
|
|
2019-08-07 08:19:27 +02:00
|
|
|
fn format_str(_str string) string {
|
2019-10-07 00:09:11 +02:00
|
|
|
// TODO don't call replace 3 times for every string, do this in scanner.v
|
2019-08-07 08:19:27 +02:00
|
|
|
mut str := _str.replace('"', '\\"')
|
2019-10-12 03:48:26 +02:00
|
|
|
str = str.replace('\r\n', '\\n')
|
2019-07-04 21:51:59 +02:00
|
|
|
str = str.replace('\n', '\\n')
|
|
|
|
return str
|
|
|
|
}
|
|
|
|
|
2019-06-22 20:20:28 +02:00
|
|
|
// m := map[string]int{}
|
2019-08-17 21:19:37 +02:00
|
|
|
// m := { 'one': 1 }
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) map_init() string {
|
2019-08-17 21:19:37 +02:00
|
|
|
// m := { 'one': 1, 'two': 2 }
|
|
|
|
mut keys_gen := '' // (string[]){tos2("one"), tos2("two")}
|
|
|
|
mut vals_gen := '' // (int[]){1, 2}
|
2019-12-18 02:34:50 +01:00
|
|
|
mut val_type := '' // 'int'
|
2019-08-03 09:44:08 +02:00
|
|
|
if p.tok == .lcbr {
|
|
|
|
p.check(.lcbr)
|
2019-08-17 21:19:37 +02:00
|
|
|
mut i := 0
|
|
|
|
for {
|
|
|
|
key := p.lit
|
2019-10-10 21:27:22 +02:00
|
|
|
keys_gen += 'tos3("$key"), '
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.str)
|
|
|
|
p.check(.colon)
|
2019-11-11 15:18:32 +01:00
|
|
|
p.fspace()
|
2019-12-18 02:34:50 +01:00
|
|
|
t,val_expr := p.tmp_expr()
|
2019-08-17 21:19:37 +02:00
|
|
|
if i == 0 {
|
|
|
|
val_type = t
|
|
|
|
}
|
|
|
|
i++
|
2019-08-03 09:44:08 +02:00
|
|
|
if val_type != t {
|
|
|
|
if !p.check_types_no_throw(val_type, t) {
|
|
|
|
p.error('bad map element type `$val_type` instead of `$t`')
|
|
|
|
}
|
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
vals_gen += '$val_expr, '
|
2019-08-03 09:44:08 +02:00
|
|
|
if p.tok == .rcbr {
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.rcbr)
|
|
|
|
break
|
|
|
|
}
|
2019-08-03 09:44:08 +02:00
|
|
|
if p.tok == .comma {
|
|
|
|
p.check(.comma)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
p.gen('new_map_init($i, sizeof($val_type), ' + '(string[$i]){ $keys_gen }, ($val_type [$i]){ $vals_gen } )')
|
2019-12-21 23:44:16 +01:00
|
|
|
typ := 'map_${stringify_pointer(val_type)}'
|
2019-08-26 16:42:18 +02:00
|
|
|
p.register_map(typ)
|
2019-08-17 21:19:37 +02:00
|
|
|
return typ
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lsbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
key_type := p.check_name()
|
|
|
|
if key_type != 'string' {
|
|
|
|
p.error('only string key maps allowed for now')
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rsbr)
|
2019-12-18 02:34:50 +01:00
|
|
|
val_type = p.get_type() // / p.check_name()
|
|
|
|
// if !p.table.known_type(val_type) {
|
|
|
|
// p.error('map init unknown type "$val_type"')
|
|
|
|
// }
|
2019-12-21 23:44:16 +01:00
|
|
|
typ := 'map_${stringify_pointer(val_type)}'
|
2019-07-15 12:33:18 +02:00
|
|
|
p.register_map(typ)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.gen('new_map(1, sizeof($val_type))')
|
2019-08-16 17:44:35 +02:00
|
|
|
if p.tok == .lcbr {
|
|
|
|
p.check(.lcbr)
|
|
|
|
p.check(.rcbr)
|
2019-12-18 02:34:50 +01:00
|
|
|
println('warning: $p.file_name:$p.scanner.line_nr ' + 'initializaing maps no longer requires `{}`')
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-07-15 12:33:18 +02:00
|
|
|
return typ
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-08-17 21:19:37 +02:00
|
|
|
// `nums := [1, 2, 3]`
|
2019-06-22 20:20:28 +02:00
|
|
|
fn (p mut Parser) array_init() string {
|
2019-11-08 15:59:01 +01:00
|
|
|
expected_array_type := p.expected_type
|
2019-12-18 02:34:50 +01:00
|
|
|
// if p.fileis('interface_') {
|
|
|
|
// println('a exp='+p.expected_type)
|
|
|
|
// }
|
2019-08-17 21:19:37 +02:00
|
|
|
p.is_alloc = true
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lsbr)
|
2019-12-18 02:34:50 +01:00
|
|
|
mut is_integer := p.tok == .number // for `[10]int`
|
2019-08-17 21:19:37 +02:00
|
|
|
// fixed length arrays with a const len: `nums := [N]int`, same as `[10]int` basically
|
|
|
|
mut is_const_len := false
|
2019-09-19 13:19:44 +02:00
|
|
|
if p.tok == .name && !p.inside_const {
|
|
|
|
const_name := p.prepend_mod(p.lit)
|
|
|
|
if p.table.known_const(const_name) {
|
2019-12-18 05:52:14 +01:00
|
|
|
c := p.table.find_const(const_name) or {
|
2019-12-22 00:22:32 +01:00
|
|
|
p.error('unknown const `$const_name`')
|
2019-09-19 13:19:44 +02:00
|
|
|
exit(1)
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if c.typ == 'int' && p.peek() == .rsbr {
|
|
|
|
// && !p.inside_const {
|
2019-09-19 13:19:44 +02:00
|
|
|
is_integer = true
|
|
|
|
is_const_len = true
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-09-19 13:19:44 +02:00
|
|
|
p.error('bad fixed size array const `$p.lit`')
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
lit := p.lit
|
|
|
|
mut typ := ''
|
|
|
|
new_arr_ph := p.cgen.add_placeholder()
|
|
|
|
mut i := 0
|
2019-07-07 22:30:15 +02:00
|
|
|
for p.tok != .rsbr {
|
2020-01-09 12:00:39 +01:00
|
|
|
if expected_array_type.starts_with('array_') {
|
|
|
|
p.expected_type = expected_array_type[6..]
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
val_typ := p.bool_expression()
|
2019-09-14 22:48:30 +02:00
|
|
|
// Get the type of the first expression
|
2019-06-22 20:20:28 +02:00
|
|
|
if i == 0 {
|
|
|
|
typ = val_typ
|
|
|
|
// fixed width array initialization? (`arr := [20]byte`)
|
2019-12-18 02:34:50 +01:00
|
|
|
if is_integer && p.tok == .rsbr && p.peek() == .name && p.cur_tok().line_nr == p.peek_token().line_nr {
|
2019-09-28 12:50:30 +02:00
|
|
|
// there is no space between `[10]` and `byte`
|
2019-11-23 17:31:28 +01:00
|
|
|
// if p.cur_tok().col + p.peek_token().lit.len == p.peek_token().col {
|
|
|
|
if p.cur_tok().pos + p.peek_token().lit.len == p.peek_token().pos {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rsbr)
|
2019-12-21 08:35:29 +01:00
|
|
|
// `[10]C.kevent` needs `struct `
|
|
|
|
is_c := p.tok == .name && p.lit == 'C'
|
|
|
|
if is_c {
|
|
|
|
p.cgen.insert_before('struct ')
|
|
|
|
}
|
2019-08-31 02:28:20 +02:00
|
|
|
array_elem_typ := p.get_type()
|
2019-09-14 22:48:30 +02:00
|
|
|
if !p.table.known_type(array_elem_typ) {
|
2019-08-31 02:28:20 +02:00
|
|
|
p.error('bad type `$array_elem_typ`')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-14 22:48:30 +02:00
|
|
|
p.cgen.resetln('')
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.gen('{0}')
|
2019-09-14 22:48:30 +02:00
|
|
|
p.is_alloc = false
|
|
|
|
if is_const_len {
|
2019-10-13 02:05:11 +02:00
|
|
|
return '[${mod_gen_name(p.mod)}__$lit]$array_elem_typ'
|
2019-09-14 22:48:30 +02:00
|
|
|
}
|
|
|
|
return '[$lit]$array_elem_typ'
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-09-28 12:50:30 +02:00
|
|
|
p.check(.rsbr)
|
|
|
|
typ = p.get_type()
|
|
|
|
p.error('no space allowed between [$lit] and $typ')
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if val_typ != typ {
|
|
|
|
if !p.check_types_no_throw(val_typ, typ) {
|
2019-11-08 15:59:01 +01:00
|
|
|
mut ok := false
|
|
|
|
// `foo([cat, dog])` where foo is `fn foo([]Animal) {`
|
|
|
|
// `expected_type` is `[]Animaler`
|
|
|
|
if expected_array_type.ends_with('er') {
|
|
|
|
if p.satisfies_interface(expected_array_type, typ, false) {
|
|
|
|
ok = true
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
|
|
|
}
|
2019-11-08 15:59:01 +01:00
|
|
|
if !ok {
|
|
|
|
p.error('bad array element type `$val_typ` instead of `$typ`')
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok != .rsbr && p.tok != .semicolon {
|
2019-07-03 13:20:43 +02:00
|
|
|
p.gen(', ')
|
2019-12-22 00:22:32 +01:00
|
|
|
line_nr := p.tok
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.comma)
|
2019-12-22 00:22:32 +01:00
|
|
|
p.fspace_or_newline()
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
i++
|
|
|
|
// Repeat (a = [0;5] )
|
2019-07-07 22:30:15 +02:00
|
|
|
if i == 1 && p.tok == .semicolon {
|
2019-11-08 15:59:01 +01:00
|
|
|
p.error('`[0 ; len]` syntax was removed. Use `[0].repeat(len)` instead')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rsbr)
|
2019-06-22 20:20:28 +02:00
|
|
|
// type after `]`? (e.g. "[]string")
|
2019-11-14 07:18:07 +01:00
|
|
|
exp_array := p.expected_type.starts_with('array_')
|
2019-12-21 23:44:16 +01:00
|
|
|
if p.tok != .name && p.tok != .mul && p.tok != .lsbr && p.tok != .amp && i == 0 && !exp_array {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.error('specify array type: `[]typ` instead of `[]`')
|
|
|
|
}
|
2019-12-21 23:44:16 +01:00
|
|
|
if i == 0 && (p.tok == .name || p.tok == .mul || p.tok == .amp) && p.tokens[p.token_idx - 2].line_nr == p.tokens[p.token_idx - 1].line_nr {
|
2019-12-18 02:34:50 +01:00
|
|
|
// TODO
|
2019-06-22 20:20:28 +02:00
|
|
|
// vals.len == 0 {
|
2019-11-14 07:18:07 +01:00
|
|
|
if exp_array {
|
2019-12-21 23:44:16 +01:00
|
|
|
type_expected := p.expected_type[6..].replace('ptr_', '&')
|
2020-01-09 01:39:47 +01:00
|
|
|
p.warn('no need to specify the full array type here, use `[]` instead of `[]$type_expected`')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = p.get_type()
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else if exp_array && i == 0 {
|
2019-11-14 05:15:17 +01:00
|
|
|
// allow `known_array = []`
|
|
|
|
typ = p.expected_type[6..]
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
// ! after array => no malloc and no copy
|
2019-08-17 21:19:37 +02:00
|
|
|
no_alloc := p.tok == .not
|
2019-06-23 13:17:33 +02:00
|
|
|
if no_alloc {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
|
|
|
}
|
|
|
|
// [1,2,3]!! => [3]int{1,2,3}
|
2019-08-17 21:19:37 +02:00
|
|
|
is_fixed_size := p.tok == .not
|
2019-06-22 20:20:28 +02:00
|
|
|
if is_fixed_size {
|
|
|
|
p.next()
|
|
|
|
p.gen(' }')
|
2019-07-29 18:21:36 +02:00
|
|
|
if !p.first_pass() {
|
2019-06-22 20:20:28 +02:00
|
|
|
// If we are defining a const array, we don't need to specify the type:
|
|
|
|
// `a = {1,2,3}`, not `a = (int[]) {1,2,3}`
|
|
|
|
if p.inside_const {
|
2019-08-30 23:59:21 +02:00
|
|
|
p.cgen.set_placeholder(new_arr_ph, '{')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
2019-08-30 23:59:21 +02:00
|
|
|
p.cgen.set_placeholder(new_arr_ph, '($typ[]) {')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return '[$i]$typ'
|
|
|
|
}
|
|
|
|
// if ptr {
|
|
|
|
// typ += '_ptr"
|
|
|
|
// }
|
2019-12-21 23:44:16 +01:00
|
|
|
real := parse_pointer(typ)
|
2019-12-18 01:26:04 +01:00
|
|
|
p.gen_array_init(real, no_alloc, new_arr_ph, i)
|
2019-12-21 23:44:16 +01:00
|
|
|
typ = 'array_${stringify_pointer(typ)}'
|
2019-06-22 20:20:28 +02:00
|
|
|
p.register_array(typ)
|
|
|
|
return typ
|
|
|
|
}
|
|
|
|
|
|
|
|
// `f32(3)`
|
|
|
|
// tok is `f32` or `)` if `(*int)(ptr)`
|
|
|
|
fn (p mut Parser) get_tmp() string {
|
|
|
|
p.tmp_cnt++
|
|
|
|
return 'tmp$p.tmp_cnt'
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) get_tmp_counter() int {
|
|
|
|
p.tmp_cnt++
|
|
|
|
return p.tmp_cnt
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) assert_statement() {
|
2019-07-29 18:21:36 +02:00
|
|
|
if p.first_pass() {
|
2019-08-17 21:19:37 +02:00
|
|
|
return
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.key_assert)
|
2019-06-22 20:20:28 +02:00
|
|
|
p.fspace()
|
|
|
|
tmp := p.get_tmp()
|
|
|
|
p.gen('bool $tmp = ')
|
|
|
|
p.check_types(p.bool_expression(), 'bool')
|
2019-10-30 10:15:33 +01:00
|
|
|
nline := p.scanner.line_nr
|
2019-06-22 20:20:28 +02:00
|
|
|
// TODO print "expected: got" for failed tests
|
2019-10-25 15:34:12 +02:00
|
|
|
filename := cescaped_path(p.file_path)
|
2019-12-18 02:34:50 +01:00
|
|
|
cfname := p.cur_fn.name.replace('main__', '')
|
|
|
|
sourceline := p.scanner.line(nline - 1).replace('"', "\'")
|
2019-10-30 10:15:33 +01:00
|
|
|
if !p.pref.is_test {
|
|
|
|
// an assert used in a normal v program. no fancy formatting
|
|
|
|
p.genln(';\n
|
|
|
|
/// sline: "$sourceline"
|
|
|
|
if (!$tmp) {
|
|
|
|
g_test_fails++;
|
|
|
|
eprintln(tos3("${filename}:${p.scanner.line_nr}: FAILED: ${cfname}()"));
|
|
|
|
eprintln(tos3("Source: $sourceline"));
|
2019-12-18 02:34:50 +01:00
|
|
|
v_panic(tos3("An assertion failed."));
|
|
|
|
exit(1);
|
2019-10-30 10:15:33 +01:00
|
|
|
} else {
|
|
|
|
g_test_oks++;
|
|
|
|
}
|
|
|
|
')
|
|
|
|
return
|
|
|
|
}
|
|
|
|
p.genln(';\n
|
2019-07-31 19:01:52 +02:00
|
|
|
if (!$tmp) {
|
2019-12-18 02:34:50 +01:00
|
|
|
g_test_fails++;
|
|
|
|
main__cb_assertion_failed(
|
|
|
|
tos3("$filename"),
|
|
|
|
$p.scanner.line_nr,
|
|
|
|
tos3("$sourceline"),
|
|
|
|
tos3("${p.cur_fn.name}()")
|
|
|
|
);
|
|
|
|
exit(1);
|
|
|
|
// TODO
|
|
|
|
// Maybe print all vars in a test function if it fails?
|
2019-10-07 07:51:26 +02:00
|
|
|
} else {
|
2019-12-18 02:34:50 +01:00
|
|
|
g_test_oks++;
|
|
|
|
main__cb_assertion_ok(
|
|
|
|
tos3("$filename"),
|
|
|
|
$p.scanner.line_nr,
|
|
|
|
tos3("$sourceline"),
|
|
|
|
tos3("${p.cur_fn.name}()")
|
|
|
|
);
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-10-07 07:51:26 +02:00
|
|
|
|
|
|
|
')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) return_st() {
|
2019-07-15 16:15:34 +02:00
|
|
|
p.check(.key_return)
|
2019-11-09 17:13:26 +01:00
|
|
|
p.fspace()
|
2019-10-12 21:01:50 +02:00
|
|
|
deferred_text := p.get_deferred_text()
|
2019-06-22 20:20:28 +02:00
|
|
|
fn_returns := p.cur_fn.typ != 'void'
|
|
|
|
if fn_returns {
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.tok == .rcbr {
|
2019-06-22 20:20:28 +02:00
|
|
|
p.error('`$p.cur_fn.name` needs to return `$p.cur_fn.typ`')
|
|
|
|
}
|
2019-10-12 21:01:50 +02:00
|
|
|
ph := p.cgen.add_placeholder()
|
|
|
|
p.inside_return_expr = true
|
|
|
|
is_none := p.tok == .key_none
|
|
|
|
p.expected_type = p.cur_fn.typ
|
|
|
|
mut expr_type := p.bool_expression()
|
2019-10-20 09:19:37 +02:00
|
|
|
// println('$p.cur_fn.name returns type $expr_type, should be $p.cur_fn.typ')
|
2019-10-12 21:01:50 +02:00
|
|
|
mut types := []string
|
2019-10-27 08:03:15 +01:00
|
|
|
mut mr_values := [p.cgen.cur_line[ph..].trim_space()]
|
2019-10-12 21:01:50 +02:00
|
|
|
types << expr_type
|
|
|
|
for p.tok == .comma {
|
|
|
|
p.check(.comma)
|
2019-12-18 02:34:50 +01:00
|
|
|
typ,expr := p.tmp_expr()
|
2019-11-11 03:14:54 +01:00
|
|
|
types << typ
|
|
|
|
mr_values << expr.trim_space()
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
|
|
|
mut cur_fn_typ_chk := p.cur_fn.typ
|
|
|
|
// multiple returns
|
|
|
|
if types.len > 1 {
|
|
|
|
expr_type = types.join(',')
|
|
|
|
cur_fn_typ_chk = cur_fn_typ_chk.replace('_V_MulRet_', '').replace('_PTR_', '*').replace('_V_', ',')
|
|
|
|
mut ret_fields := ''
|
2019-12-18 08:17:07 +01:00
|
|
|
for ret_val_idx, ret_val in mr_values {
|
2019-10-12 21:01:50 +02:00
|
|
|
if ret_val_idx > 0 {
|
|
|
|
ret_fields += ','
|
2019-09-23 04:45:19 +02:00
|
|
|
}
|
2019-10-12 21:01:50 +02:00
|
|
|
ret_fields += '.var_$ret_val_idx=${ret_val}'
|
2019-07-15 16:15:34 +02:00
|
|
|
}
|
2019-10-12 21:01:50 +02:00
|
|
|
p.cgen.resetln('($p.cur_fn.typ){$ret_fields}')
|
|
|
|
}
|
|
|
|
p.inside_return_expr = false
|
|
|
|
// Automatically wrap an object inside an option if the function
|
|
|
|
// returns an option:
|
|
|
|
// `return val` => `return opt_ok(val)`
|
2019-12-21 23:44:16 +01:00
|
|
|
if p.cur_fn.typ.ends_with(stringify_pointer(expr_type)) && !is_none && p.cur_fn.typ.starts_with('Option_') {
|
2019-10-12 21:01:50 +02:00
|
|
|
tmp := p.get_tmp()
|
2019-10-27 08:03:15 +01:00
|
|
|
ret := p.cgen.cur_line[ph..]
|
2019-12-21 23:44:16 +01:00
|
|
|
typ := parse_pointer(expr_type.replace('Option_', ''))
|
2019-10-12 21:01:50 +02:00
|
|
|
p.cgen.resetln('$expr_type $tmp = OPTION_CAST($expr_type)($ret);')
|
|
|
|
p.genln(deferred_text)
|
|
|
|
p.gen('return opt_ok(&$tmp, sizeof($typ))')
|
|
|
|
}
|
|
|
|
else {
|
2019-10-27 08:03:15 +01:00
|
|
|
ret := p.cgen.cur_line[ph..]
|
2019-10-12 21:01:50 +02:00
|
|
|
if deferred_text == '' || expr_type == 'void*' {
|
|
|
|
// no defer{} necessary?
|
|
|
|
if expr_type == '${p.cur_fn.typ}*' {
|
|
|
|
p.cgen.resetln('return *$ret')
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-12 21:01:50 +02:00
|
|
|
p.cgen.resetln('return $ret')
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-12 21:01:50 +02:00
|
|
|
tmp := p.get_tmp()
|
|
|
|
p.cgen.resetln('$expr_type $tmp = $ret;\n')
|
|
|
|
p.genln(deferred_text)
|
|
|
|
p.genln('return $tmp;')
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-12 21:01:50 +02:00
|
|
|
p.check_types(expr_type, cur_fn_typ_chk)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Don't allow `return val` in functions that don't return anything
|
2019-11-26 07:23:11 +01:00
|
|
|
if p.tok == .name || p.tok == .number || p.tok == .str {
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-10-12 21:01:50 +02:00
|
|
|
p.genln(deferred_text)
|
2019-07-15 16:32:35 +02:00
|
|
|
if p.cur_fn.name == 'main' {
|
|
|
|
p.gen('return 0')
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
p.gen('return')
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
// p.fgenln('//ret')
|
2019-06-22 20:20:28 +02:00
|
|
|
p.returns = true
|
|
|
|
}
|
|
|
|
|
2019-11-16 22:58:09 +01:00
|
|
|
fn (p &Parser) get_deferred_text() string {
|
2019-10-12 21:01:50 +02:00
|
|
|
// @emily33901: Scoped defer
|
|
|
|
// Check all of our defer texts to see if there is one at a higher scope level
|
|
|
|
// The one for our current scope would be the last so any before that need to be
|
|
|
|
// added.
|
|
|
|
mut deferred_text := ''
|
|
|
|
for text in p.cur_fn.defer_text {
|
|
|
|
if text != '' {
|
|
|
|
// In reverse order
|
|
|
|
deferred_text = text + deferred_text
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return deferred_text
|
|
|
|
}
|
|
|
|
|
2019-08-05 09:49:52 +02:00
|
|
|
fn prepend_mod(mod, name string) string {
|
|
|
|
return '${mod}__${name}'
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
2019-08-05 09:49:52 +02:00
|
|
|
fn (p &Parser) prepend_mod(name string) string {
|
2019-10-13 02:05:11 +02:00
|
|
|
return prepend_mod(mod_gen_name(p.mod), name)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) go_statement() {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.key_go)
|
2019-11-10 01:08:53 +01:00
|
|
|
p.fspace()
|
2019-09-29 19:37:39 +02:00
|
|
|
mut gotoken_idx := p.cur_tok_index()
|
2019-06-22 20:20:28 +02:00
|
|
|
// TODO copypasta of name_expr() ?
|
2019-07-07 22:30:15 +02:00
|
|
|
if p.peek() == .dot {
|
2019-09-28 19:42:29 +02:00
|
|
|
// Method
|
2019-06-22 20:20:28 +02:00
|
|
|
var_name := p.lit
|
2019-12-18 05:52:14 +01:00
|
|
|
v := p.find_var(var_name) or {
|
2019-09-28 19:42:29 +02:00
|
|
|
return
|
|
|
|
}
|
2019-09-09 15:22:39 +02:00
|
|
|
p.mark_var_used(v)
|
2019-09-29 19:37:39 +02:00
|
|
|
gotoken_idx = p.cur_tok_index()
|
2019-06-22 20:20:28 +02:00
|
|
|
p.next()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.dot)
|
2019-06-22 20:20:28 +02:00
|
|
|
typ := p.table.find_type(v.typ)
|
2019-12-18 05:52:14 +01:00
|
|
|
method := p.table.find_method(typ, p.lit) or {
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('go method missing $var_name', gotoken_idx)
|
2019-09-28 19:42:29 +02:00
|
|
|
return
|
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
p.async_fn_call(method, 0, var_name, v.typ)
|
|
|
|
}
|
|
|
|
else {
|
2019-09-28 19:42:29 +02:00
|
|
|
f_name := p.lit
|
|
|
|
// Normal function
|
2019-12-18 05:52:14 +01:00
|
|
|
f := p.table.find_fn(p.prepend_mod(f_name)) or {
|
2019-12-18 02:34:50 +01:00
|
|
|
println(p.table.debug_fns())
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('can not find function $f_name', gotoken_idx)
|
2019-09-28 19:42:29 +02:00
|
|
|
return
|
|
|
|
}
|
2019-09-19 16:25:00 +02:00
|
|
|
if f.name == 'println' || f.name == 'print' {
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('`go` cannot be used with `println`', gotoken_idx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
p.async_fn_call(f, 0, '', '')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-23 19:34:08 +02:00
|
|
|
/*
|
2019-07-05 02:44:22 +02:00
|
|
|
fn (p mut Parser) register_var(v Var) {
|
2019-06-22 20:20:28 +02:00
|
|
|
if v.line_nr == 0 {
|
2019-09-20 12:42:37 +02:00
|
|
|
spos := p.scanner.get_scanner_pos()
|
2019-09-23 19:34:08 +02:00
|
|
|
p.register_var({ v | scanner_pos: spos, line_nr: spos.line_nr })
|
2019-08-17 21:19:37 +02:00
|
|
|
} else {
|
2019-09-23 19:34:08 +02:00
|
|
|
p.register_var(v)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
2019-09-23 19:34:08 +02:00
|
|
|
*/
|
2019-06-22 20:20:28 +02:00
|
|
|
|
|
|
|
// user:=jsdecode(User, user_json_string)
|
|
|
|
fn (p mut Parser) js_decode() string {
|
2019-12-18 02:34:50 +01:00
|
|
|
p.check(.name) // json
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.dot)
|
2019-06-22 20:20:28 +02:00
|
|
|
op := p.check_name()
|
2019-10-12 21:01:50 +02:00
|
|
|
op_token_idx := p.cur_tok_index()
|
2019-06-22 20:20:28 +02:00
|
|
|
if op == 'decode' {
|
|
|
|
// User tmp2; tmp2.foo = 0; tmp2.bar = 0;// I forgot to zero vals before => huge bug
|
|
|
|
// Option_User tmp3 = jsdecode_User(json_parse( s), &tmp2); ;
|
|
|
|
// if (!tmp3 .ok) {
|
|
|
|
// return
|
|
|
|
// }
|
|
|
|
// User u = *(User*) tmp3 . data; // TODO remove this (generated in or {} block handler)
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lpar)
|
2019-06-22 20:20:28 +02:00
|
|
|
typ := p.get_type()
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.comma)
|
2019-12-18 02:34:50 +01:00
|
|
|
styp,expr := p.tmp_expr()
|
2019-11-11 03:14:54 +01:00
|
|
|
p.check_types(styp, 'string')
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rpar)
|
2019-06-22 20:20:28 +02:00
|
|
|
tmp := p.get_tmp()
|
|
|
|
cjson_tmp := p.get_tmp()
|
|
|
|
mut decl := '$typ $tmp; '
|
|
|
|
// Init the struct
|
|
|
|
T := p.table.find_type(typ)
|
|
|
|
for field in T.fields {
|
|
|
|
def_val := type_default(field.typ)
|
|
|
|
if def_val != '' {
|
2019-11-11 03:14:54 +01:00
|
|
|
decl += '${tmp}.$field.name = OPTION_CAST($field.typ) $def_val;\n'
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
p.gen_json_for_type(T)
|
|
|
|
decl += 'cJSON* $cjson_tmp = json__json_parse($expr);'
|
|
|
|
p.cgen.insert_before(decl)
|
|
|
|
// p.gen('jsdecode_$typ(json_parse($expr), &$tmp);')
|
2019-12-08 20:22:47 +01:00
|
|
|
p.gen('json__jsdecode_${typ}($cjson_tmp, &$tmp); cJSON_Delete($cjson_tmp);')
|
2019-06-22 20:20:28 +02:00
|
|
|
opt_type := 'Option_$typ'
|
|
|
|
p.cgen.typedefs << 'typedef Option $opt_type;'
|
2019-10-24 11:47:21 +02:00
|
|
|
p.table.register_builtin(opt_type)
|
2019-06-22 20:20:28 +02:00
|
|
|
return opt_type
|
|
|
|
}
|
|
|
|
else if op == 'encode' {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lpar)
|
2019-12-18 02:34:50 +01:00
|
|
|
typ,expr := p.tmp_expr()
|
2019-06-22 20:20:28 +02:00
|
|
|
T := p.table.find_type(typ)
|
|
|
|
p.gen_json_for_type(T)
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.rpar)
|
2019-12-08 20:22:47 +01:00
|
|
|
p.gen('json__json_print(json__jsencode_${typ}($expr))')
|
2019-06-22 20:20:28 +02:00
|
|
|
return 'string'
|
|
|
|
}
|
|
|
|
else {
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('bad json op "$op"', op_token_idx)
|
2019-06-22 20:20:28 +02:00
|
|
|
}
|
|
|
|
return ''
|
|
|
|
}
|
|
|
|
|
2019-07-07 21:46:21 +02:00
|
|
|
fn (p mut Parser) attribute() {
|
2019-07-07 22:30:15 +02:00
|
|
|
p.check(.lsbr)
|
2019-10-27 01:03:06 +02:00
|
|
|
if p.tok == .key_if {
|
|
|
|
// [if vfmt]
|
|
|
|
p.next()
|
2019-12-28 13:55:53 +01:00
|
|
|
p.fspace()
|
2019-10-27 01:03:06 +02:00
|
|
|
p.attr = 'if ' + p.check_name()
|
2019-12-18 02:34:50 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-10-27 01:03:06 +02:00
|
|
|
p.attr = p.check_name()
|
|
|
|
}
|
2019-09-29 19:37:39 +02:00
|
|
|
attr_token_idx := p.cur_tok_index()
|
2019-09-21 20:38:12 +02:00
|
|
|
if p.tok == .colon {
|
2019-08-20 13:34:29 +02:00
|
|
|
p.check(.colon)
|
2019-09-21 20:38:12 +02:00
|
|
|
p.attr = p.attr + ':' + p.check_name()
|
2019-10-12 21:01:50 +02:00
|
|
|
}
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.rsbr)
|
2019-11-18 11:10:31 +01:00
|
|
|
p.fgen_nl()
|
2019-10-21 13:21:30 +02:00
|
|
|
if p.tok == .key_fn || (p.tok == .key_pub && p.peek() == .key_fn) {
|
2019-08-17 21:19:37 +02:00
|
|
|
p.fn_decl()
|
|
|
|
p.attr = ''
|
|
|
|
return
|
|
|
|
}
|
2019-07-07 22:30:15 +02:00
|
|
|
else if p.tok == .key_struct {
|
2019-12-21 01:53:58 +01:00
|
|
|
p.struct_decl([])
|
2019-08-17 21:19:37 +02:00
|
|
|
p.attr = ''
|
|
|
|
return
|
|
|
|
}
|
2019-12-10 04:16:47 +01:00
|
|
|
else if p.tok == .key_enum {
|
|
|
|
p.enum_decl(false)
|
|
|
|
p.attr = ''
|
|
|
|
return
|
|
|
|
}
|
2019-09-29 19:37:39 +02:00
|
|
|
p.error_with_token_index('bad attribute usage', attr_token_idx)
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-07-07 21:46:21 +02:00
|
|
|
|
2019-07-15 22:09:34 +02:00
|
|
|
fn (p mut Parser) defer_st() {
|
|
|
|
p.check(.key_defer)
|
2019-12-18 07:04:35 +01:00
|
|
|
p.fspace()
|
2019-08-17 21:19:37 +02:00
|
|
|
p.check(.lcbr)
|
|
|
|
pos := p.cgen.lines.len
|
2019-07-30 18:05:06 +02:00
|
|
|
// Save everything inside the defer block to `defer_text`.
|
|
|
|
// It will be inserted before every `return`
|
2019-08-07 17:51:21 +02:00
|
|
|
// Emily: TODO: all variables that are used in this defer statement need to be evaluated when the block
|
|
|
|
// is defined otherwise they could change over the course of the function
|
|
|
|
// (make temps out of them)
|
2019-08-17 21:19:37 +02:00
|
|
|
p.genln('{')
|
|
|
|
p.statements()
|
2019-10-27 08:03:15 +01:00
|
|
|
p.cur_fn.defer_text.last() = p.cgen.lines[pos..].join('\n') + p.cur_fn.defer_text.last()
|
2019-07-30 18:05:06 +02:00
|
|
|
// Rollback p.cgen.lines
|
2019-10-27 08:03:15 +01:00
|
|
|
p.cgen.lines = p.cgen.lines[..pos]
|
2019-07-30 18:05:06 +02:00
|
|
|
p.cgen.resetln('')
|
2019-08-17 21:19:37 +02:00
|
|
|
}
|
2019-07-15 22:09:34 +02:00
|
|
|
|
2019-09-23 12:42:20 +02:00
|
|
|
fn (p mut Parser) check_and_register_used_imported_type(typ_name string) {
|
2019-12-18 05:52:14 +01:00
|
|
|
us_idx := typ_name.index('__') or {
|
2019-12-18 02:34:50 +01:00
|
|
|
return
|
|
|
|
}
|
2020-01-05 16:29:33 +01:00
|
|
|
mut arg_mod := typ_name[..us_idx]
|
|
|
|
if arg_mod.contains('_dot_') {
|
|
|
|
arg_mod = arg_mod.all_after('_dot_')
|
|
|
|
}
|
2019-11-30 11:09:05 +01:00
|
|
|
if p.import_table.known_alias(arg_mod) {
|
|
|
|
p.import_table.register_used_import(arg_mod)
|
2019-09-23 12:42:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (p mut Parser) check_unused_imports() {
|
2019-09-26 04:28:43 +02:00
|
|
|
// Don't run in the generated V file with `.str()`
|
2019-10-25 15:34:12 +02:00
|
|
|
if p.is_vgen {
|
2019-09-26 04:28:43 +02:00
|
|
|
return
|
2019-09-27 13:02:01 +02:00
|
|
|
}
|
2019-09-23 12:42:20 +02:00
|
|
|
mut output := ''
|
2019-12-18 08:17:07 +01:00
|
|
|
for alias, mod in p.import_table.imports {
|
2019-09-23 12:42:20 +02:00
|
|
|
if !p.import_table.is_used_import(alias) {
|
2019-12-19 03:41:12 +01:00
|
|
|
mod_alias := if alias == mod { alias } else { '$alias ($mod)' }
|
2019-09-23 12:42:20 +02:00
|
|
|
output += '\n * $mod_alias'
|
|
|
|
}
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
if output == '' {
|
|
|
|
return
|
|
|
|
}
|
2020-01-06 23:15:37 +01:00
|
|
|
|
2019-12-18 02:34:50 +01:00
|
|
|
// the imports are usually at the start of the file
|
2019-12-29 08:51:55 +01:00
|
|
|
//p.production_error_with_token_index('the following imports were never used: $output', 0)
|
2020-01-05 16:29:33 +01:00
|
|
|
if p.pref.is_verbose {
|
|
|
|
eprintln('Used imports table: ${p.import_table.used_imports.str()}')
|
2019-12-31 19:42:16 +01:00
|
|
|
}
|
2020-01-06 23:15:37 +01:00
|
|
|
p.warn('the following imports were never used: $output')
|
2019-09-23 12:42:20 +02:00
|
|
|
}
|
2019-10-20 11:24:12 +02:00
|
|
|
|
2019-12-18 02:34:50 +01:00
|
|
|
fn (p &Parser) is_expr_fn_call(start_tok_idx int) (bool,string) {
|
|
|
|
mut expr := p.tokens[start_tok_idx - 1].str()
|
2019-11-04 12:35:10 +01:00
|
|
|
mut is_fn_call := p.tokens[start_tok_idx].tok == .lpar
|
2019-10-20 11:24:12 +02:00
|
|
|
if !is_fn_call {
|
2019-11-04 12:35:10 +01:00
|
|
|
mut i := start_tok_idx
|
2019-12-18 02:34:50 +01:00
|
|
|
for (p.tokens[i].tok == .dot || p.tokens[i].tok == .name) && p.tokens[i].lit != '_' && i < p.tokens.len {
|
2019-11-04 12:35:10 +01:00
|
|
|
expr += p.tokens[i].str()
|
2019-10-20 11:24:12 +02:00
|
|
|
i++
|
|
|
|
}
|
|
|
|
is_fn_call = p.tokens[i].tok == .lpar
|
|
|
|
}
|
2019-12-18 02:34:50 +01:00
|
|
|
return is_fn_call,expr
|
2019-10-20 11:24:12 +02:00
|
|
|
}
|
2019-11-19 07:53:52 +01:00
|
|
|
|
2019-12-21 03:33:59 +01:00
|
|
|
[inline]
|
|
|
|
// skip any block of code in curley braces `{}`
|
|
|
|
fn (p mut Parser) skip_block(inside_first_lcbr bool) {
|
|
|
|
mut cbr_depth := if inside_first_lcbr { 1 } else { 0 }
|
|
|
|
for {
|
|
|
|
if p.tok == .lcbr {
|
|
|
|
cbr_depth++
|
|
|
|
}
|
|
|
|
if p.tok == .rcbr {
|
|
|
|
cbr_depth--
|
2019-12-21 22:54:37 +01:00
|
|
|
if cbr_depth == 0 {
|
|
|
|
break
|
|
|
|
}
|
2019-12-21 03:33:59 +01:00
|
|
|
}
|
|
|
|
p.next()
|
|
|
|
}
|
|
|
|
p.check(.rcbr)
|
|
|
|
}
|
|
|
|
|
2019-11-19 07:53:52 +01:00
|
|
|
fn todo_remove() {
|
2019-12-31 19:42:16 +01:00
|
|
|
//x64.new_gen('f')
|
2019-11-29 09:11:53 +01:00
|
|
|
}
|
2019-12-19 20:52:27 +01:00
|
|
|
|