2021-01-18 13:20:06 +01:00
|
|
|
|
// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
|
2019-12-22 02:34:37 +01:00
|
|
|
|
// Use of this source code is governed by an MIT license
|
|
|
|
|
// that can be found in the LICENSE file.
|
|
|
|
|
module parser
|
|
|
|
|
|
2020-04-14 19:32:23 +02:00
|
|
|
|
import v.scanner
|
|
|
|
|
import v.ast
|
|
|
|
|
import v.token
|
|
|
|
|
import v.pref
|
|
|
|
|
import v.util
|
2021-01-09 15:11:49 +01:00
|
|
|
|
import v.vet
|
2020-05-10 11:26:57 +02:00
|
|
|
|
import v.errors
|
2020-04-14 19:32:23 +02:00
|
|
|
|
import os
|
2020-01-22 21:34:38 +01:00
|
|
|
|
|
2020-12-23 17:17:09 +01:00
|
|
|
|
const (
|
|
|
|
|
builtin_functions = ['print', 'println', 'eprint', 'eprintln', 'isnil', 'panic', 'exit']
|
|
|
|
|
)
|
|
|
|
|
|
2020-05-04 21:56:41 +02:00
|
|
|
|
pub struct Parser {
|
2021-01-12 04:38:43 +01:00
|
|
|
|
pref &pref.Preferences
|
2020-12-08 17:52:24 +01:00
|
|
|
|
mut:
|
2021-04-02 00:57:09 +02:00
|
|
|
|
file_base string // "hello.v"
|
|
|
|
|
file_name string // "/home/user/hello.v"
|
|
|
|
|
file_name_dir string // "/home/user"
|
|
|
|
|
file_backend_mode ast.Language // .c for .c.v|.c.vv|.c.vsh files; .js for .js.v files, .amd64/.rv32/other arches for .amd64.v/.rv32.v/etc. files, .v otherwise.
|
2020-06-01 15:43:54 +02:00
|
|
|
|
scanner &scanner.Scanner
|
2020-10-15 22:12:59 +02:00
|
|
|
|
comments_mode scanner.CommentsMode = .skip_comments
|
|
|
|
|
// see comment in parse_file
|
2021-03-17 01:43:17 +01:00
|
|
|
|
tok token.Token
|
|
|
|
|
prev_tok token.Token
|
|
|
|
|
peek_tok token.Token
|
2021-04-02 00:57:09 +02:00
|
|
|
|
table &ast.Table
|
|
|
|
|
language ast.Language
|
2021-04-13 12:07:57 +02:00
|
|
|
|
inside_test_file bool // when inside _test.v or _test.vv file
|
2021-03-17 01:43:17 +01:00
|
|
|
|
inside_if bool
|
|
|
|
|
inside_if_expr bool
|
|
|
|
|
inside_ct_if_expr bool
|
|
|
|
|
inside_or_expr bool
|
|
|
|
|
inside_for bool
|
|
|
|
|
inside_fn bool // true even with implicit main
|
|
|
|
|
inside_unsafe_fn bool
|
|
|
|
|
inside_str_interp bool
|
2021-04-02 00:57:09 +02:00
|
|
|
|
or_is_handled bool // ignore `or` in this expression
|
|
|
|
|
builtin_mod bool // are we in the `builtin` module?
|
|
|
|
|
mod string // current module name
|
|
|
|
|
is_manualfree bool // true when `[manualfree] module abc`, makes *all* fns in the current .v file, opt out of autofree
|
|
|
|
|
attrs []ast.Attr // attributes before next decl stmt
|
|
|
|
|
expr_mod string // for constructing full type names in parse_type()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
scope &ast.Scope
|
|
|
|
|
global_scope &ast.Scope
|
|
|
|
|
imports map[string]string // alias => mod_name
|
|
|
|
|
ast_imports []ast.Import // mod_names
|
|
|
|
|
used_imports []string // alias
|
|
|
|
|
auto_imports []string // imports, the user does not need to specify
|
|
|
|
|
imported_symbols map[string]string
|
|
|
|
|
is_amp bool // for generating the right code for `&Foo{}`
|
|
|
|
|
returns bool
|
|
|
|
|
inside_match bool // to separate `match A { }` from `Struct{}`
|
|
|
|
|
inside_select bool // to allow `ch <- Struct{} {` inside `select`
|
|
|
|
|
inside_match_case bool // to separate `match_expr { }` from `Struct{}`
|
|
|
|
|
inside_match_body bool // to fix eval not used TODO
|
|
|
|
|
inside_unsafe bool
|
|
|
|
|
is_stmt_ident bool // true while the beginning of a statement is an ident/selector
|
|
|
|
|
expecting_type bool // `is Type`, expecting type
|
|
|
|
|
errors []errors.Error
|
|
|
|
|
warnings []errors.Warning
|
2021-03-22 18:43:06 +01:00
|
|
|
|
notices []errors.Notice
|
2021-03-17 01:43:17 +01:00
|
|
|
|
vet_errors []vet.Error
|
|
|
|
|
cur_fn_name string
|
|
|
|
|
label_names []string
|
|
|
|
|
in_generic_params bool // indicates if parsing between `<` and `>` of a method/function
|
|
|
|
|
name_error bool // indicates if the token is not a name or the name is on another line
|
2021-03-19 21:51:52 +01:00
|
|
|
|
n_asm int // controls assembly labels
|
2021-03-17 01:43:17 +01:00
|
|
|
|
inside_asm_template bool
|
|
|
|
|
inside_asm bool
|
2021-04-01 08:58:33 +02:00
|
|
|
|
global_labels []string
|
2019-12-22 02:34:37 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-01-22 21:34:38 +01:00
|
|
|
|
// for tests
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_stmt(text string, table &ast.Table, scope &ast.Scope) ast.Stmt {
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut p := Parser{
|
2020-12-10 10:25:57 +01:00
|
|
|
|
scanner: scanner.new_scanner(text, .skip_comments, &pref.Preferences{})
|
2021-04-13 12:07:57 +02:00
|
|
|
|
inside_test_file: true
|
2019-12-26 11:27:35 +01:00
|
|
|
|
table: table
|
2020-12-10 10:25:57 +01:00
|
|
|
|
pref: &pref.Preferences{}
|
2020-02-15 13:37:48 +01:00
|
|
|
|
scope: scope
|
2020-04-04 05:14:40 +02:00
|
|
|
|
global_scope: &ast.Scope{
|
2020-04-08 00:59:28 +02:00
|
|
|
|
start_pos: 0
|
|
|
|
|
parent: 0
|
|
|
|
|
}
|
2019-12-22 02:34:37 +01:00
|
|
|
|
}
|
2020-01-06 16:13:12 +01:00
|
|
|
|
p.init_parse_fns()
|
2021-02-24 19:03:53 +01:00
|
|
|
|
util.timing_start('PARSE stmt')
|
|
|
|
|
defer {
|
|
|
|
|
util.timing_measure_cumulative('PARSE stmt')
|
|
|
|
|
}
|
2019-12-30 15:06:56 +01:00
|
|
|
|
p.read_first_token()
|
2020-06-16 11:06:53 +02:00
|
|
|
|
return p.stmt(false)
|
2019-12-22 02:34:37 +01:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_comptime(text string, table &ast.Table, pref &pref.Preferences, scope &ast.Scope, global_scope &ast.Scope) ast.File {
|
2020-06-07 12:26:45 +02:00
|
|
|
|
mut p := Parser{
|
2020-12-10 10:25:57 +01:00
|
|
|
|
scanner: scanner.new_scanner(text, .skip_comments, pref)
|
2020-12-01 03:58:39 +01:00
|
|
|
|
table: table
|
2020-06-09 18:47:51 +02:00
|
|
|
|
pref: pref
|
2020-06-07 12:26:45 +02:00
|
|
|
|
scope: scope
|
|
|
|
|
errors: []errors.Error{}
|
|
|
|
|
warnings: []errors.Warning{}
|
|
|
|
|
global_scope: global_scope
|
|
|
|
|
}
|
|
|
|
|
return p.parse()
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_text(text string, path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) ast.File {
|
2020-12-01 03:58:39 +01:00
|
|
|
|
mut p := Parser{
|
2020-12-10 10:25:57 +01:00
|
|
|
|
scanner: scanner.new_scanner(text, comments_mode, pref)
|
2020-12-01 03:58:39 +01:00
|
|
|
|
comments_mode: comments_mode
|
|
|
|
|
table: table
|
|
|
|
|
pref: pref
|
|
|
|
|
scope: &ast.Scope{
|
|
|
|
|
start_pos: 0
|
|
|
|
|
parent: global_scope
|
|
|
|
|
}
|
|
|
|
|
errors: []errors.Error{}
|
|
|
|
|
warnings: []errors.Warning{}
|
|
|
|
|
global_scope: global_scope
|
|
|
|
|
}
|
2020-12-08 17:52:24 +01:00
|
|
|
|
p.set_path(path)
|
2020-12-01 03:58:39 +01:00
|
|
|
|
return p.parse()
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-24 19:03:53 +01:00
|
|
|
|
[unsafe]
|
|
|
|
|
pub fn (mut p Parser) free() {
|
|
|
|
|
unsafe {
|
|
|
|
|
p.scanner.free()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-08 17:52:24 +01:00
|
|
|
|
pub fn (mut p Parser) set_path(path string) {
|
|
|
|
|
p.file_name = path
|
|
|
|
|
p.file_base = os.base(path)
|
|
|
|
|
p.file_name_dir = os.dir(path)
|
2021-04-13 12:07:57 +02:00
|
|
|
|
if p.file_base.ends_with('_test.v') || p.file_base.ends_with('_test.vv') {
|
|
|
|
|
p.inside_test_file = true
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
before_dot_v := path.before('.v') // also works for .vv and .vsh
|
|
|
|
|
language := before_dot_v.all_after_last('.')
|
|
|
|
|
langauge_with_underscore := before_dot_v.all_after_last('_')
|
|
|
|
|
if language == before_dot_v && langauge_with_underscore == before_dot_v {
|
2020-12-10 10:25:57 +01:00
|
|
|
|
p.file_backend_mode = .v
|
2021-03-17 01:43:17 +01:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
actual_language := if language == before_dot_v { langauge_with_underscore } else { language }
|
|
|
|
|
match actual_language {
|
|
|
|
|
'c' {
|
|
|
|
|
p.file_backend_mode = .c
|
|
|
|
|
}
|
|
|
|
|
'js' {
|
|
|
|
|
p.file_backend_mode = .js
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
arch := pref.arch_from_string(actual_language) or { pref.Arch._auto }
|
2021-04-02 00:57:09 +02:00
|
|
|
|
p.file_backend_mode = ast.pref_arch_to_table_language(arch)
|
2021-03-17 01:43:17 +01:00
|
|
|
|
if arch == ._auto {
|
|
|
|
|
p.file_backend_mode = .v
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-12-08 17:52:24 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_file(path string, table &ast.Table, comments_mode scanner.CommentsMode, pref &pref.Preferences, global_scope &ast.Scope) ast.File {
|
2020-06-06 17:47:16 +02:00
|
|
|
|
// NB: when comments_mode == .toplevel_comments,
|
|
|
|
|
// the parser gives feedback to the scanner about toplevel statements, so that the scanner can skip
|
|
|
|
|
// all the tricky inner comments. This is needed because we do not have a good general solution
|
|
|
|
|
// for handling them, and should be removed when we do (the general solution is also needed for vfmt)
|
2020-02-29 17:51:35 +01:00
|
|
|
|
// println('parse_file("$path")')
|
2020-02-29 17:53:04 +01:00
|
|
|
|
// text := os.read_file(path) or {
|
2021-03-01 00:18:14 +01:00
|
|
|
|
// panic(err)
|
2020-02-29 17:53:04 +01:00
|
|
|
|
// }
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut p := Parser{
|
2020-07-11 11:41:39 +02:00
|
|
|
|
scanner: scanner.new_scanner_file(path, comments_mode, pref)
|
2020-06-06 17:47:16 +02:00
|
|
|
|
comments_mode: comments_mode
|
2020-12-01 03:58:39 +01:00
|
|
|
|
table: table
|
2020-04-05 04:05:09 +02:00
|
|
|
|
pref: pref
|
2020-02-16 11:48:29 +01:00
|
|
|
|
scope: &ast.Scope{
|
2020-04-08 00:59:28 +02:00
|
|
|
|
start_pos: 0
|
2020-06-09 09:08:11 +02:00
|
|
|
|
parent: global_scope
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2020-05-11 16:05:59 +02:00
|
|
|
|
errors: []errors.Error{}
|
|
|
|
|
warnings: []errors.Warning{}
|
2020-04-04 05:14:40 +02:00
|
|
|
|
global_scope: global_scope
|
2019-12-26 11:27:35 +01:00
|
|
|
|
}
|
2020-12-08 17:52:24 +01:00
|
|
|
|
p.set_path(path)
|
2020-07-19 19:58:34 +02:00
|
|
|
|
return p.parse()
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_vet_file(path string, table_ &ast.Table, pref &pref.Preferences) (ast.File, []vet.Error) {
|
2020-07-19 19:58:34 +02:00
|
|
|
|
global_scope := &ast.Scope{
|
|
|
|
|
parent: 0
|
|
|
|
|
}
|
|
|
|
|
mut p := Parser{
|
2021-02-24 19:03:53 +01:00
|
|
|
|
scanner: scanner.new_scanner_file(path, .parse_comments, pref)
|
2020-07-19 19:58:34 +02:00
|
|
|
|
comments_mode: .parse_comments
|
|
|
|
|
table: table_
|
|
|
|
|
pref: pref
|
|
|
|
|
scope: &ast.Scope{
|
|
|
|
|
start_pos: 0
|
|
|
|
|
parent: global_scope
|
|
|
|
|
}
|
|
|
|
|
errors: []errors.Error{}
|
|
|
|
|
warnings: []errors.Warning{}
|
|
|
|
|
global_scope: global_scope
|
|
|
|
|
}
|
2020-12-08 17:52:24 +01:00
|
|
|
|
p.set_path(path)
|
2021-04-09 12:22:14 +02:00
|
|
|
|
if p.scanner.text.contains_any_substr(['\n ', ' \n']) {
|
2020-12-04 12:25:23 +01:00
|
|
|
|
source_lines := os.read_lines(path) or { []string{} }
|
2020-07-04 14:29:00 +02:00
|
|
|
|
for lnumber, line in source_lines {
|
2020-07-19 19:58:34 +02:00
|
|
|
|
if line.starts_with(' ') {
|
2021-01-09 15:11:49 +01:00
|
|
|
|
p.vet_error('Looks like you are using spaces for indentation.', lnumber,
|
2021-04-09 12:22:14 +02:00
|
|
|
|
.vfmt, .space_indent)
|
|
|
|
|
}
|
|
|
|
|
if line.ends_with(' ') {
|
|
|
|
|
p.vet_error('Looks like you have trailing whitespace.', lnumber, .unknown,
|
|
|
|
|
.trailing_space)
|
2020-07-04 14:29:00 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-27 14:50:04 +02:00
|
|
|
|
}
|
2020-10-06 09:02:35 +02:00
|
|
|
|
p.vet_errors << p.scanner.vet_errors
|
2021-04-02 16:26:53 +02:00
|
|
|
|
file := p.parse()
|
2020-10-06 07:07:50 +02:00
|
|
|
|
return file, p.vet_errors
|
2020-06-07 12:26:45 +02:00
|
|
|
|
}
|
|
|
|
|
|
2020-10-29 07:10:45 +01:00
|
|
|
|
pub fn (mut p Parser) parse() ast.File {
|
2021-02-24 19:03:53 +01:00
|
|
|
|
util.timing_start('PARSE')
|
|
|
|
|
defer {
|
|
|
|
|
util.timing_measure_cumulative('PARSE')
|
|
|
|
|
}
|
2020-04-05 04:05:09 +02:00
|
|
|
|
// comments_mode: comments_mode
|
2020-05-25 23:00:48 +02:00
|
|
|
|
p.init_parse_fns()
|
2019-12-28 14:11:05 +01:00
|
|
|
|
p.read_first_token()
|
2020-06-07 12:26:45 +02:00
|
|
|
|
mut stmts := []ast.Stmt{}
|
2020-04-10 22:27:51 +02:00
|
|
|
|
for p.tok.kind == .comment {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
stmts << p.comment_stmt()
|
2020-04-10 22:27:51 +02:00
|
|
|
|
}
|
2020-05-10 06:47:20 +02:00
|
|
|
|
// module
|
2020-04-10 22:27:51 +02:00
|
|
|
|
module_decl := p.module_decl()
|
2021-01-29 11:17:59 +01:00
|
|
|
|
if module_decl.is_skipped {
|
|
|
|
|
stmts.insert(0, ast.Stmt(module_decl))
|
|
|
|
|
} else {
|
|
|
|
|
stmts << module_decl
|
|
|
|
|
}
|
2020-01-18 23:26:14 +01:00
|
|
|
|
// imports
|
2020-06-08 08:22:10 +02:00
|
|
|
|
for {
|
|
|
|
|
if p.tok.kind == .key_import {
|
|
|
|
|
stmts << p.import_stmt()
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .comment {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
stmts << p.comment_stmt()
|
2020-06-08 08:22:10 +02:00
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
break
|
2020-01-18 23:26:14 +01:00
|
|
|
|
}
|
2020-04-10 22:27:51 +02:00
|
|
|
|
for {
|
2019-12-28 09:15:32 +01:00
|
|
|
|
if p.tok.kind == .eof {
|
2020-07-01 00:53:53 +02:00
|
|
|
|
p.check_unused_imports()
|
2019-12-26 11:27:35 +01:00
|
|
|
|
break
|
|
|
|
|
}
|
2020-01-06 16:13:12 +01:00
|
|
|
|
// println('stmt at ' + p.tok.str())
|
2021-03-11 21:44:33 +01:00
|
|
|
|
stmt := p.top_stmt()
|
2020-08-04 20:10:22 +02:00
|
|
|
|
// clear the attributes after each statement
|
2021-03-11 21:44:33 +01:00
|
|
|
|
if !(stmt is ast.ExprStmt && (stmt as ast.ExprStmt).expr is ast.Comment) {
|
|
|
|
|
p.attrs = []
|
|
|
|
|
}
|
|
|
|
|
stmts << stmt
|
2019-12-26 11:27:35 +01:00
|
|
|
|
}
|
2019-12-29 08:51:55 +01:00
|
|
|
|
// println('nr stmts = $stmts.len')
|
2019-12-28 14:11:05 +01:00
|
|
|
|
// println(stmts[0])
|
2020-02-15 13:37:48 +01:00
|
|
|
|
p.scope.end_pos = p.tok.pos
|
2020-05-25 23:00:48 +02:00
|
|
|
|
//
|
2019-12-30 12:10:46 +01:00
|
|
|
|
return ast.File{
|
2020-06-07 12:26:45 +02:00
|
|
|
|
path: p.file_name
|
2021-01-15 19:28:18 +01:00
|
|
|
|
path_base: p.file_base
|
2021-04-13 12:07:57 +02:00
|
|
|
|
is_test: p.inside_test_file
|
2021-04-07 17:12:27 +02:00
|
|
|
|
lines: p.scanner.line_nr
|
|
|
|
|
bytes: p.scanner.text.len
|
2020-01-22 21:34:38 +01:00
|
|
|
|
mod: module_decl
|
2020-02-29 17:51:35 +01:00
|
|
|
|
imports: p.ast_imports
|
2020-12-07 18:13:03 +01:00
|
|
|
|
imported_symbols: p.imported_symbols
|
2021-01-08 17:42:40 +01:00
|
|
|
|
auto_imports: p.auto_imports
|
2020-01-22 21:34:38 +01:00
|
|
|
|
stmts: stmts
|
2020-02-20 11:13:18 +01:00
|
|
|
|
scope: p.scope
|
2020-05-11 16:05:59 +02:00
|
|
|
|
global_scope: p.global_scope
|
|
|
|
|
errors: p.errors
|
2020-05-10 11:26:57 +02:00
|
|
|
|
warnings: p.warnings
|
2021-04-01 08:58:33 +02:00
|
|
|
|
global_labels: p.global_labels
|
2019-12-28 14:11:05 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-31 10:22:18 +02:00
|
|
|
|
/*
|
2020-03-16 08:33:42 +01:00
|
|
|
|
struct Queue {
|
|
|
|
|
mut:
|
|
|
|
|
idx int
|
2020-05-22 02:22:56 +02:00
|
|
|
|
mu &sync.Mutex
|
|
|
|
|
mu2 &sync.Mutex
|
2020-03-16 08:33:42 +01:00
|
|
|
|
paths []string
|
2021-04-02 00:57:09 +02:00
|
|
|
|
table &ast.Table
|
2020-03-16 08:33:42 +01:00
|
|
|
|
parsed_ast_files []ast.File
|
2020-05-22 02:22:56 +02:00
|
|
|
|
pref &pref.Preferences
|
|
|
|
|
global_scope &ast.Scope
|
2020-03-16 08:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-05-22 02:22:56 +02:00
|
|
|
|
fn (mut q Queue) run() {
|
|
|
|
|
for {
|
|
|
|
|
q.mu.lock()
|
|
|
|
|
idx := q.idx
|
|
|
|
|
if idx >= q.paths.len {
|
|
|
|
|
q.mu.unlock()
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
q.idx++
|
2020-03-16 08:33:42 +01:00
|
|
|
|
q.mu.unlock()
|
2020-05-22 02:22:56 +02:00
|
|
|
|
println('run(idx=$idx)')
|
|
|
|
|
path := q.paths[idx]
|
|
|
|
|
file := parse_file(path, q.table, .skip_comments, q.pref, q.global_scope)
|
|
|
|
|
q.mu2.lock()
|
|
|
|
|
q.parsed_ast_files << file
|
|
|
|
|
q.mu2.unlock()
|
|
|
|
|
println('run done(idx=$idx)')
|
2020-03-16 08:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-31 10:22:18 +02:00
|
|
|
|
*/
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn parse_files(paths []string, table &ast.Table, pref &pref.Preferences, global_scope &ast.Scope) []ast.File {
|
2020-12-18 16:23:38 +01:00
|
|
|
|
mut timers := util.new_timers(false)
|
|
|
|
|
$if time_parsing ? {
|
|
|
|
|
timers.should_print = true
|
|
|
|
|
}
|
2020-05-22 02:22:56 +02:00
|
|
|
|
// println('nr_cpus= $nr_cpus')
|
2020-05-22 02:45:11 +02:00
|
|
|
|
$if macos {
|
2020-05-31 10:22:18 +02:00
|
|
|
|
/*
|
2020-05-22 02:45:11 +02:00
|
|
|
|
if pref.is_parallel && paths[0].contains('/array.v') {
|
|
|
|
|
println('\n\n\nparse_files() nr_files=$paths.len')
|
|
|
|
|
println(paths)
|
|
|
|
|
nr_cpus := runtime.nr_cpus()
|
|
|
|
|
mut q := &Queue{
|
|
|
|
|
paths: paths
|
|
|
|
|
table: table
|
|
|
|
|
pref: pref
|
|
|
|
|
global_scope: global_scope
|
|
|
|
|
mu: sync.new_mutex()
|
|
|
|
|
mu2: sync.new_mutex()
|
|
|
|
|
}
|
|
|
|
|
for _ in 0 .. nr_cpus - 1 {
|
|
|
|
|
go q.run()
|
|
|
|
|
}
|
2021-02-27 18:41:06 +01:00
|
|
|
|
time.sleep(time.second)
|
2020-05-22 02:45:11 +02:00
|
|
|
|
println('all done')
|
|
|
|
|
return q.parsed_ast_files
|
2020-05-22 02:22:56 +02:00
|
|
|
|
}
|
2020-05-31 10:22:18 +02:00
|
|
|
|
*/
|
2020-03-16 08:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
// ///////////////
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut files := []ast.File{}
|
2019-12-30 12:10:46 +01:00
|
|
|
|
for path in paths {
|
2020-03-27 08:46:54 +01:00
|
|
|
|
// println('parse_files $path')
|
2020-12-18 16:23:38 +01:00
|
|
|
|
timers.start('parse_file $path')
|
2020-04-04 05:14:40 +02:00
|
|
|
|
files << parse_file(path, table, .skip_comments, pref, global_scope)
|
2020-12-18 16:23:38 +01:00
|
|
|
|
timers.show('parse_file $path')
|
2019-12-30 12:10:46 +01:00
|
|
|
|
}
|
|
|
|
|
return files
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-06 17:47:16 +02:00
|
|
|
|
pub fn (mut p Parser) init_parse_fns() {
|
2020-01-07 01:57:38 +01:00
|
|
|
|
// p.prefix_parse_fns = make(100, 100, sizeof(PrefixParseFn))
|
2020-01-06 16:13:12 +01:00
|
|
|
|
// p.prefix_parse_fns[token.Kind.name] = parse_name
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) read_first_token() {
|
2021-03-17 01:43:17 +01:00
|
|
|
|
// need to call next() 2 times to get peek token and current token
|
2020-05-22 18:32:21 +02:00
|
|
|
|
p.next()
|
2020-04-25 10:07:30 +02:00
|
|
|
|
p.next()
|
2021-02-24 19:03:53 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
[inline]
|
|
|
|
|
pub fn (p &Parser) peek_token(n int) token.Token {
|
|
|
|
|
return p.scanner.peek_token(n - 2)
|
2019-12-26 11:27:35 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) open_scope() {
|
2020-02-15 13:37:48 +01:00
|
|
|
|
p.scope = &ast.Scope{
|
|
|
|
|
parent: p.scope
|
|
|
|
|
start_pos: p.tok.pos
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) close_scope() {
|
2020-08-17 17:51:25 +02:00
|
|
|
|
// p.scope.end_pos = p.tok.pos
|
|
|
|
|
// NOTE: since this is usually called after `p.parse_block()`
|
|
|
|
|
// ie. when `prev_tok` is rcbr `}` we most likely want `prev_tok`
|
|
|
|
|
// we could do the following, but probably not needed in 99% of cases:
|
|
|
|
|
// `end_pos = if p.prev_tok.kind == .rcbr { p.prev_tok.pos } else { p.tok.pos }`
|
|
|
|
|
p.scope.end_pos = p.prev_tok.pos
|
2020-02-15 13:37:48 +01:00
|
|
|
|
p.scope.parent.children << p.scope
|
|
|
|
|
p.scope = p.scope.parent
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) parse_block() []ast.Stmt {
|
2020-02-15 13:37:48 +01:00
|
|
|
|
p.open_scope()
|
2020-03-18 09:56:19 +01:00
|
|
|
|
// println('parse block')
|
2020-06-06 17:47:16 +02:00
|
|
|
|
stmts := p.parse_block_no_scope(false)
|
2020-03-18 09:56:19 +01:00
|
|
|
|
p.close_scope()
|
|
|
|
|
// println('nr exprs in block = $exprs.len')
|
|
|
|
|
return stmts
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-06 17:47:16 +02:00
|
|
|
|
pub fn (mut p Parser) parse_block_no_scope(is_top_level bool) []ast.Stmt {
|
2019-12-31 19:42:16 +01:00
|
|
|
|
p.check(.lcbr)
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut stmts := []ast.Stmt{}
|
2020-01-06 16:13:12 +01:00
|
|
|
|
if p.tok.kind != .rcbr {
|
2020-12-28 10:58:44 +01:00
|
|
|
|
mut count := 0
|
2020-12-10 18:32:15 +01:00
|
|
|
|
for p.tok.kind !in [.eof, .rcbr] {
|
2020-06-16 11:06:53 +02:00
|
|
|
|
stmts << p.stmt(is_top_level)
|
2020-12-28 10:58:44 +01:00
|
|
|
|
count++
|
|
|
|
|
if count % 100000 == 0 {
|
|
|
|
|
eprintln('parsed $count statements so far from fn $p.cur_fn_name ...')
|
2020-08-02 16:58:05 +02:00
|
|
|
|
}
|
2020-12-28 10:58:44 +01:00
|
|
|
|
if count > 1000000 {
|
|
|
|
|
p.error_with_pos('parsed over $count statements from fn $p.cur_fn_name, the parser is probably stuck',
|
2020-08-02 16:58:05 +02:00
|
|
|
|
p.tok.position())
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return []
|
2020-08-02 16:58:05 +02:00
|
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-06 17:47:16 +02:00
|
|
|
|
if is_top_level {
|
|
|
|
|
p.top_level_statement_end()
|
|
|
|
|
}
|
2019-12-31 19:42:16 +01:00
|
|
|
|
p.check(.rcbr)
|
2019-12-28 14:11:05 +01:00
|
|
|
|
return stmts
|
2019-12-27 13:57:49 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-05 02:08:10 +02:00
|
|
|
|
/*
|
2020-04-23 01:16:58 +02:00
|
|
|
|
fn (mut p Parser) next_with_comment() {
|
2020-04-05 02:08:10 +02:00
|
|
|
|
p.tok = p.peek_tok
|
|
|
|
|
p.peek_tok = p.scanner.scan()
|
|
|
|
|
}
|
|
|
|
|
*/
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) next() {
|
2020-04-18 00:22:03 +02:00
|
|
|
|
p.prev_tok = p.tok
|
2019-12-28 09:15:32 +01:00
|
|
|
|
p.tok = p.peek_tok
|
2021-02-24 19:03:53 +01:00
|
|
|
|
p.peek_tok = p.scanner.scan()
|
2020-04-05 02:08:10 +02:00
|
|
|
|
/*
|
|
|
|
|
if p.tok.kind==.comment {
|
|
|
|
|
p.comments << ast.Comment{text:p.tok.lit, line_nr:p.tok.line_nr}
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-04-25 17:49:16 +02:00
|
|
|
|
*/
|
2019-12-22 02:34:37 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) check(expected token.Kind) {
|
2020-12-31 12:37:11 +01:00
|
|
|
|
p.name_error = false
|
2020-02-29 17:51:35 +01:00
|
|
|
|
// for p.tok.kind in [.line_comment, .mline_comment] {
|
|
|
|
|
// p.next()
|
|
|
|
|
// }
|
2021-03-17 01:43:17 +01:00
|
|
|
|
|
|
|
|
|
if _likely_(p.tok.kind == expected) {
|
2020-12-15 04:31:34 +01:00
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
2020-12-22 13:00:23 +01:00
|
|
|
|
if expected == .name {
|
|
|
|
|
p.name_error = true
|
|
|
|
|
}
|
2021-02-09 16:07:30 +01:00
|
|
|
|
mut s := expected.str()
|
|
|
|
|
// quote keywords, punctuation, operators
|
|
|
|
|
if token.is_key(s) || (s.len > 0 && !s[0].is_letter()) {
|
|
|
|
|
s = '`$s`'
|
|
|
|
|
}
|
|
|
|
|
p.error('unexpected $p.tok, expecting $s')
|
2019-12-27 13:57:49 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-15 22:26:51 +02:00
|
|
|
|
// JS functions can have multiple dots in their name:
|
|
|
|
|
// JS.foo.bar.and.a.lot.more.dots()
|
|
|
|
|
fn (mut p Parser) check_js_name() string {
|
|
|
|
|
mut name := ''
|
|
|
|
|
for p.peek_tok.kind == .dot {
|
|
|
|
|
name += '${p.tok.lit}.'
|
|
|
|
|
p.next() // .name
|
|
|
|
|
p.next() // .dot
|
|
|
|
|
}
|
2020-05-16 16:12:23 +02:00
|
|
|
|
// last .name
|
|
|
|
|
name += p.tok.lit
|
2020-05-15 22:26:51 +02:00
|
|
|
|
p.next()
|
|
|
|
|
return name
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) check_name() string {
|
2019-12-28 09:15:32 +01:00
|
|
|
|
name := p.tok.lit
|
2020-05-14 17:14:24 +02:00
|
|
|
|
if p.peek_tok.kind == .dot && name in p.imports {
|
|
|
|
|
p.register_used_import(name)
|
|
|
|
|
}
|
2019-12-27 13:57:49 +01:00
|
|
|
|
p.check(.name)
|
|
|
|
|
return name
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) top_stmt() ast.Stmt {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
$if trace_parser ? {
|
2020-07-12 11:56:01 +02:00
|
|
|
|
tok_pos := p.tok.position()
|
|
|
|
|
eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | top_stmt')
|
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
for {
|
|
|
|
|
match p.tok.kind {
|
|
|
|
|
.key_pub {
|
|
|
|
|
match p.peek_tok.kind {
|
|
|
|
|
.key_const {
|
|
|
|
|
return p.const_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_fn {
|
|
|
|
|
return p.fn_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_struct, .key_union {
|
|
|
|
|
return p.struct_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_interface {
|
|
|
|
|
return p.interface_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_enum {
|
|
|
|
|
return p.enum_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_type {
|
|
|
|
|
return p.type_decl()
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('wrong pub keyword usage')
|
2020-08-04 20:10:22 +02:00
|
|
|
|
}
|
2019-12-31 19:42:16 +01:00
|
|
|
|
}
|
2020-04-07 15:48:13 +02:00
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
.lsbr {
|
2020-08-10 02:00:14 +02:00
|
|
|
|
// attrs are stored in `p.attrs`
|
2020-08-04 20:10:22 +02:00
|
|
|
|
p.attributes()
|
|
|
|
|
continue
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
.key_asm {
|
|
|
|
|
return p.asm_stmt(true)
|
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
.key_interface {
|
|
|
|
|
return p.interface_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_import {
|
|
|
|
|
p.error_with_pos('`import x` can only be declared at the beginning of the file',
|
|
|
|
|
p.tok.position())
|
|
|
|
|
return p.import_stmt()
|
|
|
|
|
}
|
|
|
|
|
.key_global {
|
|
|
|
|
return p.global_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_const {
|
|
|
|
|
return p.const_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_fn {
|
|
|
|
|
return p.fn_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_struct {
|
|
|
|
|
return p.struct_decl()
|
|
|
|
|
}
|
|
|
|
|
.dollar {
|
2021-01-29 11:17:59 +01:00
|
|
|
|
if_expr := p.if_expr(true)
|
2020-09-18 00:58:54 +02:00
|
|
|
|
return ast.ExprStmt{
|
2021-01-29 11:17:59 +01:00
|
|
|
|
expr: if_expr
|
|
|
|
|
pos: if_expr.pos
|
2020-09-18 00:58:54 +02:00
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
}
|
|
|
|
|
.hash {
|
|
|
|
|
return p.hash()
|
|
|
|
|
}
|
|
|
|
|
.key_type {
|
|
|
|
|
return p.type_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_enum {
|
|
|
|
|
return p.enum_decl()
|
|
|
|
|
}
|
|
|
|
|
.key_union {
|
|
|
|
|
return p.struct_decl()
|
|
|
|
|
}
|
|
|
|
|
.comment {
|
|
|
|
|
return p.comment_stmt()
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-01-17 05:30:41 +01:00
|
|
|
|
p.inside_fn = true
|
2020-08-04 20:10:22 +02:00
|
|
|
|
if p.pref.is_script && !p.pref.is_test {
|
|
|
|
|
mut stmts := []ast.Stmt{}
|
|
|
|
|
for p.tok.kind != .eof {
|
|
|
|
|
stmts << p.stmt(false)
|
|
|
|
|
}
|
|
|
|
|
return ast.FnDecl{
|
|
|
|
|
name: 'main.main'
|
|
|
|
|
mod: 'main'
|
2021-03-05 12:19:39 +01:00
|
|
|
|
is_main: true
|
2020-08-04 20:10:22 +02:00
|
|
|
|
stmts: stmts
|
|
|
|
|
file: p.file_name
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return_type: ast.void_type
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2021-02-03 15:20:10 +01:00
|
|
|
|
label_names: p.label_names
|
2020-08-04 20:10:22 +02:00
|
|
|
|
}
|
|
|
|
|
} else if p.pref.is_fmt {
|
|
|
|
|
return p.stmt(false)
|
|
|
|
|
} else {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('bad top level statement ' + p.tok.str())
|
2020-05-13 14:02:04 +02:00
|
|
|
|
}
|
2020-04-09 12:46:16 +02:00
|
|
|
|
}
|
2019-12-28 09:43:22 +01:00
|
|
|
|
}
|
2020-01-06 16:13:12 +01:00
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
// TODO remove dummy return statement
|
|
|
|
|
// the compiler complains if it's not there
|
2021-03-31 10:13:15 +02:00
|
|
|
|
return ast.empty_stmt()
|
2020-01-06 16:13:12 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-05 02:08:10 +02:00
|
|
|
|
// TODO [if vfmt]
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) check_comment() ast.Comment {
|
2020-04-05 02:08:10 +02:00
|
|
|
|
if p.tok.kind == .comment {
|
|
|
|
|
return p.comment()
|
|
|
|
|
}
|
2020-04-07 15:15:45 +02:00
|
|
|
|
return ast.Comment{}
|
2020-04-05 02:08:10 +02:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) comment() ast.Comment {
|
2021-01-19 14:49:40 +01:00
|
|
|
|
mut pos := p.tok.position()
|
2020-02-29 17:51:35 +01:00
|
|
|
|
text := p.tok.lit
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.last_line = pos.line_nr + text.count('\n')
|
2020-02-29 17:51:35 +01:00
|
|
|
|
p.next()
|
2021-04-02 16:26:53 +02:00
|
|
|
|
is_multi := text.contains('\n')
|
2021-04-09 12:22:14 +02:00
|
|
|
|
// Filter out false positive space indent vet errors inside comments
|
2021-04-02 16:26:53 +02:00
|
|
|
|
if p.vet_errors.len > 0 && is_multi {
|
2021-04-09 12:22:14 +02:00
|
|
|
|
p.vet_errors = p.vet_errors.filter(it.typ != .space_indent
|
|
|
|
|
|| it.pos.line_nr - 1 > pos.last_line || it.pos.line_nr - 1 <= pos.line_nr)
|
2021-04-02 16:26:53 +02:00
|
|
|
|
}
|
2020-04-05 02:08:10 +02:00
|
|
|
|
return ast.Comment{
|
2021-04-02 16:26:53 +02:00
|
|
|
|
is_multi: is_multi
|
2020-02-29 17:51:35 +01:00
|
|
|
|
text: text
|
2020-04-05 12:25:33 +02:00
|
|
|
|
pos: pos
|
2020-02-29 17:51:35 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-17 19:13:22 +02:00
|
|
|
|
pub fn (mut p Parser) comment_stmt() ast.ExprStmt {
|
|
|
|
|
comment := p.comment()
|
|
|
|
|
return ast.ExprStmt{
|
|
|
|
|
expr: comment
|
|
|
|
|
pos: comment.pos
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-08 17:16:02 +01:00
|
|
|
|
struct EatCommentsConfig {
|
|
|
|
|
same_line bool // Only eat comments on the same line as the previous token
|
|
|
|
|
follow_up bool // Comments directly below the previous token as long as there is no empty line
|
2020-07-02 15:44:03 +02:00
|
|
|
|
}
|
|
|
|
|
|
2021-02-08 17:16:02 +01:00
|
|
|
|
pub fn (mut p Parser) eat_comments(cfg EatCommentsConfig) []ast.Comment {
|
|
|
|
|
mut line := p.prev_tok.line_nr
|
2020-11-20 10:51:50 +01:00
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for {
|
2021-02-08 17:16:02 +01:00
|
|
|
|
if p.tok.kind != .comment || (cfg.same_line && p.tok.line_nr > line)
|
|
|
|
|
|| (cfg.follow_up && p.tok.line_nr > line + 1) {
|
2020-11-20 10:51:50 +01:00
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
comments << p.comment()
|
2021-02-08 17:16:02 +01:00
|
|
|
|
if cfg.follow_up {
|
|
|
|
|
line = p.prev_tok.line_nr
|
|
|
|
|
}
|
2020-11-20 10:51:50 +01:00
|
|
|
|
}
|
|
|
|
|
return comments
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-16 11:06:53 +02:00
|
|
|
|
pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
$if trace_parser ? {
|
2020-07-12 11:56:01 +02:00
|
|
|
|
tok_pos := p.tok.position()
|
|
|
|
|
eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | stmt($is_top_level)')
|
|
|
|
|
}
|
2020-04-25 10:57:12 +02:00
|
|
|
|
p.is_stmt_ident = p.tok.kind == .name
|
2020-01-06 16:13:12 +01:00
|
|
|
|
match p.tok.kind {
|
2020-03-24 15:44:17 +01:00
|
|
|
|
.lcbr {
|
2020-12-22 14:45:12 +01:00
|
|
|
|
mut pos := p.tok.position()
|
2020-03-24 15:44:17 +01:00
|
|
|
|
stmts := p.parse_block()
|
2020-12-22 14:45:12 +01:00
|
|
|
|
pos.last_line = p.prev_tok.line_nr
|
2020-03-24 15:44:17 +01:00
|
|
|
|
return ast.Block{
|
|
|
|
|
stmts: stmts
|
2020-09-30 16:06:22 +02:00
|
|
|
|
pos: pos
|
2020-03-24 15:44:17 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-26 15:51:05 +01:00
|
|
|
|
.key_assert {
|
|
|
|
|
p.next()
|
2021-01-19 14:49:40 +01:00
|
|
|
|
mut pos := p.tok.position()
|
2020-03-05 12:08:43 +01:00
|
|
|
|
expr := p.expr(0)
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.update_last_line(p.prev_tok.line_nr)
|
2020-02-26 15:51:05 +01:00
|
|
|
|
return ast.AssertStmt{
|
|
|
|
|
expr: expr
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos: pos
|
2021-04-13 12:07:57 +02:00
|
|
|
|
is_used: p.inside_test_file || !p.pref.is_prod
|
2020-02-26 15:51:05 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-30 06:16:59 +01:00
|
|
|
|
.key_for {
|
2020-04-08 19:08:54 +02:00
|
|
|
|
return p.for_stmt()
|
2019-12-30 06:16:59 +01:00
|
|
|
|
}
|
2020-12-25 15:59:13 +01:00
|
|
|
|
.name {
|
|
|
|
|
if p.tok.lit == 'sql' {
|
|
|
|
|
return p.sql_stmt()
|
|
|
|
|
}
|
|
|
|
|
if p.peek_tok.kind == .colon {
|
|
|
|
|
// `label:`
|
|
|
|
|
spos := p.tok.position()
|
|
|
|
|
name := p.check_name()
|
2021-02-03 15:20:10 +01:00
|
|
|
|
if name in p.label_names {
|
|
|
|
|
p.error_with_pos('duplicate label `$name`', spos)
|
|
|
|
|
}
|
|
|
|
|
p.label_names << name
|
2020-12-25 15:59:13 +01:00
|
|
|
|
p.next()
|
|
|
|
|
if p.tok.kind == .key_for {
|
2021-03-06 18:09:28 +01:00
|
|
|
|
for_pos := p.tok.position()
|
2020-12-25 15:59:13 +01:00
|
|
|
|
mut stmt := p.stmt(is_top_level)
|
|
|
|
|
match mut stmt {
|
|
|
|
|
ast.ForStmt {
|
|
|
|
|
stmt.label = name
|
|
|
|
|
return stmt
|
|
|
|
|
}
|
|
|
|
|
ast.ForInStmt {
|
|
|
|
|
stmt.label = name
|
|
|
|
|
return stmt
|
|
|
|
|
}
|
|
|
|
|
ast.ForCStmt {
|
|
|
|
|
stmt.label = name
|
|
|
|
|
return stmt
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-03-06 18:09:28 +01:00
|
|
|
|
p.error_with_pos('unknown kind of For statement', for_pos)
|
2020-11-20 12:23:48 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-12 17:18:25 +02:00
|
|
|
|
}
|
2020-12-25 15:59:13 +01:00
|
|
|
|
return ast.GotoLabel{
|
|
|
|
|
name: name
|
|
|
|
|
pos: spos.extend(p.tok.position())
|
|
|
|
|
}
|
|
|
|
|
} else if p.peek_tok.kind == .name {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('unexpected name `$p.peek_tok.lit`', p.peek_tok.position())
|
2021-01-23 09:33:22 +01:00
|
|
|
|
} else if !p.inside_if_expr && !p.inside_match_body && !p.inside_or_expr
|
2021-02-07 23:10:39 +01:00
|
|
|
|
&& p.peek_tok.kind in [.rcbr, .eof] && !p.mark_var_as_used(p.tok.lit) {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('`$p.tok.lit` evaluated but not used', p.tok.position())
|
2020-05-12 17:18:25 +02:00
|
|
|
|
}
|
2020-06-16 11:06:53 +02:00
|
|
|
|
return p.parse_multi_expr(is_top_level)
|
2020-05-12 17:18:25 +02:00
|
|
|
|
}
|
2020-04-05 02:08:10 +02:00
|
|
|
|
.comment {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
return p.comment_stmt()
|
2020-02-29 17:51:35 +01:00
|
|
|
|
}
|
2020-01-06 16:13:12 +01:00
|
|
|
|
.key_return {
|
|
|
|
|
return p.return_stmt()
|
|
|
|
|
}
|
2020-02-03 07:02:54 +01:00
|
|
|
|
.dollar {
|
2020-12-26 07:54:13 +01:00
|
|
|
|
match p.peek_tok.kind {
|
|
|
|
|
.key_if {
|
2021-01-19 14:49:40 +01:00
|
|
|
|
mut pos := p.tok.position()
|
|
|
|
|
expr := p.if_expr(true)
|
|
|
|
|
pos.update_last_line(p.prev_tok.line_nr)
|
2020-12-26 07:54:13 +01:00
|
|
|
|
return ast.ExprStmt{
|
2021-01-19 14:49:40 +01:00
|
|
|
|
expr: expr
|
|
|
|
|
pos: pos
|
2020-12-26 07:54:13 +01:00
|
|
|
|
}
|
2020-09-18 00:58:54 +02:00
|
|
|
|
}
|
2020-12-26 07:54:13 +01:00
|
|
|
|
.key_for {
|
|
|
|
|
return p.comp_for()
|
|
|
|
|
}
|
|
|
|
|
.name {
|
2021-01-19 14:49:40 +01:00
|
|
|
|
mut pos := p.tok.position()
|
|
|
|
|
expr := p.comp_call()
|
|
|
|
|
pos.update_last_line(p.prev_tok.line_nr)
|
2020-12-26 07:54:13 +01:00
|
|
|
|
return ast.ExprStmt{
|
2021-01-19 14:49:40 +01:00
|
|
|
|
expr: expr
|
|
|
|
|
pos: pos
|
2020-12-26 07:54:13 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('unexpected \$', p.tok.position())
|
2020-05-27 03:33:37 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
2020-02-04 08:29:50 +01:00
|
|
|
|
.key_continue, .key_break {
|
|
|
|
|
tok := p.tok
|
2020-11-20 12:23:48 +01:00
|
|
|
|
line := p.tok.line_nr
|
2020-02-04 08:29:50 +01:00
|
|
|
|
p.next()
|
2020-11-20 12:23:48 +01:00
|
|
|
|
mut label := ''
|
|
|
|
|
if p.tok.line_nr == line && p.tok.kind == .name {
|
|
|
|
|
label = p.check_name()
|
|
|
|
|
}
|
2020-02-04 08:29:50 +01:00
|
|
|
|
return ast.BranchStmt{
|
2020-11-04 12:34:12 +01:00
|
|
|
|
kind: tok.kind
|
2020-11-20 12:23:48 +01:00
|
|
|
|
label: label
|
2020-11-04 12:34:12 +01:00
|
|
|
|
pos: tok.position()
|
2020-02-04 08:29:50 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-05 10:00:11 +01:00
|
|
|
|
.key_unsafe {
|
2020-09-19 18:18:36 +02:00
|
|
|
|
return p.unsafe_stmt()
|
2020-02-05 10:00:11 +01:00
|
|
|
|
}
|
2020-03-19 07:59:01 +01:00
|
|
|
|
.hash {
|
|
|
|
|
return p.hash()
|
|
|
|
|
}
|
2020-02-11 10:26:46 +01:00
|
|
|
|
.key_defer {
|
|
|
|
|
p.next()
|
2020-11-04 12:34:12 +01:00
|
|
|
|
spos := p.tok.position()
|
2020-02-11 10:26:46 +01:00
|
|
|
|
stmts := p.parse_block()
|
|
|
|
|
return ast.DeferStmt{
|
|
|
|
|
stmts: stmts
|
2020-12-22 14:45:12 +01:00
|
|
|
|
pos: spos.extend_with_last_line(p.tok.position(), p.prev_tok.line_nr)
|
2020-02-11 10:26:46 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-16 08:33:42 +01:00
|
|
|
|
.key_go {
|
2021-04-11 23:56:25 +02:00
|
|
|
|
go_expr := p.go_expr()
|
|
|
|
|
return ast.ExprStmt{
|
|
|
|
|
expr: go_expr
|
|
|
|
|
pos: go_expr.pos
|
2020-03-16 08:33:42 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-17 14:15:42 +01:00
|
|
|
|
.key_goto {
|
|
|
|
|
p.next()
|
2020-11-04 12:34:12 +01:00
|
|
|
|
spos := p.tok.position()
|
2020-02-17 14:15:42 +01:00
|
|
|
|
name := p.check_name()
|
|
|
|
|
return ast.GotoStmt{
|
|
|
|
|
name: name
|
2020-11-04 12:34:12 +01:00
|
|
|
|
pos: spos
|
2020-02-17 14:15:42 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-15 23:14:53 +02:00
|
|
|
|
.key_const {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('const can only be defined at the top level (outside of functions)',
|
2020-05-16 16:12:23 +02:00
|
|
|
|
p.tok.position())
|
2020-05-15 23:14:53 +02:00
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
.key_asm {
|
|
|
|
|
return p.asm_stmt(false)
|
|
|
|
|
}
|
2020-07-04 23:37:41 +02:00
|
|
|
|
// literals, 'if', etc. in here
|
2019-12-28 14:11:05 +01:00
|
|
|
|
else {
|
2020-06-16 11:06:53 +02:00
|
|
|
|
return p.parse_multi_expr(is_top_level)
|
2019-12-28 14:11:05 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-17 01:43:17 +01:00
|
|
|
|
fn (mut p Parser) asm_stmt(is_top_level bool) ast.AsmStmt {
|
|
|
|
|
p.inside_asm = true
|
|
|
|
|
p.inside_asm_template = true
|
|
|
|
|
defer {
|
|
|
|
|
p.inside_asm = false
|
|
|
|
|
p.inside_asm_template = false
|
|
|
|
|
}
|
|
|
|
|
p.n_asm = 0
|
|
|
|
|
if is_top_level {
|
|
|
|
|
p.top_level_statement_start()
|
|
|
|
|
}
|
|
|
|
|
mut backup_scope := p.scope
|
|
|
|
|
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
|
|
|
|
|
p.check(.key_asm)
|
|
|
|
|
mut arch := pref.arch_from_string(p.tok.lit) or { pref.Arch._auto }
|
|
|
|
|
mut is_volatile := false
|
|
|
|
|
mut is_goto := false
|
|
|
|
|
if p.tok.lit == 'volatile' && p.tok.kind == .name {
|
|
|
|
|
arch = pref.arch_from_string(p.peek_tok.lit) or { pref.Arch._auto }
|
|
|
|
|
is_volatile = true
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
} else if p.tok.kind == .key_goto {
|
|
|
|
|
arch = pref.arch_from_string(p.peek_tok.lit) or { pref.Arch._auto }
|
|
|
|
|
is_goto = true
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
if arch == ._auto && !p.pref.is_fmt {
|
|
|
|
|
p.error('unknown assembly architecture')
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind != .name {
|
|
|
|
|
p.error('must specify assembly architecture')
|
|
|
|
|
} else {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
p.check_for_impure_v(ast.pref_arch_to_table_language(arch), p.prev_tok.position())
|
2021-03-17 01:43:17 +01:00
|
|
|
|
|
|
|
|
|
p.check(.lcbr)
|
|
|
|
|
p.scope = &ast.Scope{
|
|
|
|
|
parent: 0 // you shouldn't be able to reference other variables in assembly blocks
|
|
|
|
|
detached_from_parent: true
|
|
|
|
|
start_pos: p.tok.pos
|
2021-03-19 21:51:52 +01:00
|
|
|
|
objects: ast.all_registers(mut p.table, arch) //
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mut local_labels := []string{}
|
|
|
|
|
// riscv: https://github.com/jameslzhu/riscv-card/blob/master/riscv-card.pdf
|
|
|
|
|
// x86: https://www.felixcloutier.com/x86/
|
|
|
|
|
// arm: https://developer.arm.com/documentation/dui0068/b/arm-instruction-reference
|
|
|
|
|
mut templates := []ast.AsmTemplate{}
|
|
|
|
|
for p.tok.kind !in [.semicolon, .rcbr] {
|
|
|
|
|
template_pos := p.tok.position()
|
|
|
|
|
mut name := ''
|
|
|
|
|
is_directive := p.tok.kind == .dot
|
|
|
|
|
if is_directive {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
if p.tok.kind in [.key_in, .key_lock, .key_orelse] { // `in`, `lock`, `or` are v keywords that are also x86/arm/riscv instructions.
|
|
|
|
|
name = p.tok.kind.str()
|
|
|
|
|
p.next()
|
2021-04-15 01:30:23 +02:00
|
|
|
|
} else if p.tok.kind == .number {
|
|
|
|
|
name = p.tok.lit
|
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
} else {
|
|
|
|
|
name = p.tok.lit
|
|
|
|
|
p.check(.name)
|
|
|
|
|
}
|
|
|
|
|
// dots are part of instructions for some riscv extensions
|
|
|
|
|
if arch in [.rv32, .rv64] {
|
|
|
|
|
for p.tok.kind == .dot {
|
|
|
|
|
name += '.'
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
name += p.tok.lit
|
|
|
|
|
p.check(.name)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mut is_label := false
|
|
|
|
|
|
|
|
|
|
mut args := []ast.AsmArg{}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
if p.tok.line_nr == p.prev_tok.line_nr {
|
|
|
|
|
args_loop: for {
|
|
|
|
|
if p.prev_tok.position().line_nr < p.tok.position().line_nr {
|
|
|
|
|
break
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
match p.tok.kind {
|
|
|
|
|
.name {
|
|
|
|
|
if p.tok.kind == .name && p.tok.lit.len >= 2
|
|
|
|
|
&& (p.tok.lit.starts_with('b') || p.tok.lit.starts_with('f')) {
|
|
|
|
|
mut is_digit := true
|
|
|
|
|
for c in p.tok.lit[1..] {
|
|
|
|
|
if !c.is_digit() {
|
|
|
|
|
is_digit = false
|
|
|
|
|
break
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
if is_digit {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
args << ast.AsmDisp{
|
2021-04-15 01:30:23 +02:00
|
|
|
|
val: p.tok.lit
|
|
|
|
|
pos: p.tok.position()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.check(.name)
|
2021-04-01 08:58:33 +02:00
|
|
|
|
} else {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
args << p.reg_or_alias()
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
args << p.reg_or_alias()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
.number {
|
|
|
|
|
number_lit := p.parse_number_literal()
|
|
|
|
|
match number_lit {
|
|
|
|
|
ast.FloatLiteral {
|
|
|
|
|
args << ast.FloatLiteral{
|
2021-04-01 08:58:33 +02:00
|
|
|
|
...number_lit
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
ast.IntegerLiteral {
|
|
|
|
|
if is_directive || number_lit.val.ends_with('b')
|
|
|
|
|
|| number_lit.val.ends_with('f') {
|
|
|
|
|
args << ast.AsmDisp{
|
|
|
|
|
val: number_lit.val
|
|
|
|
|
pos: number_lit.pos
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
args << ast.IntegerLiteral{
|
|
|
|
|
...number_lit
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
verror('p.parse_number_literal() invalid output: `$number_lit`')
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
}
|
|
|
|
|
.chartoken {
|
|
|
|
|
args << ast.CharLiteral{
|
|
|
|
|
val: p.tok.lit
|
|
|
|
|
pos: p.tok.position()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
.colon {
|
|
|
|
|
is_label = true
|
|
|
|
|
p.next()
|
|
|
|
|
local_labels << name
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
.lsbr {
|
|
|
|
|
args << p.asm_addressing()
|
|
|
|
|
}
|
|
|
|
|
.rcbr {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
.semicolon {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
p.error('invalid token in assembly block')
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
if p.tok.kind == .comma {
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
2021-03-17 01:43:17 +01:00
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-04-14 07:50:50 +02:00
|
|
|
|
// if p.prev_tok.position().line_nr < p.tok.position().line_nr {
|
|
|
|
|
// break
|
|
|
|
|
// }
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for p.tok.kind == .comment {
|
|
|
|
|
comments << p.comment()
|
|
|
|
|
}
|
|
|
|
|
if is_directive && name in ['globl', 'global'] {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
for arg in args {
|
|
|
|
|
p.global_labels << (arg as ast.AsmAlias).name
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
templates << ast.AsmTemplate{
|
|
|
|
|
name: name
|
|
|
|
|
args: args
|
|
|
|
|
comments: comments
|
|
|
|
|
is_label: is_label
|
|
|
|
|
is_directive: is_directive
|
|
|
|
|
pos: template_pos.extend(p.tok.position())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
mut scope := p.scope
|
|
|
|
|
p.scope = backup_scope
|
|
|
|
|
p.inside_asm_template = false
|
|
|
|
|
mut output, mut input, mut clobbered, mut global_labels := []ast.AsmIO{}, []ast.AsmIO{}, []ast.AsmClobbered{}, []string{}
|
|
|
|
|
if !is_top_level {
|
|
|
|
|
if p.tok.kind == .semicolon {
|
|
|
|
|
output = p.asm_ios(true)
|
|
|
|
|
if p.tok.kind == .semicolon {
|
|
|
|
|
input = p.asm_ios(false)
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .semicolon {
|
|
|
|
|
// because p.reg_or_alias() requires the scope with registers to recognize registers.
|
|
|
|
|
backup_scope = p.scope
|
|
|
|
|
p.scope = scope
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
for p.tok.kind == .name {
|
2021-04-06 21:25:24 +02:00
|
|
|
|
reg := ast.AsmRegister{
|
|
|
|
|
name: p.tok.lit
|
|
|
|
|
typ: 0
|
|
|
|
|
size: -1
|
|
|
|
|
}
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
|
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for p.tok.kind == .comment {
|
|
|
|
|
comments << p.comment()
|
|
|
|
|
}
|
2021-04-06 21:25:24 +02:00
|
|
|
|
clobbered << ast.AsmClobbered{
|
|
|
|
|
reg: reg
|
|
|
|
|
comments: comments
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
2021-04-06 21:25:24 +02:00
|
|
|
|
|
2021-03-17 01:43:17 +01:00
|
|
|
|
if p.tok.kind in [.rcbr, .semicolon] {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if is_goto && p.tok.kind == .semicolon {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
for p.tok.kind == .name {
|
|
|
|
|
global_labels << p.tok.lit
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else if p.tok.kind == .semicolon {
|
|
|
|
|
p.error('extended assembly is not allowed as a top level statement')
|
|
|
|
|
}
|
|
|
|
|
p.scope = backup_scope
|
|
|
|
|
p.check(.rcbr)
|
|
|
|
|
if is_top_level {
|
|
|
|
|
p.top_level_statement_end()
|
|
|
|
|
}
|
|
|
|
|
scope.end_pos = p.prev_tok.pos
|
|
|
|
|
|
|
|
|
|
return ast.AsmStmt{
|
|
|
|
|
arch: arch
|
|
|
|
|
is_goto: is_goto
|
|
|
|
|
is_volatile: is_volatile
|
|
|
|
|
templates: templates
|
|
|
|
|
output: output
|
|
|
|
|
input: input
|
|
|
|
|
clobbered: clobbered
|
|
|
|
|
pos: pos.extend(p.tok.position())
|
|
|
|
|
is_top_level: is_top_level
|
|
|
|
|
scope: scope
|
|
|
|
|
global_labels: global_labels
|
|
|
|
|
local_labels: local_labels
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn (mut p Parser) reg_or_alias() ast.AsmArg {
|
|
|
|
|
if p.tok.lit in p.scope.objects {
|
|
|
|
|
x := p.scope.objects[p.tok.lit]
|
|
|
|
|
if x is ast.AsmRegister {
|
|
|
|
|
b := x
|
|
|
|
|
p.check(.name)
|
|
|
|
|
return b
|
|
|
|
|
} else {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
verror('parser bug: non-register ast.ScopeObject found in scope')
|
|
|
|
|
return ast.AsmDisp{} // should not be reached
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
p.check(.name)
|
|
|
|
|
return ast.AsmAlias{
|
|
|
|
|
name: p.prev_tok.lit
|
|
|
|
|
pos: p.prev_tok.position()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// fn (mut p Parser) asm_addressing() ast.AsmAddressing {
|
|
|
|
|
// pos := p.tok.position()
|
|
|
|
|
// p.check(.lsbr)
|
|
|
|
|
// unknown_addressing_mode := 'unknown addressing mode. supported ones are [displacement], [base], [base + displacement] [index ∗ scale + displacement], [base + index ∗ scale + displacement], [base + index + displacement] [rip + displacement]'
|
|
|
|
|
// mut mode := ast.AddressingMode.invalid
|
|
|
|
|
// if p.peek_tok.kind == .rsbr {
|
|
|
|
|
// if p.tok.kind == .name {
|
|
|
|
|
// mode = .base
|
|
|
|
|
// } else if p.tok.kind == .number {
|
|
|
|
|
// mode = .displacement
|
|
|
|
|
// } else {
|
|
|
|
|
// p.error(unknown_addressing_mode)
|
|
|
|
|
// }
|
|
|
|
|
// } else if p.peek_tok.kind == .mul {
|
|
|
|
|
// mode = .index_times_scale_plus_displacement
|
|
|
|
|
// } else if p.tok.lit == 'rip' {
|
|
|
|
|
// mode = .rip_plus_displacement
|
|
|
|
|
// } else if p.peek_tok3.kind == .mul {
|
|
|
|
|
// mode = .base_plus_index_times_scale_plus_displacement
|
|
|
|
|
// } else if p.peek_tok.kind == .plus && p.peek_tok3.kind == .rsbr {
|
|
|
|
|
// mode = .base_plus_displacement
|
|
|
|
|
// } else if p.peek_tok.kind == .plus && p.peek_tok3.kind == .plus {
|
|
|
|
|
// mode = .base_plus_index_plus_displacement
|
|
|
|
|
// } else {
|
|
|
|
|
// p.error(unknown_addressing_mode)
|
|
|
|
|
// }
|
|
|
|
|
// mut displacement, mut base, mut index, mut scale := u32(0), ast.AsmArg{}, ast.AsmArg{}, -1
|
|
|
|
|
|
|
|
|
|
// match mode {
|
|
|
|
|
// .base {
|
|
|
|
|
// base = p.reg_or_alias()
|
|
|
|
|
// }
|
|
|
|
|
// .displacement {
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .base_plus_displacement {
|
|
|
|
|
// base = p.reg_or_alias()
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .index_times_scale_plus_displacement {
|
|
|
|
|
// index = p.reg_or_alias()
|
|
|
|
|
// p.check(.mul)
|
|
|
|
|
// scale = p.tok.lit.int()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .base_plus_index_times_scale_plus_displacement {
|
|
|
|
|
// base = p.reg_or_alias()
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// index = p.reg_or_alias()
|
|
|
|
|
// p.check(.mul)
|
|
|
|
|
// scale = p.tok.lit.int()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .rip_plus_displacement {
|
|
|
|
|
// base = p.reg_or_alias()
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .base_plus_index_plus_displacement {
|
|
|
|
|
// base = p.reg_or_alias()
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// index = p.reg_or_alias()
|
|
|
|
|
// p.check(.plus)
|
|
|
|
|
// displacement = p.tok.lit.u32()
|
|
|
|
|
// p.check(.number)
|
|
|
|
|
// }
|
|
|
|
|
// .invalid {} // there was already an error above
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
// p.check(.rsbr)
|
|
|
|
|
// return ast.AsmAddressing{
|
|
|
|
|
// base: base
|
|
|
|
|
// displacement: displacement
|
|
|
|
|
// index: index
|
|
|
|
|
// scale: scale
|
|
|
|
|
// mode: mode
|
|
|
|
|
// pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
fn (mut p Parser) asm_addressing() ast.AsmAddressing {
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
p.check(.lsbr)
|
|
|
|
|
unknown_addressing_mode := 'unknown addressing mode. supported ones are [displacement], [base], [base + displacement], [index ∗ scale + displacement], [base + index ∗ scale + displacement], [base + index + displacement], [rip + displacement]'
|
|
|
|
|
// this mess used to look much cleaner before the removal of peek_tok3, see above
|
|
|
|
|
if p.peek_tok.kind == .rsbr { // [displacement] or [base]
|
|
|
|
|
if p.tok.kind == .name {
|
|
|
|
|
base := p.reg_or_alias()
|
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .base
|
|
|
|
|
base: base
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
} else if p.tok.kind == .number {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
|
|
|
|
p.check(.number)
|
|
|
|
|
x
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .displacement
|
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
p.error(unknown_addressing_mode)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if p.peek_tok.kind == .plus && p.tok.kind == .name { // [base + displacement], [base + index ∗ scale + displacement], [base + index + displacement] or [rip + displacement]
|
|
|
|
|
if p.tok.lit == 'rip' {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
rip := p.reg_or_alias()
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
|
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
|
|
|
|
p.check(.number)
|
|
|
|
|
x
|
|
|
|
|
}
|
|
|
|
|
p.check(.rsbr)
|
2021-03-17 01:43:17 +01:00
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .rip_plus_displacement
|
2021-04-01 08:58:33 +02:00
|
|
|
|
base: rip
|
2021-03-17 01:43:17 +01:00
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
base := p.reg_or_alias()
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
if p.peek_tok.kind == .rsbr {
|
|
|
|
|
if p.tok.kind == .number {
|
2021-04-01 08:58:33 +02:00
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.check(.number)
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .base_plus_displacement
|
|
|
|
|
base: base
|
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
p.error(unknown_addressing_mode)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
index := p.reg_or_alias()
|
|
|
|
|
if p.tok.kind == .mul {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
scale := p.tok.lit.int()
|
|
|
|
|
p.check(.number)
|
|
|
|
|
p.check(.plus)
|
2021-04-01 08:58:33 +02:00
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
|
|
|
|
p.check(.number)
|
|
|
|
|
x
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .base_plus_index_times_scale_plus_displacement
|
|
|
|
|
base: base
|
|
|
|
|
index: index
|
|
|
|
|
scale: scale
|
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
} else if p.tok.kind == .plus {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
|
|
|
|
p.check(.number)
|
|
|
|
|
x
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .base_plus_index_plus_displacement
|
|
|
|
|
base: base
|
|
|
|
|
index: index
|
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if p.peek_tok.kind == .mul { // [index ∗ scale + displacement]
|
|
|
|
|
index := p.reg_or_alias()
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
scale := p.tok.lit.int()
|
|
|
|
|
p.check(.number)
|
|
|
|
|
p.check(.plus)
|
2021-04-01 08:58:33 +02:00
|
|
|
|
displacement := if p.tok.kind == .name {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-04-01 08:58:33 +02:00
|
|
|
|
x
|
|
|
|
|
} else {
|
|
|
|
|
x := ast.AsmArg(p.tok.lit)
|
|
|
|
|
p.check(.number)
|
|
|
|
|
x
|
|
|
|
|
}
|
2021-03-17 01:43:17 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.AsmAddressing{
|
|
|
|
|
mode: .index_times_scale_plus_displacement
|
|
|
|
|
index: index
|
|
|
|
|
scale: scale
|
|
|
|
|
displacement: displacement
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
p.error(unknown_addressing_mode)
|
|
|
|
|
return ast.AsmAddressing{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn (mut p Parser) asm_ios(output bool) []ast.AsmIO {
|
|
|
|
|
mut res := []ast.AsmIO{}
|
|
|
|
|
p.check(.semicolon)
|
|
|
|
|
if p.tok.kind in [.rcbr, .semicolon] {
|
|
|
|
|
return []
|
|
|
|
|
}
|
|
|
|
|
for {
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
|
|
|
|
|
mut constraint := ''
|
|
|
|
|
if p.tok.kind == .lpar {
|
|
|
|
|
constraint = if output { '+r' } else { 'r' } // default constraint
|
|
|
|
|
} else {
|
|
|
|
|
constraint += match p.tok.kind {
|
|
|
|
|
.assign {
|
|
|
|
|
'='
|
|
|
|
|
}
|
|
|
|
|
.plus {
|
|
|
|
|
'+'
|
|
|
|
|
}
|
|
|
|
|
.mod {
|
|
|
|
|
'%'
|
|
|
|
|
}
|
|
|
|
|
.amp {
|
|
|
|
|
'&'
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
''
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if constraint != '' {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .assign {
|
|
|
|
|
constraint += '='
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
} else if p.tok.kind == .plus {
|
|
|
|
|
constraint += '+'
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
}
|
|
|
|
|
constraint += p.tok.lit
|
|
|
|
|
p.check(.name)
|
|
|
|
|
}
|
|
|
|
|
mut expr := p.expr(0)
|
|
|
|
|
if mut expr is ast.ParExpr {
|
|
|
|
|
expr = expr.expr
|
|
|
|
|
} else {
|
|
|
|
|
p.error('asm in/output must be incolsed in brackets $expr.type_name()')
|
|
|
|
|
}
|
|
|
|
|
mut alias := ''
|
|
|
|
|
if p.tok.kind == .key_as {
|
2021-04-15 01:30:23 +02:00
|
|
|
|
p.next()
|
2021-03-17 01:43:17 +01:00
|
|
|
|
alias = p.tok.lit
|
|
|
|
|
p.check(.name)
|
|
|
|
|
} else if mut expr is ast.Ident {
|
|
|
|
|
alias = expr.name
|
|
|
|
|
}
|
|
|
|
|
// for constraints like `a`, no alias is needed, it is reffered to as rcx
|
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for p.tok.kind == .comment {
|
|
|
|
|
comments << p.comment()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
res << ast.AsmIO{
|
|
|
|
|
alias: alias
|
|
|
|
|
constraint: constraint
|
|
|
|
|
expr: expr
|
|
|
|
|
comments: comments
|
|
|
|
|
pos: pos.extend(p.prev_tok.position())
|
|
|
|
|
}
|
|
|
|
|
p.n_asm++
|
|
|
|
|
if p.tok.kind in [.semicolon, .rcbr] {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return res
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-17 19:13:22 +02:00
|
|
|
|
fn (mut p Parser) expr_list() ([]ast.Expr, []ast.Comment) {
|
2020-06-16 13:20:16 +02:00
|
|
|
|
mut exprs := []ast.Expr{}
|
2020-07-17 19:13:22 +02:00
|
|
|
|
mut comments := []ast.Comment{}
|
2020-06-16 13:20:16 +02:00
|
|
|
|
for {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
expr := p.expr(0)
|
|
|
|
|
if expr is ast.Comment {
|
|
|
|
|
comments << expr
|
|
|
|
|
} else {
|
|
|
|
|
exprs << expr
|
|
|
|
|
if p.tok.kind != .comma {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
p.next()
|
2020-06-16 13:20:16 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-07-17 19:13:22 +02:00
|
|
|
|
return exprs, comments
|
2020-06-16 13:20:16 +02:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-17 12:27:51 +02:00
|
|
|
|
// when is_top_stmt is true attrs are added to p.attrs
|
2020-08-04 20:10:22 +02:00
|
|
|
|
fn (mut p Parser) attributes() {
|
2020-02-03 07:02:54 +01:00
|
|
|
|
p.check(.lsbr)
|
2020-08-04 20:10:22 +02:00
|
|
|
|
mut has_ctdefine := false
|
2020-05-08 15:09:42 +02:00
|
|
|
|
for p.tok.kind != .rsbr {
|
2020-06-17 12:27:51 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-05-08 15:09:42 +02:00
|
|
|
|
attr := p.parse_attr()
|
2020-08-04 20:10:22 +02:00
|
|
|
|
if p.attrs.contains(attr.name) {
|
2020-06-17 12:27:51 +02:00
|
|
|
|
p.error_with_pos('duplicate attribute `$attr.name`', start_pos.extend(p.prev_tok.position()))
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-06-17 12:27:51 +02:00
|
|
|
|
}
|
2021-02-05 08:05:13 +01:00
|
|
|
|
if attr.is_comptime_define {
|
2020-08-04 20:10:22 +02:00
|
|
|
|
if has_ctdefine {
|
|
|
|
|
p.error_with_pos('only one `[if flag]` may be applied at a time `$attr.name`',
|
|
|
|
|
start_pos.extend(p.prev_tok.position()))
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-08-04 20:10:22 +02:00
|
|
|
|
} else {
|
|
|
|
|
has_ctdefine = true
|
|
|
|
|
}
|
2020-06-17 12:27:51 +02:00
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
p.attrs << attr
|
2020-05-08 15:09:42 +02:00
|
|
|
|
if p.tok.kind != .semicolon {
|
|
|
|
|
if p.tok.kind == .rsbr {
|
|
|
|
|
p.next()
|
|
|
|
|
break
|
|
|
|
|
}
|
2021-02-09 16:07:30 +01:00
|
|
|
|
p.error('unexpected $p.tok, expecting `;`')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-05-08 15:09:42 +02:00
|
|
|
|
}
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-08-04 20:10:22 +02:00
|
|
|
|
if p.attrs.len == 0 {
|
2020-06-17 12:27:51 +02:00
|
|
|
|
p.error_with_pos('attributes cannot be empty', p.prev_tok.position().extend(p.tok.position()))
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-06-17 12:27:51 +02:00
|
|
|
|
}
|
2020-05-08 15:09:42 +02:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
fn (mut p Parser) parse_attr() ast.Attr {
|
2021-01-08 16:24:42 +01:00
|
|
|
|
apos := p.prev_tok.position()
|
2020-08-08 16:24:05 +02:00
|
|
|
|
if p.tok.kind == .key_unsafe {
|
|
|
|
|
p.next()
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return ast.Attr{
|
2020-08-08 16:24:05 +02:00
|
|
|
|
name: 'unsafe'
|
2021-01-08 16:24:42 +01:00
|
|
|
|
pos: apos.extend(p.tok.position())
|
2020-08-08 16:24:05 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-02-05 08:05:13 +01:00
|
|
|
|
is_comptime_define := p.tok.kind == .key_if
|
|
|
|
|
if is_comptime_define {
|
2020-02-18 17:29:47 +01:00
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-07-03 15:10:39 +02:00
|
|
|
|
mut name := ''
|
2020-08-10 02:00:14 +02:00
|
|
|
|
mut arg := ''
|
2020-07-04 23:38:12 +02:00
|
|
|
|
is_string := p.tok.kind == .string
|
2020-08-10 02:00:14 +02:00
|
|
|
|
mut is_string_arg := false
|
2020-07-04 23:38:12 +02:00
|
|
|
|
if is_string {
|
2020-07-03 15:10:39 +02:00
|
|
|
|
name = p.tok.lit
|
2020-04-12 17:45:04 +02:00
|
|
|
|
p.next()
|
2020-07-03 15:10:39 +02:00
|
|
|
|
} else {
|
2020-08-09 11:22:11 +02:00
|
|
|
|
name = p.check_name()
|
2020-08-08 16:24:05 +02:00
|
|
|
|
if name == 'unsafe_fn' {
|
2021-02-13 15:52:01 +01:00
|
|
|
|
p.error_with_pos('[unsafe_fn] is obsolete, use `[unsafe]` instead', apos.extend(p.tok.position()))
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return ast.Attr{}
|
2020-08-09 11:22:11 +02:00
|
|
|
|
} else if name == 'trusted_fn' {
|
2021-02-13 15:52:01 +01:00
|
|
|
|
p.error_with_pos('[trusted_fn] is obsolete, use `[trusted]` instead', apos.extend(p.tok.position()))
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return ast.Attr{}
|
2021-02-13 15:52:01 +01:00
|
|
|
|
} else if name == 'ref_only' {
|
|
|
|
|
p.warn_with_pos('[ref_only] is deprecated, use [heap] instead', apos.extend(p.tok.position()))
|
|
|
|
|
name = 'heap'
|
2020-08-08 16:24:05 +02:00
|
|
|
|
}
|
2020-07-03 15:10:39 +02:00
|
|
|
|
if p.tok.kind == .colon {
|
2020-04-12 17:45:04 +02:00
|
|
|
|
p.next()
|
2020-08-10 02:00:14 +02:00
|
|
|
|
// `name: arg`
|
2020-07-03 15:10:39 +02:00
|
|
|
|
if p.tok.kind == .name {
|
2020-08-10 02:00:14 +02:00
|
|
|
|
arg = p.check_name()
|
|
|
|
|
} else if p.tok.kind == .string { // `name: 'arg'`
|
|
|
|
|
arg = p.tok.lit
|
|
|
|
|
is_string_arg = true
|
2020-07-03 15:10:39 +02:00
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-04-12 17:45:04 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return ast.Attr{
|
2020-02-03 07:02:54 +01:00
|
|
|
|
name: name
|
2020-07-04 23:38:12 +02:00
|
|
|
|
is_string: is_string
|
2021-02-05 08:05:13 +01:00
|
|
|
|
is_comptime_define: is_comptime_define
|
2020-08-10 02:00:14 +02:00
|
|
|
|
arg: arg
|
|
|
|
|
is_string_arg: is_string_arg
|
2021-01-08 16:24:42 +01:00
|
|
|
|
pos: apos.extend(p.tok.position())
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn (mut p Parser) check_for_impure_v(language ast.Language, pos token.Position) {
|
2020-12-08 17:52:24 +01:00
|
|
|
|
if language == .v {
|
|
|
|
|
// pure V code is always allowed everywhere
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if !p.pref.warn_impure_v {
|
|
|
|
|
// the stricter mode is not ON yet => allow everything for now
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if p.file_backend_mode != language {
|
|
|
|
|
upcase_language := language.str().to_upper()
|
|
|
|
|
if p.file_backend_mode == .v {
|
|
|
|
|
p.warn_with_pos('$upcase_language code will not be allowed in pure .v files, please move it to a .${language}.v file instead',
|
|
|
|
|
pos)
|
|
|
|
|
return
|
|
|
|
|
} else {
|
|
|
|
|
p.warn_with_pos('$upcase_language code is not allowed in .${p.file_backend_mode}.v files, please move it to a .${language}.v file',
|
|
|
|
|
pos)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-30 09:33:29 +02:00
|
|
|
|
pub fn (mut p Parser) error(s string) ast.NodeError {
|
|
|
|
|
return p.error_with_pos(s, p.tok.position())
|
2020-04-10 21:00:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2020-05-10 11:26:57 +02:00
|
|
|
|
pub fn (mut p Parser) warn(s string) {
|
2020-04-10 21:00:54 +02:00
|
|
|
|
p.warn_with_pos(s, p.tok.position())
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-22 18:43:06 +01:00
|
|
|
|
pub fn (mut p Parser) note(s string) {
|
|
|
|
|
p.note_with_pos(s, p.tok.position())
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-30 09:33:29 +02:00
|
|
|
|
pub fn (mut p Parser) error_with_pos(s string, pos token.Position) ast.NodeError {
|
2020-12-04 19:34:05 +01:00
|
|
|
|
if p.pref.fatal_errors {
|
|
|
|
|
exit(1)
|
|
|
|
|
}
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut kind := 'error:'
|
2020-05-10 11:26:57 +02:00
|
|
|
|
if p.pref.output_mode == .stdout {
|
|
|
|
|
if p.pref.is_verbose {
|
|
|
|
|
print_backtrace()
|
|
|
|
|
kind = 'parser error:'
|
|
|
|
|
}
|
|
|
|
|
ferror := util.formatted_error(kind, s, p.file_name, pos)
|
|
|
|
|
eprintln(ferror)
|
|
|
|
|
exit(1)
|
|
|
|
|
} else {
|
|
|
|
|
p.errors << errors.Error{
|
2020-05-11 16:05:59 +02:00
|
|
|
|
file_path: p.file_name
|
|
|
|
|
pos: pos
|
|
|
|
|
reporter: .parser
|
2020-05-10 11:26:57 +02:00
|
|
|
|
message: s
|
|
|
|
|
}
|
2020-01-09 14:08:33 +01:00
|
|
|
|
}
|
2020-11-01 12:59:53 +01:00
|
|
|
|
if p.pref.output_mode == .silent {
|
|
|
|
|
// Normally, parser errors mean that the parser exits immediately, so there can be only 1 parser error.
|
|
|
|
|
// In the silent mode however, the parser continues to run, even though it would have stopped. Some
|
|
|
|
|
// of the parser logic does not expect that, and may loop forever.
|
|
|
|
|
// The p.next() here is needed, so the parser is more robust, and *always* advances, even in the -silent mode.
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return ast.NodeError{
|
|
|
|
|
idx: p.errors.len - 1
|
|
|
|
|
pos: pos
|
|
|
|
|
}
|
2019-12-30 09:38:12 +01:00
|
|
|
|
}
|
|
|
|
|
|
2021-03-15 12:35:55 +01:00
|
|
|
|
pub fn (mut p Parser) error_with_error(error errors.Error) {
|
|
|
|
|
if p.pref.fatal_errors {
|
|
|
|
|
exit(1)
|
|
|
|
|
}
|
|
|
|
|
mut kind := 'error:'
|
|
|
|
|
if p.pref.output_mode == .stdout {
|
|
|
|
|
if p.pref.is_verbose {
|
|
|
|
|
print_backtrace()
|
|
|
|
|
kind = 'parser error:'
|
|
|
|
|
}
|
|
|
|
|
ferror := util.formatted_error(kind, error.message, error.file_path, error.pos)
|
|
|
|
|
eprintln(ferror)
|
|
|
|
|
exit(1)
|
|
|
|
|
} else {
|
|
|
|
|
p.errors << error
|
|
|
|
|
}
|
|
|
|
|
if p.pref.output_mode == .silent {
|
|
|
|
|
// Normally, parser errors mean that the parser exits immediately, so there can be only 1 parser error.
|
|
|
|
|
// In the silent mode however, the parser continues to run, even though it would have stopped. Some
|
|
|
|
|
// of the parser logic does not expect that, and may loop forever.
|
|
|
|
|
// The p.next() here is needed, so the parser is more robust, and *always* advances, even in the -silent mode.
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-10 11:26:57 +02:00
|
|
|
|
pub fn (mut p Parser) warn_with_pos(s string, pos token.Position) {
|
2020-10-24 19:29:24 +02:00
|
|
|
|
if p.pref.warns_are_errors {
|
|
|
|
|
p.error_with_pos(s, pos)
|
|
|
|
|
return
|
|
|
|
|
}
|
2020-05-16 22:45:38 +02:00
|
|
|
|
if p.pref.skip_warnings {
|
|
|
|
|
return
|
|
|
|
|
}
|
2020-05-10 11:26:57 +02:00
|
|
|
|
if p.pref.output_mode == .stdout {
|
|
|
|
|
ferror := util.formatted_error('warning:', s, p.file_name, pos)
|
|
|
|
|
eprintln(ferror)
|
|
|
|
|
} else {
|
|
|
|
|
p.warnings << errors.Warning{
|
2020-05-11 16:05:59 +02:00
|
|
|
|
file_path: p.file_name
|
|
|
|
|
pos: pos
|
|
|
|
|
reporter: .parser
|
2020-05-10 11:26:57 +02:00
|
|
|
|
message: s
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-29 08:51:55 +01:00
|
|
|
|
}
|
|
|
|
|
|
2021-03-22 18:43:06 +01:00
|
|
|
|
pub fn (mut p Parser) note_with_pos(s string, pos token.Position) {
|
|
|
|
|
if p.pref.skip_warnings {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
if p.pref.output_mode == .stdout {
|
|
|
|
|
ferror := util.formatted_error('notice:', s, p.file_name, pos)
|
|
|
|
|
eprintln(ferror)
|
|
|
|
|
} else {
|
|
|
|
|
p.notices << errors.Notice{
|
|
|
|
|
file_path: p.file_name
|
|
|
|
|
pos: pos
|
|
|
|
|
reporter: .parser
|
|
|
|
|
message: s
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-09 12:22:14 +02:00
|
|
|
|
pub fn (mut p Parser) vet_error(msg string, line int, fix vet.FixKind, typ vet.ErrorType) {
|
2021-01-09 15:11:49 +01:00
|
|
|
|
pos := token.Position{
|
|
|
|
|
line_nr: line + 1
|
|
|
|
|
}
|
|
|
|
|
p.vet_errors << vet.Error{
|
|
|
|
|
message: msg
|
|
|
|
|
file_path: p.scanner.file_path
|
|
|
|
|
pos: pos
|
|
|
|
|
kind: .error
|
|
|
|
|
fix: fix
|
2021-04-09 12:22:14 +02:00
|
|
|
|
typ: typ
|
2021-01-09 15:11:49 +01:00
|
|
|
|
}
|
2020-07-19 19:58:34 +02:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-16 11:06:53 +02:00
|
|
|
|
fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
|
2020-05-15 23:14:53 +02:00
|
|
|
|
// in here might be 1) multi-expr 2) multi-assign
|
|
|
|
|
// 1, a, c ... } // multi-expression
|
|
|
|
|
// a, mut b ... :=/= // multi-assign
|
|
|
|
|
// collect things upto hard boundaries
|
2020-06-16 13:20:16 +02:00
|
|
|
|
tok := p.tok
|
2021-01-19 14:49:40 +01:00
|
|
|
|
mut pos := tok.position()
|
2020-07-17 19:13:22 +02:00
|
|
|
|
left, left_comments := p.expr_list()
|
2020-06-16 13:20:16 +02:00
|
|
|
|
left0 := left[0]
|
2020-09-09 17:27:12 +02:00
|
|
|
|
if tok.kind == .key_mut && p.tok.kind != .decl_assign {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('expecting `:=` (e.g. `mut x :=`)')
|
2020-09-09 17:27:12 +02:00
|
|
|
|
}
|
2021-02-12 14:44:12 +01:00
|
|
|
|
// TODO remove translated
|
2020-06-16 13:20:16 +02:00
|
|
|
|
if p.tok.kind in [.assign, .decl_assign] || p.tok.kind.is_assign() {
|
2020-07-17 19:13:22 +02:00
|
|
|
|
return p.partial_assign_stmt(left, left_comments)
|
2021-02-12 14:44:12 +01:00
|
|
|
|
} else if !p.pref.translated
|
2021-02-15 16:41:04 +01:00
|
|
|
|
&& tok.kind !in [.key_if, .key_match, .key_lock, .key_rlock, .key_select] {
|
|
|
|
|
for node in left {
|
|
|
|
|
if node !is ast.CallExpr && (is_top_level || p.tok.kind != .rcbr)
|
|
|
|
|
&& node !is ast.PostfixExpr && !(node is ast.InfixExpr
|
|
|
|
|
&& (node as ast.InfixExpr).op in [.left_shift, .arrow]) && node !is ast.ComptimeCall
|
2021-03-06 18:09:28 +01:00
|
|
|
|
&& node !is ast.SelectorExpr && node !is ast.DumpExpr {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('expression evaluated but not used', node.position())
|
2021-02-15 16:41:04 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-16 13:20:16 +02:00
|
|
|
|
}
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.update_last_line(p.prev_tok.line_nr)
|
2020-06-16 13:20:16 +02:00
|
|
|
|
if left.len == 1 {
|
2020-05-21 22:35:43 +02:00
|
|
|
|
return ast.ExprStmt{
|
2020-06-16 13:20:16 +02:00
|
|
|
|
expr: left0
|
2020-12-22 13:00:23 +01:00
|
|
|
|
pos: left0.position()
|
2020-07-17 19:13:22 +02:00
|
|
|
|
comments: left_comments
|
2020-06-16 13:20:16 +02:00
|
|
|
|
is_expr: p.inside_for
|
2020-05-18 18:33:27 +02:00
|
|
|
|
}
|
2020-06-16 13:20:16 +02:00
|
|
|
|
}
|
|
|
|
|
return ast.ExprStmt{
|
|
|
|
|
expr: ast.ConcatExpr{
|
|
|
|
|
vals: left
|
2020-11-04 12:34:12 +01:00
|
|
|
|
pos: tok.position()
|
2020-05-15 23:14:53 +02:00
|
|
|
|
}
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos: pos
|
2020-07-17 19:13:22 +02:00
|
|
|
|
comments: left_comments
|
2020-05-15 23:14:53 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
pub fn (mut p Parser) parse_ident(language ast.Language) ast.Ident {
|
2020-02-06 17:38:02 +01:00
|
|
|
|
// p.warn('name ')
|
2020-07-07 01:57:31 +02:00
|
|
|
|
is_shared := p.tok.kind == .key_shared
|
|
|
|
|
is_atomic := p.tok.kind == .key_atomic
|
2021-01-23 16:03:21 +01:00
|
|
|
|
if is_shared {
|
|
|
|
|
p.register_auto_import('sync')
|
|
|
|
|
}
|
2020-11-27 03:08:42 +01:00
|
|
|
|
mut_pos := p.tok.position()
|
2020-07-07 01:57:31 +02:00
|
|
|
|
is_mut := p.tok.kind == .key_mut || is_shared || is_atomic
|
2020-05-15 23:14:53 +02:00
|
|
|
|
if is_mut {
|
|
|
|
|
p.next()
|
2020-02-27 11:12:30 +01:00
|
|
|
|
}
|
2020-05-15 23:14:53 +02:00
|
|
|
|
is_static := p.tok.kind == .key_static
|
|
|
|
|
if is_static {
|
|
|
|
|
p.next()
|
2020-02-28 13:29:04 +01:00
|
|
|
|
}
|
2020-05-15 23:14:53 +02:00
|
|
|
|
if p.tok.kind == .name {
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
mut name := p.check_name()
|
|
|
|
|
if name == '_' {
|
|
|
|
|
return ast.Ident{
|
2020-07-21 18:49:53 +02:00
|
|
|
|
tok_kind: p.tok.kind
|
2020-05-15 23:14:53 +02:00
|
|
|
|
name: '_'
|
|
|
|
|
kind: .blank_ident
|
|
|
|
|
pos: pos
|
2020-05-16 16:12:23 +02:00
|
|
|
|
info: ast.IdentVar{
|
2020-05-15 23:14:53 +02:00
|
|
|
|
is_mut: false
|
|
|
|
|
is_static: false
|
|
|
|
|
}
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2020-05-15 23:14:53 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-19 11:46:08 +02:00
|
|
|
|
if p.inside_match_body && name == 'it' {
|
|
|
|
|
// p.warn('it')
|
|
|
|
|
}
|
2020-05-15 23:14:53 +02:00
|
|
|
|
if p.expr_mod.len > 0 {
|
|
|
|
|
name = '${p.expr_mod}.$name'
|
|
|
|
|
}
|
2020-06-08 00:47:04 +02:00
|
|
|
|
return ast.Ident{
|
2020-07-21 18:49:53 +02:00
|
|
|
|
tok_kind: p.tok.kind
|
2020-05-15 23:14:53 +02:00
|
|
|
|
kind: .unresolved
|
|
|
|
|
name: name
|
2020-05-19 17:12:47 +02:00
|
|
|
|
language: language
|
2020-05-15 23:14:53 +02:00
|
|
|
|
mod: p.mod
|
|
|
|
|
pos: pos
|
|
|
|
|
is_mut: is_mut
|
2020-11-27 03:08:42 +01:00
|
|
|
|
mut_pos: mut_pos
|
2020-06-08 00:47:04 +02:00
|
|
|
|
info: ast.IdentVar{
|
|
|
|
|
is_mut: is_mut
|
|
|
|
|
is_static: is_static
|
2021-04-02 00:57:09 +02:00
|
|
|
|
share: ast.sharetype_from_flags(is_shared, is_atomic)
|
2020-06-08 00:47:04 +02:00
|
|
|
|
}
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2020-05-15 23:14:53 +02:00
|
|
|
|
}
|
2020-12-12 09:01:12 +01:00
|
|
|
|
}
|
2020-12-12 13:52:22 +01:00
|
|
|
|
p.error('unexpected token `$p.tok.lit`')
|
2020-12-12 09:01:12 +01:00
|
|
|
|
return ast.Ident{
|
2020-12-12 13:52:22 +01:00
|
|
|
|
scope: p.scope
|
2020-02-06 17:38:02 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-27 09:07:18 +01:00
|
|
|
|
fn (p &Parser) is_typename(t token.Token) bool {
|
2021-03-26 15:50:35 +01:00
|
|
|
|
return t.kind == .name && (t.lit[0].is_capital() || p.table.known_type(t.lit))
|
2021-02-27 09:07:18 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// heuristics to detect `func<T>()` from `var < expr`
|
|
|
|
|
// 1. `f<[]` is generic(e.g. `f<[]int>`) because `var < []` is invalid
|
|
|
|
|
// 2. `f<map[` is generic(e.g. `f<map[string]string>)
|
2021-03-21 12:09:43 +01:00
|
|
|
|
// 3. `f<foo>` and `f<foo<` are generic because `v1 < foo > v2` and `v1 < foo < v2` are invalid syntax
|
2021-02-27 09:07:18 +01:00
|
|
|
|
// 4. `f<Foo,` is generic when Foo is typename.
|
|
|
|
|
// otherwise it is not generic because it may be multi-value (e.g. `return f < foo, 0`).
|
|
|
|
|
// 5. `f<mod.Foo>` is same as case 3
|
|
|
|
|
// 6. `f<mod.Foo,` is same as case 4
|
|
|
|
|
// 7. otherwise, it's not generic
|
|
|
|
|
// see also test_generic_detection in vlib/v/tests/generics_test.v
|
2021-01-09 01:32:08 +01:00
|
|
|
|
fn (p &Parser) is_generic_call() bool {
|
2021-01-05 02:59:36 +01:00
|
|
|
|
lit0_is_capital := if p.tok.kind != .eof && p.tok.lit.len > 0 {
|
|
|
|
|
p.tok.lit[0].is_capital()
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
2021-02-27 09:07:18 +01:00
|
|
|
|
if lit0_is_capital || p.peek_tok.kind != .lt {
|
|
|
|
|
return false
|
|
|
|
|
}
|
2021-02-26 08:05:00 +01:00
|
|
|
|
tok2 := p.peek_token(2)
|
|
|
|
|
tok3 := p.peek_token(3)
|
|
|
|
|
tok4 := p.peek_token(4)
|
|
|
|
|
tok5 := p.peek_token(5)
|
2021-02-27 09:07:18 +01:00
|
|
|
|
kind2, kind3, kind4, kind5 := tok2.kind, tok3.kind, tok4.kind, tok5.kind
|
2021-02-26 08:05:00 +01:00
|
|
|
|
|
2021-02-27 09:07:18 +01:00
|
|
|
|
if kind2 == .lsbr {
|
|
|
|
|
// case 1
|
|
|
|
|
return tok3.kind == .rsbr
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if kind2 == .name {
|
|
|
|
|
if tok2.lit == 'map' && kind3 == .lsbr {
|
|
|
|
|
// case 2
|
|
|
|
|
return true
|
2021-01-05 02:59:36 +01:00
|
|
|
|
}
|
2021-02-27 09:07:18 +01:00
|
|
|
|
return match kind3 {
|
2021-03-21 12:09:43 +01:00
|
|
|
|
.gt, .lt { true } // case 3
|
2021-02-27 09:07:18 +01:00
|
|
|
|
.comma { p.is_typename(tok2) } // case 4
|
|
|
|
|
// case 5 and 6
|
|
|
|
|
.dot { kind4 == .name && (kind5 == .gt || (kind5 == .comma && p.is_typename(tok4))) }
|
|
|
|
|
else { false }
|
2021-01-05 02:59:36 +01:00
|
|
|
|
}
|
2021-02-27 09:07:18 +01:00
|
|
|
|
}
|
|
|
|
|
return false
|
2021-01-05 02:59:36 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
pub fn (mut p Parser) name_expr() ast.Expr {
|
2020-09-20 03:50:09 +02:00
|
|
|
|
prev_tok_kind := p.prev_tok.kind
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut node := ast.empty_expr()
|
2020-05-05 14:19:31 +02:00
|
|
|
|
if p.expecting_type {
|
|
|
|
|
p.expecting_type = false
|
2020-04-18 00:19:33 +02:00
|
|
|
|
// get type position before moving to next
|
|
|
|
|
type_pos := p.tok.position()
|
2020-05-04 16:46:36 +02:00
|
|
|
|
typ := p.parse_type()
|
2021-04-02 00:57:09 +02:00
|
|
|
|
return ast.TypeNode{
|
2020-05-04 16:46:36 +02:00
|
|
|
|
typ: typ
|
2020-04-18 00:19:33 +02:00
|
|
|
|
pos: type_pos
|
2020-04-16 15:32:11 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-04-02 00:57:09 +02:00
|
|
|
|
mut language := ast.Language.v
|
2020-12-08 17:52:24 +01:00
|
|
|
|
if p.tok.lit == 'C' {
|
2021-04-02 00:57:09 +02:00
|
|
|
|
language = ast.Language.c
|
2020-12-08 17:52:24 +01:00
|
|
|
|
p.check_for_impure_v(language, p.tok.position())
|
2020-05-19 17:12:47 +02:00
|
|
|
|
} else if p.tok.lit == 'JS' {
|
2021-04-02 00:57:09 +02:00
|
|
|
|
language = ast.Language.js
|
2020-12-08 17:52:24 +01:00
|
|
|
|
p.check_for_impure_v(language, p.tok.position())
|
2020-05-19 17:12:47 +02:00
|
|
|
|
}
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut mod := ''
|
2020-02-27 00:12:37 +01:00
|
|
|
|
// p.warn('resetting')
|
|
|
|
|
p.expr_mod = ''
|
|
|
|
|
// `map[string]int` initialization
|
|
|
|
|
if p.tok.lit == 'map' && p.peek_tok.kind == .lsbr {
|
|
|
|
|
map_type := p.parse_map_type()
|
2021-01-24 00:06:43 +01:00
|
|
|
|
if p.tok.kind == .lcbr {
|
2020-05-04 17:26:28 +02:00
|
|
|
|
p.next()
|
2021-01-24 00:06:43 +01:00
|
|
|
|
if p.tok.kind == .rcbr {
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
p.error('`}` expected; explicit `map` initialization does not support parameters')
|
|
|
|
|
}
|
2020-05-04 17:26:28 +02:00
|
|
|
|
}
|
2020-03-07 16:23:10 +01:00
|
|
|
|
return ast.MapInit{
|
2020-03-02 12:34:02 +01:00
|
|
|
|
typ: map_type
|
2021-04-06 15:16:19 +02:00
|
|
|
|
pos: p.prev_tok.position()
|
2020-03-02 12:34:02 +01:00
|
|
|
|
}
|
2020-02-27 00:12:37 +01:00
|
|
|
|
}
|
2020-08-14 21:18:42 +02:00
|
|
|
|
// `chan typ{...}`
|
|
|
|
|
if p.tok.lit == 'chan' {
|
|
|
|
|
first_pos := p.tok.position()
|
2020-08-19 02:37:17 +02:00
|
|
|
|
mut last_pos := first_pos
|
2020-08-14 21:18:42 +02:00
|
|
|
|
chan_type := p.parse_chan_type()
|
|
|
|
|
mut has_cap := false
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut cap_expr := ast.empty_expr()
|
2020-08-14 21:18:42 +02:00
|
|
|
|
p.check(.lcbr)
|
|
|
|
|
if p.tok.kind == .rcbr {
|
|
|
|
|
last_pos = p.tok.position()
|
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
|
|
|
|
key := p.check_name()
|
|
|
|
|
p.check(.colon)
|
|
|
|
|
match key {
|
|
|
|
|
'cap' {
|
|
|
|
|
has_cap = true
|
|
|
|
|
cap_expr = p.expr(0)
|
|
|
|
|
}
|
|
|
|
|
'len', 'init' {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('`$key` cannot be initialized for `chan`. Did you mean `cap`?')
|
2020-08-14 21:18:42 +02:00
|
|
|
|
}
|
|
|
|
|
else {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('wrong field `$key`, expecting `cap`')
|
2020-08-14 21:18:42 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
last_pos = p.tok.position()
|
|
|
|
|
p.check(.rcbr)
|
|
|
|
|
}
|
|
|
|
|
return ast.ChanInit{
|
2020-08-19 02:37:17 +02:00
|
|
|
|
pos: first_pos.extend(last_pos)
|
2020-08-14 21:18:42 +02:00
|
|
|
|
has_cap: has_cap
|
|
|
|
|
cap_expr: cap_expr
|
|
|
|
|
typ: chan_type
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-24 17:07:27 +01:00
|
|
|
|
// Raw string (`s := r'hello \n ')
|
2021-02-24 19:03:53 +01:00
|
|
|
|
if p.peek_tok.kind == .string && !p.inside_str_interp && p.peek_token(2).kind != .colon {
|
2020-11-30 09:58:00 +01:00
|
|
|
|
if p.tok.lit in ['r', 'c', 'js'] && p.tok.kind == .name {
|
|
|
|
|
return p.string_expr()
|
|
|
|
|
} else {
|
|
|
|
|
// don't allow any other string prefix except `r`, `js` and `c`
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('only `c`, `r`, `js` are recognized string prefixes, but you tried to use `$p.tok.lit`')
|
2020-11-30 09:58:00 +01:00
|
|
|
|
}
|
2020-03-24 17:07:27 +01:00
|
|
|
|
}
|
2020-09-21 16:34:24 +02:00
|
|
|
|
// don't allow r`byte` and c`byte`
|
|
|
|
|
if p.tok.lit in ['r', 'c'] && p.peek_tok.kind == .chartoken {
|
|
|
|
|
opt := if p.tok.lit == 'r' { '`r` (raw string)' } else { '`c` (c string)' }
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('cannot use $opt with `byte` and `rune`')
|
2020-09-21 16:34:24 +02:00
|
|
|
|
}
|
2020-07-03 15:10:39 +02:00
|
|
|
|
known_var := p.mark_var_as_used(p.tok.lit)
|
2020-06-29 17:15:40 +02:00
|
|
|
|
mut is_mod_cast := false
|
2021-01-23 09:33:22 +01:00
|
|
|
|
if p.peek_tok.kind == .dot && !known_var && (language != .v || p.known_import(p.tok.lit)
|
|
|
|
|
|| p.mod.all_after_last('.') == p.tok.lit) {
|
2020-07-22 19:33:43 +02:00
|
|
|
|
// p.tok.lit has been recognized as a module
|
2020-05-19 17:12:47 +02:00
|
|
|
|
if language == .c {
|
2020-02-29 11:47:47 +01:00
|
|
|
|
mod = 'C'
|
2020-05-19 17:12:47 +02:00
|
|
|
|
} else if language == .js {
|
2020-04-15 23:16:49 +02:00
|
|
|
|
mod = 'JS'
|
2020-04-07 15:48:13 +02:00
|
|
|
|
} else {
|
2020-05-14 17:14:24 +02:00
|
|
|
|
if p.tok.lit in p.imports {
|
2020-07-22 19:33:43 +02:00
|
|
|
|
// mark the imported module as used
|
2020-05-14 17:14:24 +02:00
|
|
|
|
p.register_used_import(p.tok.lit)
|
2021-02-24 19:03:53 +01:00
|
|
|
|
if p.peek_tok.kind == .dot && p.peek_token(2).kind != .eof
|
|
|
|
|
&& p.peek_token(2).lit.len > 0 && p.peek_token(2).lit[0].is_capital() {
|
2020-06-29 17:15:40 +02:00
|
|
|
|
is_mod_cast = true
|
2021-02-24 19:03:53 +01:00
|
|
|
|
} else if p.peek_tok.kind == .dot && p.peek_token(2).kind != .eof
|
|
|
|
|
&& p.peek_token(2).lit.len == 0 {
|
2020-12-26 11:23:51 +01:00
|
|
|
|
// incomplete module selector must be handled by dot_expr instead
|
|
|
|
|
node = p.parse_ident(language)
|
|
|
|
|
return node
|
2020-06-29 17:15:40 +02:00
|
|
|
|
}
|
2020-05-14 17:14:24 +02:00
|
|
|
|
}
|
2020-02-20 11:13:18 +01:00
|
|
|
|
// prepend the full import
|
|
|
|
|
mod = p.imports[p.tok.lit]
|
2020-02-19 07:16:38 +01:00
|
|
|
|
}
|
2020-01-07 16:06:37 +01:00
|
|
|
|
p.next()
|
|
|
|
|
p.check(.dot)
|
2020-02-27 00:12:37 +01:00
|
|
|
|
p.expr_mod = mod
|
2020-02-05 10:00:11 +01:00
|
|
|
|
}
|
2020-12-27 14:20:30 +01:00
|
|
|
|
lit0_is_capital := if p.tok.kind != .eof && p.tok.lit.len > 0 {
|
|
|
|
|
p.tok.lit[0].is_capital()
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
2021-04-02 16:34:48 +02:00
|
|
|
|
is_optional := p.tok.kind == .question
|
2020-02-18 17:29:47 +01:00
|
|
|
|
// p.warn('name expr $p.tok.lit $p.peek_tok.str()')
|
2020-08-19 02:37:17 +02:00
|
|
|
|
same_line := p.tok.line_nr == p.peek_tok.line_nr
|
|
|
|
|
// `(` must be on same line as name token otherwise it's a ParExpr
|
|
|
|
|
if !same_line && p.peek_tok.kind == .lpar {
|
|
|
|
|
node = p.parse_ident(language)
|
2021-04-02 16:34:48 +02:00
|
|
|
|
} else if p.peek_tok.kind == .lpar
|
|
|
|
|
|| (is_optional && p.peek_token(2).kind == .lpar) || p.is_generic_call() {
|
2020-08-19 02:37:17 +02:00
|
|
|
|
// foo(), foo<int>() or type() cast
|
2021-04-02 16:34:48 +02:00
|
|
|
|
mut name := if is_optional { p.peek_tok.lit } else { p.tok.lit }
|
2020-02-29 11:47:47 +01:00
|
|
|
|
if mod.len > 0 {
|
|
|
|
|
name = '${mod}.$name'
|
|
|
|
|
}
|
2020-03-01 16:14:52 +01:00
|
|
|
|
name_w_mod := p.prepend_mod(name)
|
2020-02-07 21:29:28 +01:00
|
|
|
|
// type cast. TODO: finish
|
2021-04-02 00:57:09 +02:00
|
|
|
|
// if name in ast.builtin_type_names {
|
2021-01-23 09:33:22 +01:00
|
|
|
|
if (!known_var && (name in p.table.type_idxs || name_w_mod in p.table.type_idxs)
|
|
|
|
|
&& name !in ['C.stat', 'C.sigaction']) || is_mod_cast
|
|
|
|
|
|| (language == .v && name[0].is_capital()) {
|
2020-10-07 14:50:54 +02:00
|
|
|
|
// MainLetter(x) is *always* a cast, as long as it is not `C.`
|
2020-02-11 10:26:46 +01:00
|
|
|
|
// TODO handle C.stat()
|
2020-11-02 01:17:35 +01:00
|
|
|
|
start_pos := p.tok.position()
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut to_typ := p.parse_type()
|
2020-03-10 23:21:26 +01:00
|
|
|
|
if p.is_amp {
|
|
|
|
|
// Handle `&Foo(0)`
|
2020-04-25 09:08:53 +02:00
|
|
|
|
to_typ = to_typ.to_ptr()
|
2020-03-10 23:21:26 +01:00
|
|
|
|
}
|
2020-09-18 01:04:56 +02:00
|
|
|
|
// this prevents inner casts to also have an `&`
|
|
|
|
|
// example: &Foo(malloc(int(num)))
|
|
|
|
|
// without the next line int would result in int*
|
|
|
|
|
p.is_amp = false
|
2020-02-10 14:43:17 +01:00
|
|
|
|
p.check(.lpar)
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut expr := ast.empty_expr()
|
|
|
|
|
mut arg := ast.empty_expr()
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut has_arg := false
|
2020-03-05 12:08:43 +01:00
|
|
|
|
expr = p.expr(0)
|
2020-02-10 20:33:34 +01:00
|
|
|
|
// TODO, string(b, len)
|
2021-04-02 00:57:09 +02:00
|
|
|
|
if p.tok.kind == .comma && to_typ.idx() == ast.string_type_idx {
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2020-04-20 08:30:42 +02:00
|
|
|
|
arg = p.expr(0) // len
|
2020-03-07 16:23:10 +01:00
|
|
|
|
has_arg = true
|
2020-02-07 21:29:28 +01:00
|
|
|
|
}
|
2020-11-02 01:17:35 +01:00
|
|
|
|
end_pos := p.tok.position()
|
2020-02-10 14:43:17 +01:00
|
|
|
|
p.check(.rpar)
|
|
|
|
|
node = ast.CastExpr{
|
|
|
|
|
typ: to_typ
|
|
|
|
|
expr: expr
|
2020-03-07 16:23:10 +01:00
|
|
|
|
arg: arg
|
|
|
|
|
has_arg: has_arg
|
2020-11-02 01:17:35 +01:00
|
|
|
|
pos: start_pos.extend(end_pos)
|
2020-02-10 14:43:17 +01:00
|
|
|
|
}
|
2020-02-27 00:12:37 +01:00
|
|
|
|
p.expr_mod = ''
|
2020-02-18 08:58:20 +01:00
|
|
|
|
return node
|
2020-04-07 15:48:13 +02:00
|
|
|
|
} else {
|
2020-04-05 04:05:09 +02:00
|
|
|
|
// fn call
|
2020-02-18 08:58:20 +01:00
|
|
|
|
// println('calling $p.tok.lit')
|
2021-04-02 16:34:48 +02:00
|
|
|
|
if is_optional {
|
|
|
|
|
p.error_with_pos('unexpected $p.prev_tok', p.prev_tok.position())
|
|
|
|
|
}
|
2020-05-19 17:12:47 +02:00
|
|
|
|
node = p.call_expr(language, mod)
|
2020-02-07 21:29:28 +01:00
|
|
|
|
}
|
2021-01-23 09:33:22 +01:00
|
|
|
|
} else if (p.peek_tok.kind == .lcbr || (p.peek_tok.kind == .lt && lit0_is_capital))
|
|
|
|
|
&& (!p.inside_match || (p.inside_select && prev_tok_kind == .arrow && lit0_is_capital))
|
|
|
|
|
&& !p.inside_match_case && (!p.inside_if || p.inside_select)
|
|
|
|
|
&& (!p.inside_for || p.inside_select) { // && (p.tok.lit[0].is_capital() || p.builtin_mod) {
|
2021-02-06 22:13:24 +01:00
|
|
|
|
// map.v has struct literal: map{field: expr}
|
|
|
|
|
if p.peek_tok.kind == .lcbr && !(p.builtin_mod && p.file_base == 'map.v')
|
|
|
|
|
&& p.tok.lit == 'map' {
|
|
|
|
|
// map{key_expr: val_expr}
|
|
|
|
|
p.check(.name)
|
|
|
|
|
p.check(.lcbr)
|
|
|
|
|
map_init := p.map_init()
|
|
|
|
|
p.check(.rcbr)
|
|
|
|
|
return map_init
|
|
|
|
|
}
|
2020-04-20 08:30:42 +02:00
|
|
|
|
return p.struct_init(false) // short_syntax: false
|
2020-06-29 20:09:09 +02:00
|
|
|
|
} else if p.peek_tok.kind == .dot && (lit0_is_capital && !known_var && language == .v) {
|
2020-11-09 14:35:26 +01:00
|
|
|
|
// T.name
|
2021-01-22 13:49:56 +01:00
|
|
|
|
if p.is_generic_name() {
|
2020-11-09 14:35:26 +01:00
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
name := p.check_name()
|
|
|
|
|
p.check(.dot)
|
|
|
|
|
field := p.check_name()
|
|
|
|
|
pos.extend(p.tok.position())
|
|
|
|
|
return ast.SelectorExpr{
|
|
|
|
|
expr: ast.Ident{
|
|
|
|
|
name: name
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2020-11-09 14:35:26 +01:00
|
|
|
|
}
|
|
|
|
|
field_name: field
|
|
|
|
|
pos: pos
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2020-11-09 14:35:26 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-27 00:12:37 +01:00
|
|
|
|
// `Color.green`
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut enum_name := p.check_name()
|
2021-04-02 16:26:37 +02:00
|
|
|
|
enum_name_pos := p.prev_tok.position()
|
2020-02-27 00:12:37 +01:00
|
|
|
|
if mod != '' {
|
|
|
|
|
enum_name = mod + '.' + enum_name
|
2020-04-07 15:48:13 +02:00
|
|
|
|
} else {
|
2021-03-07 16:44:38 +01:00
|
|
|
|
enum_name = p.imported_symbols[enum_name] or { p.prepend_mod(enum_name) }
|
2020-02-27 00:12:37 +01:00
|
|
|
|
}
|
|
|
|
|
// p.warn('Color.green $enum_name ' + p.prepend_mod(enum_name) + 'mod=$mod')
|
2020-02-25 15:02:34 +01:00
|
|
|
|
p.check(.dot)
|
|
|
|
|
val := p.check_name()
|
|
|
|
|
// println('enum val $enum_name . $val')
|
2020-02-27 00:12:37 +01:00
|
|
|
|
p.expr_mod = ''
|
2020-02-25 15:02:34 +01:00
|
|
|
|
return ast.EnumVal{
|
2020-04-07 15:48:13 +02:00
|
|
|
|
enum_name: enum_name
|
2020-02-25 15:02:34 +01:00
|
|
|
|
val: val
|
2021-04-02 16:26:37 +02:00
|
|
|
|
pos: enum_name_pos.extend(p.prev_tok.position())
|
2020-03-15 00:46:08 +01:00
|
|
|
|
mod: mod
|
2020-02-25 15:02:34 +01:00
|
|
|
|
}
|
2021-02-24 19:03:53 +01:00
|
|
|
|
} else if language == .js && p.peek_tok.kind == .dot && p.peek_token(2).kind == .name {
|
2021-02-06 22:13:24 +01:00
|
|
|
|
// JS. function call with more than 1 dot
|
2020-05-19 17:12:47 +02:00
|
|
|
|
node = p.call_expr(language, mod)
|
2020-04-07 15:48:13 +02:00
|
|
|
|
} else {
|
2020-05-19 17:12:47 +02:00
|
|
|
|
node = p.parse_ident(language)
|
2020-01-06 16:13:12 +01:00
|
|
|
|
}
|
2020-02-27 00:12:37 +01:00
|
|
|
|
p.expr_mod = ''
|
2020-02-18 08:58:20 +01:00
|
|
|
|
return node
|
2020-01-06 16:13:12 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
2020-02-10 23:19:50 +01:00
|
|
|
|
// left == `a` in `a[0]`
|
2020-07-20 18:52:03 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-04-20 08:30:42 +02:00
|
|
|
|
p.next() // [
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut has_low := true
|
2020-02-10 23:19:50 +01:00
|
|
|
|
if p.tok.kind == .dotdot {
|
2020-03-06 22:24:39 +01:00
|
|
|
|
has_low = false
|
2020-02-10 23:19:50 +01:00
|
|
|
|
// [..end]
|
|
|
|
|
p.next()
|
2020-03-05 12:08:43 +01:00
|
|
|
|
high := p.expr(0)
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos := start_pos.extend(p.tok.position())
|
2020-02-10 23:19:50 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.IndexExpr{
|
|
|
|
|
left: left
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos: pos
|
2020-02-10 23:19:50 +01:00
|
|
|
|
index: ast.RangeExpr{
|
2021-03-31 10:13:15 +02:00
|
|
|
|
low: ast.empty_expr()
|
2020-04-08 00:59:28 +02:00
|
|
|
|
high: high
|
|
|
|
|
has_high: true
|
2020-11-04 12:34:12 +01:00
|
|
|
|
pos: pos
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2020-02-15 13:37:48 +01:00
|
|
|
|
}
|
2020-02-02 14:31:54 +01:00
|
|
|
|
}
|
2020-07-20 18:52:03 +02:00
|
|
|
|
expr := p.expr(0) // `[expr]` or `[expr..`
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut has_high := false
|
2020-02-10 23:19:50 +01:00
|
|
|
|
if p.tok.kind == .dotdot {
|
|
|
|
|
// [start..end] or [start..]
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut high := ast.empty_expr()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
if p.tok.kind != .rsbr {
|
2020-03-06 22:24:39 +01:00
|
|
|
|
has_high = true
|
2020-03-05 12:08:43 +01:00
|
|
|
|
high = p.expr(0)
|
2020-02-10 23:19:50 +01:00
|
|
|
|
}
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos := start_pos.extend(p.tok.position())
|
2020-02-10 23:19:50 +01:00
|
|
|
|
p.check(.rsbr)
|
|
|
|
|
return ast.IndexExpr{
|
|
|
|
|
left: left
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos: pos
|
2020-02-10 23:19:50 +01:00
|
|
|
|
index: ast.RangeExpr{
|
2020-04-08 00:59:28 +02:00
|
|
|
|
low: expr
|
|
|
|
|
high: high
|
|
|
|
|
has_high: has_high
|
|
|
|
|
has_low: has_low
|
2020-11-04 12:34:12 +01:00
|
|
|
|
pos: pos
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2020-02-15 13:37:48 +01:00
|
|
|
|
}
|
2020-02-11 12:59:40 +01:00
|
|
|
|
}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
// [expr]
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos := start_pos.extend(p.tok.position())
|
2020-01-07 12:14:10 +01:00
|
|
|
|
p.check(.rsbr)
|
2021-01-19 13:46:47 +01:00
|
|
|
|
mut or_kind := ast.OrKind.absent
|
|
|
|
|
mut or_stmts := []ast.Stmt{}
|
|
|
|
|
mut or_pos := token.Position{}
|
|
|
|
|
if !p.or_is_handled {
|
|
|
|
|
// a[i] or { ... }
|
|
|
|
|
if p.tok.kind == .key_orelse {
|
|
|
|
|
was_inside_or_expr := p.inside_or_expr
|
|
|
|
|
or_pos = p.tok.position()
|
|
|
|
|
p.next()
|
|
|
|
|
p.open_scope()
|
|
|
|
|
or_stmts = p.parse_block_no_scope(false)
|
|
|
|
|
or_pos = or_pos.extend(p.prev_tok.position())
|
|
|
|
|
p.close_scope()
|
|
|
|
|
p.inside_or_expr = was_inside_or_expr
|
|
|
|
|
return ast.IndexExpr{
|
|
|
|
|
left: left
|
|
|
|
|
index: expr
|
|
|
|
|
pos: pos
|
|
|
|
|
or_expr: ast.OrExpr{
|
|
|
|
|
kind: .block
|
|
|
|
|
stmts: or_stmts
|
|
|
|
|
pos: or_pos
|
|
|
|
|
}
|
2020-12-19 10:28:17 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-19 13:46:47 +01:00
|
|
|
|
// `a[i] ?`
|
|
|
|
|
if p.tok.kind == .question {
|
2021-04-06 15:16:19 +02:00
|
|
|
|
or_pos = p.tok.position()
|
2021-01-19 13:46:47 +01:00
|
|
|
|
or_kind = .propagate
|
2021-04-06 15:16:19 +02:00
|
|
|
|
p.next()
|
2021-01-19 13:46:47 +01:00
|
|
|
|
}
|
2020-12-19 10:28:17 +01:00
|
|
|
|
}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
return ast.IndexExpr{
|
2020-01-07 12:14:10 +01:00
|
|
|
|
left: left
|
2020-02-10 23:19:50 +01:00
|
|
|
|
index: expr
|
2020-07-20 18:52:03 +02:00
|
|
|
|
pos: pos
|
2020-12-19 10:28:17 +01:00
|
|
|
|
or_expr: ast.OrExpr{
|
2021-01-19 13:46:47 +01:00
|
|
|
|
kind: or_kind
|
|
|
|
|
stmts: or_stmts
|
|
|
|
|
pos: or_pos
|
2020-12-19 10:28:17 +01:00
|
|
|
|
}
|
2020-02-15 13:37:48 +01:00
|
|
|
|
}
|
2020-01-07 12:14:10 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-05-02 15:26:58 +02:00
|
|
|
|
fn (mut p Parser) scope_register_it() {
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.scope.register(ast.Var{
|
2020-02-15 13:37:48 +01:00
|
|
|
|
name: 'it'
|
2020-04-27 15:16:31 +02:00
|
|
|
|
pos: p.tok.position()
|
|
|
|
|
is_used: true
|
2020-02-15 13:37:48 +01:00
|
|
|
|
})
|
2020-02-10 14:42:57 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-08-12 05:54:51 +02:00
|
|
|
|
fn (mut p Parser) scope_register_ab() {
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.scope.register(ast.Var{
|
2020-08-12 05:54:51 +02:00
|
|
|
|
name: 'a'
|
|
|
|
|
pos: p.tok.position()
|
|
|
|
|
is_used: true
|
|
|
|
|
})
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.scope.register(ast.Var{
|
2020-08-12 05:54:51 +02:00
|
|
|
|
name: 'b'
|
|
|
|
|
pos: p.tok.position()
|
|
|
|
|
is_used: true
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) dot_expr(left ast.Expr) ast.Expr {
|
2020-01-06 16:13:12 +01:00
|
|
|
|
p.next()
|
2020-05-27 03:20:22 +02:00
|
|
|
|
if p.tok.kind == .dollar {
|
2021-01-05 15:11:43 +01:00
|
|
|
|
return p.comptime_selector(left)
|
2020-05-27 03:20:22 +02:00
|
|
|
|
}
|
2021-01-05 02:59:36 +01:00
|
|
|
|
is_generic_call := p.is_generic_call()
|
2020-11-26 11:28:54 +01:00
|
|
|
|
name_pos := p.tok.position()
|
2020-12-31 12:37:11 +01:00
|
|
|
|
mut field_name := ''
|
|
|
|
|
// check if the name is on the same line as the dot
|
|
|
|
|
if (p.prev_tok.position().line_nr == name_pos.line_nr) || p.tok.kind != .name {
|
|
|
|
|
field_name = p.check_name()
|
|
|
|
|
} else {
|
|
|
|
|
p.name_error = true
|
|
|
|
|
}
|
2021-03-18 15:24:16 +01:00
|
|
|
|
is_filter := field_name in ['filter', 'map', 'any', 'all']
|
2021-01-03 16:57:29 +01:00
|
|
|
|
if is_filter || field_name == 'sort' {
|
2020-08-12 05:54:51 +02:00
|
|
|
|
p.open_scope()
|
2020-02-10 14:42:57 +01:00
|
|
|
|
}
|
2020-06-06 12:43:35 +02:00
|
|
|
|
// ! in mutable methods
|
|
|
|
|
if p.tok.kind == .not && p.peek_tok.kind == .lpar {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-03-18 09:56:19 +01:00
|
|
|
|
// Method call
|
2020-06-06 12:28:03 +02:00
|
|
|
|
// TODO move to fn.v call_expr()
|
2021-04-02 00:57:09 +02:00
|
|
|
|
mut generic_types := []ast.Type{}
|
2020-12-31 18:00:22 +01:00
|
|
|
|
mut generic_list_pos := p.tok.position()
|
2021-01-05 02:59:36 +01:00
|
|
|
|
if is_generic_call {
|
2020-12-31 18:00:22 +01:00
|
|
|
|
// `g.foo<int>(10)`
|
2021-01-22 13:49:56 +01:00
|
|
|
|
generic_types = p.parse_generic_type_list()
|
2020-12-31 18:00:22 +01:00
|
|
|
|
generic_list_pos = generic_list_pos.extend(p.prev_tok.position())
|
|
|
|
|
// In case of `foo<T>()`
|
|
|
|
|
// T is unwrapped and registered in the checker.
|
2021-01-22 13:49:56 +01:00
|
|
|
|
has_generic_generic := generic_types.filter(it.has_flag(.generic)).len > 0
|
|
|
|
|
if !has_generic_generic {
|
|
|
|
|
// will be added in checker
|
2021-04-15 10:00:23 +02:00
|
|
|
|
p.table.register_fn_generic_types(field_name, generic_types)
|
2020-12-31 18:00:22 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-01-07 01:08:24 +01:00
|
|
|
|
if p.tok.kind == .lpar {
|
|
|
|
|
p.next()
|
2020-03-14 11:11:43 +01:00
|
|
|
|
args := p.call_args()
|
2020-04-10 14:53:06 +02:00
|
|
|
|
p.check(.rpar)
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut or_stmts := []ast.Stmt{}
|
2020-05-23 08:51:15 +02:00
|
|
|
|
mut or_kind := ast.OrKind.absent
|
2020-11-04 15:02:01 +01:00
|
|
|
|
mut or_pos := p.tok.position()
|
2020-02-04 17:44:39 +01:00
|
|
|
|
if p.tok.kind == .key_orelse {
|
|
|
|
|
p.next()
|
2020-03-18 09:56:19 +01:00
|
|
|
|
p.open_scope()
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.scope.register(ast.Var{
|
2020-03-18 09:56:19 +01:00
|
|
|
|
name: 'err'
|
2021-04-02 00:57:09 +02:00
|
|
|
|
typ: ast.error_type
|
2020-04-27 15:16:31 +02:00
|
|
|
|
pos: p.tok.position()
|
|
|
|
|
is_used: true
|
2020-03-18 09:56:19 +01:00
|
|
|
|
})
|
2020-05-23 08:51:15 +02:00
|
|
|
|
or_kind = .block
|
2020-06-06 17:47:16 +02:00
|
|
|
|
or_stmts = p.parse_block_no_scope(false)
|
2020-11-04 15:02:01 +01:00
|
|
|
|
or_pos = or_pos.extend(p.prev_tok.position())
|
2020-03-18 09:56:19 +01:00
|
|
|
|
p.close_scope()
|
2020-02-04 17:44:39 +01:00
|
|
|
|
}
|
2020-06-06 12:43:35 +02:00
|
|
|
|
// `foo()?`
|
2020-05-23 08:51:15 +02:00
|
|
|
|
if p.tok.kind == .question {
|
|
|
|
|
p.next()
|
|
|
|
|
or_kind = .propagate
|
|
|
|
|
}
|
2020-06-06 12:43:35 +02:00
|
|
|
|
//
|
2020-12-03 10:25:06 +01:00
|
|
|
|
end_pos := p.prev_tok.position()
|
|
|
|
|
pos := name_pos.extend(end_pos)
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments := p.eat_comments(same_line: true)
|
2020-03-30 12:39:20 +02:00
|
|
|
|
mcall_expr := ast.CallExpr{
|
|
|
|
|
left: left
|
2020-01-07 01:08:24 +01:00
|
|
|
|
name: field_name
|
|
|
|
|
args: args
|
2021-04-02 16:26:37 +02:00
|
|
|
|
name_pos: name_pos
|
2020-03-06 10:52:03 +01:00
|
|
|
|
pos: pos
|
2020-03-30 12:39:20 +02:00
|
|
|
|
is_method: true
|
2021-01-22 13:49:56 +01:00
|
|
|
|
generic_types: generic_types
|
2020-12-31 18:00:22 +01:00
|
|
|
|
generic_list_pos: generic_list_pos
|
2020-02-28 15:36:41 +01:00
|
|
|
|
or_block: ast.OrExpr{
|
2020-04-07 16:36:00 +02:00
|
|
|
|
stmts: or_stmts
|
2020-05-23 08:51:15 +02:00
|
|
|
|
kind: or_kind
|
2020-11-04 15:02:01 +01:00
|
|
|
|
pos: or_pos
|
2020-04-07 16:36:00 +02:00
|
|
|
|
}
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2021-01-13 19:10:17 +01:00
|
|
|
|
comments: comments
|
2020-01-07 01:08:24 +01:00
|
|
|
|
}
|
2020-08-12 05:54:51 +02:00
|
|
|
|
if is_filter || field_name == 'sort' {
|
2020-03-06 10:52:03 +01:00
|
|
|
|
p.close_scope()
|
|
|
|
|
}
|
2020-05-21 22:35:43 +02:00
|
|
|
|
return mcall_expr
|
2020-01-08 10:19:12 +01:00
|
|
|
|
}
|
2020-11-27 03:08:42 +01:00
|
|
|
|
mut is_mut := false
|
|
|
|
|
mut mut_pos := token.Position{}
|
|
|
|
|
if p.inside_match || p.inside_if_expr {
|
|
|
|
|
match left {
|
|
|
|
|
ast.Ident, ast.SelectorExpr {
|
|
|
|
|
is_mut = left.is_mut
|
|
|
|
|
mut_pos = left.mut_pos
|
|
|
|
|
}
|
|
|
|
|
else {}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-12-22 13:00:23 +01:00
|
|
|
|
pos := if p.name_error { left.position().extend(name_pos) } else { name_pos }
|
2020-01-18 23:26:14 +01:00
|
|
|
|
sel_expr := ast.SelectorExpr{
|
2020-01-06 16:13:12 +01:00
|
|
|
|
expr: left
|
2020-05-09 15:16:48 +02:00
|
|
|
|
field_name: field_name
|
2020-12-22 13:00:23 +01:00
|
|
|
|
pos: pos
|
2020-11-27 03:08:42 +01:00
|
|
|
|
is_mut: is_mut
|
2020-11-29 20:23:37 +01:00
|
|
|
|
mut_pos: mut_pos
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2021-01-09 01:32:33 +01:00
|
|
|
|
next_token: p.tok.kind
|
2019-12-22 02:34:37 +01:00
|
|
|
|
}
|
2020-03-25 10:09:50 +01:00
|
|
|
|
if is_filter {
|
2020-03-06 10:52:03 +01:00
|
|
|
|
p.close_scope()
|
|
|
|
|
}
|
2021-01-09 01:32:33 +01:00
|
|
|
|
return sel_expr
|
2019-12-24 18:54:43 +01:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-02 00:57:09 +02:00
|
|
|
|
fn (mut p Parser) parse_generic_type_list() []ast.Type {
|
|
|
|
|
mut types := []ast.Type{}
|
2021-01-22 13:49:56 +01:00
|
|
|
|
if p.tok.kind != .lt {
|
|
|
|
|
return types
|
|
|
|
|
}
|
|
|
|
|
p.next() // `<`
|
|
|
|
|
mut first_done := false
|
|
|
|
|
for p.tok.kind !in [.eof, .gt] {
|
|
|
|
|
if first_done {
|
|
|
|
|
p.check(.comma)
|
|
|
|
|
}
|
|
|
|
|
types << p.parse_type()
|
|
|
|
|
first_done = true
|
|
|
|
|
}
|
|
|
|
|
p.check(.gt) // `>`
|
|
|
|
|
return types
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-10 20:33:34 +01:00
|
|
|
|
// `.green`
|
2020-02-20 15:42:56 +01:00
|
|
|
|
// `pref.BuildMode.default_mode`
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) enum_val() ast.EnumVal {
|
2020-12-11 18:23:29 +01:00
|
|
|
|
start_pos := p.tok.position()
|
2020-02-10 20:33:34 +01:00
|
|
|
|
p.check(.dot)
|
2020-02-25 15:02:34 +01:00
|
|
|
|
val := p.check_name()
|
2020-02-26 15:51:05 +01:00
|
|
|
|
return ast.EnumVal{
|
2020-02-25 15:02:34 +01:00
|
|
|
|
val: val
|
2020-12-11 18:23:29 +01:00
|
|
|
|
pos: start_pos.extend(p.prev_tok.position())
|
2020-02-10 20:33:34 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-01-03 11:36:17 +01:00
|
|
|
|
|
2021-04-12 15:03:22 +02:00
|
|
|
|
fn (mut p Parser) filter_string_vet_errors(pos token.Position) {
|
|
|
|
|
if p.vet_errors.len == 0 {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
p.vet_errors = p.vet_errors.filter(
|
|
|
|
|
(it.typ == .trailing_space && it.pos.line_nr - 1 >= pos.last_line)
|
|
|
|
|
|| (it.typ != .trailing_space && it.pos.line_nr - 1 > pos.last_line)
|
|
|
|
|
|| (it.typ == .space_indent && it.pos.line_nr - 1 <= pos.line_nr)
|
|
|
|
|
|| (it.typ != .space_indent && it.pos.line_nr - 1 < pos.line_nr))
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) string_expr() ast.Expr {
|
2020-03-24 17:07:27 +01:00
|
|
|
|
is_raw := p.tok.kind == .name && p.tok.lit == 'r'
|
|
|
|
|
is_cstr := p.tok.kind == .name && p.tok.lit == 'c'
|
|
|
|
|
if is_raw || is_cstr {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut node := ast.empty_expr()
|
2020-03-21 07:01:06 +01:00
|
|
|
|
val := p.tok.lit
|
2021-04-03 12:16:49 +02:00
|
|
|
|
mut pos := p.tok.position()
|
|
|
|
|
pos.last_line = pos.line_nr + val.count('\n')
|
2020-01-07 01:08:24 +01:00
|
|
|
|
if p.peek_tok.kind != .str_dollar {
|
|
|
|
|
p.next()
|
2021-04-12 15:03:22 +02:00
|
|
|
|
p.filter_string_vet_errors(pos)
|
2020-04-10 10:59:07 +02:00
|
|
|
|
node = ast.StringLiteral{
|
|
|
|
|
val: val
|
|
|
|
|
is_raw: is_raw
|
2021-04-02 00:57:09 +02:00
|
|
|
|
language: if is_cstr { ast.Language.c } else { ast.Language.v }
|
2020-04-10 10:59:07 +02:00
|
|
|
|
pos: pos
|
|
|
|
|
}
|
2020-03-05 12:08:43 +01:00
|
|
|
|
return node
|
2020-01-07 01:08:24 +01:00
|
|
|
|
}
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut exprs := []ast.Expr{}
|
|
|
|
|
mut vals := []string{}
|
2020-06-16 10:41:51 +02:00
|
|
|
|
mut has_fmts := []bool{}
|
|
|
|
|
mut fwidths := []int{}
|
|
|
|
|
mut precisions := []int{}
|
|
|
|
|
mut visible_pluss := []bool{}
|
|
|
|
|
mut fills := []bool{}
|
|
|
|
|
mut fmts := []byte{}
|
|
|
|
|
mut fposs := []token.Position{}
|
2020-01-07 01:08:24 +01:00
|
|
|
|
// Handle $ interpolation
|
2020-06-05 22:37:34 +02:00
|
|
|
|
p.inside_str_interp = true
|
2020-03-16 03:19:26 +01:00
|
|
|
|
for p.tok.kind == .string {
|
2020-03-21 07:01:06 +01:00
|
|
|
|
vals << p.tok.lit
|
2020-01-07 01:08:24 +01:00
|
|
|
|
p.next()
|
|
|
|
|
if p.tok.kind != .str_dollar {
|
2020-06-13 22:38:10 +02:00
|
|
|
|
break
|
2020-01-07 01:08:24 +01:00
|
|
|
|
}
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2020-03-21 07:01:06 +01:00
|
|
|
|
exprs << p.expr(0)
|
2020-06-16 10:41:51 +02:00
|
|
|
|
mut has_fmt := false
|
|
|
|
|
mut fwidth := 0
|
|
|
|
|
mut fwidthneg := false
|
2020-10-16 16:28:11 +02:00
|
|
|
|
// 987698 is a magic default value, unlikely to be present in user input. NB: 0 is valid precision
|
2020-10-16 16:47:23 +02:00
|
|
|
|
mut precision := 987698
|
2020-06-16 10:41:51 +02:00
|
|
|
|
mut visible_plus := false
|
|
|
|
|
mut fill := false
|
|
|
|
|
mut fmt := `_` // placeholder
|
2020-02-10 20:33:34 +01:00
|
|
|
|
if p.tok.kind == .colon {
|
2020-02-04 09:54:15 +01:00
|
|
|
|
p.next()
|
2020-04-11 17:25:39 +02:00
|
|
|
|
// ${num:-2d}
|
|
|
|
|
if p.tok.kind == .minus {
|
2020-06-16 10:41:51 +02:00
|
|
|
|
fwidthneg = true
|
|
|
|
|
p.next()
|
|
|
|
|
} else if p.tok.kind == .plus {
|
|
|
|
|
visible_plus = true
|
2020-04-11 17:25:39 +02:00
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
// ${num:2d}
|
|
|
|
|
if p.tok.kind == .number {
|
2020-06-16 10:41:51 +02:00
|
|
|
|
fields := p.tok.lit.split('.')
|
|
|
|
|
if fields[0].len > 0 && fields[0][0] == `0` {
|
|
|
|
|
fill = true
|
|
|
|
|
}
|
2020-07-11 18:53:50 +02:00
|
|
|
|
fwidth = fields[0].int()
|
2020-06-16 10:41:51 +02:00
|
|
|
|
if fwidthneg {
|
|
|
|
|
fwidth = -fwidth
|
|
|
|
|
}
|
|
|
|
|
if fields.len > 1 {
|
2020-07-11 18:53:50 +02:00
|
|
|
|
precision = fields[1].int()
|
2020-06-16 10:41:51 +02:00
|
|
|
|
}
|
2020-04-11 17:25:39 +02:00
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-06-16 10:41:51 +02:00
|
|
|
|
if p.tok.kind == .name {
|
|
|
|
|
if p.tok.lit.len == 1 {
|
|
|
|
|
fmt = p.tok.lit[0]
|
2020-06-17 19:49:13 +02:00
|
|
|
|
has_fmt = true
|
2020-06-16 10:41:51 +02:00
|
|
|
|
p.next()
|
|
|
|
|
} else {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error('format specifier may only be one letter')
|
2020-06-16 10:41:51 +02:00
|
|
|
|
}
|
2020-02-11 10:26:46 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-16 10:41:51 +02:00
|
|
|
|
fwidths << fwidth
|
|
|
|
|
has_fmts << has_fmt
|
|
|
|
|
precisions << precision
|
|
|
|
|
visible_pluss << visible_plus
|
|
|
|
|
fmts << fmt
|
|
|
|
|
fills << fill
|
|
|
|
|
fposs << p.prev_tok.position()
|
2020-01-07 01:08:24 +01:00
|
|
|
|
}
|
2021-04-12 15:03:22 +02:00
|
|
|
|
pos = pos.extend(p.prev_tok.position())
|
|
|
|
|
p.filter_string_vet_errors(pos)
|
2020-03-21 07:01:06 +01:00
|
|
|
|
node = ast.StringInterLiteral{
|
|
|
|
|
vals: vals
|
|
|
|
|
exprs: exprs
|
2020-06-18 20:21:08 +02:00
|
|
|
|
need_fmts: has_fmts
|
2020-06-16 10:41:51 +02:00
|
|
|
|
fwidths: fwidths
|
|
|
|
|
precisions: precisions
|
|
|
|
|
pluss: visible_pluss
|
|
|
|
|
fills: fills
|
|
|
|
|
fmts: fmts
|
|
|
|
|
fmt_poss: fposs
|
2021-04-12 15:03:22 +02:00
|
|
|
|
pos: pos
|
2020-03-21 07:01:06 +01:00
|
|
|
|
}
|
2020-06-18 20:21:08 +02:00
|
|
|
|
// need_fmts: prelimery - until checker finds out if really needed
|
2020-06-05 22:37:34 +02:00
|
|
|
|
p.inside_str_interp = false
|
2020-03-05 12:08:43 +01:00
|
|
|
|
return node
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) parse_number_literal() ast.Expr {
|
2021-01-24 22:09:51 +01:00
|
|
|
|
mut pos := p.tok.position()
|
|
|
|
|
is_neg := p.tok.kind == .minus
|
|
|
|
|
if is_neg {
|
|
|
|
|
p.next()
|
|
|
|
|
pos = pos.extend(p.tok.position())
|
|
|
|
|
}
|
2019-12-28 09:15:32 +01:00
|
|
|
|
lit := p.tok.lit
|
2021-01-24 22:09:51 +01:00
|
|
|
|
full_lit := if is_neg { '-' + lit } else { lit }
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut node := ast.empty_expr()
|
2020-04-17 20:31:32 +02:00
|
|
|
|
if lit.index_any('.eE') >= 0 && lit[..2] !in ['0x', '0X', '0o', '0O', '0b', '0B'] {
|
2019-12-28 09:15:32 +01:00
|
|
|
|
node = ast.FloatLiteral{
|
2021-01-24 22:09:51 +01:00
|
|
|
|
val: full_lit
|
2020-04-20 14:49:26 +02:00
|
|
|
|
pos: pos
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
2020-04-07 15:48:13 +02:00
|
|
|
|
} else {
|
2019-12-28 09:15:32 +01:00
|
|
|
|
node = ast.IntegerLiteral{
|
2021-01-24 22:09:51 +01:00
|
|
|
|
val: full_lit
|
2020-04-10 00:09:34 +02:00
|
|
|
|
pos: pos
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
p.next()
|
2020-03-05 12:08:43 +01:00
|
|
|
|
return node
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) module_decl() ast.Module {
|
2021-04-02 00:57:09 +02:00
|
|
|
|
mut module_attrs := []ast.Attr{}
|
2021-01-29 11:17:59 +01:00
|
|
|
|
mut attrs_pos := p.tok.position()
|
2021-01-08 16:24:42 +01:00
|
|
|
|
if p.tok.kind == .lsbr {
|
|
|
|
|
p.attributes()
|
|
|
|
|
module_attrs = p.attrs
|
|
|
|
|
}
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut name := 'main'
|
2020-04-10 22:32:52 +02:00
|
|
|
|
is_skipped := p.tok.kind != .key_module
|
2020-05-16 16:12:23 +02:00
|
|
|
|
mut module_pos := token.Position{}
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mut name_pos := token.Position{}
|
|
|
|
|
mut mod_node := ast.Module{}
|
2020-04-10 22:32:52 +02:00
|
|
|
|
if !is_skipped {
|
2021-01-08 16:24:42 +01:00
|
|
|
|
p.attrs = []
|
2020-05-16 16:12:23 +02:00
|
|
|
|
module_pos = p.tok.position()
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
name_pos = p.tok.position()
|
2020-04-10 22:27:51 +02:00
|
|
|
|
name = p.check_name()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mod_node = ast.Module{
|
|
|
|
|
pos: module_pos
|
|
|
|
|
}
|
|
|
|
|
if module_pos.line_nr != name_pos.line_nr {
|
|
|
|
|
p.error_with_pos('`module` and `$name` must be at same line', name_pos)
|
|
|
|
|
return mod_node
|
|
|
|
|
}
|
|
|
|
|
// NB: this shouldn't be reassigned into name_pos
|
|
|
|
|
// as it creates a wrong position when extended
|
|
|
|
|
// to module_pos
|
|
|
|
|
n_pos := p.tok.position()
|
2021-03-11 09:40:09 +01:00
|
|
|
|
if module_pos.line_nr == n_pos.line_nr && p.tok.kind != .comment && p.tok.kind != .eof {
|
|
|
|
|
if p.tok.kind == .name {
|
|
|
|
|
p.error_with_pos('`module $name`, you can only declare one module, unexpected `$p.tok.lit`',
|
|
|
|
|
n_pos)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return mod_node
|
2020-05-10 02:28:56 +02:00
|
|
|
|
} else {
|
2021-03-11 09:40:09 +01:00
|
|
|
|
p.error_with_pos('`module $name`, unexpected `$p.tok.kind` after module name',
|
|
|
|
|
n_pos)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return mod_node
|
2020-05-10 02:28:56 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-29 11:17:59 +01:00
|
|
|
|
module_pos = attrs_pos.extend(name_pos)
|
2020-04-10 22:27:51 +02:00
|
|
|
|
}
|
2021-01-20 06:04:59 +01:00
|
|
|
|
full_name := util.qualify_module(name, p.file_name)
|
|
|
|
|
p.mod = full_name
|
2020-04-10 22:27:51 +02:00
|
|
|
|
p.builtin_mod = p.mod == 'builtin'
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mod_node = ast.Module{
|
2021-01-20 06:04:59 +01:00
|
|
|
|
name: full_name
|
|
|
|
|
short_name: name
|
2021-01-09 05:36:38 +01:00
|
|
|
|
attrs: module_attrs
|
|
|
|
|
is_skipped: is_skipped
|
|
|
|
|
pos: module_pos
|
|
|
|
|
name_pos: name_pos
|
|
|
|
|
}
|
2021-01-08 16:24:42 +01:00
|
|
|
|
if !is_skipped {
|
|
|
|
|
for ma in module_attrs {
|
|
|
|
|
match ma.name {
|
|
|
|
|
'manualfree' {
|
|
|
|
|
p.is_manualfree = true
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
p.error_with_pos('unknown module attribute `[$ma.name]`', ma.pos)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return mod_node
|
2021-01-08 16:24:42 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return mod_node
|
2019-12-28 09:43:22 +01:00
|
|
|
|
}
|
2019-12-28 09:15:32 +01:00
|
|
|
|
|
2020-05-07 04:32:29 +02:00
|
|
|
|
fn (mut p Parser) import_stmt() ast.Import {
|
2020-05-08 15:01:54 +02:00
|
|
|
|
import_pos := p.tok.position()
|
2020-05-07 04:32:29 +02:00
|
|
|
|
p.check(.key_import)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mut pos := p.tok.position()
|
|
|
|
|
mut import_node := ast.Import{
|
|
|
|
|
pos: import_pos.extend(pos)
|
|
|
|
|
}
|
2020-05-07 04:32:29 +02:00
|
|
|
|
if p.tok.kind == .lpar {
|
|
|
|
|
p.error_with_pos('`import()` has been deprecated, use `import x` instead', pos)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return import_node
|
2020-05-07 04:32:29 +02:00
|
|
|
|
}
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mut mod_name_arr := []string{}
|
|
|
|
|
mod_name_arr << p.check_name()
|
2020-05-08 15:01:54 +02:00
|
|
|
|
if import_pos.line_nr != pos.line_nr {
|
2020-07-18 21:34:38 +02:00
|
|
|
|
p.error_with_pos('`import` statements must be a single line', pos)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return import_node
|
|
|
|
|
}
|
|
|
|
|
mut mod_alias := mod_name_arr[0]
|
|
|
|
|
import_node = ast.Import{
|
|
|
|
|
pos: import_pos.extend(pos)
|
|
|
|
|
mod_pos: pos
|
|
|
|
|
alias_pos: pos
|
2020-05-08 15:01:54 +02:00
|
|
|
|
}
|
2020-02-19 03:08:10 +01:00
|
|
|
|
for p.tok.kind == .dot {
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
submod_pos := p.tok.position()
|
2020-05-08 15:01:54 +02:00
|
|
|
|
if p.tok.kind != .name {
|
2021-01-09 05:36:38 +01:00
|
|
|
|
p.error_with_pos('module syntax error, please use `x.y.z`', submod_pos)
|
|
|
|
|
return import_node
|
2020-05-08 15:01:54 +02:00
|
|
|
|
}
|
2021-01-09 05:36:38 +01:00
|
|
|
|
if import_pos.line_nr != submod_pos.line_nr {
|
|
|
|
|
p.error_with_pos('`import` and `submodule` must be at same line', submod_pos)
|
|
|
|
|
return import_node
|
2020-05-08 15:01:54 +02:00
|
|
|
|
}
|
2020-02-19 03:08:10 +01:00
|
|
|
|
submod_name := p.check_name()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mod_name_arr << submod_name
|
2020-02-19 03:08:10 +01:00
|
|
|
|
mod_alias = submod_name
|
2021-01-09 05:36:38 +01:00
|
|
|
|
pos = pos.extend(submod_pos)
|
|
|
|
|
import_node = ast.Import{
|
|
|
|
|
pos: import_pos.extend(pos)
|
|
|
|
|
mod_pos: pos
|
|
|
|
|
alias_pos: submod_pos
|
2021-01-20 06:04:59 +01:00
|
|
|
|
mod: util.qualify_import(p.pref, mod_name_arr.join('.'), p.file_name)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
alias: mod_alias
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if mod_name_arr.len == 1 {
|
|
|
|
|
import_node = ast.Import{
|
|
|
|
|
pos: import_node.pos
|
|
|
|
|
mod_pos: import_node.mod_pos
|
|
|
|
|
alias_pos: import_node.alias_pos
|
2021-01-20 06:04:59 +01:00
|
|
|
|
mod: util.qualify_import(p.pref, mod_name_arr[0], p.file_name)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
alias: mod_alias
|
|
|
|
|
}
|
2020-02-19 03:08:10 +01:00
|
|
|
|
}
|
2021-01-09 05:36:38 +01:00
|
|
|
|
mod_name := import_node.mod
|
2020-01-06 16:13:12 +01:00
|
|
|
|
if p.tok.kind == .key_as {
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
alias_pos := p.tok.position()
|
2020-01-18 23:26:14 +01:00
|
|
|
|
mod_alias = p.check_name()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
if mod_alias == mod_name_arr.last() {
|
2020-08-23 02:12:25 +02:00
|
|
|
|
p.error_with_pos('import alias `$mod_name as $mod_alias` is redundant', p.prev_tok.position())
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return import_node
|
|
|
|
|
}
|
|
|
|
|
import_node = ast.Import{
|
|
|
|
|
pos: import_node.pos.extend(alias_pos)
|
|
|
|
|
mod_pos: import_node.mod_pos
|
|
|
|
|
alias_pos: alias_pos
|
|
|
|
|
mod: import_node.mod
|
|
|
|
|
alias: mod_alias
|
2020-08-23 02:12:25 +02:00
|
|
|
|
}
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .lcbr { // import module { fn1, Type2 } syntax
|
2021-03-31 12:17:06 +02:00
|
|
|
|
mut initial_syms_pos := p.tok.position()
|
2021-01-09 05:36:38 +01:00
|
|
|
|
p.import_syms(mut import_node)
|
2021-03-31 12:17:06 +02:00
|
|
|
|
initial_syms_pos = initial_syms_pos.extend(p.tok.position())
|
|
|
|
|
import_node = ast.Import{
|
|
|
|
|
...import_node
|
|
|
|
|
syms_pos: initial_syms_pos
|
|
|
|
|
pos: import_node.pos.extend(initial_syms_pos)
|
|
|
|
|
}
|
2020-12-02 15:14:49 +01:00
|
|
|
|
p.register_used_import(mod_alias) // no `unused import` msg for parent
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
2020-05-08 15:01:54 +02:00
|
|
|
|
pos_t := p.tok.position()
|
|
|
|
|
if import_pos.line_nr == pos_t.line_nr {
|
2020-12-01 21:32:34 +01:00
|
|
|
|
if p.tok.kind !in [.lcbr, .eof, .comment] {
|
2020-05-08 15:01:54 +02:00
|
|
|
|
p.error_with_pos('cannot import multiple modules at a time', pos_t)
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return import_node
|
2020-05-08 15:01:54 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-02-08 17:16:02 +01:00
|
|
|
|
import_node.comments = p.eat_comments(same_line: true)
|
2021-02-08 18:48:48 +01:00
|
|
|
|
import_node.next_comments = p.eat_comments(follow_up: true)
|
2020-02-19 03:08:10 +01:00
|
|
|
|
p.imports[mod_alias] = mod_name
|
2020-07-23 22:20:00 +02:00
|
|
|
|
// if mod_name !in p.table.imports {
|
|
|
|
|
p.table.imports << mod_name
|
2021-01-09 05:36:38 +01:00
|
|
|
|
p.ast_imports << import_node
|
2020-07-23 22:20:00 +02:00
|
|
|
|
// }
|
2021-01-09 05:36:38 +01:00
|
|
|
|
return import_node
|
2020-01-18 23:26:14 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-18 21:34:38 +02:00
|
|
|
|
// import_syms parses the inner part of `import module { submod1, submod2 }`
|
|
|
|
|
fn (mut p Parser) import_syms(mut parent ast.Import) {
|
|
|
|
|
p.next()
|
|
|
|
|
pos_t := p.tok.position()
|
|
|
|
|
if p.tok.kind == .rcbr { // closed too early
|
|
|
|
|
p.error_with_pos('empty `$parent.mod` import set, remove `{}`', pos_t)
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
|
|
|
|
if p.tok.kind != .name { // not a valid inner name
|
2020-07-19 19:58:34 +02:00
|
|
|
|
p.error_with_pos('import syntax error, please specify a valid fn or type name',
|
|
|
|
|
pos_t)
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
|
|
|
|
for p.tok.kind == .name {
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
alias := p.check_name()
|
2020-12-07 18:13:03 +01:00
|
|
|
|
p.imported_symbols[alias] = parent.mod + '.' + alias
|
|
|
|
|
// so we can work with this in fmt+checker
|
|
|
|
|
parent.syms << ast.ImportSymbol{
|
|
|
|
|
pos: pos
|
|
|
|
|
name: alias
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .comma { // go again if more than one
|
|
|
|
|
p.next()
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .rcbr { // finish if closing `}` is seen
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if p.tok.kind != .rcbr {
|
|
|
|
|
p.error_with_pos('import syntax error, no closing `}`', p.tok.position())
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return
|
2020-07-18 21:34:38 +02:00
|
|
|
|
}
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) const_decl() ast.ConstDecl {
|
2020-06-06 17:47:16 +02:00
|
|
|
|
p.top_level_statement_start()
|
2020-04-19 00:07:57 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
is_pub := p.tok.kind == .key_pub
|
|
|
|
|
if is_pub {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
2020-12-03 20:11:43 +01:00
|
|
|
|
const_pos := p.tok.position()
|
2020-01-18 23:26:14 +01:00
|
|
|
|
p.check(.key_const)
|
2020-12-30 02:15:44 +01:00
|
|
|
|
is_block := p.tok.kind == .lpar
|
|
|
|
|
if is_block {
|
|
|
|
|
p.next() // (
|
|
|
|
|
}
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut fields := []ast.ConstField{}
|
2020-08-27 14:07:49 +02:00
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for {
|
2021-04-15 01:31:49 +02:00
|
|
|
|
comments = p.eat_comments({})
|
|
|
|
|
if is_block && p.tok.kind == .eof {
|
|
|
|
|
p.error('unexpected eof, expecting ´)´')
|
2020-12-03 20:11:43 +01:00
|
|
|
|
return ast.ConstDecl{}
|
|
|
|
|
}
|
2020-08-27 14:07:49 +02:00
|
|
|
|
if p.tok.kind == .rpar {
|
|
|
|
|
break
|
2020-04-05 02:08:10 +02:00
|
|
|
|
}
|
2020-04-30 12:17:31 +02:00
|
|
|
|
pos := p.tok.position()
|
2020-05-22 17:36:09 +02:00
|
|
|
|
name := p.check_name()
|
2021-01-23 10:32:00 +01:00
|
|
|
|
if util.contains_capital(name) {
|
2021-01-23 10:45:58 +01:00
|
|
|
|
p.warn_with_pos('const names cannot contain uppercase letters, use snake_case instead',
|
2020-05-27 03:20:22 +02:00
|
|
|
|
pos)
|
2020-05-22 17:36:09 +02:00
|
|
|
|
}
|
|
|
|
|
full_name := p.prepend_mod(name)
|
2020-01-18 23:26:14 +01:00
|
|
|
|
p.check(.assign)
|
2020-10-19 13:48:39 +02:00
|
|
|
|
if p.tok.kind == .key_fn {
|
|
|
|
|
p.error('const initializer fn literal is not a constant')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.ConstDecl{}
|
2020-10-19 13:48:39 +02:00
|
|
|
|
}
|
2021-04-15 01:31:49 +02:00
|
|
|
|
if p.tok.kind == .eof {
|
|
|
|
|
p.error('unexpected eof, expecting an expression')
|
|
|
|
|
return ast.ConstDecl{}
|
|
|
|
|
}
|
2020-03-05 12:08:43 +01:00
|
|
|
|
expr := p.expr(0)
|
2020-04-04 05:14:40 +02:00
|
|
|
|
field := ast.ConstField{
|
2020-05-22 17:36:09 +02:00
|
|
|
|
name: full_name
|
2020-07-08 13:19:58 +02:00
|
|
|
|
mod: p.mod
|
2021-02-03 09:17:13 +01:00
|
|
|
|
is_pub: is_pub
|
2020-04-04 05:14:40 +02:00
|
|
|
|
expr: expr
|
2021-03-03 08:23:11 +01:00
|
|
|
|
pos: pos.extend(expr.position())
|
2020-06-23 18:01:56 +02:00
|
|
|
|
comments: comments
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
2020-04-04 05:14:40 +02:00
|
|
|
|
fields << field
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.global_scope.register(field)
|
2020-08-27 14:07:49 +02:00
|
|
|
|
comments = []
|
2020-12-30 02:15:44 +01:00
|
|
|
|
if !is_block {
|
|
|
|
|
break
|
|
|
|
|
}
|
2019-12-31 19:42:16 +01:00
|
|
|
|
}
|
2020-06-06 17:47:16 +02:00
|
|
|
|
p.top_level_statement_end()
|
2020-12-30 02:15:44 +01:00
|
|
|
|
if is_block {
|
|
|
|
|
p.check(.rpar)
|
|
|
|
|
}
|
2020-02-03 07:02:54 +01:00
|
|
|
|
return ast.ConstDecl{
|
2021-04-15 01:31:49 +02:00
|
|
|
|
pos: start_pos.extend_with_last_line(const_pos, p.prev_tok.line_nr)
|
2020-02-03 07:02:54 +01:00
|
|
|
|
fields: fields
|
2020-02-26 22:43:37 +01:00
|
|
|
|
is_pub: is_pub
|
2020-08-27 14:07:49 +02:00
|
|
|
|
end_comments: comments
|
2020-12-30 02:15:44 +01:00
|
|
|
|
is_block: is_block
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) return_stmt() ast.Return {
|
2020-04-17 16:16:56 +02:00
|
|
|
|
first_pos := p.tok.position()
|
2019-12-28 09:15:32 +01:00
|
|
|
|
p.next()
|
2020-06-16 13:20:16 +02:00
|
|
|
|
// no return
|
2021-02-08 17:16:02 +01:00
|
|
|
|
mut comments := p.eat_comments({})
|
2020-03-05 13:57:05 +01:00
|
|
|
|
if p.tok.kind == .rcbr {
|
2020-01-22 21:34:38 +01:00
|
|
|
|
return ast.Return{
|
2020-11-23 10:36:20 +01:00
|
|
|
|
comments: comments
|
2020-04-17 16:16:56 +02:00
|
|
|
|
pos: first_pos
|
2020-01-22 21:34:38 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-06-16 13:20:16 +02:00
|
|
|
|
// return exprs
|
2020-11-23 10:36:20 +01:00
|
|
|
|
exprs, comments2 := p.expr_list()
|
|
|
|
|
comments << comments2
|
2020-04-19 00:07:57 +02:00
|
|
|
|
end_pos := exprs.last().position()
|
2020-04-17 16:16:56 +02:00
|
|
|
|
return ast.Return{
|
2020-01-07 12:10:07 +01:00
|
|
|
|
exprs: exprs
|
2020-07-17 19:13:22 +02:00
|
|
|
|
comments: comments
|
2020-04-19 00:07:57 +02:00
|
|
|
|
pos: first_pos.extend(end_pos)
|
2019-12-28 09:43:22 +01:00
|
|
|
|
}
|
2019-12-28 09:15:32 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-11 20:26:46 +02:00
|
|
|
|
const (
|
2020-06-09 10:49:15 +02:00
|
|
|
|
// modules which allow globals by default
|
2020-08-19 07:10:42 +02:00
|
|
|
|
global_enabled_mods = ['rand', 'sokol.sapp']
|
2020-06-09 10:49:15 +02:00
|
|
|
|
)
|
|
|
|
|
|
2020-02-28 13:29:04 +01:00
|
|
|
|
// left hand side of `=` or `:=` in `a,b,c := 1,2,3`
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) global_decl() ast.GlobalDecl {
|
2021-01-23 09:33:22 +01:00
|
|
|
|
if !p.pref.translated && !p.pref.is_livemain && !p.builtin_mod && !p.pref.building_v
|
|
|
|
|
&& p.mod != 'ui' && p.mod != 'gg2' && p.mod != 'uiold' && !p.pref.enable_globals
|
2021-02-07 23:10:39 +01:00
|
|
|
|
&& !p.pref.is_fmt && p.mod !in parser.global_enabled_mods {
|
2020-02-03 07:02:54 +01:00
|
|
|
|
p.error('use `v --enable-globals ...` to enable globals')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.GlobalDecl{}
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
2020-05-16 16:12:23 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-10-03 07:03:44 +02:00
|
|
|
|
end_pos := p.tok.position()
|
|
|
|
|
p.check(.key_global)
|
|
|
|
|
if p.tok.kind != .lpar {
|
2020-10-03 15:41:45 +02:00
|
|
|
|
p.error('globals must be grouped, e.g. `__global ( a = int(1) )`')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.GlobalDecl{}
|
2020-04-04 05:14:40 +02:00
|
|
|
|
}
|
2020-10-03 07:03:44 +02:00
|
|
|
|
p.next() // (
|
|
|
|
|
mut fields := []ast.GlobalField{}
|
|
|
|
|
mut comments := []ast.Comment{}
|
|
|
|
|
for {
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments = p.eat_comments({})
|
2020-10-03 07:03:44 +02:00
|
|
|
|
if p.tok.kind == .rpar {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
pos := p.tok.position()
|
|
|
|
|
name := p.check_name()
|
|
|
|
|
has_expr := p.tok.kind == .assign
|
|
|
|
|
if has_expr {
|
|
|
|
|
p.next() // =
|
|
|
|
|
}
|
|
|
|
|
typ := p.parse_type()
|
2020-10-03 15:41:45 +02:00
|
|
|
|
if p.tok.kind == .assign {
|
|
|
|
|
p.error('global assign must have the type around the value, use `__global ( name = type(value) )`')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.GlobalDecl{}
|
2020-10-03 15:41:45 +02:00
|
|
|
|
}
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut expr := ast.empty_expr()
|
2020-10-03 07:03:44 +02:00
|
|
|
|
if has_expr {
|
|
|
|
|
if p.tok.kind != .lpar {
|
|
|
|
|
p.error('global assign must have a type and value, use `__global ( name = type(value) )` or `__global ( name type )`')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.GlobalDecl{}
|
2020-10-03 07:03:44 +02:00
|
|
|
|
}
|
|
|
|
|
p.next() // (
|
|
|
|
|
expr = p.expr(0)
|
|
|
|
|
p.check(.rpar)
|
|
|
|
|
}
|
|
|
|
|
field := ast.GlobalField{
|
|
|
|
|
name: name
|
|
|
|
|
has_expr: has_expr
|
|
|
|
|
expr: expr
|
|
|
|
|
pos: pos
|
|
|
|
|
typ: typ
|
|
|
|
|
comments: comments
|
|
|
|
|
}
|
|
|
|
|
fields << field
|
2020-12-02 14:40:25 +01:00
|
|
|
|
p.global_scope.register(field)
|
2020-10-03 07:03:44 +02:00
|
|
|
|
comments = []
|
2020-04-04 05:14:40 +02:00
|
|
|
|
}
|
2020-10-03 07:03:44 +02:00
|
|
|
|
p.check(.rpar)
|
|
|
|
|
return ast.GlobalDecl{
|
|
|
|
|
pos: start_pos.extend(end_pos)
|
|
|
|
|
fields: fields
|
|
|
|
|
end_comments: comments
|
2020-02-03 07:02:54 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) enum_decl() ast.EnumDecl {
|
2020-06-06 17:47:16 +02:00
|
|
|
|
p.top_level_statement_start()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
is_pub := p.tok.kind == .key_pub
|
2020-04-19 00:07:57 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
if is_pub {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
p.check(.key_enum)
|
2020-04-19 00:07:57 +02:00
|
|
|
|
end_pos := p.tok.position()
|
2020-04-23 05:16:10 +02:00
|
|
|
|
enum_name := p.check_name()
|
2020-10-01 22:29:49 +02:00
|
|
|
|
if enum_name.len == 1 {
|
|
|
|
|
p.error_with_pos('single letter capital names are reserved for generic template types.',
|
|
|
|
|
end_pos)
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.EnumDecl{}
|
2020-10-01 22:29:49 +02:00
|
|
|
|
}
|
2020-04-23 05:16:10 +02:00
|
|
|
|
name := p.prepend_mod(enum_name)
|
2020-02-10 23:19:50 +01:00
|
|
|
|
p.check(.lcbr)
|
2021-02-08 17:16:02 +01:00
|
|
|
|
enum_decl_comments := p.eat_comments({})
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut vals := []string{}
|
2020-04-26 16:25:54 +02:00
|
|
|
|
// mut default_exprs := []ast.Expr{}
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut fields := []ast.EnumField{}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
for p.tok.kind != .eof && p.tok.kind != .rcbr {
|
2020-04-09 19:23:49 +02:00
|
|
|
|
pos := p.tok.position()
|
2020-04-10 00:09:34 +02:00
|
|
|
|
val := p.check_name()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
vals << val
|
2021-03-31 10:13:15 +02:00
|
|
|
|
mut expr := ast.empty_expr()
|
2020-04-21 05:11:50 +02:00
|
|
|
|
mut has_expr := false
|
2020-02-19 11:31:33 +01:00
|
|
|
|
// p.warn('enum val $val')
|
2020-02-12 01:16:38 +01:00
|
|
|
|
if p.tok.kind == .assign {
|
|
|
|
|
p.next()
|
2020-04-10 14:44:01 +02:00
|
|
|
|
expr = p.expr(0)
|
|
|
|
|
has_expr = true
|
2020-02-12 01:16:38 +01:00
|
|
|
|
}
|
2020-04-17 18:01:02 +02:00
|
|
|
|
fields << ast.EnumField{
|
2020-04-17 18:11:04 +02:00
|
|
|
|
name: val
|
|
|
|
|
pos: pos
|
|
|
|
|
expr: expr
|
|
|
|
|
has_expr: has_expr
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments: p.eat_comments(same_line: true)
|
|
|
|
|
next_comments: p.eat_comments({})
|
2020-04-17 18:01:02 +02:00
|
|
|
|
}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
}
|
2020-06-06 17:47:16 +02:00
|
|
|
|
p.top_level_statement_end()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
p.check(.rcbr)
|
2020-08-04 20:10:22 +02:00
|
|
|
|
is_flag := p.attrs.contains('flag')
|
|
|
|
|
is_multi_allowed := p.attrs.contains('_allow_multiple_values')
|
2020-05-25 23:00:48 +02:00
|
|
|
|
if is_flag {
|
|
|
|
|
if fields.len > 32 {
|
|
|
|
|
p.error('when an enum is used as bit field, it must have a max of 32 fields')
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.EnumDecl{}
|
2020-05-25 23:00:48 +02:00
|
|
|
|
}
|
2020-11-27 22:09:14 +01:00
|
|
|
|
for f in fields {
|
|
|
|
|
if f.has_expr {
|
|
|
|
|
p.error_with_pos('when an enum is used as a bit field, you can not assign custom values',
|
|
|
|
|
f.pos)
|
2020-12-04 19:34:05 +01:00
|
|
|
|
return ast.EnumDecl{}
|
2020-11-27 22:09:14 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-05-25 23:00:48 +02:00
|
|
|
|
pubfn := if p.mod == 'main' { 'fn' } else { 'pub fn' }
|
|
|
|
|
p.scanner.codegen('
|
|
|
|
|
//
|
2021-02-25 20:01:27 +01:00
|
|
|
|
[inline] $pubfn ( e &$enum_name) is_empty() bool { return int(*e) == 0 }
|
|
|
|
|
[inline] $pubfn ( e &$enum_name) has(flag $enum_name) bool { return (int(*e) & (int(flag))) != 0 }
|
2021-02-21 15:07:49 +01:00
|
|
|
|
[inline] $pubfn (mut e $enum_name) set(flag $enum_name) { unsafe{ *e = ${enum_name}(int(*e) | (int(flag))) } }
|
|
|
|
|
[inline] $pubfn (mut e $enum_name) clear(flag $enum_name) { unsafe{ *e = ${enum_name}(int(*e) & ~(int(flag))) } }
|
|
|
|
|
[inline] $pubfn (mut e $enum_name) toggle(flag $enum_name) { unsafe{ *e = ${enum_name}(int(*e) ^ (int(flag))) } }
|
2020-05-25 23:00:48 +02:00
|
|
|
|
//
|
2020-06-27 14:50:04 +02:00
|
|
|
|
')
|
2020-05-25 23:00:48 +02:00
|
|
|
|
}
|
2021-04-02 00:57:09 +02:00
|
|
|
|
idx := p.table.register_type_symbol(ast.TypeSymbol{
|
2020-02-25 13:30:43 +01:00
|
|
|
|
kind: .enum_
|
2020-03-14 05:20:12 +01:00
|
|
|
|
name: name
|
2020-11-29 14:10:45 +01:00
|
|
|
|
cname: util.no_dots(name)
|
2020-05-11 08:59:55 +02:00
|
|
|
|
mod: p.mod
|
2021-04-02 00:57:09 +02:00
|
|
|
|
info: ast.Enum{
|
2020-04-08 00:59:28 +02:00
|
|
|
|
vals: vals
|
2020-05-25 23:00:48 +02:00
|
|
|
|
is_flag: is_flag
|
2020-07-10 16:43:02 +02:00
|
|
|
|
is_multi_allowed: is_multi_allowed
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2021-02-21 11:13:52 +01:00
|
|
|
|
is_public: is_pub
|
2020-02-25 13:30:43 +01:00
|
|
|
|
})
|
2020-12-11 09:30:47 +01:00
|
|
|
|
if idx == -1 {
|
|
|
|
|
p.error_with_pos('cannot register enum `$name`, another type with this name exists',
|
2020-12-11 09:36:39 +01:00
|
|
|
|
end_pos)
|
2020-12-11 09:30:47 +01:00
|
|
|
|
}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
return ast.EnumDecl{
|
|
|
|
|
name: name
|
|
|
|
|
is_pub: is_pub
|
2020-05-25 23:00:48 +02:00
|
|
|
|
is_flag: is_flag
|
2020-07-10 16:43:02 +02:00
|
|
|
|
is_multi_allowed: is_multi_allowed
|
2020-04-09 19:23:49 +02:00
|
|
|
|
fields: fields
|
2020-12-22 14:45:12 +01:00
|
|
|
|
pos: start_pos.extend_with_last_line(end_pos, p.prev_tok.line_nr)
|
2020-10-03 20:33:02 +02:00
|
|
|
|
attrs: p.attrs
|
2020-07-02 15:44:03 +02:00
|
|
|
|
comments: enum_decl_comments
|
2020-02-10 23:19:50 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) type_decl() ast.TypeDecl {
|
2020-04-19 00:07:57 +02:00
|
|
|
|
start_pos := p.tok.position()
|
2020-02-10 23:19:50 +01:00
|
|
|
|
is_pub := p.tok.kind == .key_pub
|
|
|
|
|
if is_pub {
|
|
|
|
|
p.next()
|
|
|
|
|
}
|
|
|
|
|
p.check(.key_type)
|
2020-04-19 00:07:57 +02:00
|
|
|
|
end_pos := p.tok.position()
|
|
|
|
|
decl_pos := start_pos.extend(end_pos)
|
2020-02-10 23:19:50 +01:00
|
|
|
|
name := p.check_name()
|
2020-06-29 20:09:09 +02:00
|
|
|
|
if name.len == 1 && name[0].is_capital() {
|
2020-07-03 15:10:39 +02:00
|
|
|
|
p.error_with_pos('single letter capital names are reserved for generic template types.',
|
|
|
|
|
decl_pos)
|
2021-03-31 10:13:15 +02:00
|
|
|
|
return ast.FnTypeDecl{}
|
2020-06-29 20:09:09 +02:00
|
|
|
|
}
|
2020-11-28 22:40:40 +01:00
|
|
|
|
mut sum_variants := []ast.SumTypeVariant{}
|
2020-09-25 12:02:32 +02:00
|
|
|
|
p.check(.assign)
|
2020-11-28 22:40:40 +01:00
|
|
|
|
mut type_pos := p.tok.position()
|
2020-11-20 10:51:50 +01:00
|
|
|
|
mut comments := []ast.Comment{}
|
2020-04-13 15:06:02 +02:00
|
|
|
|
if p.tok.kind == .key_fn {
|
2020-12-27 14:18:46 +01:00
|
|
|
|
// function type: `type mycallback = fn(string, int)`
|
2020-04-13 15:06:02 +02:00
|
|
|
|
fn_name := p.prepend_mod(name)
|
|
|
|
|
fn_type := p.parse_fn_type(fn_name)
|
2021-04-09 13:51:25 +02:00
|
|
|
|
type_pos = type_pos.extend(p.tok.position())
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments = p.eat_comments(same_line: true)
|
2020-04-13 15:06:02 +02:00
|
|
|
|
return ast.FnTypeDecl{
|
|
|
|
|
name: fn_name
|
|
|
|
|
is_pub: is_pub
|
|
|
|
|
typ: fn_type
|
2020-04-19 00:07:57 +02:00
|
|
|
|
pos: decl_pos
|
2021-04-09 13:51:25 +02:00
|
|
|
|
type_pos: type_pos
|
2020-11-20 10:51:50 +01:00
|
|
|
|
comments: comments
|
2020-04-13 15:06:02 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-04-20 08:30:42 +02:00
|
|
|
|
first_type := p.parse_type() // need to parse the first type before we can check if it's `type A = X | Y`
|
2020-12-15 20:49:04 +01:00
|
|
|
|
type_alias_pos := p.tok.position()
|
2020-04-13 15:06:02 +02:00
|
|
|
|
if p.tok.kind == .pipe {
|
2020-11-28 22:40:40 +01:00
|
|
|
|
mut type_end_pos := p.prev_tok.position()
|
|
|
|
|
type_pos = type_pos.extend(type_end_pos)
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2020-11-28 22:40:40 +01:00
|
|
|
|
sum_variants << ast.SumTypeVariant{
|
|
|
|
|
typ: first_type
|
|
|
|
|
pos: type_pos
|
|
|
|
|
}
|
2020-03-07 17:37:55 +01:00
|
|
|
|
// type SumType = A | B | c
|
2020-04-10 22:27:51 +02:00
|
|
|
|
for {
|
2020-11-28 22:40:40 +01:00
|
|
|
|
type_pos = p.tok.position()
|
2020-03-02 06:40:18 +01:00
|
|
|
|
variant_type := p.parse_type()
|
2020-11-28 22:40:40 +01:00
|
|
|
|
// TODO: needs to be its own var, otherwise TCC fails because of a known stack error
|
|
|
|
|
prev_tok := p.prev_tok
|
|
|
|
|
type_end_pos = prev_tok.position()
|
|
|
|
|
type_pos = type_pos.extend(type_end_pos)
|
|
|
|
|
sum_variants << ast.SumTypeVariant{
|
|
|
|
|
typ: variant_type
|
|
|
|
|
pos: type_pos
|
|
|
|
|
}
|
2020-02-10 23:19:50 +01:00
|
|
|
|
if p.tok.kind != .pipe {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
p.check(.pipe)
|
|
|
|
|
}
|
2020-11-28 22:40:40 +01:00
|
|
|
|
variant_types := sum_variants.map(it.typ)
|
2020-08-22 12:29:15 +02:00
|
|
|
|
prepend_mod_name := p.prepend_mod(name)
|
2021-04-02 00:57:09 +02:00
|
|
|
|
typ := p.table.register_type_symbol(ast.TypeSymbol{
|
2020-11-25 12:09:40 +01:00
|
|
|
|
kind: .sum_type
|
2020-08-22 12:29:15 +02:00
|
|
|
|
name: prepend_mod_name
|
2020-11-29 14:10:45 +01:00
|
|
|
|
cname: util.no_dots(prepend_mod_name)
|
2020-05-11 08:59:55 +02:00
|
|
|
|
mod: p.mod
|
2021-04-02 00:57:09 +02:00
|
|
|
|
info: ast.SumType{
|
2020-11-28 22:40:40 +01:00
|
|
|
|
variants: variant_types
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2020-05-04 21:56:41 +02:00
|
|
|
|
is_public: is_pub
|
2020-03-02 06:40:18 +01:00
|
|
|
|
})
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments = p.eat_comments(same_line: true)
|
2020-11-25 12:09:40 +01:00
|
|
|
|
return ast.SumTypeDecl{
|
2020-03-07 17:37:55 +01:00
|
|
|
|
name: name
|
2021-03-09 18:16:18 +01:00
|
|
|
|
typ: typ
|
2020-03-07 17:37:55 +01:00
|
|
|
|
is_pub: is_pub
|
2020-11-28 22:40:40 +01:00
|
|
|
|
variants: sum_variants
|
2020-04-19 00:07:57 +02:00
|
|
|
|
pos: decl_pos
|
2020-11-20 10:51:50 +01:00
|
|
|
|
comments: comments
|
2020-03-07 17:37:55 +01:00
|
|
|
|
}
|
2020-03-02 23:19:04 +01:00
|
|
|
|
}
|
2020-12-15 20:49:04 +01:00
|
|
|
|
// type MyType = int
|
2020-04-13 15:06:02 +02:00
|
|
|
|
parent_type := first_type
|
2020-12-16 08:03:49 +01:00
|
|
|
|
parent_sym := p.table.get_type_symbol(parent_type)
|
|
|
|
|
pidx := parent_type.idx()
|
|
|
|
|
p.check_for_impure_v(parent_sym.language, decl_pos)
|
2020-08-22 12:29:15 +02:00
|
|
|
|
prepend_mod_name := p.prepend_mod(name)
|
2021-04-02 00:57:09 +02:00
|
|
|
|
idx := p.table.register_type_symbol(ast.TypeSymbol{
|
2020-03-07 17:37:55 +01:00
|
|
|
|
kind: .alias
|
2020-08-22 12:29:15 +02:00
|
|
|
|
name: prepend_mod_name
|
2020-11-29 14:10:45 +01:00
|
|
|
|
cname: util.no_dots(prepend_mod_name)
|
2020-05-11 08:59:55 +02:00
|
|
|
|
mod: p.mod
|
2020-12-16 08:03:49 +01:00
|
|
|
|
parent_idx: pidx
|
2021-04-02 00:57:09 +02:00
|
|
|
|
info: ast.Alias{
|
2020-06-24 22:12:33 +02:00
|
|
|
|
parent_type: parent_type
|
2020-12-16 08:03:49 +01:00
|
|
|
|
language: parent_sym.language
|
2020-04-08 00:59:28 +02:00
|
|
|
|
}
|
2020-05-04 21:56:41 +02:00
|
|
|
|
is_public: is_pub
|
2020-03-07 17:37:55 +01:00
|
|
|
|
})
|
2020-12-16 08:03:49 +01:00
|
|
|
|
if idx == -1 {
|
2020-12-16 08:09:02 +01:00
|
|
|
|
p.error_with_pos('cannot register alias `$name`, another type with this name exists',
|
|
|
|
|
decl_pos.extend(type_alias_pos))
|
2020-12-16 08:03:49 +01:00
|
|
|
|
return ast.AliasTypeDecl{}
|
|
|
|
|
}
|
|
|
|
|
if idx == pidx {
|
2020-12-15 20:49:04 +01:00
|
|
|
|
p.error_with_pos('a type alias can not refer to itself: $name', decl_pos.extend(type_alias_pos))
|
|
|
|
|
return ast.AliasTypeDecl{}
|
|
|
|
|
}
|
2021-02-08 17:16:02 +01:00
|
|
|
|
comments = p.eat_comments(same_line: true)
|
2020-03-07 17:37:55 +01:00
|
|
|
|
return ast.AliasTypeDecl{
|
2020-02-10 23:19:50 +01:00
|
|
|
|
name: name
|
2020-03-07 17:37:55 +01:00
|
|
|
|
is_pub: is_pub
|
|
|
|
|
parent_type: parent_type
|
2021-04-09 13:51:25 +02:00
|
|
|
|
type_pos: type_pos
|
2020-04-19 00:07:57 +02:00
|
|
|
|
pos: decl_pos
|
2020-11-20 10:51:50 +01:00
|
|
|
|
comments: comments
|
2020-02-10 23:19:50 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-21 05:11:50 +02:00
|
|
|
|
fn (mut p Parser) assoc() ast.Assoc {
|
2020-04-01 23:23:20 +02:00
|
|
|
|
var_name := p.check_name()
|
|
|
|
|
pos := p.tok.position()
|
2020-04-27 15:16:31 +02:00
|
|
|
|
mut v := p.scope.find_var(var_name) or {
|
2020-04-01 23:23:20 +02:00
|
|
|
|
p.error('unknown variable `$var_name`')
|
2020-12-12 09:01:12 +01:00
|
|
|
|
return ast.Assoc{
|
|
|
|
|
scope: 0
|
|
|
|
|
}
|
2020-04-01 23:23:20 +02:00
|
|
|
|
}
|
2020-04-27 15:16:31 +02:00
|
|
|
|
v.is_used = true
|
2020-04-01 23:23:20 +02:00
|
|
|
|
// println('assoc var $name typ=$var.typ')
|
2020-04-26 09:17:13 +02:00
|
|
|
|
mut fields := []string{}
|
|
|
|
|
mut vals := []ast.Expr{}
|
2020-04-01 23:23:20 +02:00
|
|
|
|
p.check(.pipe)
|
2020-12-12 04:06:09 +01:00
|
|
|
|
for p.tok.kind != .eof {
|
2020-04-01 23:23:20 +02:00
|
|
|
|
fields << p.check_name()
|
|
|
|
|
p.check(.colon)
|
|
|
|
|
expr := p.expr(0)
|
|
|
|
|
vals << expr
|
|
|
|
|
if p.tok.kind == .comma {
|
2020-05-07 06:51:36 +02:00
|
|
|
|
p.next()
|
2020-04-01 23:23:20 +02:00
|
|
|
|
}
|
|
|
|
|
if p.tok.kind == .rcbr {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return ast.Assoc{
|
|
|
|
|
var_name: var_name
|
|
|
|
|
fields: fields
|
|
|
|
|
exprs: vals
|
|
|
|
|
pos: pos
|
2020-12-12 09:01:12 +01:00
|
|
|
|
scope: p.scope
|
2020-04-01 23:23:20 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-31 20:26:15 +02:00
|
|
|
|
fn (p &Parser) new_true_expr() ast.Expr {
|
|
|
|
|
return ast.BoolLiteral{
|
|
|
|
|
val: true
|
2020-04-20 14:49:26 +02:00
|
|
|
|
pos: p.tok.position()
|
2020-03-31 20:26:15 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-26 11:21:41 +01:00
|
|
|
|
fn verror(s string) {
|
2020-04-03 17:38:41 +02:00
|
|
|
|
util.verror('parser error', s)
|
2019-12-26 11:21:41 +01:00
|
|
|
|
}
|
2020-06-06 17:47:16 +02:00
|
|
|
|
|
|
|
|
|
fn (mut p Parser) top_level_statement_start() {
|
|
|
|
|
if p.comments_mode == .toplevel_comments {
|
|
|
|
|
p.scanner.set_is_inside_toplevel_statement(true)
|
|
|
|
|
p.rewind_scanner_to_current_token_in_new_mode()
|
|
|
|
|
$if debugscanner ? {
|
2021-02-24 19:03:53 +01:00
|
|
|
|
eprintln('>> p.top_level_statement_start | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: $p.tok.lit $p.peek_tok.lit ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
|
2020-06-06 17:47:16 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn (mut p Parser) top_level_statement_end() {
|
|
|
|
|
if p.comments_mode == .toplevel_comments {
|
|
|
|
|
p.scanner.set_is_inside_toplevel_statement(false)
|
|
|
|
|
p.rewind_scanner_to_current_token_in_new_mode()
|
|
|
|
|
$if debugscanner ? {
|
2021-02-24 19:03:53 +01:00
|
|
|
|
eprintln('>> p.top_level_statement_end | tidx:${p.tok.tidx:-5} | p.tok.kind: ${p.tok.kind:-10} | p.tok.lit: $p.tok.lit $p.peek_tok.lit ${p.peek_token(2).lit} ${p.peek_token(3).lit} ...')
|
2020-06-06 17:47:16 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn (mut p Parser) rewind_scanner_to_current_token_in_new_mode() {
|
|
|
|
|
// Go back and rescan some tokens, ensuring that the parser's
|
2021-02-24 19:03:53 +01:00
|
|
|
|
// lookahead buffer p.peek_tok .. p.peek_token(3), will now contain
|
2020-06-06 17:47:16 +02:00
|
|
|
|
// the correct tokens (possible comments), for the new mode
|
|
|
|
|
// This refilling of the lookahead buffer is needed for the
|
|
|
|
|
// .toplevel_comments parsing mode.
|
|
|
|
|
tidx := p.tok.tidx
|
|
|
|
|
p.scanner.set_current_tidx(tidx - 5)
|
|
|
|
|
no_token := token.Token{}
|
|
|
|
|
p.prev_tok = no_token
|
|
|
|
|
p.tok = no_token
|
|
|
|
|
p.peek_tok = no_token
|
|
|
|
|
for {
|
|
|
|
|
p.next()
|
2020-06-08 13:30:17 +02:00
|
|
|
|
// eprintln('rewinding to ${p.tok.tidx:5} | goal: ${tidx:5}')
|
2020-06-06 17:47:16 +02:00
|
|
|
|
if tidx == p.tok.tidx {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-07-01 00:53:53 +02:00
|
|
|
|
|
|
|
|
|
pub fn (mut p Parser) mark_var_as_used(varname string) bool {
|
|
|
|
|
if obj := p.scope.find(varname) {
|
2020-11-25 12:09:40 +01:00
|
|
|
|
match mut obj {
|
2020-07-01 00:53:53 +02:00
|
|
|
|
ast.Var {
|
|
|
|
|
obj.is_used = true
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
else {}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
2020-09-19 18:18:36 +02:00
|
|
|
|
|
|
|
|
|
fn (mut p Parser) unsafe_stmt() ast.Stmt {
|
2020-12-22 14:45:12 +01:00
|
|
|
|
mut pos := p.tok.position()
|
2020-09-19 18:18:36 +02:00
|
|
|
|
p.next()
|
|
|
|
|
if p.tok.kind != .lcbr {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('please use `unsafe {`', p.tok.position())
|
2020-09-19 18:18:36 +02:00
|
|
|
|
}
|
|
|
|
|
p.next()
|
2020-10-20 21:57:24 +02:00
|
|
|
|
if p.inside_unsafe {
|
2021-03-30 09:33:29 +02:00
|
|
|
|
return p.error_with_pos('already inside `unsafe` block', pos)
|
2020-10-20 21:57:24 +02:00
|
|
|
|
}
|
2020-09-19 18:18:36 +02:00
|
|
|
|
if p.tok.kind == .rcbr {
|
|
|
|
|
// `unsafe {}`
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.update_last_line(p.tok.line_nr)
|
2020-09-19 18:18:36 +02:00
|
|
|
|
p.next()
|
|
|
|
|
return ast.Block{
|
|
|
|
|
is_unsafe: true
|
2020-09-30 16:06:22 +02:00
|
|
|
|
pos: pos
|
2020-09-19 18:18:36 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
p.inside_unsafe = true
|
|
|
|
|
p.open_scope() // needed in case of `unsafe {stmt}`
|
|
|
|
|
defer {
|
|
|
|
|
p.inside_unsafe = false
|
|
|
|
|
p.close_scope()
|
|
|
|
|
}
|
|
|
|
|
stmt := p.stmt(false)
|
|
|
|
|
if p.tok.kind == .rcbr {
|
|
|
|
|
if stmt is ast.ExprStmt {
|
|
|
|
|
// `unsafe {expr}`
|
|
|
|
|
if stmt.expr.is_expr() {
|
|
|
|
|
p.next()
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.update_last_line(p.prev_tok.line_nr)
|
2020-09-19 18:18:36 +02:00
|
|
|
|
ue := ast.UnsafeExpr{
|
|
|
|
|
expr: stmt.expr
|
|
|
|
|
pos: pos
|
|
|
|
|
}
|
|
|
|
|
// parse e.g. `unsafe {expr}.foo()`
|
|
|
|
|
expr := p.expr_with_left(ue, 0, p.is_stmt_ident)
|
|
|
|
|
return ast.ExprStmt{
|
|
|
|
|
expr: expr
|
|
|
|
|
pos: pos
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// unsafe {stmts}
|
|
|
|
|
mut stmts := [stmt]
|
|
|
|
|
for p.tok.kind != .rcbr {
|
|
|
|
|
stmts << p.stmt(false)
|
|
|
|
|
}
|
|
|
|
|
p.next()
|
2021-01-19 14:49:40 +01:00
|
|
|
|
pos.update_last_line(p.tok.line_nr)
|
2020-09-19 18:18:36 +02:00
|
|
|
|
return ast.Block{
|
|
|
|
|
stmts: stmts
|
|
|
|
|
is_unsafe: true
|
2020-09-30 16:06:22 +02:00
|
|
|
|
pos: pos
|
2020-09-19 18:18:36 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-10-15 10:09:19 +02:00
|
|
|
|
|
2020-10-15 12:00:46 +02:00
|
|
|
|
fn (mut p Parser) trace(fbase string, message string) {
|
2020-10-15 10:09:19 +02:00
|
|
|
|
if p.file_base == fbase {
|
|
|
|
|
println('> p.trace | ${fbase:-10s} | $message')
|
|
|
|
|
}
|
|
|
|
|
}
|