all: make comment expressions + fix vfmt array init (#5851)

pull/5868/head
Enzo 2020-07-17 19:13:22 +02:00 committed by GitHub
parent 39f90e25f3
commit 90d9040e6e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 397 additions and 291 deletions

View File

@ -9,11 +9,10 @@ const (
'vlib/net/http/http_httpbin_test.v', 'vlib/net/http/http_httpbin_test.v',
] ]
skip_on_musl = []string{} skip_on_musl = []string{}
skip_on_ubuntu_musl = skip_on_ubuntu_musl = [
[
'vlib/net/http/cookie_test.v', 'vlib/net/http/cookie_test.v',
'vlib/net/http/http_test.v', 'vlib/net/http/http_test.v',
'vlib/net/websocket/ws_test.v' 'vlib/net/websocket/ws_test.v',
'vlib/sqlite/sqlite_test.v', 'vlib/sqlite/sqlite_test.v',
'vlib/orm/orm_test.v', 'vlib/orm/orm_test.v',
'vlib/clipboard/clipboard_test.v', 'vlib/clipboard/clipboard_test.v',
@ -21,7 +20,7 @@ const (
] ]
skip_on_linux = []string{} skip_on_linux = []string{}
skip_on_non_linux = [ skip_on_non_linux = [
'vlib/net/websocket/ws_test.v' 'vlib/net/websocket/ws_test.v',
] ]
skip_on_windows = [ skip_on_windows = [
'vlib/orm/orm_test.v', 'vlib/orm/orm_test.v',
@ -75,6 +74,6 @@ fn main() {
tsession.test() tsession.test()
eprintln(tsession.benchmark.total_message(title)) eprintln(tsession.benchmark.total_message(title))
if tsession.benchmark.nfail > 0 { if tsession.benchmark.nfail > 0 {
panic('\nWARNING: failed ${tsession.benchmark.nfail} times.\n') panic('\nWARNING: failed $tsession.benchmark.nfail times.\n')
} }
} }

View File

@ -10,15 +10,15 @@ import v.errors
pub type TypeDecl = AliasTypeDecl | FnTypeDecl | SumTypeDecl pub type TypeDecl = AliasTypeDecl | FnTypeDecl | SumTypeDecl
pub type Expr = AnonFn | ArrayInit | AsCast | Assoc | BoolLiteral | CallExpr | CastExpr | pub type Expr = AnonFn | ArrayInit | AsCast | Assoc | BoolLiteral | CallExpr | CastExpr |
CharLiteral | ComptimeCall | ConcatExpr | EnumVal | FloatLiteral | Ident | IfExpr | IfGuardExpr | CharLiteral | Comment | ComptimeCall | ConcatExpr | EnumVal | FloatLiteral | Ident | IfExpr |
IndexExpr | InfixExpr | IntegerLiteral | Likely | LockExpr | MapInit | MatchExpr | None | IfGuardExpr | IndexExpr | InfixExpr | IntegerLiteral | Likely | LockExpr | MapInit | MatchExpr |
OrExpr | ParExpr | PostfixExpr | PrefixExpr | RangeExpr | SelectorExpr | SizeOf | SqlExpr | None | OrExpr | ParExpr | PostfixExpr | PrefixExpr | RangeExpr | SelectorExpr | SizeOf |
StringInterLiteral | StringLiteral | StructInit | Type | TypeOf | UnsafeExpr SqlExpr | StringInterLiteral | StringLiteral | StructInit | Type | TypeOf | UnsafeExpr
pub type Stmt = AssertStmt | AssignStmt | Attr | Block | BranchStmt | Comment | CompFor | pub type Stmt = AssertStmt | AssignStmt | Attr | Block | BranchStmt | CompFor | CompIf |
CompIf | ConstDecl | DeferStmt | EnumDecl | ExprStmt | FnDecl | ForCStmt | ForInStmt | ConstDecl | DeferStmt | EnumDecl | ExprStmt | FnDecl | ForCStmt | ForInStmt | ForStmt |
ForStmt | GlobalDecl | GoStmt | GotoLabel | GotoStmt | HashStmt | Import | InterfaceDecl | GlobalDecl | GoStmt | GotoLabel | GotoStmt | HashStmt | Import | InterfaceDecl | Module |
Module | Return | SqlStmt | StructDecl | TypeDecl | UnsafeStmt Return | SqlStmt | StructDecl | TypeDecl | UnsafeStmt
pub type ScopeObject = ConstField | GlobalDecl | Var pub type ScopeObject = ConstField | GlobalDecl | Var
@ -42,11 +42,12 @@ pub:
// Stand-alone expression in a statement list. // Stand-alone expression in a statement list.
pub struct ExprStmt { pub struct ExprStmt {
pub: pub:
expr Expr expr Expr
pos token.Position pos token.Position
is_expr bool comments []Comment
is_expr bool
pub mut: pub mut:
typ table.Type typ table.Type
} }
pub struct IntegerLiteral { pub struct IntegerLiteral {
@ -282,10 +283,11 @@ pub mut:
pub struct Return { pub struct Return {
pub: pub:
pos token.Position pos token.Position
exprs []Expr exprs []Expr
comments []Comment
pub mut: pub mut:
types []table.Type types []table.Type
} }
/* /*
@ -579,6 +581,7 @@ pub:
right []Expr right []Expr
op token.Kind op token.Kind
pos token.Position pos token.Position
comments []Comment
pub mut: pub mut:
left []Expr left []Expr
left_types []table.Type left_types []table.Type
@ -921,22 +924,25 @@ pub fn (expr Expr) position() token.Position {
AsCast { AsCast {
return expr.pos return expr.pos
} }
// ast.Ident { }
CastExpr {
return expr.pos
}
Assoc { Assoc {
return expr.pos return expr.pos
} }
BoolLiteral { BoolLiteral {
return expr.pos return expr.pos
} }
// ast.Ident { }
CallExpr { CallExpr {
return expr.pos return expr.pos
} }
CastExpr {
return expr.pos
}
CharLiteral { CharLiteral {
return expr.pos return expr.pos
} }
Comment {
return expr.pos
}
EnumVal { EnumVal {
return expr.pos return expr.pos
} }
@ -1023,7 +1029,6 @@ pub fn (stmt Stmt) position() token.Position {
// BranchStmt { // BranchStmt {
// } // }
*/ */
Comment { return stmt.pos }
CompIf { return stmt.pos } CompIf { return stmt.pos }
ConstDecl { return stmt.pos } ConstDecl { return stmt.pos }
/* /*

View File

@ -19,7 +19,7 @@ const (
pub struct Checker { pub struct Checker {
pub mut: pub mut:
table &table.Table table &table.Table
file ast.File file ast.File
nr_errors int nr_errors int
nr_warnings int nr_warnings int
@ -1865,10 +1865,10 @@ fn (mut c Checker) stmt(node ast.Stmt) {
node.pos) node.pos)
} }
} }
// ast.Attr {}
ast.AssignStmt { ast.AssignStmt {
c.assign_stmt(mut node) c.assign_stmt(mut node)
} }
ast.Attr {}
ast.Block { ast.Block {
c.stmts(node.stmts) c.stmts(node.stmts)
} }
@ -2044,7 +2044,9 @@ fn (mut c Checker) stmt(node ast.Stmt) {
} }
} }
} }
// ast.HashStmt {} ast.GotoLabel {}
ast.GotoStmt {}
ast.HashStmt {}
ast.Import {} ast.Import {}
ast.InterfaceDecl { ast.InterfaceDecl {
c.interface_decl(node) c.interface_decl(node)
@ -2074,10 +2076,6 @@ fn (mut c Checker) stmt(node ast.Stmt) {
c.stmts(node.stmts) c.stmts(node.stmts)
c.inside_unsafe = false c.inside_unsafe = false
} }
else {
// println('checker.stmt(): unhandled node')
// println('checker.stmt(): unhandled node (${typeof(node)})')
}
} }
} }
@ -2210,6 +2208,9 @@ pub fn (mut c Checker) expr(node ast.Expr) table.Type {
ast.CharLiteral { ast.CharLiteral {
return table.byte_type return table.byte_type
} }
ast.Comment {
return table.void_type
}
ast.ComptimeCall { ast.ComptimeCall {
node.sym = c.table.get_type_symbol(c.unwrap_generic(c.expr(node.left))) node.sym = c.table.get_type_symbol(c.unwrap_generic(c.expr(node.left)))
if node.is_vweb { if node.is_vweb {
@ -2616,7 +2617,7 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, type_sym table.TypeSymbol
typ := c.table.type_to_str(c.expr(expr.low)) typ := c.table.type_to_str(c.expr(expr.low))
c.error('cannot use type `$typ` in match range', branch.pos) c.error('cannot use type `$typ` in match range', branch.pos)
} }
for i in low..high + 1 { for i in low .. high + 1 {
key = i.str() key = i.str()
val := if key in branch_exprs { branch_exprs[key] } else { 0 } val := if key in branch_exprs { branch_exprs[key] } else { 0 }
if val == 1 { if val == 1 {

View File

@ -33,36 +33,30 @@ pub:
pub struct DocNode { pub struct DocNode {
pub mut: pub mut:
name string name string
content string = '' content string = ''
comment string comment string
pos DocPos = DocPos{-1, -1} pos DocPos = DocPos{-1, -1}
file_path string = '' file_path string = ''
attrs map[string]string attrs map[string]string
} }
pub fn merge_comments(stmts []ast.Stmt) string { pub fn merge_comments(comments []ast.Comment) string {
mut res := []string{} mut res := []string{}
for s in stmts { for comment in comments {
if s is ast.Comment { res << comment.text.trim_left('|')
res << s.text.trim_left('|')
}
} }
return res.join('\n') return res.join('\n')
} }
pub fn get_comment_block_right_before(stmts []ast.Stmt) string { pub fn get_comment_block_right_before(comments []ast.Comment) string {
if stmts.len == 0 { if comments.len == 0 {
return '' return ''
} }
mut comment := '' mut comment := ''
mut last_comment_line_nr := 0 mut last_comment_line_nr := 0
for i := stmts.len-1; i >= 0; i-- { for i := comments.len - 1; i >= 0; i-- {
stmt := stmts[i] cmt := comments[i]
if stmt !is ast.Comment {
panic('Not a comment')
}
cmt := stmt as ast.Comment
if last_comment_line_nr != 0 && cmt.pos.line_nr < last_comment_line_nr - 1 { if last_comment_line_nr != 0 && cmt.pos.line_nr < last_comment_line_nr - 1 {
// skip comments that are not part of a continuous block, // skip comments that are not part of a continuous block,
// located right above the top level statement. // located right above the top level statement.
@ -92,8 +86,8 @@ pub fn get_comment_block_right_before(stmts []ast.Stmt) string {
// } // }
// return new_cmt_content // return new_cmt_content
} }
//eprintln('cmt: $cmt') // eprintln('cmt: $cmt')
cseparator := if cmt_content.starts_with('```') {'\n'} else {' '} cseparator := if cmt_content.starts_with('```') { '\n' } else { ' ' }
comment = cmt_content + cseparator + comment comment = cmt_content + cseparator + comment
last_comment_line_nr = cmt.pos.line_nr last_comment_line_nr = cmt.pos.line_nr
} }
@ -171,7 +165,7 @@ pub fn new(input_path string) Doc {
d.fmt = fmt.Fmt{ d.fmt = fmt.Fmt{
indent: 0 indent: 0
is_debug: false is_debug: false
table: d.table table: d.table
} }
return d return d
} }
@ -198,7 +192,9 @@ fn compare_nodes_by_category(a, b &DocNode) int {
pub fn (nodes []DocNode) index_by_name(node_name string) int { pub fn (nodes []DocNode) index_by_name(node_name string) int {
for i, node in nodes { for i, node in nodes {
if node.name != node_name { continue } if node.name != node_name {
continue
}
return i return i
} }
return -1 return -1
@ -208,7 +204,7 @@ pub fn (nodes []DocNode) find_children_of(parent string) []DocNode {
return nodes.find_nodes_with_attr('parent', parent) return nodes.find_nodes_with_attr('parent', parent)
} }
pub fn (nodes []DocNode) find_nodes_with_attr(attr_name string, value string) []DocNode { pub fn (nodes []DocNode) find_nodes_with_attr(attr_name, value string) []DocNode {
mut subgroup := []DocNode{} mut subgroup := []DocNode{}
if attr_name.len == 0 { if attr_name.len == 0 {
return subgroup return subgroup
@ -226,9 +222,13 @@ pub fn (nodes []DocNode) find_nodes_with_attr(attr_name string, value string) []
fn get_parent_mod(dir string) ?string { fn get_parent_mod(dir string) ?string {
$if windows { $if windows {
// windows root path is C: or D: // windows root path is C: or D:
if dir.len <= 2 { return error('root folder reached') } if dir.len <= 2 {
return error('root folder reached')
}
} $else { } $else {
if dir.len == 0 { return error('root folder reached') } if dir.len == 0 {
return error('root folder reached')
}
} }
base_dir := os.base_dir(dir) base_dir := os.base_dir(dir)
if os.file_name(base_dir) in ['encoding', 'v'] && 'vlib' in base_dir { if os.file_name(base_dir) in ['encoding', 'v'] && 'vlib' in base_dir {
@ -249,7 +249,9 @@ fn get_parent_mod(dir string) ?string {
return error('No V files found.') return error('No V files found.')
} }
tbl := table.new_table() tbl := table.new_table()
scope := &ast.Scope{ parent: 0 } scope := &ast.Scope{
parent: 0
}
file_ast := parser.parse_file(v_files[0], tbl, .skip_comments, prefs, scope) file_ast := parser.parse_file(v_files[0], tbl, .skip_comments, prefs, scope)
if file_ast.mod.name == 'main' { if file_ast.mod.name == 'main' {
return '' return ''
@ -315,13 +317,15 @@ fn (mut d Doc) generate() ?Doc {
last_import_stmt_idx = sidx last_import_stmt_idx = sidx
} }
} }
mut prev_comments := []ast.Stmt{} mut prev_comments := []ast.Comment{}
mut imports_section := true mut imports_section := true
for sidx, stmt in stmts { for sidx, stmt in stmts {
//eprintln('stmt typeof: ' + typeof(stmt)) // eprintln('stmt typeof: ' + typeof(stmt))
if stmt is ast.Comment { if stmt is ast.ExprStmt {
prev_comments << stmt if stmt.expr is ast.Comment as cmt {
continue prev_comments << cmt
continue
}
} }
// TODO: Fetch head comment once // TODO: Fetch head comment once
if stmt is ast.Module { if stmt is ast.Module {
@ -385,7 +389,9 @@ fn (mut d Doc) generate() ?Doc {
name: node.attrs['parent'] name: node.attrs['parent']
content: '' content: ''
comment: '' comment: ''
attrs: {'category': 'Structs'} attrs: {
'category': 'Structs'
}
} }
} }
} }
@ -420,7 +426,6 @@ fn (mut d Doc) generate() ?Doc {
} }
prev_comments = [] prev_comments = []
} }
d.fmt.mod2alias = map[string]string{} d.fmt.mod2alias = map[string]string{}
} }
d.time_generated = time.now() d.time_generated = time.now()

View File

@ -17,11 +17,6 @@ const (
max_len = [0, 35, 85, 93, 100] max_len = [0, 35, 85, 93, 100]
) )
enum CommentsLevel {
keep
indent
}
pub struct Fmt { pub struct Fmt {
pub mut: pub mut:
table &table.Table table &table.Table
@ -251,6 +246,9 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
} }
match node { match node {
ast.AssignStmt { ast.AssignStmt {
f.comments(node.comments, {
inline: false
})
for i, left in node.left { for i, left in node.left {
if left is ast.Ident { if left is ast.Ident {
var_info := left.var_info() var_info := left.var_info()
@ -302,9 +300,6 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
else {} else {}
} }
} }
ast.Comment {
f.comment(it)
}
ast.CompFor {} ast.CompFor {}
ast.CompIf { ast.CompIf {
inversion := if it.is_not { '!' } else { '' } inversion := if it.is_not { '!' } else { '' }
@ -331,19 +326,27 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
} }
name := it.name.after('.') name := it.name.after('.')
f.writeln('enum $name {') f.writeln('enum $name {')
f.comments(it.comments, false, .indent) f.comments(it.comments, {
level: .indent
})
for field in it.fields { for field in it.fields {
f.write('\t$field.name') f.write('\t$field.name')
if field.has_expr { if field.has_expr {
f.write(' = ') f.write(' = ')
f.expr(field.expr) f.expr(field.expr)
} }
f.comments(field.comments, true, .indent) f.comments(field.comments, {
has_nl: false
level: .indent
})
f.writeln('') f.writeln('')
} }
f.writeln('}\n') f.writeln('}\n')
} }
ast.ExprStmt { ast.ExprStmt {
f.comments(it.comments, {
inline: false
})
f.expr(it.expr) f.expr(it.expr)
if !f.single_line_if { if !f.single_line_if {
f.writeln('') f.writeln('')
@ -434,6 +437,9 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
f.mod(it) f.mod(it)
} }
ast.Return { ast.Return {
f.comments(it.comments, {
inline: false
})
f.write('return') f.write('return')
if it.exprs.len > 1 { if it.exprs.len > 1 {
// multiple returns // multiple returns
@ -618,7 +624,8 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl) {
for j < comments.len && comments[j].pos.pos < field.pos.pos { for j < comments.len && comments[j].pos.pos < field.pos.pos {
f.indent++ f.indent++
f.empty_line = true f.empty_line = true
f.comment(comments[j]) f.comment(comments[j], {})
f.writeln('')
f.indent-- f.indent--
j++ j++
} }
@ -651,7 +658,8 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl) {
for comment in node.end_comments { for comment in node.end_comments {
f.indent++ f.indent++
f.empty_line = true f.empty_line = true
f.comment(comment) f.comment(comment, {})
f.writeln('')
f.indent-- f.indent--
} }
f.writeln('}\n') f.writeln('}\n')
@ -765,6 +773,9 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
ast.CharLiteral { ast.CharLiteral {
f.write('`$node.val`') f.write('`$node.val`')
} }
ast.Comment {
f.comment(node, {})
}
ast.ComptimeCall { ast.ComptimeCall {
if node.is_vweb { if node.is_vweb {
f.write('$' + 'vweb.html()') f.write('$' + 'vweb.html()')
@ -1120,10 +1131,21 @@ pub fn (mut f Fmt) or_expr(or_block ast.OrExpr) {
} }
} }
pub fn (mut f Fmt) comment(node ast.Comment) { enum CommentsLevel {
keep
indent
}
struct CommentsOptions {
has_nl bool = true
inline bool = true
level CommentsLevel = .keep
}
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
if !node.text.contains('\n') { if !node.text.contains('\n') {
is_separate_line := node.text.starts_with('|') is_separate_line := !options.inline || node.text.starts_with('|')
mut s := if is_separate_line { node.text[1..] } else { node.text } mut s := if node.text.starts_with('|') { node.text[1..] } else { node.text }
if s == '' { if s == '' {
s = '//' s = '//'
} else { } else {
@ -1133,7 +1155,7 @@ pub fn (mut f Fmt) comment(node ast.Comment) {
f.remove_new_line() // delete the generated \n f.remove_new_line() // delete the generated \n
f.write(' ') f.write(' ')
} }
f.writeln(s) f.write(s)
return return
} }
lines := node.text.split_into_lines() lines := node.text.split_into_lines()
@ -1143,25 +1165,25 @@ pub fn (mut f Fmt) comment(node ast.Comment) {
f.empty_line = false f.empty_line = false
} }
f.empty_line = true f.empty_line = true
f.writeln('*/') f.write('*/')
} }
pub fn (mut f Fmt) comments(some_comments []ast.Comment, remove_last_new_line bool, level CommentsLevel) { pub fn (mut f Fmt) comments(comments []ast.Comment, options CommentsOptions) {
for c in some_comments { for i, c in comments {
if !f.out.last_n(1)[0].is_space() { if !f.out.last_n(1)[0].is_space() {
f.write('\t') f.write('\t')
} }
if level == .indent { if options.level == .indent {
f.indent++ f.indent++
} }
f.comment(c) f.comment(c, options)
if level == .indent { if i < comments.len - 1 || options.has_nl {
f.writeln('')
}
if options.level == .indent {
f.indent-- f.indent--
} }
} }
if remove_last_new_line {
f.remove_new_line()
}
} }
pub fn (mut f Fmt) fn_decl(node ast.FnDecl) { pub fn (mut f Fmt) fn_decl(node ast.FnDecl) {
@ -1232,7 +1254,7 @@ pub fn (mut f Fmt) if_expr(it ast.IfExpr) {
f.single_line_if = single_line f.single_line_if = single_line
for i, branch in it.branches { for i, branch in it.branches {
if branch.comments.len > 0 { if branch.comments.len > 0 {
f.comments(branch.comments, true, .keep) f.comments(branch.comments, {})
} }
if i == 0 { if i == 0 {
f.write('if ') f.write('if ')
@ -1373,7 +1395,8 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
} }
for branch in it.branches { for branch in it.branches {
if branch.comment.text != '' { if branch.comment.text != '' {
f.comment(branch.comment) f.comment(branch.comment, {})
f.writeln('')
} }
if !branch.is_else { if !branch.is_else {
// normal branch // normal branch
@ -1404,7 +1427,7 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
} }
} }
if branch.post_comments.len > 0 { if branch.post_comments.len > 0 {
f.comments(branch.post_comments, false, .keep) f.comments(branch.post_comments, {})
} }
} }
f.indent-- f.indent--
@ -1456,6 +1479,7 @@ fn (mut f Fmt) write_language_prefix(lang table.Language) {
fn expr_is_single_line(expr ast.Expr) bool { fn expr_is_single_line(expr ast.Expr) bool {
match expr { match expr {
ast.IfExpr { return false } ast.IfExpr { return false }
ast.Comment { return false }
else {} else {}
} }
return true return true
@ -1553,9 +1577,12 @@ pub fn (mut f Fmt) array_init(it ast.ArrayInit) {
f.expr(expr) f.expr(expr)
if i == it.exprs.len - 1 { if i == it.exprs.len - 1 {
if is_new_line { if is_new_line {
if expr !is ast.Comment {
f.write(',')
}
f.writeln('') f.writeln('')
} }
} else { } else if expr !is ast.Comment {
f.write(',') f.write(',')
} }
last_line_nr = line_nr last_line_nr = line_nr
@ -1634,7 +1661,8 @@ pub fn (mut f Fmt) const_decl(it ast.ConstDecl) {
comments := field.comments comments := field.comments
mut j := 0 mut j := 0
for j < comments.len && comments[j].pos.pos < field.pos.pos { for j < comments.len && comments[j].pos.pos < field.pos.pos {
f.comment(comments[j]) f.comment(comments[j], {})
f.writeln('')
j++ j++
} }
name := field.name.after('.') name := field.name.after('.')

View File

@ -0,0 +1,9 @@
fn main() {
arr := [
// test 0
1,
// test 1
2,
// test 2
]
}

View File

@ -7,7 +7,7 @@ fn main() {
expected_flags := [ expected_flags := [
make_flag('solaris', '-L', '/opt/local/lib'), make_flag('solaris', '-L', '/opt/local/lib'),
make_flag('darwin', '-framework', 'Cocoa'), make_flag('darwin', '-framework', 'Cocoa'),
make_flag('windows', '-l', 'gdi32') make_flag('windows', '-l', 'gdi32'),
] ]
x := []int{len: 10, cap: 100, init: 1} x := []int{len: 10, cap: 100, init: 1}
_ := expected_flags _ := expected_flags

View File

@ -0,0 +1,19 @@
fn fun() int {
// comment zero
return 0
}
fn mr_fun() (int, int) {
// one comment
// another comment
return 1, 2
}
fn main() {
// this is a comment
a := 1
// and another comment
// just to make it worse
b, c := a, 2
d := c // and an extra one
}

View File

@ -0,0 +1,13 @@
fn fun() int {
return /* comment zero */ 0
}
fn mr_fun() (int, int) {
return /* one comment */ 1, /* another comment */ 2
}
fn main() {
a := /* this is a comment */ 1
b, c := /* and another comment */ a, /* just to make it worse */ 2
d := c // and an extra one
}

View File

@ -17,13 +17,13 @@ const (
'first line', 'first line',
'second line', 'second line',
'third line', 'third line',
'fourth line' 'fourth line',
] ]
) )
const ( const (
i_am_a_very_long_constant_name_so_i_stand_alone_and_my_length_is_over_90_characters = [ i_am_a_very_long_constant_name_so_i_stand_alone_and_my_length_is_over_90_characters = [
'testforit' 'testforit',
] ]
) )
@ -34,27 +34,27 @@ pub const (
fn main() { fn main() {
a := [ a := [
[3, 5, 6], [3, 5, 6],
[7, 9, 2] [7, 9, 2],
] ]
b := [[ b := [[
[2, 5, 8], [2, 5, 8],
[5, 1, 3], [5, 1, 3],
[2, 6, 0] [2, 6, 0],
], [ ], [
[9, 4, 5], [9, 4, 5],
[7, 2, 3], [7, 2, 3],
[1, 2, 3] [1, 2, 3],
]] ]]
c := [ c := [
[ [
[2, 5, 8], [2, 5, 8],
[5, 1, 3], [5, 1, 3],
[2, 6, 0] [2, 6, 0],
], ],
[ [
[9, 4, 5], [9, 4, 5],
[7, 2, 3], [7, 2, 3],
[1, 2, 3] [1, 2, 3],
] ],
] ]
} }

View File

@ -676,7 +676,6 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.const_decl(node) g.const_decl(node)
// } // }
} }
ast.Comment {}
ast.CompFor { ast.CompFor {
g.comp_for(node) g.comp_for(node)
} }
@ -1679,6 +1678,7 @@ fn (mut g Gen) expr(node ast.Expr) {
ast.ComptimeCall { ast.ComptimeCall {
g.comptime_call(node) g.comptime_call(node)
} }
ast.Comment {}
ast.ConcatExpr { ast.ConcatExpr {
g.concat_expr(node) g.concat_expr(node)
} }

View File

@ -62,9 +62,7 @@ pub fn gen(files []ast.File, table &table.Table, pref &pref.Preferences) string
g.enable_doc = false g.enable_doc = false
} }
g.init() g.init()
mut graph := depgraph.new_dep_graph() mut graph := depgraph.new_dep_graph()
// Get class methods // Get class methods
for file in files { for file in files {
g.file = file g.file = file
@ -73,27 +71,22 @@ pub fn gen(files []ast.File, table &table.Table, pref &pref.Preferences) string
g.find_class_methods(file.stmts) g.find_class_methods(file.stmts)
g.escape_namespace() g.escape_namespace()
} }
for file in files { for file in files {
g.file = file g.file = file
g.enter_namespace(g.file.mod.name) g.enter_namespace(g.file.mod.name)
g.is_test = g.file.path.ends_with('_test.v') g.is_test = g.file.path.ends_with('_test.v')
// store imports // store imports
mut imports := []string{} mut imports := []string{}
for imp in g.file.imports { for imp in g.file.imports {
imports << imp.mod imports << imp.mod
} }
graph.add(g.file.mod.name, imports) graph.add(g.file.mod.name, imports)
g.stmts(file.stmts) g.stmts(file.stmts)
// store the current namespace // store the current namespace
g.escape_namespace() g.escape_namespace()
} }
// resolve imports // resolve imports
deps_resolved := graph.resolve() deps_resolved := graph.resolve()
mut out := g.hashes() + g.definitions.str() mut out := g.hashes() + g.definitions.str()
for node in deps_resolved.nodes { for node in deps_resolved.nodes {
name := g.js_name(node.name).replace('.', '_') name := g.js_name(node.name).replace('.', '_')
@ -103,7 +96,9 @@ pub fn gen(files []ast.File, table &table.Table, pref &pref.Preferences) string
out += 'const $name = (function (' out += 'const $name = (function ('
imports := g.namespace_imports[node.name] imports := g.namespace_imports[node.name]
for i, key in imports.keys() { for i, key in imports.keys() {
if i > 0 { out += ', ' } if i > 0 {
out += ', '
}
out += imports[key] out += imports[key]
} }
out += ') {\n\t' out += ') {\n\t'
@ -117,13 +112,19 @@ pub fn gen(files []ast.File, table &table.Table, pref &pref.Preferences) string
out += '\n\treturn {' out += '\n\treturn {'
for i, pub_var in g.namespaces_pub[node.name] { for i, pub_var in g.namespaces_pub[node.name] {
out += '\n\t\t$pub_var' out += '\n\t\t$pub_var'
if i < g.namespaces_pub[node.name].len - 1 { out += ',' } if i < g.namespaces_pub[node.name].len - 1 {
out += ','
}
}
if g.namespaces_pub[node.name].len > 0 {
out += '\n\t'
} }
if g.namespaces_pub[node.name].len > 0 { out += '\n\t' }
out += '};' out += '};'
out += '\n})(' out += '\n})('
for i, key in imports.keys() { for i, key in imports.keys() {
if i > 0 { out += ', ' } if i > 0 {
out += ', '
}
out += key.replace('.', '_') out += key.replace('.', '_')
} }
out += ');\n\n' out += ');\n\n'
@ -157,9 +158,9 @@ pub fn (mut g JsGen) find_class_methods(stmts []ast.Stmt) {
for stmt in stmts { for stmt in stmts {
match stmt { match stmt {
ast.FnDecl { ast.FnDecl {
if it.is_method { if stmt.is_method {
// Found struct method, store it to be generated along with the class. // Found struct method, store it to be generated along with the class.
class_name := g.table.get_type_name(it.receiver.typ) class_name := g.table.get_type_name(stmt.receiver.typ)
// Workaround until `map[key] << val` works. // Workaround until `map[key] << val` works.
mut arr := g.method_fn_decls[class_name] mut arr := g.method_fn_decls[class_name]
arr << stmt arr << stmt
@ -178,7 +179,7 @@ pub fn (mut g JsGen) init() {
} }
pub fn (g JsGen) hashes() string { pub fn (g JsGen) hashes() string {
mut res := '// V_COMMIT_HASH ${util.vhash()}\n' mut res := '// V_COMMIT_HASH $util.vhash()\n'
res += '// V_CURRENT_COMMIT_HASH ${util.githash(g.pref.building_v)}\n' res += '// V_CURRENT_COMMIT_HASH ${util.githash(g.pref.building_v)}\n'
return res return res
} }
@ -187,7 +188,6 @@ pub fn (g JsGen) hashes() string {
pub fn (mut g JsGen) typ(t table.Type) string { pub fn (mut g JsGen) typ(t table.Type) string {
sym := g.table.get_type_symbol(t) sym := g.table.get_type_symbol(t)
mut styp := '' mut styp := ''
match sym.kind { match sym.kind {
.placeholder { .placeholder {
// This should never happen: means checker bug // This should never happen: means checker bug
@ -282,10 +282,11 @@ fn (mut g JsGen) fn_typ(args []table.Arg, return_type table.Type) string {
mut res := '(' mut res := '('
for i, arg in args { for i, arg in args {
res += '$arg.name: ${g.typ(arg.typ)}' res += '$arg.name: ${g.typ(arg.typ)}'
if i < args.len - 1 { res += ', ' } if i < args.len - 1 {
res += ', '
}
} }
return res + ') => ' + g.typ(return_type) return res + ') => ' + g.typ(return_type)
} }
fn (mut g JsGen) struct_typ(s string) string { fn (mut g JsGen) struct_typ(s string) string {
@ -293,17 +294,21 @@ fn (mut g JsGen) struct_typ(s string) string {
mut name := if ns == g.namespace { s.split('.').last() } else { g.get_alias(s) } mut name := if ns == g.namespace { s.split('.').last() } else { g.get_alias(s) }
mut styp := '' mut styp := ''
for i, v in name.split('.') { for i, v in name.split('.') {
if i == 0 { styp = v } if i == 0 {
else { styp += '["$v"]' } styp = v
} else {
styp += '["$v"]'
}
}
if ns in ['', g.namespace] {
return styp
} }
if ns in ['', g.namespace] { return styp }
return styp + '["prototype"]' return styp + '["prototype"]'
} }
fn (mut g JsGen) to_js_typ_val(t table.Type) string { fn (mut g JsGen) to_js_typ_val(t table.Type) string {
sym := g.table.get_type_symbol(t) sym := g.table.get_type_symbol(t)
mut styp := '' mut styp := ''
match sym.kind { match sym.kind {
.i8, .i16, .int, .i64, .byte, .u16, .u32, .u64, .f32, .f64, .any_int, .any_float, .size_t { .i8, .i16, .int, .i64, .byte, .u16, .u32, .u64, .f32, .f64, .any_int, .any_float, .size_t {
styp = '0' styp = '0'
@ -354,7 +359,7 @@ pub fn (mut g JsGen) write(s string) {
pub fn (mut g JsGen) writeln(s string) { pub fn (mut g JsGen) writeln(s string) {
g.gen_indent() g.gen_indent()
g.out.writeln(s) g.out.writeln(s)
g.empty_line = true g.empty_line = true
} }
pub fn (mut g JsGen) new_tmp_var() string { pub fn (mut g JsGen) new_tmp_var() string {
@ -366,18 +371,22 @@ pub fn (mut g JsGen) new_tmp_var() string {
// 'fn' => '' // 'fn' => ''
[inline] [inline]
fn get_ns(s string) string { fn get_ns(s string) string {
idx := s.last_index('.') or { return '' } idx := s.last_index('.') or {
return ''
}
return s.substr(0, idx) return s.substr(0, idx)
} }
fn (mut g JsGen) get_alias(name string) string { fn (mut g JsGen) get_alias(name string) string {
ns := get_ns(name) ns := get_ns(name)
if ns == '' { return name } if ns == '' {
return name
}
imports := g.namespace_imports[g.namespace] imports := g.namespace_imports[g.namespace]
alias := imports[ns] alias := imports[ns]
if alias == '' { return name } if alias == '' {
return name
}
return alias + '.' + name.split('.').last() return alias + '.' + name.split('.').last()
} }
@ -386,7 +395,9 @@ fn (mut g JsGen) js_name(name_ string) string {
mut name := if ns == g.namespace { name_.split('.').last() } else { g.get_alias(name_) } mut name := if ns == g.namespace { name_.split('.').last() } else { g.get_alias(name_) }
mut parts := name.split('.') mut parts := name.split('.')
for i, p in parts { for i, p in parts {
if p in js_reserved { parts[i] = 'v_$p' } if p in js_reserved {
parts[i] = 'v_$p'
}
} }
return parts.join('.') return parts.join('.')
} }
@ -401,82 +412,77 @@ fn (mut g JsGen) stmts(stmts []ast.Stmt) {
fn (mut g JsGen) stmt(node ast.Stmt) { fn (mut g JsGen) stmt(node ast.Stmt) {
g.stmt_start_pos = g.out.len g.stmt_start_pos = g.out.len
match node { match node {
ast.AssertStmt { ast.AssertStmt {
g.gen_assert_stmt(it) g.gen_assert_stmt(node)
} }
ast.AssignStmt { ast.AssignStmt {
g.gen_assign_stmt(it) g.gen_assign_stmt(node)
} }
ast.Attr { ast.Attr {
g.gen_attr(it) g.gen_attr(node)
} }
ast.Block { ast.Block {
g.gen_block(it) g.gen_block(node)
g.writeln('') g.writeln('')
} }
ast.BranchStmt { ast.BranchStmt {
g.gen_branch_stmt(it) g.gen_branch_stmt(node)
} }
ast.Comment { ast.CompFor {}
// Skip: don't generate comments
}
ast.CompFor {
}
ast.CompIf { ast.CompIf {
// skip: JS has no compile time if // skip: JS has no compile time if
} }
ast.ConstDecl { ast.ConstDecl {
g.gen_const_decl(it) g.gen_const_decl(node)
} }
ast.DeferStmt { ast.DeferStmt {
g.defer_stmts << *it g.defer_stmts << *node
} }
ast.EnumDecl { ast.EnumDecl {
g.gen_enum_decl(it) g.gen_enum_decl(node)
g.writeln('') g.writeln('')
} }
ast.ExprStmt { ast.ExprStmt {
g.gen_expr_stmt(it) g.gen_expr_stmt(node)
} }
ast.FnDecl { ast.FnDecl {
g.fn_decl = it g.fn_decl = node
g.gen_fn_decl(it) g.gen_fn_decl(node)
} }
ast.ForCStmt { ast.ForCStmt {
g.gen_for_c_stmt(it) g.gen_for_c_stmt(node)
g.writeln('') g.writeln('')
} }
ast.ForInStmt { ast.ForInStmt {
g.gen_for_in_stmt(it) g.gen_for_in_stmt(node)
g.writeln('') g.writeln('')
} }
ast.ForStmt { ast.ForStmt {
g.gen_for_stmt(it) g.gen_for_stmt(node)
g.writeln('') g.writeln('')
} }
ast.GlobalDecl { ast.GlobalDecl {
// TODO // TODO
} }
ast.GoStmt { ast.GoStmt {
g.gen_go_stmt(it) g.gen_go_stmt(node)
g.writeln('') g.writeln('')
} }
ast.GotoLabel { ast.GotoLabel {
g.writeln('${g.js_name(it.name)}:') g.writeln('${g.js_name(node.name)}:')
} }
ast.GotoStmt { ast.GotoStmt {
// skip: JS has no goto // skip: JS has no goto
} }
ast.HashStmt { ast.HashStmt {
g.gen_hash_stmt(it) g.gen_hash_stmt(node)
} }
ast.Import { ast.Import {
g.gen_import_stmt(it) g.gen_import_stmt(node)
} }
ast.InterfaceDecl { ast.InterfaceDecl {
g.gen_interface_decl(it) g.gen_interface_decl(node)
} }
ast.Module { ast.Module {
// skip: namespacing implemented externally // skip: namespacing implemented externally
@ -485,18 +491,17 @@ fn (mut g JsGen) stmt(node ast.Stmt) {
if g.defer_stmts.len > 0 { if g.defer_stmts.len > 0 {
g.gen_defer_stmts() g.gen_defer_stmts()
} }
g.gen_return_stmt(it) g.gen_return_stmt(node)
} }
ast.SqlStmt{ ast.SqlStmt {}
}
ast.StructDecl { ast.StructDecl {
g.gen_struct_decl(it) g.gen_struct_decl(node)
} }
ast.TypeDecl { ast.TypeDecl {
// skip JS has no typedecl // skip JS has no typedecl
} }
ast.UnsafeStmt { ast.UnsafeStmt {
g.stmts(it.stmts) g.stmts(node.stmts)
} }
} }
} }
@ -504,10 +509,10 @@ fn (mut g JsGen) stmt(node ast.Stmt) {
fn (mut g JsGen) expr(node ast.Expr) { fn (mut g JsGen) expr(node ast.Expr) {
match node { match node {
ast.AnonFn { ast.AnonFn {
g.gen_fn_decl(it.decl) g.gen_fn_decl(node.decl)
} }
ast.ArrayInit { ast.ArrayInit {
g.gen_array_init_expr(it) g.gen_array_init_expr(node)
} }
ast.AsCast { ast.AsCast {
// skip: JS has no types, so no need to cast // skip: JS has no types, so no need to cast
@ -517,57 +522,58 @@ fn (mut g JsGen) expr(node ast.Expr) {
// TODO // TODO
} }
ast.BoolLiteral { ast.BoolLiteral {
if it.val == true { if node.val == true {
g.write('true') g.write('true')
} else { } else {
g.write('false') g.write('false')
} }
} }
ast.CallExpr { ast.CallExpr {
g.gen_call_expr(it) g.gen_call_expr(node)
} }
ast.CastExpr { ast.CastExpr {
// JS has no types, so no need to cast // JS has no types, so no need to cast
// Just write the expression inside // Just write the expression inside
g.expr(it.expr) g.expr(node.expr)
} }
ast.CharLiteral { ast.CharLiteral {
g.write("'$it.val'") g.write("'$node.val'")
} }
ast.Comment {}
ast.ConcatExpr { ast.ConcatExpr {
// TODO // TODO
} }
ast.EnumVal { ast.EnumVal {
sym := g.table.get_type_symbol(it.typ) sym := g.table.get_type_symbol(node.typ)
styp := g.js_name(sym.name) styp := g.js_name(sym.name)
g.write('${styp}.${it.val}') g.write('${styp}.$node.val')
} }
ast.FloatLiteral { ast.FloatLiteral {
g.write(it.val) g.write(node.val)
} }
ast.Ident { ast.Ident {
g.gen_ident(it) g.gen_ident(node)
} }
ast.IfExpr { ast.IfExpr {
g.gen_if_expr(it) g.gen_if_expr(node)
} }
ast.IfGuardExpr { ast.IfGuardExpr {
// TODO no optionals yet // TODO no optionals yet
} }
ast.IndexExpr { ast.IndexExpr {
g.gen_index_expr(it) g.gen_index_expr(node)
} }
ast.InfixExpr { ast.InfixExpr {
g.gen_infix_expr(it) g.gen_infix_expr(node)
} }
ast.IntegerLiteral { ast.IntegerLiteral {
g.write(it.val) g.write(node.val)
} }
ast.LockExpr { ast.LockExpr {
g.gen_lock_expr(it) g.gen_lock_expr(node)
} }
ast.MapInit { ast.MapInit {
g.gen_map_init_expr(it) g.gen_map_init_expr(node)
} }
ast.MatchExpr { ast.MatchExpr {
// TODO // TODO
@ -580,42 +586,42 @@ fn (mut g JsGen) expr(node ast.Expr) {
} }
ast.ParExpr { ast.ParExpr {
g.write('(') g.write('(')
g.expr(it.expr) g.expr(node.expr)
g.write(')') g.write(')')
} }
ast.PostfixExpr { ast.PostfixExpr {
g.expr(it.expr) g.expr(node.expr)
g.write(it.op.str()) g.write(node.op.str())
} }
ast.PrefixExpr { ast.PrefixExpr {
if it.op in [.amp, .mul] { if node.op in [.amp, .mul] {
// C pointers/references: ignore them // C pointers/references: ignore them
} else { } else {
g.write(it.op.str()) g.write(node.op.str())
} }
g.expr(it.right) g.expr(node.right)
} }
ast.RangeExpr { ast.RangeExpr {
// Only used in IndexExpr, requires index type info // Only used in IndexExpr, requires index type info
} }
ast.SelectorExpr { ast.SelectorExpr {
g.gen_selector_expr(it) g.gen_selector_expr(node)
} }
ast.SizeOf { ast.SizeOf {
// TODO // TODO
} }
ast.SqlExpr{ ast.SqlExpr {
// TODO // TODO
} }
ast.StringInterLiteral { ast.StringInterLiteral {
g.gen_string_inter_literal(it) g.gen_string_inter_literal(node)
} }
ast.StringLiteral { ast.StringLiteral {
g.write('"$it.val"') g.write('"$node.val"')
} }
ast.StructInit { ast.StructInit {
// `user := User{name: 'Bob'}` // `user := User{name: 'Bob'}`
g.gen_struct_init(it) g.gen_struct_init(node)
} }
ast.Type { ast.Type {
// skip: JS has no types // skip: JS has no types
@ -623,24 +629,23 @@ fn (mut g JsGen) expr(node ast.Expr) {
} }
ast.Likely { ast.Likely {
g.write('(') g.write('(')
g.expr(it.expr) g.expr(node.expr)
g.write(')') g.write(')')
} }
ast.TypeOf { ast.TypeOf {
g.gen_typeof_expr(it) g.gen_typeof_expr(node)
// TODO: Should this print the V type or the JS type? // TODO: Should this print the V type or the JS type?
} }
ast.ComptimeCall { ast.ComptimeCall {
// TODO // TODO
} }
ast.UnsafeExpr { ast.UnsafeExpr {
es := it.stmts[0] as ast.ExprStmt es := node.stmts[0] as ast.ExprStmt
g.expr(es.expr) g.expr(es.expr)
} }
} }
} }
// TODO // TODO
fn (mut g JsGen) gen_assert_stmt(a ast.AssertStmt) { fn (mut g JsGen) gen_assert_stmt(a ast.AssertStmt) {
g.writeln('// assert') g.writeln('// assert')
@ -651,17 +656,17 @@ fn (mut g JsGen) gen_assert_stmt(a ast.AssertStmt) {
mut mod_path := g.file.path.replace('\\', '\\\\') mut mod_path := g.file.path.replace('\\', '\\\\')
if g.is_test { if g.is_test {
g.writeln(' g_test_oks++;') g.writeln(' g_test_oks++;')
g.writeln(' cb_assertion_ok("${mod_path}", ${a.pos.line_nr+1}, "assert ${s_assertion}", "${g.fn_decl.name}()" );') g.writeln(' cb_assertion_ok("$mod_path", ${a.pos.line_nr+1}, "assert $s_assertion", "${g.fn_decl.name}()" );')
g.writeln('} else {') g.writeln('} else {')
g.writeln(' g_test_fails++;') g.writeln(' g_test_fails++;')
g.writeln(' cb_assertion_failed("${mod_path}", ${a.pos.line_nr+1}, "assert ${s_assertion}", "${g.fn_decl.name}()" );') g.writeln(' cb_assertion_failed("$mod_path", ${a.pos.line_nr+1}, "assert $s_assertion", "${g.fn_decl.name}()" );')
g.writeln(' exit(1);') g.writeln(' exit(1);')
g.writeln('}') g.writeln('}')
return return
} }
g.writeln('} else {') g.writeln('} else {')
g.inc_indent() g.inc_indent()
g.writeln('builtin.eprintln("${mod_path}:${a.pos.line_nr+1}: FAIL: fn ${g.fn_decl.name}(): assert $s_assertion");') g.writeln('builtin.eprintln("$mod_path:${a.pos.line_nr+1}: FAIL: fn ${g.fn_decl.name}(): assert $s_assertion");')
g.writeln('builtin.exit(1);') g.writeln('builtin.exit(1);')
g.dec_indent() g.dec_indent()
g.writeln('}') g.writeln('}')
@ -686,7 +691,9 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt) {
// `a := 1` | `a,b := 1,2` // `a := 1` | `a,b := 1,2`
for i, left in stmt.left { for i, left in stmt.left {
mut op := stmt.op mut op := stmt.op
if stmt.op == .decl_assign { op = .assign } if stmt.op == .decl_assign {
op = .assign
}
val := stmt.right[i] val := stmt.right[i]
mut is_mut := false mut is_mut := false
if left is ast.Ident { if left is ast.Ident {
@ -700,13 +707,10 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt) {
continue continue
} }
} }
mut styp := g.typ(stmt.left_types[i]) mut styp := g.typ(stmt.left_types[i])
if !g.inside_loop && styp.len > 0 { if !g.inside_loop && styp.len > 0 {
g.doc.gen_typ(styp) g.doc.gen_typ(styp)
} }
if stmt.op == .decl_assign { if stmt.op == .decl_assign {
if g.inside_loop || is_mut { if g.inside_loop || is_mut {
g.write('let ') g.write('let ')
@ -724,7 +728,6 @@ fn (mut g JsGen) gen_assign_stmt(stmt ast.AssignStmt) {
g.write(' $op ') g.write(' $op ')
g.expr(val) g.expr(val)
} }
if g.inside_loop { if g.inside_loop {
g.write('; ') g.write('; ')
} else { } else {
@ -753,9 +756,9 @@ fn (mut g JsGen) gen_branch_stmt(it ast.BranchStmt) {
fn (mut g JsGen) gen_const_decl(it ast.ConstDecl) { fn (mut g JsGen) gen_const_decl(it ast.ConstDecl) {
for field in it.fields { for field in it.fields {
g.doc.gen_const(g.typ(field.typ)) g.doc.gen_const(g.typ(field.typ))
if field.is_pub {
if field.is_pub { g.push_pub_var(field.name) } g.push_pub_var(field.name)
}
g.write('const ${g.js_name(field.name)} = ') g.write('const ${g.js_name(field.name)} = ')
g.expr(field.expr) g.expr(field.expr)
g.writeln(';') g.writeln(';')
@ -794,7 +797,9 @@ fn (mut g JsGen) gen_enum_decl(it ast.EnumDecl) {
fn (mut g JsGen) gen_expr_stmt(it ast.ExprStmt) { fn (mut g JsGen) gen_expr_stmt(it ast.ExprStmt) {
g.expr(it.expr) g.expr(it.expr)
if !it.is_expr && it.expr !is ast.IfExpr && !g.inside_ternary { g.writeln(';') } if !it.is_expr && it.expr !is ast.IfExpr && !g.inside_ternary {
g.writeln(';')
}
} }
fn (mut g JsGen) gen_fn_decl(it ast.FnDecl) { fn (mut g JsGen) gen_fn_decl(it ast.FnDecl) {
@ -811,7 +816,9 @@ fn (mut g JsGen) gen_fn_decl(it ast.FnDecl) {
fn fn_has_go(it ast.FnDecl) bool { fn fn_has_go(it ast.FnDecl) bool {
mut has_go := false mut has_go := false
for stmt in it.stmts { for stmt in it.stmts {
if stmt is ast.GoStmt { has_go = true } if stmt is ast.GoStmt {
has_go = true
}
} }
return has_go return has_go
} }
@ -836,12 +843,9 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl) {
if c in [`+`, `-`, `*`, `/`] { if c in [`+`, `-`, `*`, `/`] {
name = util.replace_op(name) name = util.replace_op(name)
} }
// type_name := g.typ(it.return_type) // type_name := g.typ(it.return_type)
// generate jsdoc for the function // generate jsdoc for the function
g.doc.gen_fn(it) g.doc.gen_fn(it)
if has_go { if has_go {
g.write('async ') g.write('async ')
} }
@ -849,25 +853,21 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl) {
g.write('function ') g.write('function ')
} }
g.write('${name}(') g.write('${name}(')
if it.is_pub && !it.is_method { if it.is_pub && !it.is_method {
g.push_pub_var(name) g.push_pub_var(name)
} }
} }
mut args := it.args mut args := it.args
if it.is_method { if it.is_method {
args = args[1..] args = args[1..]
} }
g.fn_args(args, it.is_variadic) g.fn_args(args, it.is_variadic)
g.writeln(') {') g.writeln(') {')
if it.is_method { if it.is_method {
g.inc_indent() g.inc_indent()
g.writeln('const ${it.args[0].name} = this;') g.writeln('const ${it.args[0].name} = this;')
g.dec_indent() g.dec_indent()
} }
g.stmts(it.stmts) g.stmts(it.stmts)
g.write('}') g.write('}')
if is_main { if is_main {
@ -876,7 +876,6 @@ fn (mut g JsGen) gen_method_decl(it ast.FnDecl) {
if !it.is_anon && !it.is_method { if !it.is_anon && !it.is_method {
g.writeln('\n') g.writeln('\n')
} }
g.fn_decl = voidptr(0) g.fn_decl = voidptr(0)
} }
@ -922,8 +921,9 @@ fn (mut g JsGen) gen_for_in_stmt(it ast.ForInStmt) {
if it.is_range { if it.is_range {
// `for x in 1..10 {` // `for x in 1..10 {`
mut i := it.val_var mut i := it.val_var
if i in ['', '_'] { i = g.new_tmp_var() } if i in ['', '_'] {
i = g.new_tmp_var()
}
g.inside_loop = true g.inside_loop = true
g.write('for (let $i = ') g.write('for (let $i = ')
g.expr(it.cond) g.expr(it.cond)
@ -1013,7 +1013,6 @@ fn (mut g JsGen) gen_interface_decl(it ast.InterfaceDecl) {
// JS is dynamically typed, so we don't need any codegen at all // JS is dynamically typed, so we don't need any codegen at all
// We just need the JSDoc so TypeScript type checking works // We just need the JSDoc so TypeScript type checking works
g.doc.gen_interface(it) g.doc.gen_interface(it)
// This is a hack to make the interface's type accessible outside its namespace // This is a hack to make the interface's type accessible outside its namespace
// TODO: interfaces are always `pub`? // TODO: interfaces are always `pub`?
name := g.js_name(it.name) name := g.js_name(it.name)
@ -1026,7 +1025,6 @@ fn (mut g JsGen) gen_return_stmt(it ast.Return) {
g.write('return;') g.write('return;')
return return
} }
g.write('return ') g.write('return ')
if it.exprs.len == 1 { if it.exprs.len == 1 {
g.expr(it.exprs[0]) g.expr(it.exprs[0])
@ -1050,7 +1048,9 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
} else { } else {
g.write('${g.to_js_typ_val(field.typ)}') g.write('${g.to_js_typ_val(field.typ)}')
} }
if i < node.fields.len - 1 { g.write(', ') } if i < node.fields.len - 1 {
g.write(', ')
}
} }
g.writeln(' }) {') g.writeln(' }) {')
g.inc_indent() g.inc_indent()
@ -1059,22 +1059,26 @@ fn (mut g JsGen) gen_struct_decl(node ast.StructDecl) {
} }
g.dec_indent() g.dec_indent()
g.writeln('};') g.writeln('};')
g.writeln('${g.js_name(node.name)}.prototype = {') g.writeln('${g.js_name(node.name)}.prototype = {')
g.inc_indent() g.inc_indent()
fns := g.method_fn_decls[node.name] fns := g.method_fn_decls[node.name]
for i, field in node.fields { for i, field in node.fields {
typ := g.typ(field.typ) typ := g.typ(field.typ)
g.doc.gen_typ(typ) g.doc.gen_typ(typ)
g.write('$field.name: ${g.to_js_typ_val(field.typ)}') g.write('$field.name: ${g.to_js_typ_val(field.typ)}')
if i < node.fields.len - 1 || fns.len > 0 { g.writeln(',') } else { g.writeln('') } if i < node.fields.len - 1 || fns.len > 0 {
g.writeln(',')
} else {
g.writeln('')
}
} }
for i, cfn in fns { for i, cfn in fns {
g.gen_method_decl(cfn) g.gen_method_decl(cfn)
if i < fns.len - 1 { g.writeln(',') } else { g.writeln('') } if i < fns.len - 1 {
g.writeln(',')
} else {
g.writeln('')
}
} }
g.dec_indent() g.dec_indent()
g.writeln('};\n') g.writeln('};\n')
@ -1141,7 +1145,6 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
if it.is_method { // foo.bar.baz() if it.is_method { // foo.bar.baz()
sym := g.table.get_type_symbol(it.receiver_type) sym := g.table.get_type_symbol(it.receiver_type)
g.write('.') g.write('.')
if sym.kind == .array && it.name in ['map', 'filter'] { if sym.kind == .array && it.name in ['map', 'filter'] {
// Prevent 'it' from getting shadowed inside the match // Prevent 'it' from getting shadowed inside the match
node := it node := it
@ -1150,9 +1153,9 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
match node.args[0].expr { match node.args[0].expr {
ast.AnonFn { ast.AnonFn {
g.gen_fn_decl(it.decl) g.gen_fn_decl(it.decl)
g.write(')') g.write(')')
return return
} }
ast.Ident { ast.Ident {
if it.kind == .function { if it.kind == .function {
g.write(g.js_name(it.name)) g.write(g.js_name(it.name))
@ -1166,7 +1169,8 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
return return
} }
} }
} else {} }
else {}
} }
g.write('it => ') g.write('it => ')
g.expr(node.args[0].expr) g.expr(node.args[0].expr)
@ -1190,7 +1194,7 @@ fn (mut g JsGen) gen_call_expr(it ast.CallExpr) {
fn (mut g JsGen) gen_ident(node ast.Ident) { fn (mut g JsGen) gen_ident(node ast.Ident) {
mut name := g.js_name(node.name) mut name := g.js_name(node.name)
if node.kind == .blank_ident || name in ['', '_']{ if node.kind == .blank_ident || name in ['', '_'] {
name = g.new_tmp_var() name = g.new_tmp_var()
} }
// TODO `is` // TODO `is`
@ -1204,7 +1208,6 @@ fn (mut g JsGen) gen_lock_expr(node ast.LockExpr) {
fn (mut g JsGen) gen_if_expr(node ast.IfExpr) { fn (mut g JsGen) gen_if_expr(node ast.IfExpr) {
type_sym := g.table.get_type_symbol(node.typ) type_sym := g.table.get_type_symbol(node.typ)
// one line ?: // one line ?:
if node.is_expr && node.branches.len >= 2 && node.has_else && type_sym.kind != .void { if node.is_expr && node.branches.len >= 2 && node.has_else && type_sym.kind != .void {
// `x := if a > b { } else if { } else { }` // `x := if a > b { } else if { } else { }`
@ -1286,7 +1289,9 @@ fn (mut g JsGen) gen_index_expr(expr ast.IndexExpr) {
g.write('.get(') g.write('.get(')
} }
g.expr(expr.index) g.expr(expr.index)
if !expr.is_setter { g.write(')') } if !expr.is_setter {
g.write(')')
}
} else if left_typ.kind == .string { } else if left_typ.kind == .string {
if expr.is_setter { if expr.is_setter {
// TODO: What's the best way to do this? // TODO: What's the best way to do this?
@ -1309,35 +1314,46 @@ fn (mut g JsGen) gen_index_expr(expr ast.IndexExpr) {
fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) { fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
l_sym := g.table.get_type_symbol(it.left_type) l_sym := g.table.get_type_symbol(it.left_type)
r_sym := g.table.get_type_symbol(it.right_type) r_sym := g.table.get_type_symbol(it.right_type)
if l_sym.kind == .array && it.op == .left_shift { // arr << 1 if l_sym.kind == .array && it.op == .left_shift { // arr << 1
g.expr(it.left) g.expr(it.left)
g.write('.push(') g.write('.push(')
if r_sym.kind == .array { g.write('...') } // arr << [1, 2] if r_sym.kind == .array {
g.write('...')
} // arr << [1, 2]
g.expr(it.right) g.expr(it.right)
g.write(')') g.write(')')
} else if r_sym.kind in [.array, .map, .string] && it.op in [.key_in, .not_in] { } else if r_sym.kind in [.array, .map, .string] && it.op in [.key_in, .not_in] {
if it.op == .not_in { g.write('!(') } if it.op == .not_in {
g.write('!(')
}
g.expr(it.right) g.expr(it.right)
g.write(if r_sym.kind == .map { '.has(' } else { '.includes(' }) g.write(if r_sym.kind == .map {
'.has('
} else {
'.includes('
})
g.expr(it.left) g.expr(it.left)
g.write(')') g.write(')')
if it.op == .not_in { g.write(')') } if it.op == .not_in {
g.write(')')
}
} else if it.op in [.key_is, .not_is] { // foo is Foo } else if it.op in [.key_is, .not_is] { // foo is Foo
if it.op == .not_is { g.write('!(') } if it.op == .not_is {
g.write('!(')
}
g.expr(it.left) g.expr(it.left)
g.write(' instanceof ') g.write(' instanceof ')
g.write(g.typ(it.right_type)) g.write(g.typ(it.right_type))
if it.op == .not_is { g.write(')') } if it.op == .not_is {
g.write(')')
}
} else { } else {
both_are_int := int(it.left_type) in table.integer_type_idxs && int(it.right_type) in table.integer_type_idxs both_are_int := int(it.left_type) in table.integer_type_idxs &&
int(it.right_type) in table.integer_type_idxs
if it.op == .div && both_are_int { if it.op == .div && both_are_int {
g.write('parseInt(') g.write('parseInt(')
} }
g.expr(it.left) g.expr(it.left)
// in js == is non-strict & === is strict, always do strict // in js == is non-strict & === is strict, always do strict
if it.op == .eq { if it.op == .eq {
g.write(' === ') g.write(' === ')
@ -1346,9 +1362,7 @@ fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
} else { } else {
g.write(' $it.op ') g.write(' $it.op ')
} }
g.expr(it.right) g.expr(it.right)
// Int division: 2.5 -> 2 by prepending |0 // Int division: 2.5 -> 2 by prepending |0
if it.op == .div && both_are_int { if it.op == .div && both_are_int {
g.write(',10)') g.write(',10)')
@ -1356,7 +1370,6 @@ fn (mut g JsGen) gen_infix_expr(it ast.InfixExpr) {
} }
} }
fn (mut g JsGen) gen_map_init_expr(it ast.MapInit) { fn (mut g JsGen) gen_map_init_expr(it ast.MapInit) {
// key_typ_sym := g.table.get_type_symbol(it.key_type) // key_typ_sym := g.table.get_type_symbol(it.key_type)
// value_typ_sym := g.table.get_type_symbol(it.value_type) // value_typ_sym := g.table.get_type_symbol(it.value_type)
@ -1402,12 +1415,11 @@ fn (mut g JsGen) gen_string_inter_literal(it ast.StringInterLiteral) {
fwidth := it.fwidths[i] fwidth := it.fwidths[i]
precision := it.precisions[i] precision := it.precisions[i]
g.write('\${') g.write('\${')
if fmt != `_` || fwidth !=0 || precision != 0 { if fmt != `_` || fwidth != 0 || precision != 0 {
// TODO: Handle formatting // TODO: Handle formatting
g.expr(expr) g.expr(expr)
} else { } else {
sym := g.table.get_type_symbol(it.expr_types[i]) sym := g.table.get_type_symbol(it.expr_types[i])
g.expr(expr) g.expr(expr)
if sym.kind == .struct_ && sym.has_method('str') { if sym.kind == .struct_ && sym.has_method('str') {
g.write('.str()') g.write('.str()')
@ -1446,7 +1458,7 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
} else if sym.kind == .array_fixed { } else if sym.kind == .array_fixed {
fixed_info := sym.info as table.ArrayFixed fixed_info := sym.info as table.ArrayFixed
typ_name := g.table.get_type_name(fixed_info.elem_type) typ_name := g.table.get_type_name(fixed_info.elem_type)
g.write('"[$fixed_info.size]${typ_name}"') g.write('"[$fixed_info.size]$typ_name"')
} else if sym.kind == .function { } else if sym.kind == .function {
info := sym.info as table.FnType info := sym.info as table.FnType
fn_info := info.func fn_info := info.func
@ -1463,6 +1475,6 @@ fn (mut g JsGen) gen_typeof_expr(it ast.TypeOf) {
} }
g.write('"$repr"') g.write('"$repr"')
} else { } else {
g.write('"${sym.name}"') g.write('"$sym.name"')
} }
} }

View File

@ -7,7 +7,8 @@ import v.ast
import v.table import v.table
fn (mut p Parser) assign_stmt() ast.Stmt { fn (mut p Parser) assign_stmt() ast.Stmt {
return p.partial_assign_stmt(p.expr_list()) exprs, comments := p.expr_list()
return p.partial_assign_stmt(exprs, comments)
} }
fn (mut p Parser) check_undefined_variables(exprs []ast.Expr, val ast.Expr) { fn (mut p Parser) check_undefined_variables(exprs []ast.Expr, val ast.Expr) {
@ -77,12 +78,15 @@ fn (mut p Parser) check_cross_variables(exprs []ast.Expr, val ast.Expr) bool {
return false return false
} }
fn (mut p Parser) partial_assign_stmt(left []ast.Expr) ast.Stmt { fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comment) ast.Stmt {
p.is_stmt_ident = false p.is_stmt_ident = false
op := p.tok.kind op := p.tok.kind
pos := p.tok.position() pos := p.tok.position()
p.next() p.next()
right := p.expr_list() right, right_comments := p.expr_list()
mut comments := []ast.Comment{cap: left_comments.len + right_comments.len}
comments << left_comments
comments << right_comments
mut has_cross_var := false mut has_cross_var := false
if op == .decl_assign { if op == .decl_assign {
// a, b := a + 1, b // a, b := a + 1, b
@ -155,6 +159,7 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr) ast.Stmt {
op: op op: op
left: left left: left
right: right right: right
comments: comments
pos: pos pos: pos
has_cross_var: has_cross_var has_cross_var: has_cross_var
is_simple: p.inside_for && p.tok.kind == .lcbr is_simple: p.inside_for && p.tok.kind == .lcbr

View File

@ -37,12 +37,10 @@ fn (mut p Parser) array_init() ast.ArrayInit {
} else { } else {
// [1,2,3] or [const]byte // [1,2,3] or [const]byte
for i := 0; p.tok.kind != .rsbr; i++ { for i := 0; p.tok.kind != .rsbr; i++ {
expr := p.expr(0) exprs << p.expr(0)
exprs << expr
if p.tok.kind == .comma { if p.tok.kind == .comma {
p.next() p.next()
} }
// p.check_comment()
} }
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
$if tinyc { $if tinyc {

View File

@ -24,12 +24,10 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
p.check(.comma) p.check(.comma)
} }
stmts := p.parse_block() stmts := p.parse_block()
return ast.LockExpr { return ast.LockExpr{
lockeds: lockeds lockeds: lockeds
stmts: stmts stmts: stmts
is_rlock: is_rlock is_rlock: is_rlock
pos: pos pos: pos
} }
} }

View File

@ -13,7 +13,7 @@ pub fn (mut p Parser) parse_array_type() table.Type {
p.next() p.next()
p.check(.rsbr) p.check(.rsbr)
elem_type := p.parse_type() elem_type := p.parse_type()
//sym := p.table.get_type_symbol(elem_type) // sym := p.table.get_type_symbol(elem_type)
idx := p.table.find_or_register_array_fixed(elem_type, size, 1) idx := p.table.find_or_register_array_fixed(elem_type, size, 1)
return table.new_type(idx) return table.new_type(idx)
} }
@ -21,10 +21,8 @@ pub fn (mut p Parser) parse_array_type() table.Type {
p.check(.rsbr) p.check(.rsbr)
elem_type := p.parse_type() elem_type := p.parse_type()
mut nr_dims := 1 mut nr_dims := 1
// detect attr // detect attr
not_attr := p.peek_tok.kind != .name && p.peek_tok2.kind !in [.semicolon, .rsbr] not_attr := p.peek_tok.kind != .name && p.peek_tok2.kind !in [.semicolon, .rsbr]
for p.tok.kind == .lsbr && not_attr { for p.tok.kind == .lsbr && not_attr {
p.next() p.next()
p.check(.rsbr) p.check(.rsbr)
@ -100,7 +98,6 @@ pub fn (mut p Parser) parse_type_with_mut(is_mut bool) table.Type {
// Parses any language indicators on a type. // Parses any language indicators on a type.
pub fn (mut p Parser) parse_language() table.Language { pub fn (mut p Parser) parse_language() table.Language {
language := if p.tok.lit == 'C' { language := if p.tok.lit == 'C' {
table.Language.c table.Language.c
} else if p.tok.lit == 'JS' { } else if p.tok.lit == 'JS' {
@ -108,12 +105,10 @@ pub fn (mut p Parser) parse_language() table.Language {
} else { } else {
table.Language.v table.Language.v
} }
if language != .v { if language != .v {
p.next() p.next()
p.check(.dot) p.check(.dot)
} }
return language return language
} }
@ -124,7 +119,6 @@ pub fn (mut p Parser) parse_type() table.Type {
line_nr := p.tok.line_nr line_nr := p.tok.line_nr
p.next() p.next()
is_optional = true is_optional = true
if p.tok.line_nr > line_nr { if p.tok.line_nr > line_nr {
mut typ := table.void_type mut typ := table.void_type
if is_optional { if is_optional {
@ -135,7 +129,6 @@ pub fn (mut p Parser) parse_type() table.Type {
} }
is_shared := p.tok.kind == .key_shared is_shared := p.tok.kind == .key_shared
is_atomic := p.tok.kind == .key_atomic is_atomic := p.tok.kind == .key_atomic
mut nr_muls := 0 mut nr_muls := 0
if p.tok.kind == .key_mut || is_shared || is_atomic { if p.tok.kind == .key_mut || is_shared || is_atomic {
nr_muls++ nr_muls++
@ -192,7 +185,7 @@ pub fn (mut p Parser) parse_any_type(language table.Language, is_ptr bool) table
name = '${p.imports[name]}.$p.tok.lit' name = '${p.imports[name]}.$p.tok.lit'
} else if p.expr_mod != '' { } else if p.expr_mod != '' {
name = p.expr_mod + '.' + name name = p.expr_mod + '.' + name
} else if p.mod !in ['builtin'] && name !in p.table.type_idxs && name.len > 1 { } else if p.mod !in ['builtin'] && name !in p.table.type_idxs && name.len > 1 {
// `Foo` in module `mod` means `mod.Foo` // `Foo` in module `mod` means `mod.Foo`
name = p.mod + '.' + name name = p.mod + '.' + name
} }

View File

@ -137,7 +137,7 @@ fn (mut p Parser) parse() ast.File {
p.read_first_token() p.read_first_token()
mut stmts := []ast.Stmt{} mut stmts := []ast.Stmt{}
for p.tok.kind == .comment { for p.tok.kind == .comment {
stmts << p.comment() stmts << p.comment_stmt()
} }
// module // module
module_decl := p.module_decl() module_decl := p.module_decl()
@ -149,7 +149,7 @@ fn (mut p Parser) parse() ast.File {
continue continue
} }
if p.tok.kind == .comment { if p.tok.kind == .comment {
stmts << p.comment() stmts << p.comment_stmt()
continue continue
} }
break break
@ -372,7 +372,7 @@ fn (mut p Parser) check_name() string {
} }
pub fn (mut p Parser) top_stmt() ast.Stmt { pub fn (mut p Parser) top_stmt() ast.Stmt {
$if trace_parser? { $if trace_parser ? {
tok_pos := p.tok.position() tok_pos := p.tok.position()
eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | top_stmt') eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | top_stmt')
} }
@ -443,7 +443,7 @@ pub fn (mut p Parser) top_stmt() ast.Stmt {
return p.struct_decl() return p.struct_decl()
} }
.comment { .comment {
return p.comment() return p.comment_stmt()
} }
else { else {
if p.pref.is_script && !p.pref.is_test { if p.pref.is_script && !p.pref.is_test {
@ -488,19 +488,27 @@ pub fn (mut p Parser) comment() ast.Comment {
} }
} }
pub fn (mut p Parser) comment_stmt() ast.ExprStmt {
comment := p.comment()
return ast.ExprStmt{
expr: comment
pos: comment.pos
}
}
pub fn (mut p Parser) eat_comments() []ast.Comment { pub fn (mut p Parser) eat_comments() []ast.Comment {
mut comments := []ast.Comment{} mut comments := []ast.Comment{}
for { for {
if p.tok.kind != .comment { if p.tok.kind != .comment {
break break
} }
comments << p.check_comment() comments << p.comment()
} }
return comments return comments
} }
pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt { pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
$if trace_parser? { $if trace_parser ? {
tok_pos := p.tok.position() tok_pos := p.tok.position()
eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | stmt($is_top_level)') eprintln('parsing file: ${p.file_name:-30} | tok.kind: ${p.tok.kind:-10} | tok.lit: ${p.tok.lit:-10} | tok_pos: ${tok_pos.str():-45} | stmt($is_top_level)')
} }
@ -546,7 +554,7 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
return p.parse_multi_expr(is_top_level) return p.parse_multi_expr(is_top_level)
} }
.comment { .comment {
return p.comment() return p.comment_stmt()
} }
.key_return { .key_return {
return p.return_stmt() return p.return_stmt()
@ -622,16 +630,22 @@ pub fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
} }
} }
fn (mut p Parser) expr_list() []ast.Expr { fn (mut p Parser) expr_list() ([]ast.Expr, []ast.Comment) {
mut exprs := []ast.Expr{} mut exprs := []ast.Expr{}
mut comments := []ast.Comment{}
for { for {
exprs << p.expr(0) expr := p.expr(0)
if p.tok.kind != .comma { if expr is ast.Comment {
break comments << expr
} else {
exprs << expr
if p.tok.kind != .comma {
break
}
p.next()
} }
p.next()
} }
return exprs return exprs, comments
} }
// when is_top_stmt is true attrs are added to p.attrs // when is_top_stmt is true attrs are added to p.attrs
@ -769,10 +783,10 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
// a, mut b ... :=/= // multi-assign // a, mut b ... :=/= // multi-assign
// collect things upto hard boundaries // collect things upto hard boundaries
tok := p.tok tok := p.tok
left := p.expr_list() left, left_comments := p.expr_list()
left0 := left[0] left0 := left[0]
if p.tok.kind in [.assign, .decl_assign] || p.tok.kind.is_assign() { if p.tok.kind in [.assign, .decl_assign] || p.tok.kind.is_assign() {
return p.partial_assign_stmt(left) return p.partial_assign_stmt(left, left_comments)
} else if is_top_level && tok.kind !in [.key_if, .key_match, .key_lock, .key_rlock] && } else if is_top_level && tok.kind !in [.key_if, .key_match, .key_lock, .key_rlock] &&
left0 !is ast.CallExpr && left0 !is ast.PostfixExpr && !(left0 is ast.InfixExpr && left0 !is ast.CallExpr && left0 !is ast.PostfixExpr && !(left0 is ast.InfixExpr &&
(left0 as ast.InfixExpr).op == .left_shift) && (left0 as ast.InfixExpr).op == .left_shift) &&
@ -783,6 +797,7 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
return ast.ExprStmt{ return ast.ExprStmt{
expr: left0 expr: left0
pos: tok.position() pos: tok.position()
comments: left_comments
is_expr: p.inside_for is_expr: p.inside_for
} }
} }
@ -791,6 +806,7 @@ fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {
vals: left vals: left
} }
pos: tok.position() pos: tok.position()
comments: left_comments
} }
} }
@ -1416,10 +1432,11 @@ fn (mut p Parser) return_stmt() ast.Return {
} }
} }
// return exprs // return exprs
exprs := p.expr_list() exprs, comments := p.expr_list()
end_pos := exprs.last().position() end_pos := exprs.last().position()
return ast.Return{ return ast.Return{
exprs: exprs exprs: exprs
comments: comments
pos: first_pos.extend(end_pos) pos: first_pos.extend(end_pos)
} }
} }

View File

@ -13,7 +13,9 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
mut node := ast.Expr{} mut node := ast.Expr{}
is_stmt_ident := p.is_stmt_ident is_stmt_ident := p.is_stmt_ident
p.is_stmt_ident = false p.is_stmt_ident = false
p.eat_comments() if !p.pref.is_fmt {
p.eat_comments()
}
// Prefix // Prefix
match p.tok.kind { match p.tok.kind {
.key_mut, .key_shared, .key_atomic, .key_static { .key_mut, .key_shared, .key_atomic, .key_static {
@ -33,6 +35,9 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
.string { .string {
node = p.string_expr() node = p.string_expr()
} }
.comment {
node = p.comment()
}
.dot { .dot {
// .enum_val // .enum_val
node = p.enum_val() node = p.enum_val()
@ -86,7 +91,7 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
p.inside_unsafe = true p.inside_unsafe = true
stmts := p.parse_block() stmts := p.parse_block()
p.inside_unsafe = false p.inside_unsafe = false
node = ast.UnsafeExpr { node = ast.UnsafeExpr{
stmts: stmts stmts: stmts
pos: pos pos: pos
} }

View File

@ -165,7 +165,6 @@ fn (mut p Parser) sql_stmt() ast.SqlStmt {
} else if kind == .delete && n != 'from' { } else if kind == .delete && n != 'from' {
p.error('expecting `from`') p.error('expecting `from`')
} }
mut table_type := table.Type(0) mut table_type := table.Type(0)
mut where_expr := ast.Expr{} mut where_expr := ast.Expr{}
if kind == .insert { if kind == .insert {