cgen: lots of fixes

pull/4027/head
Alexander Medvednikov 2020-03-15 00:46:08 +01:00
parent 50ed4004f4
commit 843bb6dac1
15 changed files with 107 additions and 79 deletions

View File

@ -982,7 +982,7 @@ fn (p mut Parser) check_name() string {
fn (p mut Parser) check_string() string {
s := p.lit
p.check(.str)
p.check(.string)
return s
}
@ -1005,7 +1005,7 @@ fn (p &Parser) strtok() string {
}
return '`$p.lit`'
}
if p.tok == .str {
if p.tok == .string{
if p.lit.contains("'") && !p.lit.contains('"') {
return '"$p.lit"'
}
@ -1296,7 +1296,7 @@ fn (p &Parser) print_tok() {
println(p.lit)
return
}
if p.tok == .str {
if p.tok == .string{
println('"$p.lit"')
return
}
@ -2613,7 +2613,7 @@ fn (p mut Parser) map_init() string {
for {
key := p.lit
keys_gen += 'tos3("$key"), '
p.check(.str)
p.check(.string)
p.check(.colon)
p.fspace()
t,val_expr := p.tmp_expr()
@ -2954,7 +2954,7 @@ fn (p mut Parser) return_st() {
}
else {
// Don't allow `return val` in functions that don't return anything
if p.tok == .name || p.tok == .number || p.tok == .str {
if p.tok == .name || p.tok == .number || p.tok == .string{
p.error_with_token_index('function `$p.cur_fn.name` should not return a value', p.cur_fn.fn_name_token_idx)
}
p.genln(deferred_text)

View File

@ -11,7 +11,7 @@ fn (p mut Parser) inline_asm() {
p.check(.lcbr)
s := p.check_string()
p.genln('asm("$s"')
for p.tok == .str {
for p.tok == .string{
p.genln('"$p.lit"')
p.next()
}

View File

@ -286,12 +286,12 @@ fn (p mut Parser) name_expr() string {
name = p.generic_dispatch.inst[name]
}
// Raw string (`s := r'hello \n ')
if name == 'r' && p.peek() == .str && p.prev_tok != .str_dollar {
if name == 'r' && p.peek() == .string&& p.prev_tok != .str_dollar {
p.string_expr()
return 'string'
}
// C string (a zero terminated one) C.func( c'hello' )
if name == 'c' && p.peek() == .str && p.prev_tok != .str_dollar {
if name == 'c' && p.peek() == .string&& p.prev_tok != .str_dollar {
p.string_expr()
return 'charptr'
}
@ -913,7 +913,7 @@ fn (p mut Parser) factor() string {
typ = 'byte'
return typ
}
.str {
.string{
p.string_expr()
typ = 'string'
return typ
@ -934,7 +934,7 @@ fn (p mut Parser) factor() string {
}
.lcbr {
// `m := { 'one': 1 }`
if p.peek() == .str {
if p.peek() == .string{
return p.map_init()
}
peek2 := p.tokens[p.token_idx + 1]

View File

@ -1115,7 +1115,7 @@ fn (p mut Parser) fn_call_args(f mut Fn, generic_param_types []string) {
p.gen('/*YY f=$f.name arg=$arg.name is_moved=$arg.is_moved*/string_clone(')
}
// x64 println gen
if p.pref.backend == .x64 && i == 0 && f.name == 'println' && p.tok == .str && p.peek() == .rpar {
if p.pref.backend == .x64 && i == 0 && f.name == 'println' && p.tok == .string&& p.peek() == .rpar {
//p.x64.gen_print(p.lit)
}
mut typ := p.bool_expression()

View File

@ -349,10 +349,10 @@ fn (s mut Scanner) scan() ScanRes {
if s.inter_end {
if s.text[s.pos] == s.quote {
s.inter_end = false
return scan_res(.str, '')
return scan_res(.string, '')
}
s.inter_end = false
return scan_res(.str, s.ident_string())
return scan_res(.string, s.ident_string())
}
s.skip_whitespace()
// end of file
@ -473,7 +473,7 @@ fn (s mut Scanner) scan() ScanRes {
return scan_res(.question, '')
}
single_quote, double_quote {
return scan_res(.str, s.ident_string())
return scan_res(.string, s.ident_string())
}
`\`` {
// ` // apostrophe balance comment. do not remove
@ -513,9 +513,9 @@ fn (s mut Scanner) scan() ScanRes {
s.pos++
if s.text[s.pos] == s.quote {
s.inside_string = false
return scan_res(.str, '')
return scan_res(.string, '')
}
return scan_res(.str, s.ident_string())
return scan_res(.string, s.ident_string())
}
else {
return scan_res(.rcbr, '')
@ -558,19 +558,19 @@ fn (s mut Scanner) scan() ScanRes {
// println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN)
// ... which is useful while debugging/tracing
if name == 'FN' {
return scan_res(.str, s.fn_name)
return scan_res(.string, s.fn_name)
}
if name == 'FILE' {
return scan_res(.str, cescaped_path(os.realpath(s.file_path)))
return scan_res(.string, cescaped_path(os.realpath(s.file_path)))
}
if name == 'LINE' {
return scan_res(.str, (s.line_nr + 1).str())
return scan_res(.string, (s.line_nr + 1).str())
}
if name == 'COLUMN' {
return scan_res(.str, (s.current_column()).str())
return scan_res(.string, (s.current_column()).str())
}
if name == 'VHASH' {
return scan_res(.str, vhash())
return scan_res(.string, vhash())
}
if !is_key(name) {
s.error('@ must be used before keywords (e.g. `@type string`)')

View File

@ -52,7 +52,7 @@ fn (p mut Parser) string_expr() {
mut args := '"'
mut format := '"'
mut complex_inter := false // for vfmt
for p.tok == .str {
for p.tok == .string{
// Add the string between %d's
p.lit = p.lit.replace('%', '%%')
format += format_str(p.lit)
@ -64,7 +64,7 @@ fn (p mut Parser) string_expr() {
p.check(.str_dollar)
// If there's no string after current token, it means we are in
// a complex expression (`${...}`)
if p.peek() != .str {
if p.peek() != .string{
p.fgen('{')
complex_inter = true
}

View File

@ -304,7 +304,7 @@ fn (p mut Parser) struct_decl(generic_param_types []string) {
.name {
val = p.check_name()
}
.str {
.string{
val = p.check_string()
}
else {

View File

@ -15,7 +15,7 @@ enum TokenKind {
eof
name // user
number // 123
str // 'foo'
string // 'foo'
str_inter // 'name=$user.name'
chartoken // `A`
plus
@ -143,7 +143,7 @@ fn build_token_str() []string {
s[TokenKind.eof] = 'eof'
s[TokenKind.name] = 'name'
s[TokenKind.number] = 'number'
s[TokenKind.str] = 'STR'
s[TokenKind.string] = 'STR'
s[TokenKind.chartoken] = 'char'
s[TokenKind.plus] = '+'
s[TokenKind.minus] = '-'
@ -292,7 +292,7 @@ pub fn (t Token) str() string {
if t.tok == .chartoken {
return '`$t.lit`'
}
if t.tok == .str {
if t.tok == .string {
return "'$t.lit'"
}
if t.tok == .eof {

View File

@ -437,8 +437,9 @@ pub struct EnumVal {
pub:
enum_name string
val string
mod string // for full path `mod_Enum_val`
// typ table.Type
pos token.Position
// name string
}
pub struct EnumDecl {

View File

@ -132,6 +132,7 @@ pub fn (c mut Checker) infix_expr(infix_expr mut ast.InfixExpr) table.Type {
infix_expr.left_type = left_type
c.expected_type = left_type
right_type := c.expr(infix_expr.right)
infix_expr.right_type = right_type
if !c.table.check(right_type, left_type) {
left := c.table.get_type_symbol(left_type)
right := c.table.get_type_symbol(right_type)

View File

@ -63,7 +63,7 @@ pub fn (g mut Gen) typ(t table.Type) string {
if styp.starts_with('C__') {
styp = styp[3..]
}
if styp in ['stat', 'dirent*'] {
if styp in ['stat', 'dirent*', 'tm'] {
// TODO perf and other C structs
styp = 'struct $styp'
}
@ -106,9 +106,8 @@ pub fn (g mut Gen) write_typedef_types() {
styp := typ.name.replace('.', '__')
g.definitions.writeln('typedef map $styp;')
}
.function {
// TODO:
}
.function {}
else {
continue
}
@ -290,8 +289,11 @@ fn (g mut Gen) stmt(node ast.Stmt) {
}
ast.HashStmt {
// #include etc
typ := it.val.all_before(' ')
if typ in ['#include', '#define'] {
g.definitions.writeln('#$it.val')
}
}
ast.Import {}
ast.Return {
g.return_statement(it)
@ -466,7 +468,7 @@ fn (g mut Gen) fn_args(args []table.Arg, is_variadic bool) {
no_names := args.len > 0 && args[0].name == 'arg_1'
for i, arg in args {
arg_type_sym := g.table.get_type_symbol(arg.typ)
mut arg_type_name := arg_type_sym.name.replace('.', '__')
mut arg_type_name := g.typ(arg.typ) // arg_type_sym.name.replace('.', '__')
is_varg := i == args.len - 1 && is_variadic
if is_varg {
g.varaidic_args[int(arg.typ).str()] = 0
@ -486,14 +488,14 @@ fn (g mut Gen) fn_args(args []table.Arg, is_variadic bool) {
}
else {
mut nr_muls := table.type_nr_muls(arg.typ)
mut s := arg_type_name + ' ' + arg.name
s := arg_type_name + ' ' + arg.name
if arg.is_mut {
// mut arg needs one *
nr_muls = 1
}
if nr_muls > 0 && !is_varg {
s = arg_type_name + strings.repeat(`*`, nr_muls) + ' ' + arg.name
}
// if nr_muls > 0 && !is_varg {
// s = arg_type_name + strings.repeat(`*`, nr_muls) + ' ' + arg.name
// }
g.write(s)
g.definitions.write(s)
}
@ -625,6 +627,7 @@ fn (g mut Gen) expr(node ast.Expr) {
g.write("'$it.val'")
}
ast.EnumVal {
// g.write('/*EnumVal*/${it.mod}${it.enum_name}_$it.val')
g.write('${it.enum_name}_$it.val')
}
ast.FloatLiteral {
@ -718,8 +721,16 @@ fn (g mut Gen) expr(node ast.Expr) {
}
g.write('if (')
for i, expr in branch.exprs {
if type_sym.kind == .string {
g.write('string_eq($tmp, ')
}
else {
g.write('$tmp == ')
}
g.expr(expr)
if type_sym.kind == .string {
g.write(')')
}
if i < branch.exprs.len - 1 {
g.write(' || ')
}
@ -948,17 +959,30 @@ fn (g mut Gen) infix_expr(node ast.InfixExpr) {
}
// arr << val
else if node.op == .left_shift && g.table.get_type_symbol(node.left_type).kind == .array {
tmp := g.new_tmp_var()
sym := g.table.get_type_symbol(node.left_type)
right_sym := g.table.get_type_symbol(node.right_type)
if right_sym.kind == .array {
// push an array => PUSH_MANY
g.write('_PUSH_MANY(&')
g.expr(node.left)
g.write(', (')
g.expr(node.right)
styp := g.typ(node.left_type)
g.write('), $tmp, $styp)')
}
else {
// push a single element
info := sym.info as table.Array
elem_type_str := g.typ(info.elem_type)
// g.write('array_push(&')
tmp := g.new_tmp_var()
g.write('_PUSH(&')
g.expr(node.left)
g.write(', (')
g.expr(node.right)
g.write('), $tmp, $elem_type_str)')
}
}
else {
// if node.op == .dot {
// println('!! dot')
@ -1156,16 +1180,16 @@ fn (g mut Gen) call_args(args []ast.CallArg) {
for i, arg in args {
if table.type_is_variadic(arg.expected_type) {
struct_name := 'varg_' + g.typ(arg.expected_type).replace('*', '_ptr')
len := args.len-i
len := args.len - i
type_str := int(arg.expected_type).str()
if len > g.varaidic_args[type_str] {
g.varaidic_args[type_str] = len
}
g.write('($struct_name){.len=$len,.args={')
for j in i..args.len {
for j in i .. args.len {
g.ref_or_deref_arg(args[j])
g.expr(args[j].expr)
if j < args.len-1 {
if j < args.len - 1 {
g.write(', ')
}
}

View File

@ -46,6 +46,7 @@ const (
// c_headers
#include <stdio.h> // TODO remove all these includes, define all function signatures and types manually
#include <stdlib.h>
#include <float.h>
//#include "fns.h"
#include <signal.h>

View File

@ -613,6 +613,7 @@ pub fn (p mut Parser) name_expr() ast.Expr {
val: val
pos: p.tok.position()
mod: mod
}
}
else {
@ -633,7 +634,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
.name {
node = p.name_expr()
}
.str {
.string {
node = p.string_expr()
}
.dot {
@ -702,7 +703,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
// Map `{"age": 20}` or `{ x | foo:bar, a:10 }`
.lcbr {
p.next()
if p.tok.kind == .str {
if p.tok.kind == .string{
mut keys := []ast.Expr
mut vals := []ast.Expr
for p.tok.kind != .rcbr && p.tok.kind != .eof {
@ -1168,7 +1169,7 @@ fn (p mut Parser) string_expr() ast.Expr {
return node
}
// Handle $ interpolation
for p.tok.kind == .str {
for p.tok.kind == .string{
p.next()
if p.tok.kind != .str_dollar {
continue

View File

@ -359,10 +359,10 @@ pub fn (s mut Scanner) scan() token.Token {
if s.inter_end {
if s.text[s.pos] == s.quote {
s.inter_end = false
return s.scan_res(.str, '')
return s.scan_res(.string, '')
}
s.inter_end = false
return s.scan_res(.str, s.ident_string())
return s.scan_res(.string, s.ident_string())
}
s.skip_whitespace()
// end of file
@ -483,7 +483,7 @@ pub fn (s mut Scanner) scan() token.Token {
return s.scan_res(.question, '')
}
single_quote, double_quote {
return s.scan_res(.str, s.ident_string())
return s.scan_res(.string, s.ident_string())
}
`\`` {
// ` // apostrophe balance comment. do not remove
@ -523,9 +523,9 @@ pub fn (s mut Scanner) scan() token.Token {
s.pos++
if s.text[s.pos] == s.quote {
s.inside_string = false
return s.scan_res(.str, '')
return s.scan_res(.string, '')
}
return s.scan_res(.str, s.ident_string())
return s.scan_res(.string, s.ident_string())
}
else {
return s.scan_res(.rcbr, '')
@ -568,19 +568,19 @@ pub fn (s mut Scanner) scan() token.Token {
// println( 'file: ' + @FILE + ' | line: ' + @LINE + ' | fn: ' + @FN)
// ... which is useful while debugging/tracing
if name == 'FN' {
return s.scan_res(.str, s.fn_name)
return s.scan_res(.string, s.fn_name)
}
if name == 'FILE' {
return s.scan_res(.str, cescaped_path(os.realpath(s.file_path)))
return s.scan_res(.string, cescaped_path(os.realpath(s.file_path)))
}
if name == 'LINE' {
return s.scan_res(.str, (s.line_nr + 1).str())
return s.scan_res(.string, (s.line_nr + 1).str())
}
if name == 'COLUMN' {
return s.scan_res(.str, (s.current_column()).str())
return s.scan_res(.string, (s.current_column()).str())
}
if name == 'VHASH' {
return s.scan_res(.str, vhash())
return s.scan_res(.string, vhash())
}
if !token.is_key(name) {
s.error('@ must be used before keywords (e.g. `@type string`)')

View File

@ -16,7 +16,7 @@ pub enum Kind {
eof
name // user
number // 123
str // 'foo'
string // 'foo'
str_inter // 'name=$user.name'
chartoken // `A`
plus
@ -148,7 +148,7 @@ fn build_token_str() []string {
s[Kind.eof] = 'eof'
s[Kind.name] = 'name'
s[Kind.number] = 'number'
s[Kind.str] = 'STR'
s[Kind.string] = 'STR'
s[Kind.chartoken] = 'char'
s[Kind.plus] = '+'
s[Kind.minus] = '-'
@ -287,7 +287,7 @@ pub fn (t Kind) str() string {
if t == .chartoken {
return 'char' // '`lit`'
}
if t == .str {
if t == .string {
return 'str' // "'lit'"
}
/*
@ -417,7 +417,7 @@ pub fn (tok Token) precedence() int {
// is_scalar returns true if the token is a scalar
pub fn (tok Token) is_scalar() bool {
return tok.kind in [.number, .str]
return tok.kind in [.number, .string]
}
// is_unary returns true if the token can be in a unary expression