all: `!is` operator
parent
4e51867612
commit
fb3e52ca63
|
@ -546,7 +546,7 @@ pub fn (mut c Checker) infix_expr(mut infix_expr ast.InfixExpr) table.Type {
|
||||||
.right_shift {
|
.right_shift {
|
||||||
return c.check_shift(left_type, right_type, left_pos, right_pos)
|
return c.check_shift(left_type, right_type, left_pos, right_pos)
|
||||||
}
|
}
|
||||||
.key_is {
|
.key_is, .not_is {
|
||||||
type_expr := infix_expr.right as ast.Type
|
type_expr := infix_expr.right as ast.Type
|
||||||
typ_sym := c.table.get_type_symbol(type_expr.typ)
|
typ_sym := c.table.get_type_symbol(type_expr.typ)
|
||||||
if typ_sym.kind == .placeholder {
|
if typ_sym.kind == .placeholder {
|
||||||
|
|
|
@ -180,10 +180,10 @@ pub fn (mut d Doc) generate() ?bool {
|
||||||
// parse files
|
// parse files
|
||||||
mut file_asts := []ast.File{}
|
mut file_asts := []ast.File{}
|
||||||
// TODO: remove later for vlib
|
// TODO: remove later for vlib
|
||||||
comments_mode := if d.with_comments {
|
comments_mode := if d.with_comments {
|
||||||
scanner.CommentsMode.parse_comments
|
scanner.CommentsMode.parse_comments
|
||||||
} else {
|
} else {
|
||||||
scanner.CommentsMode.skip_comments
|
scanner.CommentsMode.skip_comments
|
||||||
}
|
}
|
||||||
for file in v_files {
|
for file in v_files {
|
||||||
file_ast := parser.parse_file(
|
file_ast := parser.parse_file(
|
||||||
|
@ -222,13 +222,14 @@ pub fn (mut d Doc) generate() ?bool {
|
||||||
for si, stmt in stmts {
|
for si, stmt in stmts {
|
||||||
if stmt is ast.Comment { continue }
|
if stmt is ast.Comment { continue }
|
||||||
|
|
||||||
if !(stmt is ast.Module) {
|
//if !(stmt is ast.Module) {
|
||||||
|
if stmt !is ast.Module {
|
||||||
// todo: accumulate consts
|
// todo: accumulate consts
|
||||||
mut name := d.get_name(stmt)
|
mut name := d.get_name(stmt)
|
||||||
signature := d.get_signature(stmt)
|
signature := d.get_signature(stmt)
|
||||||
pos := d.get_pos(stmt)
|
pos := d.get_pos(stmt)
|
||||||
|
|
||||||
if !signature.starts_with('pub') && d.pub_only {
|
if !signature.starts_with('pub') && d.pub_only {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,7 +269,7 @@ pub fn (mut d Doc) generate() ?bool {
|
||||||
last_comment := d.contents[d.contents.len-1].comment
|
last_comment := d.contents[d.contents.len-1].comment
|
||||||
d.contents[d.contents.len-1].comment = last_comment + '\n' + write_comment_bw(stmts, si-1)
|
d.contents[d.contents.len-1].comment = last_comment + '\n' + write_comment_bw(stmts, si-1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -286,4 +287,4 @@ pub fn generate(input_path string, pub_only bool, with_comments bool) ?Doc {
|
||||||
}
|
}
|
||||||
|
|
||||||
return doc
|
return doc
|
||||||
}
|
}
|
||||||
|
|
|
@ -1744,7 +1744,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
|
||||||
// g.infix_op = node.op
|
// g.infix_op = node.op
|
||||||
left_type := if node.left_type == table.t_type { g.cur_generic_type } else { node.left_type }
|
left_type := if node.left_type == table.t_type { g.cur_generic_type } else { node.left_type }
|
||||||
left_sym := g.table.get_type_symbol(left_type)
|
left_sym := g.table.get_type_symbol(left_type)
|
||||||
if node.op == .key_is {
|
if node.op in [.key_is, .not_is] {
|
||||||
g.is_expr(node)
|
g.is_expr(node)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -3767,6 +3767,7 @@ fn (mut g Gen) as_cast(node ast.AsCast) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut g Gen) is_expr(node ast.InfixExpr) {
|
fn (mut g Gen) is_expr(node ast.InfixExpr) {
|
||||||
|
eq := if node.op == .key_is { '==' } else { '!=' }
|
||||||
g.expr(node.left)
|
g.expr(node.left)
|
||||||
if node.left_type.is_ptr() {
|
if node.left_type.is_ptr() {
|
||||||
g.write('->')
|
g.write('->')
|
||||||
|
@ -3775,14 +3776,14 @@ fn (mut g Gen) is_expr(node ast.InfixExpr) {
|
||||||
}
|
}
|
||||||
sym := g.table.get_type_symbol(node.left_type)
|
sym := g.table.get_type_symbol(node.left_type)
|
||||||
if sym.kind == .interface_ {
|
if sym.kind == .interface_ {
|
||||||
g.write('_interface_idx == ')
|
g.write('_interface_idx $eq ')
|
||||||
// `_Animal_Dog_index`
|
// `_Animal_Dog_index`
|
||||||
sub_type := node.right as ast.Type
|
sub_type := node.right as ast.Type
|
||||||
sub_sym := g.table.get_type_symbol(sub_type.typ)
|
sub_sym := g.table.get_type_symbol(sub_type.typ)
|
||||||
g.write('_${sym.name}_${sub_sym.name}_index')
|
g.write('_${sym.name}_${sub_sym.name}_index')
|
||||||
return
|
return
|
||||||
} else if sym.kind == .sum_type {
|
} else if sym.kind == .sum_type {
|
||||||
g.write('typ == ')
|
g.write('typ $eq ')
|
||||||
}
|
}
|
||||||
g.expr(node.right)
|
g.expr(node.right)
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,7 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
|
||||||
pos := p.tok.position()
|
pos := p.tok.position()
|
||||||
p.next()
|
p.next()
|
||||||
mut right := ast.Expr{}
|
mut right := ast.Expr{}
|
||||||
if op == .key_is {
|
if op in [.key_is, .not_is] {
|
||||||
p.expecting_type = true
|
p.expecting_type = true
|
||||||
}
|
}
|
||||||
right = p.expr(precedence)
|
right = p.expr(precedence)
|
||||||
|
|
|
@ -946,6 +946,11 @@ pub fn (mut s Scanner) scan() token.Token {
|
||||||
s.pos += 2
|
s.pos += 2
|
||||||
return s.new_token(.not_in, '', 3)
|
return s.new_token(.not_in, '', 3)
|
||||||
}
|
}
|
||||||
|
else if nextc == `i` && s.text[s.pos+2] == `s` && s.text[s.pos+3].is_space() {
|
||||||
|
s.pos += 2
|
||||||
|
return s.new_token(.not_is, '', 3)
|
||||||
|
}
|
||||||
|
//
|
||||||
else {
|
else {
|
||||||
return s.new_token(.not, '', 1)
|
return s.new_token(.not, '', 1)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ struct IntegerLiteral {
|
||||||
fn handle(e Expr) string {
|
fn handle(e Expr) string {
|
||||||
is_literal := e is IntegerLiteral
|
is_literal := e is IntegerLiteral
|
||||||
assert is_literal
|
assert is_literal
|
||||||
|
assert !(e !is IntegerLiteral)
|
||||||
if e is IntegerLiteral {
|
if e is IntegerLiteral {
|
||||||
println('int')
|
println('int')
|
||||||
}
|
}
|
||||||
|
@ -39,8 +40,8 @@ fn test_assignment_and_push() {
|
||||||
mut expr1 := Expr{}
|
mut expr1 := Expr{}
|
||||||
mut arr1 := []Expr{}
|
mut arr1 := []Expr{}
|
||||||
expr := IntegerLiteral{
|
expr := IntegerLiteral{
|
||||||
val: '111'
|
val: '111'
|
||||||
}
|
}
|
||||||
arr1 << expr
|
arr1 << expr
|
||||||
match arr1[0] {
|
match arr1[0] {
|
||||||
IntegerLiteral {
|
IntegerLiteral {
|
||||||
|
@ -54,24 +55,33 @@ fn test_assignment_and_push() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test moving structs between master/sub arrays
|
// Test moving structs between master/sub arrays
|
||||||
|
|
||||||
type Master = Sub1 | Sub2
|
type Master = Sub1 | Sub2
|
||||||
|
|
||||||
struct Sub1 {
|
struct Sub1 {
|
||||||
mut:
|
mut:
|
||||||
val int
|
val int
|
||||||
name string
|
name string
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Sub2 {
|
struct Sub2 {
|
||||||
name string
|
name string
|
||||||
val int
|
val int
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_converting_down() {
|
fn test_converting_down() {
|
||||||
mut out := []Master{}
|
mut out := []Master{}
|
||||||
out << Sub1 { val: 1, name: 'one' }
|
out << Sub1{
|
||||||
out << Sub2 { val: 2, name: 'two'}
|
val: 1
|
||||||
out << Sub2 { val: 3, name: 'three'}
|
name: 'one'
|
||||||
|
}
|
||||||
|
out << Sub2{
|
||||||
|
val: 2
|
||||||
|
name: 'two'
|
||||||
|
}
|
||||||
|
out << Sub2{
|
||||||
|
val: 3
|
||||||
|
name: 'three'
|
||||||
|
}
|
||||||
mut res := []Sub2{cap: out.len}
|
mut res := []Sub2{cap: out.len}
|
||||||
for d in out {
|
for d in out {
|
||||||
match d {
|
match d {
|
||||||
|
@ -79,10 +89,8 @@ fn test_converting_down() {
|
||||||
else {}
|
else {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert res[0].val == 2
|
assert res[0].val == 2
|
||||||
assert res[0].name == 'two'
|
assert res[0].name == 'two'
|
||||||
assert res[1].val == 3
|
assert res[1].val == 3
|
||||||
assert res[1].name == 'three'
|
assert res[1].name == 'three'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,7 @@ pub enum Kind {
|
||||||
left_shift
|
left_shift
|
||||||
right_shift
|
right_shift
|
||||||
not_in // !in
|
not_in // !in
|
||||||
|
not_is // !is
|
||||||
// at // @
|
// at // @
|
||||||
assign // =
|
assign // =
|
||||||
decl_assign // :=
|
decl_assign // :=
|
||||||
|
@ -169,6 +170,7 @@ fn build_token_str() []string {
|
||||||
s[Kind.ellipsis] = '...'
|
s[Kind.ellipsis] = '...'
|
||||||
s[Kind.comma] = ','
|
s[Kind.comma] = ','
|
||||||
s[Kind.not_in] = '!in'
|
s[Kind.not_in] = '!in'
|
||||||
|
s[Kind.not_is] = '!is'
|
||||||
// s[Kind.at] = '@'
|
// s[Kind.at] = '@'
|
||||||
s[Kind.semicolon] = ';'
|
s[Kind.semicolon] = ';'
|
||||||
s[Kind.colon] = ':'
|
s[Kind.colon] = ':'
|
||||||
|
@ -382,7 +384,7 @@ pub fn (tok Token) precedence() int {
|
||||||
.left_shift_assign, .right_shift_assign, .mult_assign, .xor_assign {
|
.left_shift_assign, .right_shift_assign, .mult_assign, .xor_assign {
|
||||||
return int(Precedence.assign)
|
return int(Precedence.assign)
|
||||||
}
|
}
|
||||||
.key_in, .not_in, .key_as, .key_is {
|
.key_in, .not_in, .key_as, .key_is, .not_is {
|
||||||
return int(Precedence.in_as)
|
return int(Precedence.in_as)
|
||||||
}
|
}
|
||||||
.logical_or, .and {
|
.logical_or, .and {
|
||||||
|
@ -422,7 +424,7 @@ pub fn (k Kind) is_start_of_type() bool {
|
||||||
pub fn (kind Kind) is_infix() bool {
|
pub fn (kind Kind) is_infix() bool {
|
||||||
return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in,
|
return kind in [.plus, .minus, .mod, .mul, .div, .eq, .ne, .gt, .lt, .key_in,
|
||||||
//
|
//
|
||||||
.key_as, .ge, .le, .logical_or, .xor, .not_in, .key_is,
|
.key_as, .ge, .le, .logical_or, .xor, .not_in, .key_is, .not_is,
|
||||||
//
|
//
|
||||||
.and, .dot, .pipe, .amp, .left_shift, .right_shift]
|
.and, .dot, .pipe, .amp, .left_shift, .right_shift]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue