json: maps, memory leak, and error forwarding (#6116)

pull/6144/head
Vinicius Rangel 2020-08-15 21:30:37 -03:00 committed by GitHub
parent e7b2aefd36
commit bf065674cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 296 additions and 47 deletions

View File

@ -7,10 +7,10 @@ enum JobTitle {
} }
struct Employee { struct Employee {
name string name string
age int age int
salary f32 salary f32
title JobTitle title JobTitle
} }
fn test_simple() { fn test_simple() {
@ -37,10 +37,10 @@ struct User2 {
struct User { struct User {
age int age int
nums []int nums []int
last_name string [json:lastName] last_name string [json: lastName]
is_registered bool [json:IsRegistered] is_registered bool [json: IsRegistered]
typ int [json:'type'] typ int [json: 'type']
pets string [raw; json:'pet_animals'] pets string [raw; json: 'pet_animals']
} }
fn test_parse_user() { fn test_parse_user() {
@ -113,3 +113,149 @@ fn test_struct_in_struct() {
assert country.cities[1].name == 'Manchester' assert country.cities[1].name == 'Manchester'
println(country.cities) println(country.cities)
} }
fn test_encode_map() {
expected := '{"one":1,"two":2,"three":3,"four":4}'
numbers := {
'one': 1
'two': 2
'three': 3
'four': 4
}
out := json.encode(numbers)
println(out)
assert out == expected
}
fn test_parse_map() {
expected := {
'one': 1
'two': 2
'three': 3
'four': 4
}
out := json.decode(map[string]int, '{"one":1,"two":2,"three":3,"four":4}') or {
assert false
r := {
'': 0
}
r
}
println(out)
assert out == expected
}
struct Data {
countries []Country
users map[string]User
extra map[string]map[string]int
}
fn test_nested_type() {
data_expected := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":[{"name":"Donlon"},{"name":"Termanches"}],"name":"KU"}],"users":{"Foo":{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},"Boo":{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}},"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}'
data := Data{
countries: [
Country{
name: 'UK'
cities: [City{'London'},
City{'Manchester'},
]
},
Country{
name: 'KU'
cities: [City{'Donlon'},
City{'Termanches'},
]
},
]
users: {
'Foo': User{
age: 10
nums: [1, 2, 3]
last_name: 'Johnson'
is_registered: true
typ: 0
pets: 'little foo'
},
'Boo': User{
age: 20
nums: [5, 3, 1]
last_name: 'Smith'
is_registered: false
typ: 4
pets: 'little boo'
}
},
extra: {
'2': {
'n1': 2
'n2': 4
'n3': 8
'n4': 16
},
'3': {
'n1': 3
'n2': 9
'n3': 27
'n4': 81
},
}
}
out := json.encode(data)
println(out)
assert out == data_expected
data2 := json.decode(Data, data_expected) or {
assert false
Data{}
}
assert data2.countries.len == data.countries.len
for i in 0..1 {
assert data2.countries[i].name == data.countries[i].name
assert data2.countries[i].cities.len == data.countries[i].cities.len
for j in 0..1 {
assert data2.countries[i].cities[j].name == data.countries[i].cities[j].name
}
}
for key, user in data.users {
assert data2.users[key].age == user.age
assert data2.users[key].nums == user.nums
assert data2.users[key].last_name == user.last_name
assert data2.users[key].is_registered == user.is_registered
assert data2.users[key].typ == user.typ
// assert data2.users[key].pets == user.pets // TODO FIX
}
for k, v in data.extra {
for k2, v2 in v {
assert data2.extra[k][k2] == v2
}
}
}
fn test_errors() {
invalid_array := fn () {
data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":{"name":"Donlon"},"name":"KU"}],"users":{"Foo":{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},"Boo":{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}},"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}'
json.decode(Data, data) or {
println(err)
assert err.starts_with('Json element is not an array:')
return
}
assert false
}
invalid_object := fn() {
data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":[{"name":"Donlon"},{"name":"Termanches"}],"name":"KU"}],"users":[{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}],"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}'
json.decode(Data, data) or {
println(err)
assert err.starts_with('Json element is not an object:')
return
}
assert false
}
invalid_array()
invalid_object()
}

View File

@ -454,19 +454,42 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
is_json_decode := name == 'json.decode' is_json_decode := name == 'json.decode'
g.is_json_fn = is_json_encode || is_json_decode g.is_json_fn = is_json_encode || is_json_decode
mut json_type_str := '' mut json_type_str := ''
mut json_obj := ''
if g.is_json_fn { if g.is_json_fn {
if name == 'json.encode' { json_obj = g.new_tmp_var()
g.write('json__json_print(') mut tmp2 := ''
cur_line := g.go_before_stmt(0)
if is_json_encode {
g.gen_json_for_type(node.args[0].typ) g.gen_json_for_type(node.args[0].typ)
json_type_str = g.table.get_type_symbol(node.args[0].typ).name json_type_str = g.table.get_type_symbol(node.args[0].typ).name
// `json__encode` => `json__encode_User`
encode_name := c_name(name) + '_' + util.no_dots(json_type_str)
g.writeln('// json.encode')
g.write('cJSON* $json_obj = ${encode_name}(')
g.call_args(node.args, node.expected_arg_types)
g.writeln(');')
tmp2 = g.new_tmp_var()
g.writeln('string $tmp2 = json__json_print($json_obj);')
} else { } else {
g.insert_before_stmt('// json.decode')
ast_type := node.args[0].expr as ast.Type ast_type := node.args[0].expr as ast.Type
// `json.decode(User, s)` => json.decode_User(s) // `json.decode(User, s)` => json.decode_User(s)
sym := g.table.get_type_symbol(ast_type.typ) typ := c_name(g.table.get_type_symbol(ast_type.typ).name)
name += '_' + sym.name fn_name := c_name(name) + '_' + typ
g.gen_json_for_type(ast_type.typ) g.gen_json_for_type(ast_type.typ)
g.writeln('// json.decode')
g.write('cJSON* $json_obj = json__json_parse(')
// Skip the first argument in json.decode which is a type
// its name was already used to generate the function call
g.call_args(node.args[1..], node.expected_arg_types)
g.writeln(');')
tmp2 = g.new_tmp_var()
g.writeln('Option_$typ $tmp2 = $fn_name ($json_obj);')
} }
g.write('cJSON_Delete($json_obj);')
g.write('\n$cur_line')
name = ''
json_obj = tmp2
} }
if node.language == .c { if node.language == .c {
// Skip "C." // Skip "C."
@ -475,10 +498,6 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
} else { } else {
name = c_name(name) name = c_name(name)
} }
if is_json_encode {
// `json__encode` => `json__encode_User`
name += '_' + util.no_dots(json_type_str)
}
if node.generic_type != table.void_type && node.generic_type != 0 { if node.generic_type != table.void_type && node.generic_type != 0 {
// `foo<int>()` => `foo_int()` // `foo<int>()` => `foo_int()`
name += '_' + g.typ(node.generic_type) name += '_' + g.typ(node.generic_type)
@ -581,21 +600,15 @@ fn (mut g Gen) fn_call(node ast.CallExpr) {
g.write(')') g.write(')')
} else { } else {
g.write('${g.get_ternary_name(name)}(') g.write('${g.get_ternary_name(name)}(')
if is_json_decode { if g.is_json_fn {
g.write('json__json_parse(') g.write(json_obj)
// Skip the first argument in json.decode which is a type
// its name was already used to generate the function call
g.call_args(node.args[1..], node.expected_arg_types)
} else { } else {
g.call_args(node.args, node.expected_arg_types) g.call_args(node.args, node.expected_arg_types)
} }
g.write(')') g.write(')')
} }
g.is_c_call = false g.is_c_call = false
if g.is_json_fn { g.is_json_fn = false
g.write(')')
g.is_json_fn = false
}
if free_tmp_arg_vars { if free_tmp_arg_vars {
g.tmp_idxs.clear() g.tmp_idxs.clear()
} }

View File

@ -63,15 +63,24 @@ $dec_fn_dec {
g.json_forward_decls.writeln('$enc_fn_dec;\n') g.json_forward_decls.writeln('$enc_fn_dec;\n')
enc.writeln(' enc.writeln('
$enc_fn_dec { $enc_fn_dec {
\tcJSON *o = cJSON_CreateObject();') \tcJSON *o;')
if sym.kind == .array { if sym.kind == .array {
// Handle arrays // Handle arrays
value_type := g.table.value_type(typ) value_type := g.table.value_type(typ)
// If we have `[]Profile`, have to register a Profile en(de)coder first
g.gen_json_for_type(value_type) g.gen_json_for_type(value_type)
dec.writeln(g.decode_array(value_type)) dec.writeln(g.decode_array(value_type))
enc.writeln(g.encode_array(value_type)) enc.writeln(g.encode_array(value_type))
// enc += g.encode_array(t) // enc += g.encode_array(t)
} else if sym.kind == .map {
// Handle maps
m := sym.info as table.Map
g.gen_json_for_type(m.key_type)
g.gen_json_for_type(m.value_type)
dec.writeln(g.decode_map(m.key_type, m.value_type))
enc.writeln(g.encode_map(m.key_type, m.value_type))
} else { } else {
enc.writeln('\to = cJSON_CreateObject();')
// Structs. Range through fields // Structs. Range through fields
if sym.info !is table.Struct { if sym.info !is table.Struct {
verror('json: $sym.name is not struct') verror('json: $sym.name is not struct')
@ -90,19 +99,24 @@ $enc_fn_dec {
} }
field_type := g.typ(field.typ) field_type := g.typ(field.typ)
if field.attrs.contains('raw') { if field.attrs.contains('raw') {
dec.writeln(' res . ${c_name(field.name)} = tos2(cJSON_PrintUnformatted(' + 'js_get(root, "$name")));') dec.writeln('\tres.${c_name(field.name)} = tos2(cJSON_PrintUnformatted(' + 'js_get(root, "$name")));')
} else { } else {
// Now generate decoders for all field types in this struct // Now generate decoders for all field types in this struct
// need to do it here so that these functions are generated first // need to do it here so that these functions are generated first
g.gen_json_for_type(field.typ) g.gen_json_for_type(field.typ)
dec_name := js_dec_name(field_type) dec_name := js_dec_name(field_type)
if is_js_prim(field_type) { if is_js_prim(field_type) {
dec.writeln(' res . ${c_name(field.name)} = $dec_name (js_get(root, "$name"));') dec.writeln('\tres.${c_name(field.name)} = $dec_name (js_get(root, "$name"));')
} else if g.table.get_type_symbol(field.typ).kind == .enum_ { } else if g.table.get_type_symbol(field.typ).kind == .enum_ {
dec.writeln(' res . ${c_name(field.name)} = json__decode_u64(js_get(root, "$name"));') dec.writeln('\tres.${c_name(field.name)} = json__decode_u64(js_get(root, "$name"));')
} else { } else {
// dec.writeln(' $dec_name (js_get(root, "$name"), & (res . $field.name));') // dec.writeln(' $dec_name (js_get(root, "$name"), & (res . $field.name));')
dec.writeln(' res . ${c_name(field.name)} = *($field_type*) $dec_name (js_get(root,"$name")).data;') tmp := g.new_tmp_var()
dec.writeln('\tOption_$field_type $tmp = $dec_name (js_get(root,"$name"));')
dec.writeln('\tif(!${tmp}.ok) {')
dec.writeln('\t\treturn *(Option_$styp*) &$tmp;')
dec.writeln('\t}')
dec.writeln('\tres.${c_name(field.name)} = *($field_type*) ${tmp}.data;')
} }
} }
mut enc_name := js_enc_name(field_type) mut enc_name := js_enc_name(field_type)
@ -115,9 +129,9 @@ $enc_fn_dec {
} }
// cJSON_delete // cJSON_delete
// p.cgen.fns << '$dec return opt_ok(res); \n}' // p.cgen.fns << '$dec return opt_ok(res); \n}'
dec.writeln('Option_$styp ret;') dec.writeln('\tOption_$styp ret;')
dec.writeln('opt_ok2(&res, (OptionBase*)&ret, sizeof(res));') dec.writeln('\topt_ok2(&res, (OptionBase*)&ret, sizeof(res));')
dec.writeln('return ret;\n}') dec.writeln('\treturn ret;\n}')
enc.writeln('\treturn o;\n}') enc.writeln('\treturn o;\n}')
g.definitions.writeln(dec.str()) g.definitions.writeln(dec.str())
g.gowrappers.writeln(enc.str()) g.gowrappers.writeln(enc.str())
@ -142,22 +156,32 @@ fn is_js_prim(typ string) bool {
fn (mut g Gen) decode_array(value_type table.Type) string { fn (mut g Gen) decode_array(value_type table.Type) string {
styp := g.typ(value_type) styp := g.typ(value_type)
fn_name := js_dec_name(styp) fn_name := js_dec_name(styp)
// If we have `[]Profile`, have to register a Profile en(de)coder first
g.gen_json_for_type(value_type)
mut s := '' mut s := ''
if is_js_prim(styp) { if is_js_prim(styp) {
s = '$styp val = ${fn_name}(jsval); ' s = '$styp val = ${fn_name}(jsval); '
} else { } else {
s = '\t$styp val = *($styp*) ${fn_name}(jsval).data; ' s = '
Option_$styp val2 = $fn_name (jsval);
if(!val2.ok) {
array_free(&res);
return *(Option_array_${styp}*)&val2;
}
$styp val = *($styp*)val2.data;
'
} }
return ' return '
res = __new_array(0, 0, sizeof($styp)); if(!cJSON_IsArray(root)) {
const cJSON *jsval = NULL; Option err = v_error( string_add(tos_lit("Json element is not an array: "), tos2(cJSON_PrintUnformatted(root))) );
cJSON_ArrayForEach(jsval, root) return *(Option_array_${styp} *)&err;
{ }
$s res = __new_array(0, 0, sizeof($styp));
array_push(&res, &val); const cJSON *jsval = NULL;
} cJSON_ArrayForEach(jsval, root)
{
$s
array_push(&res, &val);
}
' '
} }
@ -165,9 +189,75 @@ fn (mut g Gen) encode_array(value_type table.Type) string {
styp := g.typ(value_type) styp := g.typ(value_type)
fn_name := js_enc_name(styp) fn_name := js_enc_name(styp)
return ' return '
o = cJSON_CreateArray(); o = cJSON_CreateArray();
for (int i = 0; i < val.len; i++){ for (int i = 0; i < val.len; i++){
cJSON_AddItemToArray(o, $fn_name ( (($styp*)val.data)[i] )); cJSON_AddItemToArray(o, $fn_name ( (($styp*)val.data)[i] ));
} }
'
}
fn (mut g Gen) decode_map(key_type, value_type table.Type) string {
styp := g.typ(key_type)
styp_v := g.typ(value_type)
fn_name_v := js_dec_name(styp_v)
mut s := ''
if is_js_prim(styp_v) {
s = '$styp_v val = $fn_name_v (js_get(root, jsval->string));'
} else {
s = '
Option_$styp_v val2 = $fn_name_v (js_get(root, jsval->string));
if(!val2.ok) {
map_free(&res);
return *(Option_map_${styp}_${styp_v}*)&val2;
}
$styp_v val = *($styp_v*)val2.data;
'
}
return '
if(!cJSON_IsObject(root)) {
Option err = v_error( string_add(tos_lit("Json element is not an object: "), tos2(cJSON_PrintUnformatted(root))) );
return *(Option_map_${styp}_${styp_v} *)&err;
}
res = new_map_1(sizeof($styp_v));
cJSON *jsval = NULL;
cJSON_ArrayForEach(jsval, root)
{
$s
map_set(&res, tos2( (byteptr) jsval->string ) , &val );
}
'
}
fn (mut g Gen) encode_map(key_type, value_type table.Type) string {
styp := g.typ(key_type)
styp_v := g.typ(value_type)
fn_name_v := js_enc_name(styp_v)
zero := g.type_default(value_type)
keys_tmp := g.new_tmp_var()
mut key := 'string key = '
if key_type.is_string() {
key += '(($styp*)${keys_tmp}.data)[i];'
} else {
// g.gen_str_for_type(key_type)
// key += '${styp}_str((($styp*)${keys_tmp}.data)[i]);'
verror('json: encode only maps with string keys')
}
return '
o = cJSON_CreateObject();
array_$styp $keys_tmp = map_keys(&val);
for (int i = 0; i < ${keys_tmp}.len; ++i) {
$key
cJSON_AddItemToObject(o, (char*) key.str, $fn_name_v ( *($styp_v*) map_get(val, key, &($styp_v[]) { $zero } ) ) );
}
array_free(&$keys_tmp);
' '
} }