toml: support for `[a."b.c"]` quoted keys (#12444)
parent
6c32c544e1
commit
9c508237bd
|
@ -4,6 +4,7 @@
|
||||||
module toml
|
module toml
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
import toml.util
|
||||||
|
|
||||||
// Pretty much all the same builtin types as the `json2.Any` type plus `time.Time`
|
// Pretty much all the same builtin types as the `json2.Any` type plus `time.Time`
|
||||||
pub type Any = Null
|
pub type Any = Null
|
||||||
|
@ -150,26 +151,27 @@ pub fn (a Any) datetime() time.Time {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// value queries a value from the map.
|
||||||
|
// `key` should be in "dotted" form (`a.b.c`).
|
||||||
|
// `key` supports quoted keys like `a."b.c"`.
|
||||||
pub fn (m map[string]Any) value(key string) ?Any {
|
pub fn (m map[string]Any) value(key string) ?Any {
|
||||||
// return m[key] ?
|
key_split := util.parse_dotted_key(key) ?
|
||||||
key_split := key.split('.')
|
return m.value_(key_split)
|
||||||
// util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' getting "${key_split[0]}"')
|
}
|
||||||
if key_split[0] in m.keys() {
|
|
||||||
value := m[key_split[0]] or {
|
fn (m map[string]Any) value_(key []string) ?Any {
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
value := m[key[0]] or {
|
||||||
}
|
return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "${key[0]}" does not exist')
|
||||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
|
||||||
if value is map[string]Any {
|
|
||||||
nm := (value as map[string]Any)
|
|
||||||
next_key := key_split[1..].join('.')
|
|
||||||
if next_key == '' {
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
return nm.value(next_key)
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
}
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
// `match` isn't currently very suitable for these types of sum type constructs...
|
||||||
|
if value is map[string]Any {
|
||||||
|
if key.len <= 1 {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
nm := (value as map[string]Any)
|
||||||
|
return nm.value_(key[1..])
|
||||||
|
}
|
||||||
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (a []Any) as_strings() []string {
|
pub fn (a []Any) as_strings() []string {
|
||||||
|
|
|
@ -63,51 +63,6 @@ pub fn (dtt DateTimeType) str() string {
|
||||||
return dtt.text
|
return dtt.text
|
||||||
}
|
}
|
||||||
|
|
||||||
// value queries a value from the map.
|
|
||||||
// `key` should be in "dotted" form (`a.b.c`).
|
|
||||||
pub fn (v map[string]Value) value(key string) &Value {
|
|
||||||
null := &Value(Null{})
|
|
||||||
key_split := key.split('.')
|
|
||||||
// util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' retreiving value at "$key"')
|
|
||||||
if key_split[0] in v.keys() {
|
|
||||||
value := v[key_split[0]] or {
|
|
||||||
return null
|
|
||||||
// TODO return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
|
||||||
}
|
|
||||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
|
||||||
if value is map[string]Value {
|
|
||||||
m := (value as map[string]Value)
|
|
||||||
next_key := key_split[1..].join('.')
|
|
||||||
if next_key == '' {
|
|
||||||
return &value
|
|
||||||
}
|
|
||||||
return m.value(next_key)
|
|
||||||
}
|
|
||||||
return &value
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
// TODO return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
|
||||||
}
|
|
||||||
|
|
||||||
// exists returns true if the "dotted" `key` path exists in the map.
|
|
||||||
pub fn (v map[string]Value) exists(key string) bool {
|
|
||||||
key_split := key.split('.')
|
|
||||||
if key_split[0] in v.keys() {
|
|
||||||
value := v[key_split[0]] or { return false }
|
|
||||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
|
||||||
if value is map[string]Value {
|
|
||||||
m := (value as map[string]Value)
|
|
||||||
next_key := key_split[1..].join('.')
|
|
||||||
if next_key == '' {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return m.exists(next_key)
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Comment is the data representation of a TOML comment (`# This is a comment`).
|
// Comment is the data representation of a TOML comment (`# This is a comment`).
|
||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
pub:
|
pub:
|
||||||
|
|
|
@ -14,6 +14,21 @@ pub const (
|
||||||
space_formatting = [token.Kind.whitespace, .tab]
|
space_formatting = [token.Kind.whitespace, .tab]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type DottedKey = []string
|
||||||
|
|
||||||
|
pub fn (dk DottedKey) str() string {
|
||||||
|
return dk.join('.')
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (a []DottedKey) has(target DottedKey) bool {
|
||||||
|
for dk in a {
|
||||||
|
if dk == target {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// Parser contains the necessary fields for keeping the state of the parsing process.
|
// Parser contains the necessary fields for keeping the state of the parsing process.
|
||||||
pub struct Parser {
|
pub struct Parser {
|
||||||
pub:
|
pub:
|
||||||
|
@ -26,10 +41,11 @@ mut:
|
||||||
tokens []token.Token // To be able to peek more than one token ahead.
|
tokens []token.Token // To be able to peek more than one token ahead.
|
||||||
skip_next bool
|
skip_next bool
|
||||||
// The root map (map is called table in TOML world)
|
// The root map (map is called table in TOML world)
|
||||||
root_map map[string]ast.Value
|
root_map map[string]ast.Value
|
||||||
root_map_key string
|
root_map_key DottedKey
|
||||||
|
explicit_declared []DottedKey
|
||||||
// Array of Tables state
|
// Array of Tables state
|
||||||
last_aot string
|
last_aot DottedKey
|
||||||
last_aot_index int
|
last_aot_index int
|
||||||
// Root of the tree
|
// Root of the tree
|
||||||
ast_root &ast.Root = &ast.Root{}
|
ast_root &ast.Root = &ast.Root{}
|
||||||
|
@ -220,7 +236,7 @@ pub fn (mut p Parser) find_table() ?&map[string]ast.Value {
|
||||||
unsafe {
|
unsafe {
|
||||||
t = &p.root_map
|
t = &p.root_map
|
||||||
}
|
}
|
||||||
if p.root_map_key == '' {
|
if p.root_map_key.len == 0 {
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -229,11 +245,10 @@ pub fn (mut p Parser) find_table() ?&map[string]ast.Value {
|
||||||
|
|
||||||
// sub_table_key returns the logic parts of a dotted key (`a.b.c`) for
|
// sub_table_key returns the logic parts of a dotted key (`a.b.c`) for
|
||||||
// use with the `find_sub_table` method.
|
// use with the `find_sub_table` method.
|
||||||
pub fn (mut p Parser) sub_table_key(key string) (string, string) {
|
pub fn (mut p Parser) sub_table_key(key DottedKey) (DottedKey, DottedKey) {
|
||||||
mut ks := key.split('.')
|
last := [key.last()]
|
||||||
last := ks.last()
|
first := key[..key.len - 1]
|
||||||
ks.delete_last()
|
return first, last
|
||||||
return ks.join('.'), last
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// find_sub_table returns a reference to a map if found in the *root* table given a "dotted" key (`a.b.c`).
|
// find_sub_table returns a reference to a map if found in the *root* table given a "dotted" key (`a.b.c`).
|
||||||
|
@ -241,9 +256,11 @@ pub fn (mut p Parser) sub_table_key(key string) (string, string) {
|
||||||
// allocate a new map for the segment. This behavior is needed because you can
|
// allocate a new map for the segment. This behavior is needed because you can
|
||||||
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
||||||
// See also `find_in_table`.
|
// See also `find_in_table`.
|
||||||
pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Value {
|
pub fn (mut p Parser) find_sub_table(key DottedKey) ?&map[string]ast.Value {
|
||||||
mut ky := p.root_map_key + '.' + key
|
mut ky := DottedKey([]string{})
|
||||||
if p.root_map_key == '' {
|
ky << p.root_map_key
|
||||||
|
ky << key
|
||||||
|
if p.root_map_key.len == 0 {
|
||||||
ky = key
|
ky = key
|
||||||
}
|
}
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$ky" in map ${ptr_str(p.root_map)}')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$ky" in map ${ptr_str(p.root_map)}')
|
||||||
|
@ -251,7 +268,7 @@ pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Value {
|
||||||
unsafe {
|
unsafe {
|
||||||
t = &p.root_map
|
t = &p.root_map
|
||||||
}
|
}
|
||||||
if ky == '' {
|
if ky.len == 0 {
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,7 +279,7 @@ pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Value {
|
||||||
// If some segments of the key does not exist in the input map find_in_table will
|
// If some segments of the key does not exist in the input map find_in_table will
|
||||||
// allocate a new map for the segment. This behavior is needed because you can
|
// allocate a new map for the segment. This behavior is needed because you can
|
||||||
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
||||||
pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key string) ?&map[string]ast.Value {
|
pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key DottedKey) ?&map[string]ast.Value {
|
||||||
// NOTE This code is the result of much trial and error.
|
// NOTE This code is the result of much trial and error.
|
||||||
// I'm still not quite sure *exactly* why it works. All I can leave here is a hope
|
// I'm still not quite sure *exactly* why it works. All I can leave here is a hope
|
||||||
// that this kind of minefield someday will be easier in V :)
|
// that this kind of minefield someday will be easier in V :)
|
||||||
|
@ -271,9 +288,8 @@ pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key string)
|
||||||
unsafe {
|
unsafe {
|
||||||
t = &table
|
t = &table
|
||||||
}
|
}
|
||||||
ks := key.split('.')
|
|
||||||
unsafe {
|
unsafe {
|
||||||
for k in ks {
|
for k in key {
|
||||||
if val := t[k] {
|
if val := t[k] {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "$k" in $t.keys()')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'found key "$k" in $t.keys()')
|
||||||
if val is map[string]ast.Value {
|
if val is map[string]ast.Value {
|
||||||
|
@ -296,22 +312,23 @@ pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key string)
|
||||||
|
|
||||||
// dotted_key returns a string of the next tokens parsed as
|
// dotted_key returns a string of the next tokens parsed as
|
||||||
// sub/nested/path keys (e.g. `a.b.c`). In TOML, this form of key is referred to as a "dotted" key.
|
// sub/nested/path keys (e.g. `a.b.c`). In TOML, this form of key is referred to as a "dotted" key.
|
||||||
pub fn (mut p Parser) dotted_key() ?string {
|
pub fn (mut p Parser) dotted_key() ?DottedKey {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing nested key...')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing dotted key...')
|
||||||
|
mut dotted_key := DottedKey([]string{})
|
||||||
key := p.key() ?
|
key := p.key() ?
|
||||||
p.ignore_while_peek(parser.space_formatting)
|
p.ignore_while_peek(parser.space_formatting)
|
||||||
mut text := key.str()
|
dotted_key << key.str()
|
||||||
for p.peek_tok.kind == .period {
|
for p.peek_tok.kind == .period {
|
||||||
p.next() ? // .
|
p.next() ? // .
|
||||||
p.check(.period) ?
|
p.check(.period) ?
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
next_key := p.key() ?
|
next_key := p.key() ?
|
||||||
text += '.' + next_key.text
|
dotted_key << next_key.text
|
||||||
p.ignore_while_peek(parser.space_formatting)
|
p.ignore_while_peek(parser.space_formatting)
|
||||||
}
|
}
|
||||||
p.next() ?
|
p.next() ?
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed nested key `$text` now at "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key `$dotted_key` now at "$p.tok.kind" "$p.tok.lit"')
|
||||||
return text
|
return dotted_key
|
||||||
}
|
}
|
||||||
|
|
||||||
// root_table parses next tokens into the root map of `ast.Value`s.
|
// root_table parses next tokens into the root map of `ast.Value`s.
|
||||||
|
@ -356,7 +373,7 @@ pub fn (mut p Parser) root_table() ? {
|
||||||
t := p.find_sub_table(sub_table) ?
|
t := p.find_sub_table(sub_table) ?
|
||||||
unsafe {
|
unsafe {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key" = $val.to_json() in table ${ptr_str(t)}')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting "$key" = $val.to_json() in table ${ptr_str(t)}')
|
||||||
t[key] = val
|
t[key.str()] = val
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
|
@ -403,6 +420,14 @@ pub fn (mut p Parser) root_table() ? {
|
||||||
// Parse `[d.e.f]`
|
// Parse `[d.e.f]`
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
p.root_map_key = p.dotted_key() ?
|
p.root_map_key = p.dotted_key() ?
|
||||||
|
|
||||||
|
// Disallow redeclaring the key
|
||||||
|
if p.explicit_declared.has(p.root_map_key) {
|
||||||
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
|
' key `$p.root_map_key` is already explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
|
||||||
|
}
|
||||||
|
p.explicit_declared << p.root_map_key
|
||||||
|
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$p.root_map_key` at "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$p.root_map_key` at "$p.tok.kind" "$p.tok.lit"')
|
||||||
p.expect(.rsbr) ?
|
p.expect(.rsbr) ?
|
||||||
|
@ -410,7 +435,15 @@ pub fn (mut p Parser) root_table() ? {
|
||||||
} else {
|
} else {
|
||||||
// Parse `[key]`
|
// Parse `[key]`
|
||||||
key := p.key() ?
|
key := p.key() ?
|
||||||
p.root_map_key = key.str()
|
p.root_map_key = DottedKey([key.str()])
|
||||||
|
|
||||||
|
// Disallow redeclaring the key
|
||||||
|
if p.explicit_declared.has(p.root_map_key) {
|
||||||
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
|
' key `$p.root_map_key` is already explicitly declared. Unexpected redeclaration at "$p.tok.kind" "$p.tok.lit" in this (excerpt): "...${p.excerpt()}..."')
|
||||||
|
}
|
||||||
|
p.explicit_declared << p.root_map_key
|
||||||
|
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$p.root_map_key` at "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'setting root map key to `$p.root_map_key` at "$p.tok.kind" "$p.tok.lit"')
|
||||||
p.next() ?
|
p.next() ?
|
||||||
p.expect(.rsbr) ?
|
p.expect(.rsbr) ?
|
||||||
|
@ -493,7 +526,7 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ? {
|
||||||
mut t := p.find_in_table(mut tbl, sub_table) ?
|
mut t := p.find_in_table(mut tbl, sub_table) ?
|
||||||
unsafe {
|
unsafe {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
||||||
t[key] = val
|
t[key.str()] = val
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
|
@ -539,25 +572,26 @@ pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
|
|
||||||
p.ignore_while(parser.all_formatting)
|
p.ignore_while(parser.all_formatting)
|
||||||
|
|
||||||
key_str := key.str()
|
dotted_key := DottedKey([key.str()])
|
||||||
|
dotted_key_str := dotted_key.str()
|
||||||
unsafe {
|
unsafe {
|
||||||
if val := table[key_str] {
|
if val := table[dotted_key_str] {
|
||||||
if val is []ast.Value {
|
if val is []ast.Value {
|
||||||
arr := &(table[key_str] as []ast.Value)
|
arr := &(table[dotted_key_str] as []ast.Value)
|
||||||
arr << p.array_of_tables_contents() ?
|
arr << p.array_of_tables_contents() ?
|
||||||
table[key_str] = arr
|
table[dotted_key_str] = arr
|
||||||
} else {
|
} else {
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
' table[$key_str] is not an array. (excerpt): "...${p.excerpt()}..."')
|
' table[$dotted_key_str] is not an array. (excerpt): "...${p.excerpt()}..."')
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
table[key_str] = p.array_of_tables_contents() ?
|
table[dotted_key_str] = p.array_of_tables_contents() ?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.last_aot = key_str
|
p.last_aot = dotted_key
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
arr := &(table[p.last_aot] as []ast.Value)
|
arr := &(table[p.last_aot.str()] as []ast.Value)
|
||||||
p.last_aot_index = arr.len - 1
|
p.last_aot_index = arr.len - 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -585,7 +619,7 @@ pub fn (mut p Parser) array_of_tables_contents() ?[]ast.Value {
|
||||||
mut t := p.find_in_table(mut tbl, sub_table) ?
|
mut t := p.find_in_table(mut tbl, sub_table) ?
|
||||||
unsafe {
|
unsafe {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
||||||
t[key] = val
|
t[key.str()] = val
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
key, val := p.key_value() ?
|
key, val := p.key_value() ?
|
||||||
|
@ -607,15 +641,17 @@ pub fn (mut p Parser) array_of_tables_contents() ?[]ast.Value {
|
||||||
pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables of arrays "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables of arrays "$p.tok.kind" "$p.tok.lit"')
|
||||||
|
|
||||||
|
mut dotted_key := DottedKey([]string{})
|
||||||
|
|
||||||
key := p.key() ?
|
key := p.key() ?
|
||||||
mut key_str := key.str()
|
dotted_key << key.str()
|
||||||
for p.peek_tok.kind == .period {
|
for p.peek_tok.kind == .period {
|
||||||
p.next() ? // .
|
p.next() ? // .
|
||||||
p.check(.period) ?
|
p.check(.period) ?
|
||||||
next_key := p.key() ?
|
next_key := p.key() ?
|
||||||
key_str += '.' + next_key.text
|
dotted_key << next_key.text
|
||||||
}
|
}
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed nested key `$key_str` now at "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsed dotted key `$dotted_key` now at "$p.tok.kind" "$p.tok.lit"')
|
||||||
|
|
||||||
p.next() ?
|
p.next() ?
|
||||||
p.check(.rsbr) ?
|
p.check(.rsbr) ?
|
||||||
|
@ -623,15 +659,13 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
|
|
||||||
p.ignore_while(parser.all_formatting)
|
p.ignore_while(parser.all_formatting)
|
||||||
|
|
||||||
ks := key_str.split('.')
|
if dotted_key.len != 2 {
|
||||||
|
|
||||||
if ks.len != 2 {
|
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
' nested array of tables does not support more than 2 levels. (excerpt): "...${p.excerpt()}..."')
|
' nested array of tables does not support more than 2 levels. (excerpt): "...${p.excerpt()}..."')
|
||||||
}
|
}
|
||||||
|
|
||||||
first := ks[0] // The array that holds the entries
|
first := DottedKey([dotted_key[0]]) // The array that holds the entries
|
||||||
last := ks[1] // The key the parsed array data should be added to
|
last := DottedKey([dotted_key[1]]) // The key the parsed array data should be added to
|
||||||
|
|
||||||
mut t_arr := &[]ast.Value(0)
|
mut t_arr := &[]ast.Value(0)
|
||||||
mut t_map := ast.Value(ast.Null{})
|
mut t_map := ast.Value(ast.Null{})
|
||||||
|
@ -640,11 +674,11 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
// NOTE this is starting to get EVEN uglier. TOML is not at all simple at this point...
|
// NOTE this is starting to get EVEN uglier. TOML is not at all simple at this point...
|
||||||
if first != p.last_aot {
|
if first != p.last_aot {
|
||||||
// Implicit allocation
|
// Implicit allocation
|
||||||
if p.last_aot == '' {
|
if p.last_aot.len == 0 {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'implicit allocation of array for nested key `$key_str`.')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'implicit allocation of array for dotted key `$dotted_key`.')
|
||||||
table[first] = []ast.Value{}
|
table[first.str()] = []ast.Value{}
|
||||||
p.last_aot = first
|
p.last_aot = first
|
||||||
t_arr = &(table[p.last_aot] as []ast.Value)
|
t_arr = &(table[p.last_aot.str()] as []ast.Value)
|
||||||
t_arr << ast.Value(map[string]ast.Value{})
|
t_arr << ast.Value(map[string]ast.Value{})
|
||||||
p.last_aot_index = t_arr.len - 1
|
p.last_aot_index = t_arr.len - 1
|
||||||
} else {
|
} else {
|
||||||
|
@ -653,7 +687,7 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
t_arr = &(table[p.last_aot] as []ast.Value)
|
t_arr = &(table[p.last_aot.str()] as []ast.Value)
|
||||||
t_map = ast.Value(map[string]ast.Value{})
|
t_map = ast.Value(map[string]ast.Value{})
|
||||||
if p.last_aot_index < t_arr.len {
|
if p.last_aot_index < t_arr.len {
|
||||||
t_map = t_arr[p.last_aot_index]
|
t_map = t_arr[p.last_aot_index]
|
||||||
|
@ -661,17 +695,17 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
|
|
||||||
mut t := &(t_map as map[string]ast.Value)
|
mut t := &(t_map as map[string]ast.Value)
|
||||||
|
|
||||||
if val := t[last] {
|
if val := t[last.str()] {
|
||||||
if val is []ast.Value {
|
if val is []ast.Value {
|
||||||
arr := &(val as []ast.Value)
|
arr := &(val as []ast.Value)
|
||||||
arr << p.double_array_of_tables_contents(key_str) ?
|
arr << p.double_array_of_tables_contents(dotted_key) ?
|
||||||
t[last] = arr
|
t[last.str()] = arr
|
||||||
} else {
|
} else {
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
' t[$last] is not an array. (excerpt): "...${p.excerpt()}..."')
|
' t[$last.str()] is not an array. (excerpt): "...${p.excerpt()}..."')
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
t[last] = p.double_array_of_tables_contents(key_str) ?
|
t[last.str()] = p.double_array_of_tables_contents(dotted_key) ?
|
||||||
}
|
}
|
||||||
if t_arr.len == 0 {
|
if t_arr.len == 0 {
|
||||||
t_arr << t
|
t_arr << t
|
||||||
|
@ -681,11 +715,11 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||||
}
|
}
|
||||||
|
|
||||||
// double_array_of_tables_contents parses next tokens into an array of `ast.Value`s.
|
// double_array_of_tables_contents parses next tokens into an array of `ast.Value`s.
|
||||||
pub fn (mut p Parser) double_array_of_tables_contents(target_key string) ?[]ast.Value {
|
pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ?[]ast.Value {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing contents from "$p.tok.kind" "$p.tok.lit"')
|
||||||
mut tbl := map[string]ast.Value{}
|
mut tbl := map[string]ast.Value{}
|
||||||
|
|
||||||
mut implicit_allocation_key := ''
|
mut implicit_allocation_key := DottedKey([]string{})
|
||||||
mut peeked_over := 0
|
mut peeked_over := 0
|
||||||
mut peek_tok := p.peek_tok
|
mut peek_tok := p.peek_tok
|
||||||
|
|
||||||
|
@ -709,21 +743,20 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key string) ?[]ast.
|
||||||
match p.tok.kind {
|
match p.tok.kind {
|
||||||
.bare, .quoted, .boolean, .number {
|
.bare, .quoted, .boolean, .number {
|
||||||
if p.peek_tok.kind == .period {
|
if p.peek_tok.kind == .period {
|
||||||
dotkey := p.dotted_key() ?
|
mut dotkey := p.dotted_key() ?
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
p.check(.assign) ?
|
p.check(.assign) ?
|
||||||
val := p.value() ?
|
val := p.value() ?
|
||||||
|
|
||||||
mut implicit := ''
|
if implicit_allocation_key.len > 0 {
|
||||||
if implicit_allocation_key != '' {
|
dotkey.insert(0, implicit_allocation_key)
|
||||||
implicit = implicit_allocation_key + '.'
|
|
||||||
}
|
}
|
||||||
sub_table, key := p.sub_table_key(implicit + dotkey)
|
sub_table, key := p.sub_table_key(dotkey)
|
||||||
|
|
||||||
mut t := p.find_in_table(mut tbl, sub_table) ?
|
mut t := p.find_in_table(mut tbl, sub_table) ?
|
||||||
unsafe {
|
unsafe {
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'inserting @6 "$key" = $val.to_json() into ${ptr_str(t)}')
|
||||||
t[key] = val
|
t[key.str()] = val
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
key, val := p.key_value() ?
|
key, val := p.key_value() ?
|
||||||
|
@ -732,7 +765,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key string) ?[]ast.
|
||||||
unsafe {
|
unsafe {
|
||||||
t = &tbl
|
t = &tbl
|
||||||
}
|
}
|
||||||
if implicit_allocation_key != '' {
|
if implicit_allocation_key.len > 0 {
|
||||||
t = p.find_in_table(mut tbl, implicit_allocation_key) ?
|
t = p.find_in_table(mut tbl, implicit_allocation_key) ?
|
||||||
}
|
}
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -755,7 +788,10 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key string) ?[]ast.
|
||||||
// Parse `[d.e.f]`
|
// Parse `[d.e.f]`
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
dotkey := p.dotted_key() ?
|
dotkey := p.dotted_key() ?
|
||||||
implicit_allocation_key = dotkey.all_after(target_key + '.')
|
implicit_allocation_key = dotkey
|
||||||
|
if dotkey.len > 2 {
|
||||||
|
implicit_allocation_key = dotkey[2..]
|
||||||
|
}
|
||||||
p.ignore_while(parser.space_formatting)
|
p.ignore_while(parser.space_formatting)
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'keys are: dotted `$dotkey`, target `$target_key`, implicit `$implicit_allocation_key` at "$p.tok.kind" "$p.tok.lit"')
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'keys are: dotted `$dotkey`, target `$target_key`, implicit `$implicit_allocation_key` at "$p.tok.kind" "$p.tok.lit"')
|
||||||
p.expect(.rsbr) ?
|
p.expect(.rsbr) ?
|
||||||
|
|
|
@ -13,7 +13,6 @@ const (
|
||||||
invalid_exceptions = [
|
invalid_exceptions = [
|
||||||
// Table
|
// Table
|
||||||
'table/duplicate-table-array2.toml',
|
'table/duplicate-table-array2.toml',
|
||||||
'table/duplicate.toml',
|
|
||||||
'table/array-implicit.toml',
|
'table/array-implicit.toml',
|
||||||
'table/injection-2.toml',
|
'table/injection-2.toml',
|
||||||
'table/injection-1.toml',
|
'table/injection-1.toml',
|
||||||
|
@ -40,7 +39,7 @@ fn test_burnt_sushi_tomltest() {
|
||||||
relative = relative.replace('/', '\\')
|
relative = relative.replace('/', '\\')
|
||||||
}
|
}
|
||||||
if relative !in valid_exceptions {
|
if relative !in valid_exceptions {
|
||||||
println('OK [$i/$valid_test_files.len] "$valid_test_file"...')
|
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||||
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
||||||
|
|
||||||
// parsed_json := toml_doc.to_json().replace(' ','')
|
// parsed_json := toml_doc.to_json().replace(' ','')
|
||||||
|
@ -51,7 +50,7 @@ fn test_burnt_sushi_tomltest() {
|
||||||
valid++
|
valid++
|
||||||
} else {
|
} else {
|
||||||
e++
|
e++
|
||||||
println('SKIP [$i/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_exceptions.len]...')
|
println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_exceptions.len]...')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println('$valid/$valid_test_files.len TOML files was parsed correctly')
|
println('$valid/$valid_test_files.len TOML files was parsed correctly')
|
||||||
|
@ -74,7 +73,7 @@ fn test_burnt_sushi_tomltest() {
|
||||||
relative = relative.replace('/', '\\')
|
relative = relative.replace('/', '\\')
|
||||||
}
|
}
|
||||||
if relative !in invalid_exceptions {
|
if relative !in invalid_exceptions {
|
||||||
println('OK [$i/$invalid_test_files.len] "$invalid_test_file"...')
|
println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
|
||||||
if toml_doc := toml.parse_file(invalid_test_file) {
|
if toml_doc := toml.parse_file(invalid_test_file) {
|
||||||
content_that_should_have_failed := os.read_file(invalid_test_file) or {
|
content_that_should_have_failed := os.read_file(invalid_test_file) or {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -88,7 +87,7 @@ fn test_burnt_sushi_tomltest() {
|
||||||
invalid++
|
invalid++
|
||||||
} else {
|
} else {
|
||||||
e++
|
e++
|
||||||
println('SKIP [$i/$invalid_test_files.len] "$invalid_test_file" EXCEPTION [$e/$invalid_exceptions.len]...')
|
println('SKIP [${i + 1}/$invalid_test_files.len] "$invalid_test_file" EXCEPTION [$e/$invalid_exceptions.len]...')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println('$invalid/$invalid_test_files.len TOML files was parsed correctly')
|
println('$invalid/$invalid_test_files.len TOML files was parsed correctly')
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
import toml
|
||||||
|
|
||||||
|
fn test_quoted_keys() {
|
||||||
|
str_value := 'V rocks!'
|
||||||
|
toml_txt := 'a."b.c" = "V rocks!"'
|
||||||
|
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
|
value := toml_doc.value('a."b.c"')
|
||||||
|
assert value == toml.Any(str_value)
|
||||||
|
assert value as string == str_value
|
||||||
|
assert value.string() == str_value
|
||||||
|
}
|
|
@ -90,9 +90,31 @@ pub fn (d Doc) to_json() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// value queries a value from the TOML document.
|
// value queries a value from the TOML document.
|
||||||
|
// `key` should be in "dotted" form (`a.b.c`).
|
||||||
|
// `key` supports quoted keys like `a."b.c"`.
|
||||||
pub fn (d Doc) value(key string) Any {
|
pub fn (d Doc) value(key string) Any {
|
||||||
values := d.ast.table as map[string]ast.Value
|
values := d.ast.table as map[string]ast.Value
|
||||||
return d.get_map_value_as_any(values, key)
|
key_split := util.parse_dotted_key(key) or { return Any(Null{}) }
|
||||||
|
return d.value_(values, key_split)
|
||||||
|
}
|
||||||
|
|
||||||
|
// value_ returns the value found at `key` in the map `values` as `Any` type.
|
||||||
|
fn (d Doc) value_(values map[string]ast.Value, key []string) Any {
|
||||||
|
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' getting "${key[0]}"')
|
||||||
|
value := values[key[0]] or {
|
||||||
|
return Any(Null{})
|
||||||
|
// TODO decide this
|
||||||
|
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key[0]" does not exist')
|
||||||
|
}
|
||||||
|
// `match` isn't currently very suitable for these types of sum type constructs...
|
||||||
|
if value is map[string]ast.Value {
|
||||||
|
if key.len <= 1 {
|
||||||
|
return d.ast_to_any(value)
|
||||||
|
}
|
||||||
|
m := (value as map[string]ast.Value)
|
||||||
|
return d.value_(m, key[1..])
|
||||||
|
}
|
||||||
|
return d.ast_to_any(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ast_to_any_value converts `from` ast.Value to toml.Any value.
|
// ast_to_any_value converts `from` ast.Value to toml.Any value.
|
||||||
|
@ -177,29 +199,3 @@ fn (d Doc) ast_to_any(value ast.Value) Any {
|
||||||
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' can\'t convert "$value"')
|
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' can\'t convert "$value"')
|
||||||
// return Any('')
|
// return Any('')
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_map_value_as_any returns the value found at `key` in the map `values` as `Any` type.
|
|
||||||
fn (d Doc) get_map_value_as_any(values map[string]ast.Value, key string) Any {
|
|
||||||
key_split := key.split('.')
|
|
||||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' getting "${key_split[0]}"')
|
|
||||||
if key_split[0] in values.keys() {
|
|
||||||
value := values[key_split[0]] or {
|
|
||||||
return Any(Null{})
|
|
||||||
// TODO decide this
|
|
||||||
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
|
||||||
}
|
|
||||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
|
||||||
if value is map[string]ast.Value {
|
|
||||||
m := (value as map[string]ast.Value)
|
|
||||||
next_key := key_split[1..].join('.')
|
|
||||||
if next_key == '' {
|
|
||||||
return d.ast_to_any(value)
|
|
||||||
}
|
|
||||||
return d.get_map_value_as_any(m, next_key)
|
|
||||||
}
|
|
||||||
return d.ast_to_any(value)
|
|
||||||
}
|
|
||||||
return Any(Null{})
|
|
||||||
// TODO decide this
|
|
||||||
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
|
||||||
}
|
|
||||||
|
|
|
@ -25,3 +25,44 @@ pub fn is_illegal_ascii_control_character(byte_char byte) bool {
|
||||||
pub fn printdbg(id string, message string) {
|
pub fn printdbg(id string, message string) {
|
||||||
eprintln(id + ' ' + message)
|
eprintln(id + ' ' + message)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parse_dotted_key converts `key` string to an array of strings.
|
||||||
|
// parse_dotted_key preserves strings delimited by both `"` and `'`.
|
||||||
|
pub fn parse_dotted_key(key string) ?[]string {
|
||||||
|
mut out := []string{}
|
||||||
|
mut buf := ''
|
||||||
|
mut in_string := false
|
||||||
|
mut delim := byte(` `)
|
||||||
|
for ch in key {
|
||||||
|
if ch in [`"`, `'`] {
|
||||||
|
if !in_string {
|
||||||
|
delim = ch
|
||||||
|
}
|
||||||
|
in_string = !in_string && ch == delim
|
||||||
|
if !in_string {
|
||||||
|
if buf != '' && buf != ' ' {
|
||||||
|
out << buf
|
||||||
|
}
|
||||||
|
buf = ''
|
||||||
|
delim = ` `
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
buf += ch.ascii_str()
|
||||||
|
if !in_string && ch == `.` {
|
||||||
|
if buf != '' && buf != ' ' {
|
||||||
|
out << buf[..buf.len - 1]
|
||||||
|
}
|
||||||
|
buf = ''
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if buf != '' && buf != ' ' {
|
||||||
|
out << buf
|
||||||
|
}
|
||||||
|
if in_string {
|
||||||
|
return error(@FN +
|
||||||
|
': could not parse key, missing closing string delimiter `$delim.ascii_str()`')
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue