toml: rename ast.Node -> ast.Value (#11974)
parent
80c15607da
commit
13b2aa701c
|
@ -11,7 +11,7 @@ pub struct Root {
|
|||
pub:
|
||||
input input.Config // User input configuration
|
||||
pub mut:
|
||||
table Node
|
||||
table Value
|
||||
// errors []errors.Error // all the checker errors in the file
|
||||
}
|
||||
|
||||
|
|
|
@ -13,11 +13,19 @@ pub fn (k Key) str() string {
|
|||
return k.text
|
||||
}
|
||||
|
||||
// Node is a sumtype representing all possible value types
|
||||
// Value is a sumtype representing all possible value types
|
||||
// found in a TOML document.
|
||||
pub type Node = Bool | Date | DateTime | Null | Number | Quoted | Time | []Node | map[string]Node
|
||||
pub type Value = Bool
|
||||
| Date
|
||||
| DateTime
|
||||
| Null
|
||||
| Number
|
||||
| Quoted
|
||||
| Time
|
||||
| []Value
|
||||
| map[string]Value
|
||||
|
||||
pub fn (v Node) to_json() string {
|
||||
pub fn (v Value) to_json() string {
|
||||
match v {
|
||||
Quoted, Date, DateTime, Time {
|
||||
return '"$v.text"'
|
||||
|
@ -25,7 +33,7 @@ pub fn (v Node) to_json() string {
|
|||
Bool, Null, Number {
|
||||
return v.text
|
||||
}
|
||||
map[string]Node {
|
||||
map[string]Value {
|
||||
mut str := '{'
|
||||
for key, val in v {
|
||||
str += ' "$key": $val.to_json(),'
|
||||
|
@ -34,7 +42,7 @@ pub fn (v Node) to_json() string {
|
|||
str += ' }'
|
||||
return str
|
||||
}
|
||||
[]Node {
|
||||
[]Value {
|
||||
mut str := '['
|
||||
for val in v {
|
||||
str += ' $val.to_json(),'
|
||||
|
@ -56,8 +64,8 @@ pub fn (dtt DateTimeType) str() string {
|
|||
|
||||
// value queries a value from the map.
|
||||
// `key` should be in "dotted" form e.g.: `"a.b.c.d"`
|
||||
pub fn (v map[string]Node) value(key string) &Node {
|
||||
null := &Node(Null{})
|
||||
pub fn (v map[string]Value) value(key string) &Value {
|
||||
null := &Value(Null{})
|
||||
key_split := key.split('.')
|
||||
// util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' retreiving value at "$key"')
|
||||
if key_split[0] in v.keys() {
|
||||
|
@ -66,8 +74,8 @@ pub fn (v map[string]Node) value(key string) &Node {
|
|||
// TODO return error(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
||||
}
|
||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
||||
if value is map[string]Node {
|
||||
m := (value as map[string]Node)
|
||||
if value is map[string]Value {
|
||||
m := (value as map[string]Value)
|
||||
next_key := key_split[1..].join('.')
|
||||
if next_key == '' {
|
||||
return &value
|
||||
|
@ -81,13 +89,13 @@ pub fn (v map[string]Node) value(key string) &Node {
|
|||
}
|
||||
|
||||
// value queries a value from the map.
|
||||
pub fn (v map[string]Node) exists(key string) bool {
|
||||
pub fn (v map[string]Value) exists(key string) bool {
|
||||
key_split := key.split('.')
|
||||
if key_split[0] in v.keys() {
|
||||
value := v[key_split[0]] or { return false }
|
||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
||||
if value is map[string]Node {
|
||||
m := (value as map[string]Node)
|
||||
if value is map[string]Value {
|
||||
m := (value as map[string]Value)
|
||||
next_key := key_split[1..].join('.')
|
||||
if next_key == '' {
|
||||
return true
|
||||
|
|
|
@ -2,12 +2,12 @@ module walker
|
|||
|
||||
import toml.ast
|
||||
|
||||
// Visitor defines a visit method which is invoked by the walker in each node it encounters.
|
||||
// Visitor defines a visit method which is invoked by the walker in each Value node it encounters.
|
||||
pub interface Visitor {
|
||||
visit(node &ast.Node) ?
|
||||
visit(value &ast.Value) ?
|
||||
}
|
||||
|
||||
pub type InspectorFn = fn (node &ast.Node, data voidptr) ?
|
||||
pub type InspectorFn = fn (value &ast.Value, data voidptr) ?
|
||||
|
||||
struct Inspector {
|
||||
inspector_callback InspectorFn
|
||||
|
@ -15,23 +15,23 @@ mut:
|
|||
data voidptr
|
||||
}
|
||||
|
||||
pub fn (i &Inspector) visit(node &ast.Node) ? {
|
||||
i.inspector_callback(node, i.data) or { return err }
|
||||
pub fn (i &Inspector) visit(value &ast.Value) ? {
|
||||
i.inspector_callback(value, i.data) or { return err }
|
||||
}
|
||||
|
||||
// inspect traverses and checks the AST node on a depth-first order and based on the data given
|
||||
pub fn inspect(node &ast.Node, data voidptr, inspector_callback InspectorFn) ? {
|
||||
walk(Inspector{inspector_callback, data}, node) ?
|
||||
// inspect traverses and checks the AST Value node on a depth-first order and based on the data given
|
||||
pub fn inspect(value &ast.Value, data voidptr, inspector_callback InspectorFn) ? {
|
||||
walk(Inspector{inspector_callback, data}, value) ?
|
||||
}
|
||||
|
||||
// walk traverses the AST using the given visitor
|
||||
pub fn walk(visitor Visitor, node &ast.Node) ? {
|
||||
if node is map[string]ast.Node {
|
||||
n := node as map[string]ast.Node
|
||||
for _, nn in n {
|
||||
walk(visitor, &nn) ?
|
||||
pub fn walk(visitor Visitor, value &ast.Value) ? {
|
||||
if value is map[string]ast.Value {
|
||||
value_map := value as map[string]ast.Value
|
||||
for _, val in value_map {
|
||||
walk(visitor, &val) ?
|
||||
}
|
||||
} else {
|
||||
visitor.visit(node) ?
|
||||
visitor.visit(value) ?
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,22 +9,22 @@ import toml.ast.walker
|
|||
import toml.token
|
||||
import toml.scanner
|
||||
|
||||
// Checker checks a tree of TOML `ast.Node`'s for common errors.
|
||||
// Checker checks a tree of TOML `ast.Value`'s for common errors.
|
||||
pub struct Checker {
|
||||
scanner &scanner.Scanner
|
||||
}
|
||||
|
||||
pub fn (c Checker) check(n &ast.Node) ? {
|
||||
pub fn (c Checker) check(n &ast.Value) ? {
|
||||
walker.walk(c, n) ?
|
||||
}
|
||||
|
||||
fn (c Checker) visit(node &ast.Node) ? {
|
||||
match node {
|
||||
fn (c Checker) visit(value &ast.Value) ? {
|
||||
match value {
|
||||
ast.Number {
|
||||
c.check_number(node) ?
|
||||
c.check_number(value) ?
|
||||
}
|
||||
ast.Bool {
|
||||
c.check_boolean(node) ?
|
||||
c.check_boolean(value) ?
|
||||
}
|
||||
else {
|
||||
// TODO add more checks to make BurntSushi/toml-test invalid TOML pass
|
||||
|
|
|
@ -22,7 +22,7 @@ mut:
|
|||
peek_tok token.Token
|
||||
skip_next bool
|
||||
// The root map (map is called table in TOML world)
|
||||
root_map map[string]ast.Node
|
||||
root_map map[string]ast.Value
|
||||
root_map_key string
|
||||
// Array of Tables state
|
||||
last_aot string
|
||||
|
@ -49,11 +49,11 @@ pub fn new_parser(config Config) Parser {
|
|||
|
||||
// init initializes the parser.
|
||||
pub fn (mut p Parser) init() ? {
|
||||
p.root_map = map[string]ast.Node{}
|
||||
p.root_map = map[string]ast.Value{}
|
||||
p.next() ?
|
||||
}
|
||||
|
||||
// run_checker validates the parsed `ast.Node` nodes in the
|
||||
// run_checker validates the parsed `ast.Value` nodes in the
|
||||
// the generated AST.
|
||||
fn (mut p Parser) run_checker() ? {
|
||||
if p.config.run_checks {
|
||||
|
@ -120,9 +120,9 @@ fn (mut p Parser) expect(expected_token token.Kind) ? {
|
|||
// If some segments of the key does not exist in the root table find_table will
|
||||
// allocate a new map for each segment. This behavior is needed because you can
|
||||
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
||||
pub fn (mut p Parser) find_table() ?&map[string]ast.Node {
|
||||
pub fn (mut p Parser) find_table() ?&map[string]ast.Value {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$p.root_map_key" in map ${ptr_str(p.root_map)}')
|
||||
mut t := &map[string]ast.Node{}
|
||||
mut t := &map[string]ast.Value{}
|
||||
unsafe {
|
||||
t = &p.root_map
|
||||
}
|
||||
|
@ -144,13 +144,13 @@ pub fn (mut p Parser) sub_table_key(key string) (string, string) {
|
|||
// If some segments of the key does not exist in the input map find_in_table will
|
||||
// allocate a new map for the segment. This behavior is needed because you can
|
||||
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
||||
pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Node {
|
||||
pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Value {
|
||||
mut ky := p.root_map_key + '.' + key
|
||||
if p.root_map_key == '' {
|
||||
ky = key
|
||||
}
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$ky" in map ${ptr_str(p.root_map)}')
|
||||
mut t := &map[string]ast.Node{}
|
||||
mut t := &map[string]ast.Value{}
|
||||
unsafe {
|
||||
t = &p.root_map
|
||||
}
|
||||
|
@ -165,12 +165,12 @@ pub fn (mut p Parser) find_sub_table(key string) ?&map[string]ast.Node {
|
|||
// If some segments of the key does not exist in the input map find_in_table will
|
||||
// allocate a new map for the segment. This behavior is needed because you can
|
||||
// reference maps by multiple keys "dotted" (separated by "." periods) in TOML documents.
|
||||
pub fn (mut p Parser) find_in_table(mut table map[string]ast.Node, key string) ?&map[string]ast.Node {
|
||||
pub fn (mut p Parser) find_in_table(mut table map[string]ast.Value, key string) ?&map[string]ast.Value {
|
||||
// NOTE This code is the result of much trial and error.
|
||||
// I'm still not quite sure *exactly* why it works. All I can leave here is a hope
|
||||
// that this kind of minefield someday will be easier in V :)
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'locating "$key" in map ${ptr_str(table)}')
|
||||
mut t := &map[string]ast.Node{}
|
||||
mut t := &map[string]ast.Value{}
|
||||
unsafe {
|
||||
t = &table
|
||||
}
|
||||
|
@ -184,9 +184,9 @@ pub fn (mut p Parser) find_in_table(mut table map[string]ast.Node, key string) ?
|
|||
' this should never happen. Key "$k" was checked before access')
|
||||
}
|
||||
{
|
||||
if val is map[string]ast.Node {
|
||||
if val is map[string]ast.Value {
|
||||
// unsafe {
|
||||
t = &(t[k] as map[string]ast.Node)
|
||||
t = &(t[k] as map[string]ast.Value)
|
||||
//}
|
||||
} else {
|
||||
return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "$k" is not a map')
|
||||
|
@ -195,8 +195,8 @@ pub fn (mut p Parser) find_in_table(mut table map[string]ast.Node, key string) ?
|
|||
} else {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'no key "$k" found, allocating new map "$k" in map ${ptr_str(t)}"')
|
||||
// unsafe {
|
||||
t[k] = map[string]ast.Node{}
|
||||
t = &(t[k] as map[string]ast.Node)
|
||||
t[k] = map[string]ast.Value{}
|
||||
t = &(t[k] as map[string]ast.Value)
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'allocated new map ${ptr_str(t)}"')
|
||||
//}
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ pub fn (mut p Parser) sub_key() ?string {
|
|||
return text
|
||||
}
|
||||
|
||||
// root_table parses next tokens into the root map of `ast.Node`s.
|
||||
// root_table parses next tokens into the root map of `ast.Value`s.
|
||||
// The V `map` type is corresponding to a "table" in TOML.
|
||||
pub fn (mut p Parser) root_table() ? {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing root table...')
|
||||
|
@ -306,9 +306,9 @@ fn (p Parser) excerpt() string {
|
|||
return p.scanner.excerpt(p.tok.pos, 10)
|
||||
}
|
||||
|
||||
// inline_table parses next tokens into a map of `ast.Node`s.
|
||||
// inline_table parses next tokens into a map of `ast.Value`s.
|
||||
// The V map type is corresponding to a "table" in TOML.
|
||||
pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Node) ? {
|
||||
pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Value) ? {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing inline table into ${ptr_str(tbl)}...')
|
||||
|
||||
for p.tok.kind != .eof {
|
||||
|
@ -371,8 +371,8 @@ pub fn (mut p Parser) inline_table(mut tbl map[string]ast.Node) ? {
|
|||
}
|
||||
}
|
||||
|
||||
// array_of_tables parses next tokens into an array of `ast.Node`s.
|
||||
pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Node) ? {
|
||||
// array_of_tables parses next tokens into an array of `ast.Value`s.
|
||||
pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Value) ? {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables "$p.tok.kind" "$p.tok.lit"')
|
||||
// NOTE this is starting to get ugly. TOML isn't simple at this point
|
||||
p.check(.lsbr) ? // '[' bracket
|
||||
|
@ -396,8 +396,8 @@ pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Node) ? {
|
|||
' this should never happen. Key "$key_str" was checked before access')
|
||||
}
|
||||
{
|
||||
if val is []ast.Node {
|
||||
arr := &(table[key_str] as []ast.Node)
|
||||
if val is []ast.Value {
|
||||
arr := &(table[key_str] as []ast.Value)
|
||||
arr << p.double_bracket_array() ?
|
||||
table[key_str] = arr
|
||||
} else {
|
||||
|
@ -413,8 +413,8 @@ pub fn (mut p Parser) array_of_tables(mut table map[string]ast.Node) ? {
|
|||
p.last_aot_index = 0
|
||||
}
|
||||
|
||||
// double_array_of_tables parses next tokens into an array of tables of arrays of `ast.Node`s...
|
||||
pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Node) ? {
|
||||
// double_array_of_tables parses next tokens into an array of tables of arrays of `ast.Value`s...
|
||||
pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Value) ? {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array of tables of arrays "$p.tok.kind" "$p.tok.lit"')
|
||||
|
||||
key := p.key() ?
|
||||
|
@ -443,16 +443,16 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Node) ? {
|
|||
unsafe {
|
||||
// NOTE this is starting to get EVEN uglier. TOML is not at all simple at this point...
|
||||
if p.last_aot != first {
|
||||
table[first] = []ast.Node{}
|
||||
table[first] = []ast.Value{}
|
||||
p.last_aot = first
|
||||
mut t_arr := &(table[p.last_aot] as []ast.Node)
|
||||
t_arr << map[string]ast.Node{}
|
||||
mut t_arr := &(table[p.last_aot] as []ast.Value)
|
||||
t_arr << map[string]ast.Value{}
|
||||
p.last_aot_index = 0
|
||||
}
|
||||
|
||||
mut t_arr := &(table[p.last_aot] as []ast.Node)
|
||||
mut t_arr := &(table[p.last_aot] as []ast.Value)
|
||||
mut t_map := t_arr[p.last_aot_index]
|
||||
mut t := &(t_map as map[string]ast.Node)
|
||||
mut t := &(t_map as map[string]ast.Value)
|
||||
|
||||
if last in t.keys() {
|
||||
if val := t[last] or {
|
||||
|
@ -460,8 +460,8 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Node) ? {
|
|||
' this should never happen. Key "$last" was checked before access')
|
||||
}
|
||||
{
|
||||
if val is []ast.Node {
|
||||
arr := &(val as []ast.Node)
|
||||
if val is []ast.Value {
|
||||
arr := &(val as []ast.Value)
|
||||
arr << p.double_bracket_array() ?
|
||||
t[last] = arr
|
||||
} else {
|
||||
|
@ -475,11 +475,11 @@ pub fn (mut p Parser) double_array_of_tables(mut table map[string]ast.Node) ? {
|
|||
}
|
||||
}
|
||||
|
||||
// array parses next tokens into an array of `ast.Node`s.
|
||||
pub fn (mut p Parser) double_bracket_array() ?[]ast.Node {
|
||||
mut arr := []ast.Node{}
|
||||
// array parses next tokens into an array of `ast.Value`s.
|
||||
pub fn (mut p Parser) double_bracket_array() ?[]ast.Value {
|
||||
mut arr := []ast.Value{}
|
||||
for p.tok.kind in [.bare, .quoted, .boolean, .number] && p.peek_tok.kind == .assign {
|
||||
mut tbl := map[string]ast.Node{}
|
||||
mut tbl := map[string]ast.Value{}
|
||||
key, val := p.key_value() ?
|
||||
tbl[key.str()] = val
|
||||
arr << tbl
|
||||
|
@ -488,17 +488,17 @@ pub fn (mut p Parser) double_bracket_array() ?[]ast.Node {
|
|||
return arr
|
||||
}
|
||||
|
||||
// array parses next tokens into an array of `ast.Node`s.
|
||||
pub fn (mut p Parser) array() ?[]ast.Node {
|
||||
// array parses next tokens into an array of `ast.Value`s.
|
||||
pub fn (mut p Parser) array() ?[]ast.Value {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array...')
|
||||
mut arr := []ast.Node{}
|
||||
mut arr := []ast.Value{}
|
||||
p.expect(.lsbr) ? // '[' bracket
|
||||
for p.tok.kind != .eof {
|
||||
p.next() ?
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind" "$p.tok.lit"')
|
||||
match p.tok.kind {
|
||||
.boolean {
|
||||
arr << ast.Node(p.boolean() ?)
|
||||
arr << ast.Value(p.boolean() ?)
|
||||
}
|
||||
.comma {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comma array value seperator "$p.tok.lit"')
|
||||
|
@ -514,20 +514,20 @@ pub fn (mut p Parser) array() ?[]ast.Node {
|
|||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'skipping comment "$c.text"')
|
||||
}
|
||||
.lcbr {
|
||||
mut t := map[string]ast.Node{}
|
||||
mut t := map[string]ast.Value{}
|
||||
p.inline_table(mut t) ?
|
||||
ast.Node(t)
|
||||
ast.Value(t)
|
||||
}
|
||||
.number {
|
||||
val := p.number_or_date() ?
|
||||
arr << val
|
||||
}
|
||||
.quoted {
|
||||
arr << ast.Node(p.quoted())
|
||||
arr << ast.Value(p.quoted())
|
||||
}
|
||||
.lsbr {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing array in array "$p.tok.kind" "$p.tok.lit"')
|
||||
arr << ast.Node(p.array() ?)
|
||||
arr << ast.Value(p.array() ?)
|
||||
}
|
||||
.rsbr {
|
||||
break
|
||||
|
@ -603,9 +603,9 @@ pub fn (mut p Parser) key() ?ast.Key {
|
|||
return key
|
||||
}
|
||||
|
||||
// key_value parse and returns a pair `ast.Key` and `ast.Node` type.
|
||||
// key_value parse and returns a pair `ast.Key` and `ast.Value` type.
|
||||
// see also `key()` and `value()`
|
||||
pub fn (mut p Parser) key_value() ?(ast.Key, ast.Node) {
|
||||
pub fn (mut p Parser) key_value() ?(ast.Key, ast.Value) {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing key value pair...')
|
||||
key := p.key() ?
|
||||
p.next() ?
|
||||
|
@ -615,40 +615,40 @@ pub fn (mut p Parser) key_value() ?(ast.Key, ast.Node) {
|
|||
return key, value
|
||||
}
|
||||
|
||||
// value parse and returns an `ast.Node` type.
|
||||
// value parse and returns an `ast.Value` type.
|
||||
// values are the token(s) appearing after an assignment operator (=).
|
||||
pub fn (mut p Parser) value() ?ast.Node {
|
||||
pub fn (mut p Parser) value() ?ast.Value {
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing value...')
|
||||
// println('parsed comment "${p.tok.lit}"')
|
||||
|
||||
mut value := ast.Node(ast.Null{})
|
||||
mut value := ast.Value(ast.Null{})
|
||||
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'parsing token "$p.tok.kind" "$p.tok.lit"')
|
||||
// mut value := ast.Node{}
|
||||
// mut value := ast.Value{}
|
||||
if p.tok.kind == .number {
|
||||
number_or_date := p.number_or_date() ?
|
||||
value = number_or_date
|
||||
} else {
|
||||
value = match p.tok.kind {
|
||||
.quoted {
|
||||
ast.Node(p.quoted())
|
||||
ast.Value(p.quoted())
|
||||
}
|
||||
.boolean {
|
||||
ast.Node(p.boolean() ?)
|
||||
ast.Value(p.boolean() ?)
|
||||
}
|
||||
.lsbr {
|
||||
ast.Node(p.array() ?)
|
||||
ast.Value(p.array() ?)
|
||||
}
|
||||
.lcbr {
|
||||
mut t := map[string]ast.Node{}
|
||||
mut t := map[string]ast.Value{}
|
||||
p.inline_table(mut t) ?
|
||||
// table[key_str] = ast.Node(t)
|
||||
ast.Node(t)
|
||||
// table[key_str] = ast.Value(t)
|
||||
ast.Value(t)
|
||||
}
|
||||
else {
|
||||
error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||
' value expected .boolean, .quoted, .lsbr, .lcbr or .number got "$p.tok.kind" "$p.tok.lit"')
|
||||
ast.Node(ast.Null{}) // TODO workaround bug
|
||||
ast.Value(ast.Null{}) // TODO workaround bug
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -656,25 +656,25 @@ pub fn (mut p Parser) value() ?ast.Node {
|
|||
return value
|
||||
}
|
||||
|
||||
// number_or_date parse and returns an `ast.Node` type as
|
||||
// number_or_date parse and returns an `ast.Value` type as
|
||||
// one of [`ast.Date`, `ast.Time`, `ast.DateTime`, `ast.Number`]
|
||||
pub fn (mut p Parser) number_or_date() ?ast.Node {
|
||||
pub fn (mut p Parser) number_or_date() ?ast.Value {
|
||||
// Handle Date/Time
|
||||
if p.peek_tok.kind == .minus || p.peek_tok.kind == .colon {
|
||||
date_time_type := p.date_time() ?
|
||||
match date_time_type {
|
||||
ast.Date {
|
||||
return ast.Node(date_time_type as ast.Date)
|
||||
return ast.Value(date_time_type as ast.Date)
|
||||
}
|
||||
ast.Time {
|
||||
return ast.Node(date_time_type as ast.Time)
|
||||
return ast.Value(date_time_type as ast.Time)
|
||||
}
|
||||
ast.DateTime {
|
||||
return ast.Node(date_time_type as ast.DateTime)
|
||||
return ast.Value(date_time_type as ast.DateTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
return ast.Node(p.number())
|
||||
return ast.Value(p.number())
|
||||
}
|
||||
|
||||
// bare parse and returns an `ast.Bare` type.
|
||||
|
|
|
@ -102,13 +102,13 @@ pub fn (d Doc) to_json() string {
|
|||
|
||||
// value queries a value from the TOML document.
|
||||
pub fn (d Doc) value(key string) Any {
|
||||
values := d.ast.table as map[string]ast.Node
|
||||
values := d.ast.table as map[string]ast.Value
|
||||
// any_values := d.ast_to_any(values) as map[string]Any
|
||||
return d.get_map_value_as_any(values, key)
|
||||
}
|
||||
|
||||
// ast_to_any_value converts `from` ast.Node to toml.Any value.
|
||||
fn (d Doc) ast_to_any(value ast.Node) Any {
|
||||
// ast_to_any_value converts `from` ast.Value to toml.Any value.
|
||||
fn (d Doc) ast_to_any(value ast.Value) Any {
|
||||
// `match` isn't currently very suitable for further unwrapping sumtypes in the if's...
|
||||
if value is ast.Date || value is ast.Time || value is ast.DateTime {
|
||||
mut tim := time.Time{}
|
||||
|
@ -162,8 +162,8 @@ fn (d Doc) ast_to_any(value ast.Node) Any {
|
|||
}
|
||||
return Any(false)
|
||||
}
|
||||
map[string]ast.Node {
|
||||
m := (value as map[string]ast.Node)
|
||||
map[string]ast.Value {
|
||||
m := (value as map[string]ast.Value)
|
||||
mut am := map[string]Any{}
|
||||
for k, v in m {
|
||||
am[k] = d.ast_to_any(v)
|
||||
|
@ -171,8 +171,8 @@ fn (d Doc) ast_to_any(value ast.Node) Any {
|
|||
return am
|
||||
// return d.get_map_value(m, key_split[1..].join('.'))
|
||||
}
|
||||
[]ast.Node {
|
||||
a := (value as []ast.Node)
|
||||
[]ast.Value {
|
||||
a := (value as []ast.Value)
|
||||
mut aa := []Any{}
|
||||
for val in a {
|
||||
aa << d.ast_to_any(val)
|
||||
|
@ -191,7 +191,7 @@ fn (d Doc) ast_to_any(value ast.Node) Any {
|
|||
}
|
||||
|
||||
// get_map_value_as_any returns the value found at `key` in the map `values` as `Any` type.
|
||||
fn (d Doc) get_map_value_as_any(values map[string]ast.Node, key string) Any {
|
||||
fn (d Doc) get_map_value_as_any(values map[string]ast.Value, key string) Any {
|
||||
key_split := key.split('.')
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, ' getting "${key_split[0]}"')
|
||||
if key_split[0] in values.keys() {
|
||||
|
@ -201,8 +201,8 @@ fn (d Doc) get_map_value_as_any(values map[string]ast.Node, key string) Any {
|
|||
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' key "$key" does not exist')
|
||||
}
|
||||
// `match` isn't currently very suitable for these types of sum type constructs...
|
||||
if value is map[string]ast.Node {
|
||||
m := (value as map[string]ast.Node)
|
||||
if value is map[string]ast.Value {
|
||||
m := (value as map[string]ast.Value)
|
||||
next_key := key_split[1..].join('.')
|
||||
if next_key == '' {
|
||||
return d.ast_to_any(value)
|
||||
|
|
Loading…
Reference in New Issue