toml: fix a few string value tests (#12493)

pull/12496/head^2
Larpon 2021-11-17 19:54:48 +01:00 committed by GitHub
parent 2733319879
commit 2f5fae06ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 59 additions and 8 deletions

View File

@ -1,6 +1,7 @@
import os import os
import toml import toml
import toml.ast import toml.ast
import toml.scanner
import x.json2 import x.json2
import strconv import strconv
@ -17,12 +18,8 @@ const (
valid_value_exceptions = [ valid_value_exceptions = [
// String // String
'string/double-quote-escape.toml', 'string/raw-multiline.toml', // This test is not correct. Our parser *correctly* includes the newline at the start of the raw multiline.
'string/unicode-escape.toml',
'string/raw-multiline.toml',
'string/escapes.toml', 'string/escapes.toml',
'string/escaped-escape.toml',
'string/nl.toml',
'string/escape-tricky.toml', 'string/escape-tricky.toml',
'string/multiline.toml', 'string/multiline.toml',
// Integer // Integer
@ -153,10 +150,12 @@ fn test_burnt_sushi_tomltest() {
os.write_file(bs_toml_json_path, bs_json) or { panic(err) } os.write_file(bs_toml_json_path, bs_json) or { panic(err) }
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or { v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
panic(err) contents := os.read_file(v_toml_json_path) or { panic(err) }
panic(err.msg + '\n$contents')
} }
bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or { bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or {
panic(err) contents := os.read_file(v_toml_json_path) or { panic(err) }
panic(err.msg + '\n$contents')
} }
assert bs_normalized_json == v_normalized_json assert bs_normalized_json == v_normalized_json
@ -216,7 +215,13 @@ fn test_burnt_sushi_tomltest() {
fn to_burntsushi(value ast.Value) string { fn to_burntsushi(value ast.Value) string {
match value { match value {
ast.Quoted { ast.Quoted {
json_text := json2.Any(value.text).json_str() mut json_text := ''
if value.quote == `"` {
json_text = toml_to_json_escapes(value) or { '<error>' }
} else {
json_text = json2.Any(value.text).json_str()
}
return '{ "type": "string", "value": "$json_text" }' return '{ "type": "string", "value": "$json_text" }'
} }
ast.DateTime { ast.DateTime {
@ -276,3 +281,49 @@ fn to_burntsushi(value ast.Value) string {
} }
return '<error>' return '<error>'
} }
// toml_to_json_escapes is a utility function for normalizing
// TOML basic string to JSON string
fn toml_to_json_escapes(q ast.Quoted) ?string {
mut s := scanner.new_simple(q.text) ?
mut r := ''
for {
ch := s.next()
if ch == scanner.end_of_text {
break
}
ch_byte := byte(ch)
if ch == `"` {
if byte(s.peek(-1)) != `\\` {
r += '\\'
}
}
if ch == `\\` {
next_ch := byte(s.at())
escape := ch_byte.ascii_str() + next_ch.ascii_str()
if escape.to_lower() == '\\u' {
mut b := s.next()
mut unicode_point := ''
for {
b = s.next()
if b != ` ` && b != scanner.end_of_text {
unicode_point += byte(b).ascii_str()
} else {
break
}
}
if unicode_point.len < 8 {
unicode_point = '0'.repeat(8 - unicode_point.len) + unicode_point
}
rn := rune(strconv.parse_int(unicode_point, 16, 0) ?)
r += '$rn'
continue
}
}
r += ch_byte.ascii_str()
}
return r
}