toml: add `alexcrichton/toml-rs` test suite to CI (#12624)
parent
d1b0ce9e0c
commit
5c8e626912
|
@ -17,6 +17,8 @@ jobs:
|
|||
TOML_BS_TESTS_PINNED_COMMIT: eb989e5
|
||||
TOML_IARNA_TESTS_PATH: vlib/toml/tests/testdata/iarna/toml-test
|
||||
TOML_IARNA_TESTS_PINNED_COMMIT: 1880b1a
|
||||
TOML_ALEXCRICHTON_TESTS_PATH: vlib/toml/tests/testdata/alexcrichton/toml-test
|
||||
TOML_ALEXCRICHTON_TESTS_PINNED_COMMIT: 499e8c4
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -30,6 +32,7 @@ jobs:
|
|||
- name: Run local TOML tests
|
||||
run: ./v test vlib/toml
|
||||
|
||||
# Tests found at https://github.com/BurntSushi/toml-test
|
||||
- name: Clone BurntSushi/toml-test
|
||||
run: |
|
||||
git clone https://github.com/BurntSushi/toml-test.git $TOML_BS_TESTS_PATH
|
||||
|
@ -39,12 +42,14 @@ jobs:
|
|||
- name: Run BurntSushi TOML tests
|
||||
run: ./v vlib/toml/tests/burntsushi.toml-test_test.v
|
||||
|
||||
# Tests found at gist
|
||||
- name: Get large_toml_file_test.toml
|
||||
run: wget https://gist.githubusercontent.com/Larpon/89b0e3d94c6903851ff15559e5df7a05/raw/62a1f87a4e37bf157f2e0bfb32d85d840c98e422/large_toml_file_test.toml -O vlib/toml/tests/testdata/large_toml_file_test.toml
|
||||
|
||||
- name: Run large TOML file tests
|
||||
run: ./v vlib/toml/tests/large_toml_file_test.v
|
||||
|
||||
# Tests found at https://github.com/iarna/toml-spec-tests
|
||||
- name: Clone iarna/toml-spec-tests
|
||||
run: |
|
||||
git clone https://github.com/iarna/toml-spec-tests.git $TOML_IARNA_TESTS_PATH
|
||||
|
@ -53,3 +58,13 @@ jobs:
|
|||
|
||||
- name: Run iarna TOML tests
|
||||
run: ./v vlib/toml/tests/iarna.toml-spec-tests_test.v
|
||||
|
||||
# Tests found at https://github.com/alexcrichton/toml-rs
|
||||
- name: Clone alexcrichton/toml-rs
|
||||
run: |
|
||||
git clone https://github.com/alexcrichton/toml-rs.git $TOML_ALEXCRICHTON_TESTS_PATH
|
||||
## TODO: update/remove this pinning once all our skip lists are empty:
|
||||
git -C $TOML_ALEXCRICHTON_TESTS_PATH checkout $TOML_ALEXCRICHTON_TESTS_PINNED_COMMIT
|
||||
|
||||
- name: Run alexcrichton TOML tests
|
||||
run: ./v vlib/toml/tests/alexcrichton.toml-rs-tests_test.v
|
||||
|
|
|
@ -0,0 +1,310 @@
|
|||
import os
|
||||
import toml
|
||||
import toml.ast
|
||||
import x.json2
|
||||
|
||||
// Instructions for developers:
|
||||
// The actual tests and data can be obtained by doing:
|
||||
// `git clone --depth 1 https://github.com/alexcrichton/toml-rs.git vlib/toml/tests/testdata/alexcrichton/toml-test`
|
||||
// See also the CI toml tests
|
||||
const (
|
||||
// Kept for easier handling of future updates to the tests
|
||||
valid_exceptions = [
|
||||
'valid/example-v0.3.0.toml',
|
||||
'valid/example-v0.4.0.toml',
|
||||
'valid/datetime-truncate.toml', // Not considered valid since RFC 3339 doesn't permit > 6 ms digits ??
|
||||
'valid/table-array-nest-no-keys.toml',
|
||||
]
|
||||
invalid_exceptions = [
|
||||
'invalid/string-bad-line-ending-escape.toml',
|
||||
'invalid/float-no-suffix.toml',
|
||||
]
|
||||
|
||||
valid_value_exceptions = [
|
||||
'valid/unicode-escape.toml',
|
||||
// These have correct values, and should've passed, but the format of arrays is *mixed* in the JSON ??
|
||||
'valid/example2.toml',
|
||||
// Float
|
||||
'valid/float-exponent.toml',
|
||||
]
|
||||
|
||||
// These have correct values, and should've passed as-is, but the format of arrays changes in the JSON ??
|
||||
// We account for that here
|
||||
use_type_2_arrays = [
|
||||
'valid/table-array-implicit.toml',
|
||||
'valid/table-array-many.toml',
|
||||
'valid/table-array-one.toml',
|
||||
'valid/table-array-nest.toml',
|
||||
]
|
||||
|
||||
jq = os.find_abs_path_of_executable('jq') or { '' }
|
||||
compare_work_dir_root = os.join_path(os.temp_dir(), 'v', 'toml', 'alexcrichton')
|
||||
// From: https://stackoverflow.com/a/38266731/1904615
|
||||
jq_normalize = r'# Apply f to composite entities recursively using keys[], and to atoms
|
||||
def sorted_walk(f):
|
||||
. as $in
|
||||
| if type == "object" then
|
||||
reduce keys[] as $key
|
||||
( {}; . + { ($key): ($in[$key] | sorted_walk(f)) } ) | f
|
||||
elif type == "array" then map( sorted_walk(f) ) | f
|
||||
else f
|
||||
end;
|
||||
|
||||
def normalize: sorted_walk(if type == "array" then sort else . end);
|
||||
|
||||
normalize'
|
||||
)
|
||||
|
||||
fn run(args []string) ?string {
|
||||
res := os.execute(args.join(' '))
|
||||
if res.exit_code != 0 {
|
||||
return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
|
||||
}
|
||||
return res.output
|
||||
}
|
||||
|
||||
// test_alexcrichton_toml_rs run though 'testdata/alexcrichton/toml-test/test-suite/tests/*' if found.
|
||||
fn test_alexcrichton_toml_rs() {
|
||||
this_file := @FILE
|
||||
test_root := os.join_path(os.dir(this_file), 'testdata', 'alexcrichton', 'toml-test')
|
||||
if os.is_dir(test_root) {
|
||||
valid_test_files := os.walk_ext(os.join_path(test_root, 'test-suite', 'tests',
|
||||
'valid'), '.toml')
|
||||
println('Testing $valid_test_files.len valid TOML files...')
|
||||
mut valid := 0
|
||||
mut e := 0
|
||||
for i, valid_test_file in valid_test_files {
|
||||
mut relative := valid_test_file.all_after(os.join_path('test-suite', 'tests')).trim_left(os.path_separator)
|
||||
$if windows {
|
||||
relative = relative.replace('/', '\\')
|
||||
}
|
||||
|
||||
if relative !in valid_exceptions {
|
||||
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
||||
valid++
|
||||
} else {
|
||||
e++
|
||||
println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_exceptions.len]...')
|
||||
}
|
||||
}
|
||||
println('$valid/$valid_test_files.len TOML files was parsed correctly')
|
||||
if valid_exceptions.len > 0 {
|
||||
println('TODO Skipped parsing of $e valid TOML files...')
|
||||
}
|
||||
|
||||
// If the command-line tool `jq` is installed, value tests can be run as well.
|
||||
if jq != '' {
|
||||
println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
|
||||
|
||||
if os.exists(compare_work_dir_root) {
|
||||
os.rmdir_all(compare_work_dir_root) or { panic(err) }
|
||||
}
|
||||
os.mkdir_all(compare_work_dir_root) or { panic(err) }
|
||||
|
||||
jq_normalize_path := os.join_path(compare_work_dir_root, 'normalize.jq')
|
||||
os.write_file(jq_normalize_path, jq_normalize) or { panic(err) }
|
||||
|
||||
valid = 0
|
||||
e = 0
|
||||
for i, valid_test_file in valid_test_files {
|
||||
mut relative := valid_test_file.all_after(os.join_path('test-suite', 'tests')).trim_left(os.path_separator)
|
||||
$if windows {
|
||||
relative = relative.replace('/', '\\')
|
||||
}
|
||||
if !os.exists(valid_test_file.all_before_last('.') + '.json') {
|
||||
println('N/A [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||
continue
|
||||
}
|
||||
// Skip the file if we know it can't be parsed or we know that the value retrieval needs work.
|
||||
if relative !in valid_exceptions && relative !in valid_value_exceptions {
|
||||
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
||||
|
||||
v_toml_json_path := os.join_path(compare_work_dir_root,
|
||||
os.file_name(valid_test_file).all_before_last('.') + '.v.json')
|
||||
alexcrichton_toml_json_path := os.join_path(compare_work_dir_root,
|
||||
os.file_name(valid_test_file).all_before_last('.') + '.json')
|
||||
|
||||
mut array_type := 1
|
||||
if relative in use_type_2_arrays {
|
||||
array_type = 2
|
||||
}
|
||||
|
||||
os.write_file(v_toml_json_path, to_alexcrichton(toml_doc.ast.table,
|
||||
array_type)) or { panic(err) }
|
||||
|
||||
alexcrichton_json := os.read_file(valid_test_file.all_before_last('.') + '.json') or {
|
||||
panic(err)
|
||||
}
|
||||
os.write_file(alexcrichton_toml_json_path, alexcrichton_json) or { panic(err) }
|
||||
|
||||
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
|
||||
contents := os.read_file(v_toml_json_path) or { panic(err) }
|
||||
panic(err.msg + '\n$contents')
|
||||
}
|
||||
alexcrichton_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"',
|
||||
alexcrichton_toml_json_path]) or {
|
||||
contents := os.read_file(v_toml_json_path) or { panic(err) }
|
||||
panic(err.msg + '\n$contents')
|
||||
}
|
||||
|
||||
assert alexcrichton_normalized_json == v_normalized_json
|
||||
|
||||
valid++
|
||||
} else {
|
||||
e++
|
||||
println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_value_exceptions.len]...')
|
||||
}
|
||||
}
|
||||
println('$valid/$valid_test_files.len TOML files was parsed correctly and value checked')
|
||||
if valid_value_exceptions.len > 0 {
|
||||
println('TODO Skipped value checks of $e valid TOML files...')
|
||||
}
|
||||
}
|
||||
|
||||
invalid_test_files := os.walk_ext(os.join_path(test_root, 'test-suite', 'tests',
|
||||
'invalid'), '.toml')
|
||||
println('Testing $invalid_test_files.len invalid TOML files...')
|
||||
mut invalid := 0
|
||||
e = 0
|
||||
for i, invalid_test_file in invalid_test_files {
|
||||
mut relative := invalid_test_file.all_after(os.join_path('test-suite', 'tests')).trim_left(os.path_separator)
|
||||
$if windows {
|
||||
relative = relative.replace('/', '\\')
|
||||
}
|
||||
if relative !in invalid_exceptions {
|
||||
println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
|
||||
if toml_doc := toml.parse_file(invalid_test_file) {
|
||||
content_that_should_have_failed := os.read_file(invalid_test_file) or {
|
||||
panic(err)
|
||||
}
|
||||
println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
|
||||
assert false
|
||||
} else {
|
||||
println(' $err.msg')
|
||||
assert true
|
||||
}
|
||||
invalid++
|
||||
} else {
|
||||
e++
|
||||
println('SKIP [${i + 1}/$invalid_test_files.len] "$invalid_test_file" EXCEPTION [$e/$invalid_exceptions.len]...')
|
||||
}
|
||||
}
|
||||
println('$invalid/$invalid_test_files.len TOML files was parsed correctly')
|
||||
if invalid_exceptions.len > 0 {
|
||||
println('TODO Skipped parsing of $invalid_exceptions.len invalid TOML files...')
|
||||
}
|
||||
} else {
|
||||
println('No test data directory found in "$test_root"')
|
||||
assert true
|
||||
}
|
||||
}
|
||||
|
||||
// to_alexcrichton_time
|
||||
fn to_alexcrichton_time(time_str string) string {
|
||||
if time_str.contains('.') {
|
||||
date_and_time := time_str.all_before('.')
|
||||
mut ms := time_str.all_after('.')
|
||||
z := if ms.contains('Z') { 'Z' } else { '' }
|
||||
ms = ms.replace('Z', '')
|
||||
if ms.len > 3 {
|
||||
ms = ms[..3]
|
||||
}
|
||||
return date_and_time + '.' + ms + z
|
||||
} else {
|
||||
return time_str
|
||||
}
|
||||
}
|
||||
|
||||
// to_alexcrichton returns an alexcrichton compatible json string converted from the `value` ast.Value.
|
||||
fn to_alexcrichton(value ast.Value, array_type int) string {
|
||||
match value {
|
||||
ast.Quoted {
|
||||
json_text := json2.Any(value.text).json_str()
|
||||
return '{ "type": "string", "value": "$json_text" }'
|
||||
}
|
||||
ast.DateTime {
|
||||
// Normalization for json
|
||||
mut json_text := json2.Any(value.text).json_str().to_upper().replace(' ',
|
||||
'T')
|
||||
typ := if json_text.ends_with('Z') || json_text.all_after('T').contains('-')
|
||||
|| json_text.all_after('T').contains('+') {
|
||||
'datetime'
|
||||
} else {
|
||||
'datetime-local'
|
||||
}
|
||||
// NOTE test suite inconsistency.
|
||||
// It seems it's implementation specific how time and
|
||||
// date-time values are represented in detail. For now we follow the BurntSushi format
|
||||
// that expands to 6 digits which is also a valid RFC 3339 representation.
|
||||
json_text = to_alexcrichton_time(json_text)
|
||||
return '{ "type": "$typ", "value": "$json_text" }'
|
||||
}
|
||||
ast.Date {
|
||||
json_text := json2.Any(value.text).json_str()
|
||||
return '{ "type": "date", "value": "$json_text" }'
|
||||
}
|
||||
ast.Time {
|
||||
mut json_text := json2.Any(value.text).json_str()
|
||||
json_text = to_alexcrichton_time(json_text)
|
||||
return '{ "type": "time", "value": "$json_text" }'
|
||||
}
|
||||
ast.Bool {
|
||||
json_text := json2.Any(value.text.bool()).json_str()
|
||||
return '{ "type": "bool", "value": "$json_text" }'
|
||||
}
|
||||
ast.Null {
|
||||
json_text := json2.Any(value.text).json_str()
|
||||
return '{ "type": "null", "value": "$json_text" }'
|
||||
}
|
||||
ast.Number {
|
||||
if value.text.contains('inf') || value.text.contains('nan') {
|
||||
return '{ "type": "float", "value": "$value.text" }'
|
||||
}
|
||||
if !value.text.starts_with('0x')
|
||||
&& (value.text.contains('.') || value.text.to_lower().contains('e')) {
|
||||
mut val := '$value.f64()'.replace('.e+', '.0e') // json notation
|
||||
if !val.contains('.') && val != '0' { // json notation
|
||||
val += '.0'
|
||||
}
|
||||
return '{ "type": "float", "value": "$val" }'
|
||||
}
|
||||
v := value.i64()
|
||||
// TODO workaround https://github.com/vlang/v/issues/9507
|
||||
if v == i64(-9223372036854775807 - 1) {
|
||||
return '{ "type": "integer", "value": "-9223372036854775808" }'
|
||||
}
|
||||
return '{ "type": "integer", "value": "$v" }'
|
||||
}
|
||||
map[string]ast.Value {
|
||||
mut str := '{ '
|
||||
for key, val in value {
|
||||
json_key := json2.Any(key).json_str()
|
||||
str += ' "$json_key": ${to_alexcrichton(val, array_type)},'
|
||||
}
|
||||
str = str.trim_right(',')
|
||||
str += ' }'
|
||||
return str
|
||||
}
|
||||
[]ast.Value {
|
||||
mut str := ''
|
||||
if array_type == 1 {
|
||||
str = '{ "type": "array", "value": [ '
|
||||
} else {
|
||||
str = '[ '
|
||||
}
|
||||
for val in value {
|
||||
str += ' ${to_alexcrichton(val, array_type)},'
|
||||
}
|
||||
str = str.trim_right(',')
|
||||
if array_type == 1 {
|
||||
str += ' ] }\n'
|
||||
} else {
|
||||
str += ' ]\n'
|
||||
}
|
||||
return str
|
||||
}
|
||||
}
|
||||
return '<error>'
|
||||
}
|
Loading…
Reference in New Issue