Merge branch 'master' into string-index-funcs-return-opt-int
commit
59082fee32
|
@ -40,6 +40,7 @@ jobs:
|
||||||
|
|
||||||
- name: Build sdl examples
|
- name: Build sdl examples
|
||||||
run: |
|
run: |
|
||||||
|
v shader sdl/examples/sdl_opengl_and_sokol
|
||||||
for example in sdl/examples/*; do
|
for example in sdl/examples/*; do
|
||||||
echo "v $example"
|
echo "v $example"
|
||||||
v "$example";
|
v "$example";
|
||||||
|
|
25
README.md
25
README.md
|
@ -60,23 +60,40 @@ Unlike many other languages, V is not going to be always changing, with new feat
|
||||||
being introduced and old features modified. It is always going to be a small and simple
|
being introduced and old features modified. It is always going to be a small and simple
|
||||||
language, very similar to the way it is right now.
|
language, very similar to the way it is right now.
|
||||||
|
|
||||||
## Installing V from source
|
## Installing V - from source *(preferred method)*
|
||||||
|
|
||||||
### Linux, macOS, Windows, *BSD, Solaris, WSL, Android, Raspbian
|
### Linux, macOS, Windows, *BSD, Solaris, WSL, Android, etc.
|
||||||
|
|
||||||
|
Usually installing V is quite simple if you have an environment that already has a
|
||||||
|
functional `git` installation.
|
||||||
|
|
||||||
|
* *(* ***PLEASE NOTE:*** *If you run into any trouble or you have a different operating
|
||||||
|
system or Linux distribution that doesn't install or work immediately, please see
|
||||||
|
[Installation Issues](https://github.com/vlang/v/discussions/categories/installation-issues)
|
||||||
|
and search for your OS and problem. If you can't find your problem, please add it to an
|
||||||
|
existing discussion if one exists for your OS, or create a new one if a main discussion
|
||||||
|
doesn't yet exist for your OS.)*
|
||||||
|
|
||||||
|
|
||||||
|
To get started, simply try to execute the following in your terminal/shell:
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/vlang/v
|
git clone https://github.com/vlang/v
|
||||||
cd v
|
cd v
|
||||||
make
|
make
|
||||||
|
# HINT: Using Windows?: run make.bat in the cmd.exe shell
|
||||||
```
|
```
|
||||||
|
|
||||||
That's it! Now you have a V executable at `[path to V repo]/v`.
|
That should be it and you should find your V executable at `[path to V repo]/v`.
|
||||||
`[path to V repo]` can be anywhere.
|
`[path to V repo]` can be anywhere.
|
||||||
|
|
||||||
(On Windows `make` means running `make.bat`, so make sure you use `cmd.exe`)
|
(As in the hint above, on Windows `make` means running `make.bat`, so make sure you use
|
||||||
|
the `cmd.exe` terminal.)
|
||||||
|
|
||||||
Now you can try `./v run examples/hello_world.v` (`v.exe` on Windows).
|
Now you can try `./v run examples/hello_world.v` (`v.exe` on Windows).
|
||||||
|
|
||||||
|
* *Trouble? Please see the note above and link to
|
||||||
|
[Installation Issues](https://github.com/vlang/v/discussions/categories/installation-issues) for help.*
|
||||||
|
|
||||||
V is constantly being updated. To update V, simply run:
|
V is constantly being updated. To update V, simply run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|
|
@ -118,7 +118,7 @@ fn main() {
|
||||||
build_output := get_v_build_output(is_verbose, is_yes, file_path)
|
build_output := get_v_build_output(is_verbose, is_yes, file_path)
|
||||||
// ask the user if he wants to submit even after an error
|
// ask the user if he wants to submit even after an error
|
||||||
if !is_yes && (vdoctor_output == '' || file_content == '' || build_output == '') {
|
if !is_yes && (vdoctor_output == '' || file_content == '' || build_output == '') {
|
||||||
confirm_or_exit('An error occured retrieving the information, do you want to continue?')
|
confirm_or_exit('An error occurred retrieving the information, do you want to continue?')
|
||||||
}
|
}
|
||||||
|
|
||||||
expected_result := readline.read_line('What did you expect to see? ') or {
|
expected_result := readline.read_line('What did you expect to see? ') or {
|
||||||
|
|
|
@ -27,6 +27,8 @@ struct FormatOptions {
|
||||||
is_verify bool // exit(1) if the file is not vfmt'ed
|
is_verify bool // exit(1) if the file is not vfmt'ed
|
||||||
is_worker bool // true *only* in the worker processes. Note: workers can crash.
|
is_worker bool // true *only* in the worker processes. Note: workers can crash.
|
||||||
is_backup bool // make a `file.v.bak` copy *before* overwriting a `file.v` in place with `-w`
|
is_backup bool // make a `file.v.bak` copy *before* overwriting a `file.v` in place with `-w`
|
||||||
|
mut:
|
||||||
|
diff_cmd string // filled in when -diff or -verify is passed
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -201,36 +203,23 @@ fn print_compiler_options(compiler_params &pref.Preferences) {
|
||||||
eprintln(' is_script: $compiler_params.is_script ')
|
eprintln(' is_script: $compiler_params.is_script ')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (foptions &FormatOptions) post_process_file(file string, formatted_file_path string) ? {
|
fn (mut foptions FormatOptions) find_diff_cmd() string {
|
||||||
|
if foptions.diff_cmd != '' {
|
||||||
|
return foptions.diff_cmd
|
||||||
|
}
|
||||||
|
if foptions.is_verify || foptions.is_diff {
|
||||||
|
foptions.diff_cmd = diff.find_working_diff_command() or {
|
||||||
|
eprintln(err)
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return foptions.diff_cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_path string) ? {
|
||||||
if formatted_file_path.len == 0 {
|
if formatted_file_path.len == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if foptions.is_diff {
|
|
||||||
diff_cmd := diff.find_working_diff_command() or {
|
|
||||||
eprintln(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if foptions.is_verbose {
|
|
||||||
eprintln('Using diff command: $diff_cmd')
|
|
||||||
}
|
|
||||||
diff := diff.color_compare_files(diff_cmd, file, formatted_file_path)
|
|
||||||
if diff.len > 0 {
|
|
||||||
println(diff)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if foptions.is_verify {
|
|
||||||
diff_cmd := diff.find_working_diff_command() or {
|
|
||||||
eprintln(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
x := diff.color_compare_files(diff_cmd, file, formatted_file_path)
|
|
||||||
if x.len != 0 {
|
|
||||||
println("$file is not vfmt'ed")
|
|
||||||
return error('')
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fc := os.read_file(file) or {
|
fc := os.read_file(file) or {
|
||||||
eprintln('File $file could not be read')
|
eprintln('File $file could not be read')
|
||||||
return
|
return
|
||||||
|
@ -240,6 +229,31 @@ fn (foptions &FormatOptions) post_process_file(file string, formatted_file_path
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
is_formatted_different := fc != formatted_fc
|
is_formatted_different := fc != formatted_fc
|
||||||
|
if foptions.is_diff {
|
||||||
|
if !is_formatted_different {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
diff_cmd := foptions.find_diff_cmd()
|
||||||
|
if foptions.is_verbose {
|
||||||
|
eprintln('Using diff command: $diff_cmd')
|
||||||
|
}
|
||||||
|
diff := diff.color_compare_files(diff_cmd, file, formatted_file_path)
|
||||||
|
if diff.len > 0 {
|
||||||
|
println(diff)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if foptions.is_verify {
|
||||||
|
if !is_formatted_different {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
x := diff.color_compare_files(foptions.find_diff_cmd(), file, formatted_file_path)
|
||||||
|
if x.len != 0 {
|
||||||
|
println("$file is not vfmt'ed")
|
||||||
|
return error('')
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
if foptions.is_c {
|
if foptions.is_c {
|
||||||
if is_formatted_different {
|
if is_formatted_different {
|
||||||
eprintln('File is not formatted: $file')
|
eprintln('File is not formatted: $file')
|
||||||
|
|
|
@ -354,7 +354,11 @@ fn vexe() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_config(root_path string, toml_config string) ?Config {
|
fn new_config(root_path string, toml_config string) ?Config {
|
||||||
doc := toml.parse(toml_config) ?
|
doc := if os.is_file(toml_config) {
|
||||||
|
toml.parse_file(toml_config) ?
|
||||||
|
} else {
|
||||||
|
toml.parse_text(toml_config) ?
|
||||||
|
}
|
||||||
|
|
||||||
path := os.real_path(root_path).trim_right('/')
|
path := os.real_path(root_path).trim_right('/')
|
||||||
|
|
||||||
|
|
|
@ -208,11 +208,21 @@ fn (mut r Repl) parse_import(line string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn highlight_console_command(command string) string {
|
||||||
|
return term.bright_white(term.bright_bg_black(' $command '))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn highlight_repl_command(command string) string {
|
||||||
|
return term.bright_white(term.bg_blue(' $command '))
|
||||||
|
}
|
||||||
|
|
||||||
fn print_welcome_screen() {
|
fn print_welcome_screen() {
|
||||||
cmd_exit := term.highlight_command('exit')
|
cmd_exit := highlight_repl_command('exit')
|
||||||
cmd_help := term.highlight_command('v help')
|
cmd_list := highlight_repl_command('list')
|
||||||
file_main := term.highlight_command('main.v')
|
cmd_help := highlight_repl_command('help')
|
||||||
cmd_run := term.highlight_command('v run main.v')
|
cmd_v_help := highlight_console_command('v help')
|
||||||
|
cmd_v_run := highlight_console_command('v run main.v')
|
||||||
|
file_main := highlight_console_command('main.v')
|
||||||
vbar := term.bright_green('|')
|
vbar := term.bright_green('|')
|
||||||
width, _ := term.get_terminal_size() // get the size of the terminal
|
width, _ := term.get_terminal_size() // get the size of the terminal
|
||||||
vlogo := [
|
vlogo := [
|
||||||
|
@ -224,11 +234,11 @@ fn print_welcome_screen() {
|
||||||
term.bright_blue(r' \__/ '),
|
term.bright_blue(r' \__/ '),
|
||||||
]
|
]
|
||||||
help_text := [
|
help_text := [
|
||||||
'Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_help).',
|
'Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_v_help).',
|
||||||
'Note: the REPL is highly experimental. For best V experience, use a text editor, ',
|
'Note: the REPL is highly experimental. For best V experience, use a text editor, ',
|
||||||
'save your code in a $file_main file and execute: $cmd_run',
|
'save your code in a $file_main file and execute: $cmd_v_run',
|
||||||
version.full_v_version(false),
|
'${version.full_v_version(false)} . Use $cmd_list to see the accumulated program so far.',
|
||||||
'Use Ctrl-C or ${term.highlight_command('exit')} to exit, or ${term.highlight_command('help')} to see other available commands',
|
'Use Ctrl-C or $cmd_exit to exit, or $cmd_help to see other available commands.',
|
||||||
]
|
]
|
||||||
if width >= 97 {
|
if width >= 97 {
|
||||||
eprintln('${vlogo[0]}')
|
eprintln('${vlogo[0]}')
|
||||||
|
@ -480,6 +490,9 @@ fn main() {
|
||||||
println(' ... where vexepath is the full path to the v executable file')
|
println(' ... where vexepath is the full path to the v executable file')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !is_stdin_a_pipe {
|
||||||
|
os.setenv('VCOLORS', 'always', true)
|
||||||
|
}
|
||||||
run_repl(replfolder, replprefix)
|
run_repl(replfolder, replprefix)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ see also `v help build`.
|
||||||
|
|
||||||
-cstrict
|
-cstrict
|
||||||
Turn on additional C warnings. This slows down compilation
|
Turn on additional C warnings. This slows down compilation
|
||||||
slightly (~10-10% for gcc), but sometimes provides better diagnosis.
|
slightly (~10% for gcc), but sometimes provides better diagnosis.
|
||||||
|
|
||||||
-showcc
|
-showcc
|
||||||
Prints the C command that is used to build the program.
|
Prints the C command that is used to build the program.
|
||||||
|
@ -97,7 +97,8 @@ see also `v help build`.
|
||||||
systems also (although we do not test it as regularly as for the above):
|
systems also (although we do not test it as regularly as for the above):
|
||||||
`android`, `ios`,
|
`android`, `ios`,
|
||||||
`freebsd`, `openbsd`, `netbsd`, `dragonfly`,
|
`freebsd`, `openbsd`, `netbsd`, `dragonfly`,
|
||||||
`solaris`, `serenity`, `haiku`, `vinix`
|
`solaris`, `serenity`, `haiku`, `vinix`,
|
||||||
|
`wasm32`, `wasm32-wasi`, `wasm32-emscripten`
|
||||||
|
|
||||||
Note that V has the concept of platform files, i.e. files ending
|
Note that V has the concept of platform files, i.e. files ending
|
||||||
with `_platform.c.v`, and usually only the matching files are used in
|
with `_platform.c.v`, and usually only the matching files are used in
|
||||||
|
|
|
@ -624,6 +624,9 @@ println('[${int(x):010}]') // pad with zeros on the left => [0000000123]
|
||||||
println('[${int(x):b}]') // output as binary => [1111011]
|
println('[${int(x):b}]') // output as binary => [1111011]
|
||||||
println('[${int(x):o}]') // output as octal => [173]
|
println('[${int(x):o}]') // output as octal => [173]
|
||||||
println('[${int(x):X}]') // output as uppercase hex => [7B]
|
println('[${int(x):X}]') // output as uppercase hex => [7B]
|
||||||
|
|
||||||
|
println('[${10.0000:.2}]') // remove insignificant 0s at the end => [10]
|
||||||
|
println('[${10.0000:.2f}]') // do show the 0s at the end, even though they do not change the number => [10.00]
|
||||||
```
|
```
|
||||||
|
|
||||||
### String operators
|
### String operators
|
||||||
|
|
|
@ -38,7 +38,7 @@ hosts = [
|
||||||
]'
|
]'
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
doc := toml.parse(toml_text) or { panic(err) }
|
doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
title := doc.value('title').string()
|
title := doc.value('title').string()
|
||||||
println('title: "$title"')
|
println('title: "$title"')
|
||||||
ip := doc.value('servers.alpha.ip').string()
|
ip := doc.value('servers.alpha.ip').string()
|
||||||
|
|
|
@ -144,7 +144,7 @@ fn main() {
|
||||||
// TTF render 0 Frame counter
|
// TTF render 0 Frame counter
|
||||||
app.ttf_render << &ttf.TTF_render_Sokol{
|
app.ttf_render << &ttf.TTF_render_Sokol{
|
||||||
bmp: &ttf.BitMap{
|
bmp: &ttf.BitMap{
|
||||||
tf: &(app.tf[0])
|
tf: &app.tf[0]
|
||||||
buf: unsafe { malloc_noscan(32000000) }
|
buf: unsafe { malloc_noscan(32000000) }
|
||||||
buf_size: (32000000)
|
buf_size: (32000000)
|
||||||
color: 0xFF0000FF
|
color: 0xFF0000FF
|
||||||
|
@ -155,7 +155,7 @@ fn main() {
|
||||||
// TTF render 1 Text Block
|
// TTF render 1 Text Block
|
||||||
app.ttf_render << &ttf.TTF_render_Sokol{
|
app.ttf_render << &ttf.TTF_render_Sokol{
|
||||||
bmp: &ttf.BitMap{
|
bmp: &ttf.BitMap{
|
||||||
tf: &(app.tf[1])
|
tf: &app.tf[1]
|
||||||
// color : 0xFF0000_10
|
// color : 0xFF0000_10
|
||||||
// style: .raw
|
// style: .raw
|
||||||
// use_font_metrics: true
|
// use_font_metrics: true
|
||||||
|
@ -164,7 +164,7 @@ fn main() {
|
||||||
// TTF mouse position render
|
// TTF mouse position render
|
||||||
app.ttf_render << &ttf.TTF_render_Sokol{
|
app.ttf_render << &ttf.TTF_render_Sokol{
|
||||||
bmp: &ttf.BitMap{
|
bmp: &ttf.BitMap{
|
||||||
tf: &(app.tf[0])
|
tf: &app.tf[0]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// setup sokol_gfx
|
// setup sokol_gfx
|
||||||
|
|
|
@ -17,3 +17,9 @@ img.logo {
|
||||||
float: left;
|
float: left;
|
||||||
width: 10em;
|
width: 10em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
display:block;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
<html>
|
<html>
|
||||||
<header>
|
<header>
|
||||||
<title>@title</title>
|
<title>@title</title>
|
||||||
|
<style>html{display:none}</style>
|
||||||
@css 'index.css'
|
@css 'index.css'
|
||||||
</header>
|
</header>
|
||||||
<body>
|
<body>
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
module main
|
module main
|
||||||
|
|
||||||
|
import os
|
||||||
import vweb
|
import vweb
|
||||||
// import vweb.assets
|
// import vweb.assets
|
||||||
import time
|
import time
|
||||||
|
@ -16,6 +17,7 @@ fn main() {
|
||||||
mut app := &App{}
|
mut app := &App{}
|
||||||
app.serve_static('/favicon.ico', 'favicon.ico')
|
app.serve_static('/favicon.ico', 'favicon.ico')
|
||||||
// Automatically make available known static mime types found in given directory.
|
// Automatically make available known static mime types found in given directory.
|
||||||
|
os.chdir(os.dir(os.executable())) ?
|
||||||
app.handle_static('assets', true)
|
app.handle_static('assets', true)
|
||||||
vweb.run(app, port)
|
vweb.run(app, port)
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,7 +124,7 @@ fn (mut a array) ensure_cap(required int) {
|
||||||
cap *= 2
|
cap *= 2
|
||||||
}
|
}
|
||||||
new_size := cap * a.element_size
|
new_size := cap * a.element_size
|
||||||
new_data := vcalloc(new_size)
|
new_data := unsafe { malloc(new_size) }
|
||||||
if a.data != voidptr(0) {
|
if a.data != voidptr(0) {
|
||||||
unsafe { vmemcpy(new_data, a.data, a.len * a.element_size) }
|
unsafe { vmemcpy(new_data, a.data, a.len * a.element_size) }
|
||||||
// TODO: the old data may be leaked when no GC is used (ref-counting?)
|
// TODO: the old data may be leaked when no GC is used (ref-counting?)
|
||||||
|
@ -402,12 +402,13 @@ pub fn (mut a array) pop() voidptr {
|
||||||
a.len = new_len
|
a.len = new_len
|
||||||
// Note: a.cap is not changed here *on purpose*, so that
|
// Note: a.cap is not changed here *on purpose*, so that
|
||||||
// further << ops on that array will be more efficient.
|
// further << ops on that array will be more efficient.
|
||||||
return unsafe { memdup(last_elem, a.element_size) }
|
return last_elem
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete_last efficiently deletes the last element of the array.
|
// delete_last efficiently deletes the last element of the array.
|
||||||
// It does it simply by reducing the length of the array by 1.
|
// It does it simply by reducing the length of the array by 1.
|
||||||
// If the array is empty, this will panic.
|
// If the array is empty, this will panic.
|
||||||
|
// See also: [trim](#array.trim)
|
||||||
pub fn (mut a array) delete_last() {
|
pub fn (mut a array) delete_last() {
|
||||||
// copy pasting code for performance
|
// copy pasting code for performance
|
||||||
$if !no_bounds_checking ? {
|
$if !no_bounds_checking ? {
|
||||||
|
|
|
@ -660,7 +660,7 @@ pub fn str_intp(data_len int, in_data voidptr) string {
|
||||||
mut res := strings.new_builder(256)
|
mut res := strings.new_builder(256)
|
||||||
input_base := &StrIntpData(in_data)
|
input_base := &StrIntpData(in_data)
|
||||||
for i := 0; i < data_len; i++ {
|
for i := 0; i < data_len; i++ {
|
||||||
data := unsafe { &(input_base[i]) }
|
data := unsafe { &input_base[i] }
|
||||||
// avoid empty strings
|
// avoid empty strings
|
||||||
if data.str.len != 0 {
|
if data.str.len != 0 {
|
||||||
res.write_string(data.str)
|
res.write_string(data.str)
|
||||||
|
|
|
@ -5,7 +5,7 @@ module builtin
|
||||||
[unsafe]
|
[unsafe]
|
||||||
pub fn __malloc(size usize) voidptr {
|
pub fn __malloc(size usize) voidptr {
|
||||||
unsafe {
|
unsafe {
|
||||||
return malloc(int(size))
|
return C.malloc(int(size))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -467,3 +467,20 @@ pub fn hex(len int) string {
|
||||||
pub fn ascii(len int) string {
|
pub fn ascii(len int) string {
|
||||||
return string_from_set(rand.ascii_chars, len)
|
return string_from_set(rand.ascii_chars, len)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shuffle randomly permutates the elements in `a`.
|
||||||
|
pub fn shuffle<T>(mut a []T) {
|
||||||
|
len := a.len
|
||||||
|
for i in 0 .. len {
|
||||||
|
si := i + intn(len - i) or { len }
|
||||||
|
a[si], a[i] = a[i], a[si]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// shuffle_clone returns a random permutation of the elements in `a`.
|
||||||
|
// The permutation is done on a fresh clone of `a`, so `a` remains unchanged.
|
||||||
|
pub fn shuffle_clone<T>(a []T) []T {
|
||||||
|
mut res := a.clone()
|
||||||
|
shuffle(mut res)
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
|
@ -317,3 +317,55 @@ fn test_new_global_rng() {
|
||||||
|
|
||||||
rand.set_rng(old)
|
rand.set_rng(old)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn test_shuffle() {
|
||||||
|
mut arrays := [][]int{}
|
||||||
|
arrays << [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
arrays << [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
|
||||||
|
for seed in seeds {
|
||||||
|
a := get_n_random_ints(seed, 10)
|
||||||
|
arrays << a
|
||||||
|
}
|
||||||
|
//
|
||||||
|
mut digits := []map[int]int{len: 10}
|
||||||
|
for digit in 0 .. 10 {
|
||||||
|
digits[digit] = {}
|
||||||
|
for idx in 0 .. 10 {
|
||||||
|
digits[digit][idx] = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for mut a in arrays {
|
||||||
|
o := a.clone()
|
||||||
|
for _ in 0 .. 100 {
|
||||||
|
rand.shuffle(mut a)
|
||||||
|
assert *a != o
|
||||||
|
for idx in 0 .. 10 {
|
||||||
|
digits[idx][a[idx]]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for digit in 1 .. 10 {
|
||||||
|
assert digits[0] != digits[digit]
|
||||||
|
}
|
||||||
|
for digit in 0 .. 10 {
|
||||||
|
for idx in 0 .. 10 {
|
||||||
|
assert digits[digit][idx] > 10
|
||||||
|
}
|
||||||
|
// eprintln('digits[$digit]: ${digits[digit]}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_shuffle_clone() {
|
||||||
|
original := [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
mut a := original.clone()
|
||||||
|
mut results := [][]int{}
|
||||||
|
for _ in 0 .. 10 {
|
||||||
|
results << rand.shuffle_clone(a)
|
||||||
|
}
|
||||||
|
assert original == a
|
||||||
|
for idx in 1 .. 10 {
|
||||||
|
assert results[idx].len == 10
|
||||||
|
assert results[idx] != results[0]
|
||||||
|
assert results[idx] != original
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -158,29 +158,36 @@ pub fn f64_to_str_lnd1(f f64, dec_digit int) string {
|
||||||
|
|
||||||
// get sign and decimal parts
|
// get sign and decimal parts
|
||||||
for c in s {
|
for c in s {
|
||||||
if c == `-` {
|
match c {
|
||||||
sgn = -1
|
`-` {
|
||||||
i++
|
sgn = -1
|
||||||
} else if c == `+` {
|
i++
|
||||||
sgn = 1
|
}
|
||||||
i++
|
`+` {
|
||||||
} else if c >= `0` && c <= `9` {
|
sgn = 1
|
||||||
b[i1] = c
|
i++
|
||||||
i1++
|
}
|
||||||
i++
|
`0`...`9` {
|
||||||
} else if c == `.` {
|
b[i1] = c
|
||||||
if sgn > 0 {
|
i1++
|
||||||
d_pos = i
|
i++
|
||||||
} else {
|
}
|
||||||
d_pos = i - 1
|
`.` {
|
||||||
|
if sgn > 0 {
|
||||||
|
d_pos = i
|
||||||
|
} else {
|
||||||
|
d_pos = i - 1
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
`e` {
|
||||||
|
i++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
s.free()
|
||||||
|
return '[Float conversion error!!]'
|
||||||
}
|
}
|
||||||
i++
|
|
||||||
} else if c == `e` {
|
|
||||||
i++
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
s.free()
|
|
||||||
return '[Float conversion error!!]'
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
b[i1] = 0
|
b[i1] = 0
|
||||||
|
@ -274,10 +281,13 @@ pub fn f64_to_str_lnd1(f f64, dec_digit int) string {
|
||||||
|
|
||||||
// println("r_i-d_pos: ${r_i - d_pos}")
|
// println("r_i-d_pos: ${r_i - d_pos}")
|
||||||
if dot_res_sp >= 0 {
|
if dot_res_sp >= 0 {
|
||||||
if (r_i - dot_res_sp) > dec_digit {
|
r_i = dot_res_sp + dec_digit + 1
|
||||||
r_i = dot_res_sp + dec_digit + 1
|
|
||||||
}
|
|
||||||
res[r_i] = 0
|
res[r_i] = 0
|
||||||
|
for c1 in 1 .. dec_digit + 1 {
|
||||||
|
if res[r_i - c1] == 0 {
|
||||||
|
res[r_i - c1] = `0`
|
||||||
|
}
|
||||||
|
}
|
||||||
// println("result: [${tos(&res[0],r_i)}]")
|
// println("result: [${tos(&res[0],r_i)}]")
|
||||||
tmp_res := tos(res.data, r_i).clone()
|
tmp_res := tos(res.data, r_i).clone()
|
||||||
res.free()
|
res.free()
|
||||||
|
|
|
@ -176,6 +176,28 @@ pub fn (mut b Builder) str() string {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ensure_cap ensures that the buffer has enough space for at least `n` bytes by growing the buffer if necessary
|
||||||
|
pub fn (mut b Builder) ensure_cap(n int) {
|
||||||
|
// code adapted from vlib/builtin/array.v
|
||||||
|
if n <= b.cap {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
new_data := vcalloc(n * b.element_size)
|
||||||
|
if b.data != voidptr(0) {
|
||||||
|
unsafe { vmemcpy(new_data, b.data, b.len * b.element_size) }
|
||||||
|
// TODO: the old data may be leaked when no GC is used (ref-counting?)
|
||||||
|
if b.flags.has(.noslices) {
|
||||||
|
unsafe { free(b.data) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
b.data = new_data
|
||||||
|
b.offset = 0
|
||||||
|
b.cap = n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// free is for manually freeing the contents of the buffer
|
// free is for manually freeing the contents of the buffer
|
||||||
[unsafe]
|
[unsafe]
|
||||||
pub fn (mut b Builder) free() {
|
pub fn (mut b Builder) free() {
|
||||||
|
|
|
@ -112,3 +112,18 @@ fn test_write_runes() {
|
||||||
x := sb.str()
|
x := sb.str()
|
||||||
assert x == 'hello world'
|
assert x == 'hello world'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn test_ensure_cap() {
|
||||||
|
mut sb := strings.new_builder(0)
|
||||||
|
assert sb.cap == 0
|
||||||
|
sb.ensure_cap(10)
|
||||||
|
assert sb.cap == 10
|
||||||
|
sb.ensure_cap(10)
|
||||||
|
assert sb.cap == 10
|
||||||
|
sb.ensure_cap(15)
|
||||||
|
assert sb.cap == 15
|
||||||
|
sb.ensure_cap(10)
|
||||||
|
assert sb.cap == 15
|
||||||
|
sb.ensure_cap(-1)
|
||||||
|
assert sb.cap == 15
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
module sync
|
||||||
|
|
||||||
|
fn C.pthread_self() usize
|
||||||
|
|
||||||
|
// thread_id returns a unique identifier for the caller thread.
|
||||||
|
// All *currently* running threads in the same process, will have *different* thread identifiers.
|
||||||
|
// Note: if a thread finishes, and another starts, the identifier of the old thread may be
|
||||||
|
// reused for the newly started thread.
|
||||||
|
// In other words, thread IDs are guaranteed to be unique only within a process.
|
||||||
|
// A thread ID may be reused after a terminated thread has been joined (with `t.wait()`),
|
||||||
|
// or when the thread has terminated.
|
||||||
|
|
||||||
|
pub fn thread_id() u64 {
|
||||||
|
return u64(C.pthread_self())
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
import sync
|
||||||
|
|
||||||
|
fn simple_thread() u64 {
|
||||||
|
tid := sync.thread_id()
|
||||||
|
eprintln('simple_thread thread_id: $tid.hex()')
|
||||||
|
return tid
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_sync_thread_id() {
|
||||||
|
mtid := sync.thread_id()
|
||||||
|
eprintln('main thread_id: $sync.thread_id().hex()')
|
||||||
|
x := go simple_thread()
|
||||||
|
y := go simple_thread()
|
||||||
|
xtid := x.wait()
|
||||||
|
ytid := y.wait()
|
||||||
|
eprintln('main thread_id: $sync.thread_id().hex()')
|
||||||
|
dump(xtid.hex())
|
||||||
|
dump(ytid.hex())
|
||||||
|
assert mtid != xtid
|
||||||
|
assert mtid != ytid
|
||||||
|
assert xtid != ytid
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
module sync
|
||||||
|
|
||||||
|
fn C.GetCurrentThreadId() u32
|
||||||
|
|
||||||
|
pub fn thread_id() u64 {
|
||||||
|
return u64(C.GetCurrentThreadId())
|
||||||
|
}
|
|
@ -8,7 +8,8 @@ Parsing files or `string`s containing TOML is easy.
|
||||||
|
|
||||||
Simply import the `toml` module and do:
|
Simply import the `toml` module and do:
|
||||||
```v ignore
|
```v ignore
|
||||||
doc := toml.parse(<file path or string>) or { panic(err) }
|
doc1 := toml.parse_text(<string content>) or { panic(err) }
|
||||||
|
doc2 := toml.parse_file(<file path>) or { panic(err) }
|
||||||
```
|
```
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
@ -54,7 +55,7 @@ hosts = [
|
||||||
]'
|
]'
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
doc := toml.parse(toml_text) or { panic(err) }
|
doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
title := doc.value('title').string()
|
title := doc.value('title').string()
|
||||||
println('title: "$title"')
|
println('title: "$title"')
|
||||||
ip := doc.value('servers.alpha.ip').string()
|
ip := doc.value('servers.alpha.ip').string()
|
||||||
|
@ -91,7 +92,7 @@ array = [
|
||||||
]
|
]
|
||||||
'
|
'
|
||||||
|
|
||||||
doc := toml.parse(toml_text) or { panic(err) }
|
doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
assert doc.value('val').bool() == true
|
assert doc.value('val').bool() == true
|
||||||
assert doc.value('table.array[0].a').string() == 'A'
|
assert doc.value('table.array[0].a').string() == 'A'
|
||||||
|
@ -142,6 +143,6 @@ array = [
|
||||||
]
|
]
|
||||||
'
|
'
|
||||||
|
|
||||||
doc := toml.parse(toml_text) or { panic(err) }
|
doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
assert to.json(doc) == '{ "val": true, "table": { "array": [ { "a": "A" }, { "b": "B" } ] } }'
|
assert to.json(doc) == '{ "val": true, "table": { "array": [ { "a": "A" }, { "b": "B" } ] } }'
|
||||||
```
|
```
|
||||||
|
|
|
@ -415,7 +415,7 @@ pub fn (c Checker) check_quoted(q ast.Quoted) ? {
|
||||||
// \UXXXXXXXX - Unicode (U+XXXXXXXX)
|
// \UXXXXXXXX - Unicode (U+XXXXXXXX)
|
||||||
fn (c Checker) check_quoted_escapes(q ast.Quoted) ? {
|
fn (c Checker) check_quoted_escapes(q ast.Quoted) ? {
|
||||||
// Setup a scanner in stack memory for easier navigation.
|
// Setup a scanner in stack memory for easier navigation.
|
||||||
mut s := scanner.new_simple(q.text) ?
|
mut s := scanner.new_simple_text(q.text) ?
|
||||||
|
|
||||||
// See https://toml.io/en/v1.0.0#string for more info on string types.
|
// See https://toml.io/en/v1.0.0#string for more info on string types.
|
||||||
is_basic := q.quote == `\"`
|
is_basic := q.quote == `\"`
|
||||||
|
@ -552,7 +552,7 @@ fn (c Checker) check_unicode_escape(esc_unicode string) ? {
|
||||||
pub fn (c Checker) check_comment(comment ast.Comment) ? {
|
pub fn (c Checker) check_comment(comment ast.Comment) ? {
|
||||||
lit := comment.text
|
lit := comment.text
|
||||||
// Setup a scanner in stack memory for easier navigation.
|
// Setup a scanner in stack memory for easier navigation.
|
||||||
mut s := scanner.new_simple(lit) ?
|
mut s := scanner.new_simple_text(lit) ?
|
||||||
for {
|
for {
|
||||||
ch := s.next()
|
ch := s.next()
|
||||||
if ch == scanner.end_of_text {
|
if ch == scanner.end_of_text {
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub fn decode_quoted_escapes(mut q ast.Quoted) ? {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
mut s := scanner.new_simple(q.text) ?
|
mut s := scanner.new_simple_text(q.text) ?
|
||||||
q.text = q.text.replace('\\"', '"')
|
q.text = q.text.replace('\\"', '"')
|
||||||
|
|
||||||
for {
|
for {
|
||||||
|
|
|
@ -15,6 +15,10 @@ pub:
|
||||||
|
|
||||||
// auto_config returns an, automatic determined, input Config based on heuristics
|
// auto_config returns an, automatic determined, input Config based on heuristics
|
||||||
// found in `toml`
|
// found in `toml`
|
||||||
|
// One example of several of why it's deprecated:
|
||||||
|
// https://discord.com/channels/592103645835821068/592114487759470596/954101934988615721
|
||||||
|
[deprecated: 'will be removed and not replaced due to flaky heuristics that leads to hard to find bugs']
|
||||||
|
[deprecated_after: '2022-06-18']
|
||||||
pub fn auto_config(toml string) ?Config {
|
pub fn auto_config(toml string) ?Config {
|
||||||
mut config := Config{}
|
mut config := Config{}
|
||||||
if !toml.contains('\n') && os.is_file(toml) {
|
if !toml.contains('\n') && os.is_file(toml) {
|
||||||
|
@ -32,7 +36,7 @@ pub fn auto_config(toml string) ?Config {
|
||||||
|
|
||||||
// validate returns an optional error if more than one of the fields
|
// validate returns an optional error if more than one of the fields
|
||||||
// in `Config` has a non-default value (empty string).
|
// in `Config` has a non-default value (empty string).
|
||||||
pub fn (c Config) validate() ? {
|
fn (c Config) validate() ? {
|
||||||
if c.file_path != '' && c.text != '' {
|
if c.file_path != '' && c.text != '' {
|
||||||
error(@MOD + '.' + @FN +
|
error(@MOD + '.' + @FN +
|
||||||
' ${typeof(c).name} should contain only one of the fields `file_path` OR `text` filled out')
|
' ${typeof(c).name} should contain only one of the fields `file_path` OR `text` filled out')
|
||||||
|
@ -42,9 +46,12 @@ pub fn (c Config) validate() ? {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// read_input returns either Config.text or the read file contents of Config.file_path
|
||||||
|
// depending on which one is not empty.
|
||||||
pub fn (c Config) read_input() ?string {
|
pub fn (c Config) read_input() ?string {
|
||||||
|
c.validate() ?
|
||||||
mut text := c.text
|
mut text := c.text
|
||||||
if os.is_file(c.file_path) {
|
if text == '' && os.is_file(c.file_path) {
|
||||||
text = os.read_file(c.file_path) or {
|
text = os.read_file(c.file_path) or {
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||||
' Could not read "$c.file_path": "$err.msg()"')
|
' Could not read "$c.file_path": "$err.msg()"')
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
// that can be found in the LICENSE file.
|
// that can be found in the LICENSE file.
|
||||||
module scanner
|
module scanner
|
||||||
|
|
||||||
import os
|
|
||||||
import math
|
import math
|
||||||
import toml.input
|
import toml.input
|
||||||
import toml.token
|
import toml.token
|
||||||
|
@ -47,28 +46,47 @@ pub:
|
||||||
tokenize_formatting bool = true // if true, generate tokens for `\n`, ` `, `\t`, `\r` etc.
|
tokenize_formatting bool = true // if true, generate tokens for `\n`, ` `, `\t`, `\r` etc.
|
||||||
}
|
}
|
||||||
|
|
||||||
// new_scanner returns a new *heap* allocated `Scanner` instance.
|
// new_scanner returns a new *heap* allocated `Scanner` instance, based on the file in config.input.file_path,
|
||||||
|
// or based on the text in config.input.text .
|
||||||
pub fn new_scanner(config Config) ?&Scanner {
|
pub fn new_scanner(config Config) ?&Scanner {
|
||||||
config.input.validate() ?
|
|
||||||
mut text := config.input.text
|
|
||||||
file_path := config.input.file_path
|
|
||||||
if os.is_file(file_path) {
|
|
||||||
text = os.read_file(file_path) or {
|
|
||||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
|
||||||
' Could not read "$file_path": "$err.msg()"')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
mut s := &Scanner{
|
mut s := &Scanner{
|
||||||
config: config
|
config: config
|
||||||
text: text
|
text: config.input.read_input() ?
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns a new *stack* allocated `Scanner` instance.
|
// new_simple returns a new *stack* allocated `Scanner` instance.
|
||||||
pub fn new_simple(toml_input string) ?Scanner {
|
pub fn new_simple(config Config) ?Scanner {
|
||||||
|
return Scanner{
|
||||||
|
config: config
|
||||||
|
text: config.input.read_input() ?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// new_simple_text returns a new *stack* allocated `Scanner` instance
|
||||||
|
// ready for parsing TOML in `text`.
|
||||||
|
pub fn new_simple_text(text string) ?Scanner {
|
||||||
|
in_config := input.Config{
|
||||||
|
text: text
|
||||||
|
}
|
||||||
config := Config{
|
config := Config{
|
||||||
input: input.auto_config(toml_input) ?
|
input: in_config
|
||||||
|
}
|
||||||
|
return Scanner{
|
||||||
|
config: config
|
||||||
|
text: config.input.read_input() ?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// new_simple_file returns a new *stack* allocated `Scanner` instance
|
||||||
|
// ready for parsing TOML in file read from `path`.
|
||||||
|
pub fn new_simple_file(path string) ?Scanner {
|
||||||
|
in_config := input.Config{
|
||||||
|
file_path: path
|
||||||
|
}
|
||||||
|
config := Config{
|
||||||
|
input: in_config
|
||||||
}
|
}
|
||||||
return Scanner{
|
return Scanner{
|
||||||
config: config
|
config: config
|
||||||
|
|
|
@ -18,7 +18,7 @@ color = "gray"'
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_tables() {
|
fn test_tables() {
|
||||||
mut toml_doc := toml.parse(toml_table_text) or { panic(err) }
|
mut toml_doc := toml.parse_text(toml_table_text) or { panic(err) }
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
|
|
||||||
|
|
|
@ -22,13 +22,13 @@ name = "Born in the USA"
|
||||||
name = "Dancing in the Dark"'
|
name = "Dancing in the Dark"'
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_nested_array_of_tables() {
|
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
|
||||||
mut toml_doc := toml.parse(toml_text) or { panic(err) }
|
|
||||||
|
fn test_nested_array_of_tables() ? {
|
||||||
|
mut toml_doc := toml.parse_text(toml_text) ?
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
|
|
||||||
eprintln(toml_json)
|
eprintln(toml_json)
|
||||||
assert toml_json == os.read_file(
|
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
assert toml_json == os.read_file(fprefix + '.out') ?
|
||||||
'.out') or { panic(err) }
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_nested_array_of_tables() {
|
fn test_nested_array_of_tables() {
|
||||||
mut toml_doc := toml.parse(toml_text) or { panic(err) }
|
mut toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ fn test_array_of_tables_edge_case_file() {
|
||||||
toml_file :=
|
toml_file :=
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
'.toml'
|
'.toml'
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
toml_doc := toml.parse_file(toml_file) or { panic(err) }
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
out_file :=
|
out_file :=
|
||||||
|
|
|
@ -2,17 +2,14 @@ import os
|
||||||
import toml
|
import toml
|
||||||
import toml.to
|
import toml.to
|
||||||
|
|
||||||
fn test_array_of_tables_edge_case_file() {
|
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
|
||||||
toml_file :=
|
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
fn test_array_of_tables_edge_case_file() ? {
|
||||||
'.toml'
|
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml')) ?
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
out_file :=
|
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
out_file_json := os.read_file(os.real_path(fprefix + '.out')) ?
|
||||||
'.out'
|
|
||||||
out_file_json := os.read_file(out_file) or { panic(err) }
|
|
||||||
println(toml_json)
|
println(toml_json)
|
||||||
assert toml_json == out_file_json
|
assert toml_json == out_file_json
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ hosts = [
|
||||||
]'
|
]'
|
||||||
|
|
||||||
fn test_parse_compact_text() {
|
fn test_parse_compact_text() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
title := toml_doc.value('title')
|
title := toml_doc.value('title')
|
||||||
assert title == toml.Any('TOML Example')
|
assert title == toml.Any('TOML Example')
|
||||||
|
|
|
@ -4,7 +4,7 @@ fn test_crlf() {
|
||||||
str_value := 'test string'
|
str_value := 'test string'
|
||||||
mut toml_txt := 'crlf_string = "test string"\r\n
|
mut toml_txt := 'crlf_string = "test string"\r\n
|
||||||
# Comment with CRLF is not allowed'
|
# Comment with CRLF is not allowed'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('crlf_string')
|
value := toml_doc.value('crlf_string')
|
||||||
assert value == toml.Any(str_value)
|
assert value == toml.Any(str_value)
|
||||||
|
@ -16,7 +16,7 @@ fn test_crlf_is_parsable_just_like_lf() ? {
|
||||||
crlf_content := '# a comment\r\ntitle = "TOML Example"\r\n[database]\r\nserver = "192.168.1.1"\r\nports = [ 8000, 8001, 8002 ]\r\n'
|
crlf_content := '# a comment\r\ntitle = "TOML Example"\r\n[database]\r\nserver = "192.168.1.1"\r\nports = [ 8000, 8001, 8002 ]\r\n'
|
||||||
all := [crlf_content, crlf_content.replace('\r\n', '\n')]
|
all := [crlf_content, crlf_content.replace('\r\n', '\n')]
|
||||||
for content in all {
|
for content in all {
|
||||||
res := toml.parse(content) ?
|
res := toml.parse_text(content) ?
|
||||||
assert res.value('title') == toml.Any('TOML Example')
|
assert res.value('title') == toml.Any('TOML Example')
|
||||||
assert (res.value('database') as map[string]toml.Any)['server'] ? == toml.Any('192.168.1.1')
|
assert (res.value('database') as map[string]toml.Any)['server'] ? == toml.Any('192.168.1.1')
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ fn test_dates() {
|
||||||
lt1 = 07:32:00
|
lt1 = 07:32:00
|
||||||
lt2 = 00:32:00.999999
|
lt2 = 00:32:00.999999
|
||||||
'
|
'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
// Re-use vars
|
// Re-use vars
|
||||||
mut odt_time := toml.DateTime{'1979-05-27T07:32:00Z'}
|
mut odt_time := toml.DateTime{'1979-05-27T07:32:00Z'}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import toml
|
||||||
fn test_default_to() {
|
fn test_default_to() {
|
||||||
default_value := 4321
|
default_value := 4321
|
||||||
mut toml_txt := 'var = 1234'
|
mut toml_txt := 'var = 1234'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
value := toml_doc.value('tar').default_to(default_value).int()
|
value := toml_doc.value('tar').default_to(default_value).int()
|
||||||
assert value == default_value
|
assert value == default_value
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,17 +2,14 @@ import os
|
||||||
import toml
|
import toml
|
||||||
import toml.to
|
import toml.to
|
||||||
|
|
||||||
fn test_parse() {
|
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
|
||||||
toml_file :=
|
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
fn test_parse() ? {
|
||||||
'.toml'
|
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml')) ?
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
out_file :=
|
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
|
||||||
'.out'
|
|
||||||
out_file_json := os.read_file(out_file) or { panic(err) }
|
|
||||||
println(toml_json)
|
println(toml_json)
|
||||||
|
|
||||||
|
out_file_json := os.read_file(os.real_path(fprefix + '.out')) ?
|
||||||
assert toml_json == out_file_json
|
assert toml_json == out_file_json
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ fn test_parse() {
|
||||||
toml_file :=
|
toml_file :=
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
'.toml'
|
'.toml'
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
toml_doc := toml.parse_file(toml_file) or { panic(err) }
|
||||||
|
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
out_file :=
|
out_file :=
|
||||||
|
|
|
@ -6,7 +6,7 @@ fn test_keys() {
|
||||||
toml_file :=
|
toml_file :=
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
'.toml'
|
'.toml'
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
toml_doc := toml.parse_file(toml_file) or { panic(err) }
|
||||||
|
|
||||||
mut value := toml_doc.value('34-11')
|
mut value := toml_doc.value('34-11')
|
||||||
assert value.int() == 23
|
assert value.int() == 23
|
||||||
|
|
|
@ -15,7 +15,7 @@ fn test_large_file() {
|
||||||
'.toml'
|
'.toml'
|
||||||
if os.exists(toml_file) {
|
if os.exists(toml_file) {
|
||||||
println('Testing parsing of large (${os.file_size(toml_file)} bytes) "$toml_file"...')
|
println('Testing parsing of large (${os.file_size(toml_file)} bytes) "$toml_file"...')
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
toml_doc := toml.parse_file(toml_file) or { panic(err) }
|
||||||
println('OK [1/1] "$toml_file"...') // So it can be checked with `v -stats test ...`
|
println('OK [1/1] "$toml_file"...') // So it can be checked with `v -stats test ...`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ enabled = true
|
||||||
'
|
'
|
||||||
|
|
||||||
fn test_parse() {
|
fn test_parse() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
// dump(toml_doc.ast)
|
// dump(toml_doc.ast)
|
||||||
// assert false
|
// assert false
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ import toml
|
||||||
fn test_quoted_keys() {
|
fn test_quoted_keys() {
|
||||||
str_value := 'V rocks!'
|
str_value := 'V rocks!'
|
||||||
toml_txt := 'a."b.c" = "V rocks!"'
|
toml_txt := 'a."b.c" = "V rocks!"'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('a."b.c"')
|
value := toml_doc.value('a."b.c"')
|
||||||
assert value == toml.Any(str_value)
|
assert value == toml.Any(str_value)
|
||||||
|
|
|
@ -50,7 +50,7 @@ mut:
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_reflect() {
|
fn test_reflect() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
mut user := toml_doc.reflect<User>()
|
mut user := toml_doc.reflect<User>()
|
||||||
user.bio = toml_doc.value('bio').reflect<Bio>()
|
user.bio = toml_doc.value('bio').reflect<Bio>()
|
||||||
|
|
|
@ -12,7 +12,7 @@ fn test_spaced_keys() {
|
||||||
[ tube . test . "test.test" ]
|
[ tube . test . "test.test" ]
|
||||||
h . "i.j." . "k" = "Cryptic"
|
h . "i.j." . "k" = "Cryptic"
|
||||||
'
|
'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
mut value := toml_doc.value('a."b.c"[0].d.e')
|
mut value := toml_doc.value('a."b.c"[0].d.e')
|
||||||
assert value == toml.Any(str_value)
|
assert value == toml.Any(str_value)
|
||||||
assert value as string == str_value
|
assert value as string == str_value
|
||||||
|
|
|
@ -33,7 +33,7 @@ long = "\U000003B4"'
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_multiline_strings() {
|
fn test_multiline_strings() {
|
||||||
mut toml_doc := toml.parse(toml_multiline_text_1) or { panic(err) }
|
mut toml_doc := toml.parse_text(toml_multiline_text_1) or { panic(err) }
|
||||||
|
|
||||||
mut value := toml_doc.value('multi1')
|
mut value := toml_doc.value('multi1')
|
||||||
assert value.string() == 'one'
|
assert value.string() == 'one'
|
||||||
|
@ -44,7 +44,7 @@ fn test_multiline_strings() {
|
||||||
value = toml_doc.value('multi4')
|
value = toml_doc.value('multi4')
|
||||||
assert value.string() == 'one\ntwo\nthree\nfour\n'
|
assert value.string() == 'one\ntwo\nthree\nfour\n'
|
||||||
|
|
||||||
toml_doc = toml.parse(toml_multiline_text_2) or { panic(err) }
|
toml_doc = toml.parse_text(toml_multiline_text_2) or { panic(err) }
|
||||||
value = toml_doc.value('multi1')
|
value = toml_doc.value('multi1')
|
||||||
assert value.string() == 'one'
|
assert value.string() == 'one'
|
||||||
value = toml_doc.value('multi2')
|
value = toml_doc.value('multi2')
|
||||||
|
@ -57,7 +57,7 @@ fn test_multiline_strings() {
|
||||||
toml_file :=
|
toml_file :=
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
'.toml'
|
'.toml'
|
||||||
toml_doc = toml.parse(toml_file) or { panic(err) }
|
toml_doc = toml.parse_file(toml_file) or { panic(err) }
|
||||||
value = toml_doc.value('lit_one')
|
value = toml_doc.value('lit_one')
|
||||||
assert value.string() == "'one quote'"
|
assert value.string() == "'one quote'"
|
||||||
value = toml_doc.value('lit_two')
|
value = toml_doc.value('lit_two')
|
||||||
|
@ -69,7 +69,7 @@ fn test_multiline_strings() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_unicode_escapes() {
|
fn test_unicode_escapes() {
|
||||||
mut toml_doc := toml.parse(toml_unicode_escapes) or { panic(err) }
|
mut toml_doc := toml.parse_text(toml_unicode_escapes) or { panic(err) }
|
||||||
|
|
||||||
mut value := toml_doc.value('short')
|
mut value := toml_doc.value('short')
|
||||||
assert value.string() == '\u03B4' // <- This escape is handled by V
|
assert value.string() == '\u03B4' // <- This escape is handled by V
|
||||||
|
@ -81,7 +81,7 @@ fn test_literal_strings() {
|
||||||
toml_file :=
|
toml_file :=
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
'.toml'
|
'.toml'
|
||||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
toml_doc := toml.parse_file(toml_file) or { panic(err) }
|
||||||
|
|
||||||
assert toml_doc.value('lit1').string() == r'\' // '\'
|
assert toml_doc.value('lit1').string() == r'\' // '\'
|
||||||
assert toml_doc.value('lit2').string() == r'\\' // '\\'
|
assert toml_doc.value('lit2').string() == r'\\' // '\\'
|
||||||
|
|
|
@ -27,7 +27,7 @@ T = {a.b=2}'
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_tables() {
|
fn test_tables() {
|
||||||
mut toml_doc := toml.parse(toml_table_text) or { panic(err) }
|
mut toml_doc := toml.parse_text(toml_table_text) or { panic(err) }
|
||||||
|
|
||||||
mut value := toml_doc.value('inline.a.b')
|
mut value := toml_doc.value('inline.a.b')
|
||||||
assert value.int() == 42
|
assert value.int() == 42
|
||||||
|
|
|
@ -17,7 +17,7 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_toml_with_bom() {
|
fn test_toml_with_bom() {
|
||||||
toml_doc := toml.parse(toml_text_with_utf8_bom) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text_with_utf8_bom) or { panic(err) }
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
|
|
||||||
title := toml_doc.value('title')
|
title := toml_doc.value('title')
|
||||||
|
@ -36,13 +36,13 @@ fn test_toml_with_bom() {
|
||||||
|
|
||||||
// Re-cycle bad_toml_doc
|
// Re-cycle bad_toml_doc
|
||||||
mut bad_toml_doc := empty_toml_document
|
mut bad_toml_doc := empty_toml_document
|
||||||
bad_toml_doc = toml.parse(toml_text_with_utf16_bom) or {
|
bad_toml_doc = toml.parse_text(toml_text_with_utf16_bom) or {
|
||||||
println(' $err.msg()')
|
println(' $err.msg()')
|
||||||
assert true
|
assert true
|
||||||
empty_toml_document
|
empty_toml_document
|
||||||
}
|
}
|
||||||
|
|
||||||
bad_toml_doc = toml.parse(toml_text_with_utf32_bom) or {
|
bad_toml_doc = toml.parse_text(toml_text_with_utf32_bom) or {
|
||||||
println(' $err.msg()')
|
println(' $err.msg()')
|
||||||
assert true
|
assert true
|
||||||
empty_toml_document
|
empty_toml_document
|
||||||
|
|
|
@ -7,7 +7,7 @@ const toml_text = os.read_file(os.real_path(os.join_path(os.dir(@FILE), 'testdat
|
||||||
'.toml') or { panic(err) }
|
'.toml') or { panic(err) }
|
||||||
|
|
||||||
fn test_toml_known_memory_corruption() {
|
fn test_toml_known_memory_corruption() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
owner := toml_doc.value('owner') as map[string]toml.Any
|
owner := toml_doc.value('owner') as map[string]toml.Any
|
||||||
any_name := owner.value('name')
|
any_name := owner.value('name')
|
||||||
|
@ -34,7 +34,7 @@ fn test_toml_known_memory_corruption_2() {
|
||||||
lt1 = 07:32:00
|
lt1 = 07:32:00
|
||||||
lt2 = 00:32:00.999999
|
lt2 = 00:32:00.999999
|
||||||
'
|
'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
// ldt1 test section
|
// ldt1 test section
|
||||||
odt_time := toml.DateTime{'1979-05-27T07:32:00'}
|
odt_time := toml.DateTime{'1979-05-27T07:32:00'}
|
||||||
|
|
|
@ -9,7 +9,7 @@ const toml_text = os.read_file(
|
||||||
fn test_toml() {
|
fn test_toml() {
|
||||||
// File containing the complete text from the example in the official TOML project README.md:
|
// File containing the complete text from the example in the official TOML project README.md:
|
||||||
// https://github.com/toml-lang/toml/blob/3b11f6921da7b6f5db37af039aa021fee450c091/README.md#Example
|
// https://github.com/toml-lang/toml/blob/3b11f6921da7b6f5db37af039aa021fee450c091/README.md#Example
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
|
|
||||||
// NOTE Kept for easier debugging:
|
// NOTE Kept for easier debugging:
|
||||||
|
@ -98,7 +98,7 @@ fn test_toml_parse_text() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_toml_parse() {
|
fn test_toml_parse() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
toml_json := to.json(toml_doc)
|
toml_json := to.json(toml_doc)
|
||||||
assert toml_json == os.read_file(
|
assert toml_json == os.read_file(
|
||||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||||
|
|
|
@ -4,7 +4,7 @@ import strconv
|
||||||
fn test_string() {
|
fn test_string() {
|
||||||
str_value := 'test string'
|
str_value := 'test string'
|
||||||
toml_txt := 'string = "test string"'
|
toml_txt := 'string = "test string"'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('string')
|
value := toml_doc.value('string')
|
||||||
assert value == toml.Any(str_value)
|
assert value == toml.Any(str_value)
|
||||||
|
@ -14,7 +14,7 @@ fn test_string() {
|
||||||
|
|
||||||
fn test_i64() {
|
fn test_i64() {
|
||||||
toml_txt := 'i64 = 120'
|
toml_txt := 'i64 = 120'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('i64')
|
value := toml_doc.value('i64')
|
||||||
assert value == toml.Any(i64(120))
|
assert value == toml.Any(i64(120))
|
||||||
|
@ -26,7 +26,7 @@ fn test_bool() {
|
||||||
toml_txt := '
|
toml_txt := '
|
||||||
bool_true = true
|
bool_true = true
|
||||||
bool_false = false'
|
bool_false = false'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value_true := toml_doc.value('bool_true')
|
value_true := toml_doc.value('bool_true')
|
||||||
assert value_true == toml.Any(true)
|
assert value_true == toml.Any(true)
|
||||||
|
@ -46,7 +46,7 @@ bool_false = false'
|
||||||
fn test_bool_key_is_not_value() {
|
fn test_bool_key_is_not_value() {
|
||||||
toml_txt := 'true = true
|
toml_txt := 'true = true
|
||||||
false = false'
|
false = false'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value_true := toml_doc.value('true')
|
value_true := toml_doc.value('true')
|
||||||
assert value_true == toml.Any(true)
|
assert value_true == toml.Any(true)
|
||||||
|
@ -64,7 +64,7 @@ false = false'
|
||||||
fn test_single_letter_key() {
|
fn test_single_letter_key() {
|
||||||
toml_txt := '[v]
|
toml_txt := '[v]
|
||||||
open_sourced = "Jun 22 2019 20:20:28"'
|
open_sourced = "Jun 22 2019 20:20:28"'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('v.open_sourced').string()
|
value := toml_doc.value('v.open_sourced').string()
|
||||||
assert value == 'Jun 22 2019 20:20:28'
|
assert value == 'Jun 22 2019 20:20:28'
|
||||||
|
@ -74,7 +74,7 @@ fn test_hex_values() {
|
||||||
// Regression test
|
// Regression test
|
||||||
// '0xb' is carefully chosen to include the 'b' character that also denotes binary via 0b prefix.
|
// '0xb' is carefully chosen to include the 'b' character that also denotes binary via 0b prefix.
|
||||||
toml_txt := 'hex = 0xb'
|
toml_txt := 'hex = 0xb'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('hex')
|
value := toml_doc.value('hex')
|
||||||
assert value as i64 == 11
|
assert value as i64 == 11
|
||||||
|
@ -85,7 +85,7 @@ fn test_comment_as_last_value() {
|
||||||
toml_txt := '
|
toml_txt := '
|
||||||
test = 42
|
test = 42
|
||||||
# this line has comment as last thing'
|
# this line has comment as last thing'
|
||||||
toml_doc := toml.parse(toml_txt) or { panic(err) }
|
toml_doc := toml.parse_text(toml_txt) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('test')
|
value := toml_doc.value('test')
|
||||||
assert value as i64 == 42
|
assert value as i64 == 42
|
||||||
|
@ -93,31 +93,31 @@ test = 42
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_nan_and_inf_values() {
|
fn test_nan_and_inf_values() {
|
||||||
mut toml_doc := toml.parse('nan = nan') or { panic(err) }
|
mut toml_doc := toml.parse_text('nan = nan') or { panic(err) }
|
||||||
mut value := toml_doc.value('nan')
|
mut value := toml_doc.value('nan')
|
||||||
assert value.string() == 'nan'
|
assert value.string() == 'nan'
|
||||||
|
|
||||||
toml_doc = toml.parse('nan = nan#comment') or { panic(err) }
|
toml_doc = toml.parse_text('nan = nan#comment') or { panic(err) }
|
||||||
value = toml_doc.value('nan')
|
value = toml_doc.value('nan')
|
||||||
assert value.string() == 'nan'
|
assert value.string() == 'nan'
|
||||||
|
|
||||||
toml_doc = toml.parse('nan = -nan') or { panic(err) }
|
toml_doc = toml.parse_text('nan = -nan') or { panic(err) }
|
||||||
value = toml_doc.value('nan')
|
value = toml_doc.value('nan')
|
||||||
assert value.string() == 'nan'
|
assert value.string() == 'nan'
|
||||||
|
|
||||||
toml_doc = toml.parse('nan = +nan') or { panic(err) }
|
toml_doc = toml.parse_text('nan = +nan') or { panic(err) }
|
||||||
value = toml_doc.value('nan')
|
value = toml_doc.value('nan')
|
||||||
assert value.string() == 'nan'
|
assert value.string() == 'nan'
|
||||||
|
|
||||||
toml_doc = toml.parse('inf = inf') or { panic(err) }
|
toml_doc = toml.parse_text('inf = inf') or { panic(err) }
|
||||||
value = toml_doc.value('inf')
|
value = toml_doc.value('inf')
|
||||||
assert value.u64() == strconv.double_plus_infinity
|
assert value.u64() == strconv.double_plus_infinity
|
||||||
|
|
||||||
toml_doc = toml.parse('inf = +inf') or { panic(err) }
|
toml_doc = toml.parse_text('inf = +inf') or { panic(err) }
|
||||||
value = toml_doc.value('inf')
|
value = toml_doc.value('inf')
|
||||||
assert value.u64() == strconv.double_plus_infinity
|
assert value.u64() == strconv.double_plus_infinity
|
||||||
|
|
||||||
toml_doc = toml.parse('inf = -inf') or { panic(err) }
|
toml_doc = toml.parse_text('inf = -inf') or { panic(err) }
|
||||||
value = toml_doc.value('inf')
|
value = toml_doc.value('inf')
|
||||||
assert value.u64() == strconv.double_minus_infinity
|
assert value.u64() == strconv.double_minus_infinity
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ colors = [
|
||||||
)
|
)
|
||||||
|
|
||||||
fn test_value_query_in_array() {
|
fn test_value_query_in_array() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
mut value := toml_doc.value('themes[0].colors[1]').string()
|
mut value := toml_doc.value('themes[0].colors[1]').string()
|
||||||
assert value == 'black'
|
assert value == 'black'
|
||||||
value = toml_doc.value('themes[1].colors[0]').string()
|
value = toml_doc.value('themes[1].colors[0]').string()
|
||||||
|
@ -67,7 +67,7 @@ fn test_value_query_in_array() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_any_value_query() {
|
fn test_any_value_query() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
themes := toml_doc.value('themes')
|
themes := toml_doc.value('themes')
|
||||||
assert themes.value('[0].colors[0]').string() == 'red'
|
assert themes.value('[0].colors[0]').string() == 'red'
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ fn test_any_value_query() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_inf_and_nan_query() {
|
fn test_inf_and_nan_query() {
|
||||||
toml_doc := toml.parse(toml_text) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text) or { panic(err) }
|
||||||
|
|
||||||
value := toml_doc.value('values.nan').string()
|
value := toml_doc.value('values.nan').string()
|
||||||
assert value == 'nan'
|
assert value == 'nan'
|
||||||
|
@ -106,7 +106,7 @@ fn test_inf_and_nan_query() {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_any_value_query_2() {
|
fn test_any_value_query_2() {
|
||||||
toml_doc := toml.parse(toml_text_2) or { panic(err) }
|
toml_doc := toml.parse_text(toml_text_2) or { panic(err) }
|
||||||
defaults := toml_doc.value('defaults')
|
defaults := toml_doc.value('defaults')
|
||||||
assert defaults.value('run.flags[0]').string() == '-f 1'
|
assert defaults.value('run.flags[0]').string() == '-f 1'
|
||||||
assert defaults.value('env[0].RUN_TIME').int() == 5
|
assert defaults.value('env[0].RUN_TIME').int() == 5
|
||||||
|
|
|
@ -108,6 +108,8 @@ pub fn parse_text(text string) ?Doc {
|
||||||
// parse parses the TOML document provided in `toml`.
|
// parse parses the TOML document provided in `toml`.
|
||||||
// parse automatically try to determine if the type of `toml` is a file or text.
|
// parse automatically try to determine if the type of `toml` is a file or text.
|
||||||
// For explicit parsing of input types see `parse_file` or `parse_text`.
|
// For explicit parsing of input types see `parse_file` or `parse_text`.
|
||||||
|
[deprecated: 'use parse_file or parse_text instead']
|
||||||
|
[deprecated_after: '2022-06-18']
|
||||||
pub fn parse(toml string) ?Doc {
|
pub fn parse(toml string) ?Doc {
|
||||||
mut input_config := input.auto_config(toml) ?
|
mut input_config := input.auto_config(toml) ?
|
||||||
scanner_config := scanner.Config{
|
scanner_config := scanner.Config{
|
||||||
|
|
|
@ -557,6 +557,7 @@ pub mut:
|
||||||
name string // left.name()
|
name string // left.name()
|
||||||
is_method bool
|
is_method bool
|
||||||
is_field bool // temp hack, remove ASAP when re-impl CallExpr / Selector (joe)
|
is_field bool // temp hack, remove ASAP when re-impl CallExpr / Selector (joe)
|
||||||
|
is_fn_var bool // fn variable
|
||||||
is_keep_alive bool // GC must not free arguments before fn returns
|
is_keep_alive bool // GC must not free arguments before fn returns
|
||||||
is_noreturn bool // whether the function/method is marked as [noreturn]
|
is_noreturn bool // whether the function/method is marked as [noreturn]
|
||||||
is_ctor_new bool // if JS ctor calls requires `new` before call, marked as `[use_new]` in V
|
is_ctor_new bool // if JS ctor calls requires `new` before call, marked as `[use_new]` in V
|
||||||
|
@ -568,6 +569,7 @@ pub mut:
|
||||||
left_type Type // type of `user`
|
left_type Type // type of `user`
|
||||||
receiver_type Type // User
|
receiver_type Type // User
|
||||||
return_type Type
|
return_type Type
|
||||||
|
fn_var_type Type // fn variable type
|
||||||
should_be_skipped bool // true for calls to `[if someflag?]` functions, when there is no `-d someflag`
|
should_be_skipped bool // true for calls to `[if someflag?]` functions, when there is no `-d someflag`
|
||||||
concrete_types []Type // concrete types, e.g. <int, string>
|
concrete_types []Type // concrete types, e.g. <int, string>
|
||||||
concrete_list_pos token.Pos
|
concrete_list_pos token.Pos
|
||||||
|
@ -1692,10 +1694,10 @@ pub:
|
||||||
|
|
||||||
[inline]
|
[inline]
|
||||||
pub fn (expr Expr) is_blank_ident() bool {
|
pub fn (expr Expr) is_blank_ident() bool {
|
||||||
match expr {
|
if expr is Ident {
|
||||||
Ident { return expr.kind == .blank_ident }
|
return expr.kind == .blank_ident
|
||||||
else { return false }
|
|
||||||
}
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (expr Expr) pos() token.Pos {
|
pub fn (expr Expr) pos() token.Pos {
|
||||||
|
|
|
@ -257,10 +257,6 @@ fn (ts TypeSymbol) dbg_common(mut res []string) {
|
||||||
res << 'language: $ts.language'
|
res << 'language: $ts.language'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (t Type) str() string {
|
|
||||||
return 'ast.Type(0x$t.hex() = ${u32(t)})'
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (t &Table) type_str(typ Type) string {
|
pub fn (t &Table) type_str(typ Type) string {
|
||||||
sym := t.sym(typ)
|
sym := t.sym(typ)
|
||||||
return sym.name
|
return sym.name
|
||||||
|
@ -980,6 +976,12 @@ pub fn (mytable &Table) type_to_code(t Type) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// clean type name from generics form. From Type<int> -> Type
|
||||||
|
pub fn (t &Table) clean_generics_type_str(typ Type) string {
|
||||||
|
result := t.type_to_str(typ)
|
||||||
|
return result.all_before('<')
|
||||||
|
}
|
||||||
|
|
||||||
// import_aliases is a map of imported symbol aliases 'module.Type' => 'Type'
|
// import_aliases is a map of imported symbol aliases 'module.Type' => 'Type'
|
||||||
pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]string) string {
|
pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]string) string {
|
||||||
sym := t.sym(typ)
|
sym := t.sym(typ)
|
||||||
|
@ -1186,7 +1188,7 @@ pub fn (t &Table) fn_signature_using_aliases(func &Fn, import_aliases map[string
|
||||||
// TODO write receiver
|
// TODO write receiver
|
||||||
}
|
}
|
||||||
if !opts.type_only {
|
if !opts.type_only {
|
||||||
sb.write_string('$func.name')
|
sb.write_string(func.name)
|
||||||
}
|
}
|
||||||
sb.write_string('(')
|
sb.write_string('(')
|
||||||
start := int(opts.skip_receiver)
|
start := int(opts.skip_receiver)
|
||||||
|
@ -1203,22 +1205,38 @@ pub fn (t &Table) fn_signature_using_aliases(func &Fn, import_aliases map[string
|
||||||
sb.write_string('mut ')
|
sb.write_string('mut ')
|
||||||
}
|
}
|
||||||
if !opts.type_only {
|
if !opts.type_only {
|
||||||
sb.write_string('$param.name ')
|
sb.write_string(param.name)
|
||||||
|
sb.write_string(' ')
|
||||||
}
|
}
|
||||||
styp := t.type_to_str_using_aliases(typ, import_aliases)
|
styp := t.type_to_str_using_aliases(typ, import_aliases)
|
||||||
if i == func.params.len - 1 && func.is_variadic {
|
if i == func.params.len - 1 && func.is_variadic {
|
||||||
sb.write_string('...$styp')
|
sb.write_string('...')
|
||||||
|
sb.write_string(styp)
|
||||||
} else {
|
} else {
|
||||||
sb.write_string('$styp')
|
sb.write_string(styp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sb.write_string(')')
|
sb.write_string(')')
|
||||||
if func.return_type != ast.void_type {
|
if func.return_type != ast.void_type {
|
||||||
sb.write_string(' ${t.type_to_str_using_aliases(func.return_type, import_aliases)}')
|
sb.write_string(' ')
|
||||||
|
sb.write_string(t.type_to_str_using_aliases(func.return_type, import_aliases))
|
||||||
}
|
}
|
||||||
return sb.str()
|
return sb.str()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the name of the complete quanlified name of the type
|
||||||
|
// without the generic parts.
|
||||||
|
pub fn (t &TypeSymbol) symbol_name_except_generic() string {
|
||||||
|
// main.Abc<int>
|
||||||
|
mut embed_name := t.name
|
||||||
|
// remove generic part from name
|
||||||
|
// main.Abc<int> => main.Abc
|
||||||
|
if embed_name.contains('<') {
|
||||||
|
embed_name = embed_name.all_before('<')
|
||||||
|
}
|
||||||
|
return embed_name
|
||||||
|
}
|
||||||
|
|
||||||
pub fn (t &TypeSymbol) embed_name() string {
|
pub fn (t &TypeSymbol) embed_name() string {
|
||||||
// main.Abc<int> => Abc<int>
|
// main.Abc<int> => Abc<int>
|
||||||
mut embed_name := t.name.split('.').last()
|
mut embed_name := t.name.split('.').last()
|
||||||
|
|
|
@ -188,7 +188,39 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return error('cannot use `${c.table.type_to_str(got.clear_flag(.variadic))}` as `${c.table.type_to_str(expected.clear_flag(.variadic))}`')
|
|
||||||
|
// Check on Generics types, there are some case where we have the following case
|
||||||
|
// `&Type<int> == &Type<>`. This is a common case we are implementing a function
|
||||||
|
// with generic parameters like `compare(bst Bst<T> node) {}`
|
||||||
|
got_typ_sym := c.table.sym(got)
|
||||||
|
got_typ_str := c.table.type_to_str(got.clear_flag(.variadic))
|
||||||
|
expected_typ_sym := c.table.sym(expected_)
|
||||||
|
expected_typ_str := c.table.type_to_str(expected.clear_flag(.variadic))
|
||||||
|
|
||||||
|
if got_typ_sym.symbol_name_except_generic() == expected_typ_sym.symbol_name_except_generic() {
|
||||||
|
// Check if we are making a comparison between two different types of
|
||||||
|
// the same type like `Type<int> and &Type<>`
|
||||||
|
if (got.is_ptr() != expected.is_ptr()) || !c.check_same_module(got, expected) {
|
||||||
|
return error('cannot use `$got_typ_str` as `$expected_typ_str`')
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return error('cannot use `$got_typ_str` as `$expected_typ_str`')
|
||||||
|
}
|
||||||
|
|
||||||
|
// helper method to check if the type is of the same module.
|
||||||
|
// FIXME(vincenzopalazzo) This is a work around to the issue
|
||||||
|
// explained in the https://github.com/vlang/v/pull/13718#issuecomment-1074517800
|
||||||
|
fn (c Checker) check_same_module(got ast.Type, expected ast.Type) bool {
|
||||||
|
clean_got_typ := c.table.clean_generics_type_str(got.clear_flag(.variadic)).all_before('<')
|
||||||
|
clean_expected_typ := c.table.clean_generics_type_str(expected.clear_flag(.variadic)).all_before('<')
|
||||||
|
if clean_got_typ == clean_expected_typ {
|
||||||
|
return true
|
||||||
|
// The following if confition should catch the bugs descripted in the issue
|
||||||
|
} else if clean_expected_typ.all_after('.') == clean_got_typ.all_after('.') {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut c Checker) check_basic(got ast.Type, expected ast.Type) bool {
|
pub fn (mut c Checker) check_basic(got ast.Type, expected ast.Type) bool {
|
||||||
|
@ -580,7 +612,12 @@ pub fn (mut c Checker) string_inter_lit(mut node ast.StringInterLiteral) ast.Typ
|
||||||
}
|
}
|
||||||
c.fail_if_unreadable(expr, ftyp, 'interpolation object')
|
c.fail_if_unreadable(expr, ftyp, 'interpolation object')
|
||||||
node.expr_types << ftyp
|
node.expr_types << ftyp
|
||||||
typ := c.table.unalias_num_type(ftyp)
|
ftyp_sym := c.table.sym(ftyp)
|
||||||
|
typ := if ftyp_sym.kind == .alias && !ftyp_sym.has_method('str') {
|
||||||
|
c.table.unalias_num_type(ftyp)
|
||||||
|
} else {
|
||||||
|
ftyp
|
||||||
|
}
|
||||||
mut fmt := node.fmts[i]
|
mut fmt := node.fmts[i]
|
||||||
// analyze and validate format specifier
|
// analyze and validate format specifier
|
||||||
if fmt !in [`E`, `F`, `G`, `e`, `f`, `g`, `d`, `u`, `x`, `X`, `o`, `c`, `s`, `S`, `p`,
|
if fmt !in [`E`, `F`, `G`, `e`, `f`, `g`, `d`, `u`, `x`, `X`, `o`, `c`, `s`, `S`, `p`,
|
||||||
|
|
|
@ -91,9 +91,10 @@ pub mut:
|
||||||
inside_fn_arg bool // `a`, `b` in `a.f(b)`
|
inside_fn_arg bool // `a`, `b` in `a.f(b)`
|
||||||
inside_ct_attr bool // true inside `[if expr]`
|
inside_ct_attr bool // true inside `[if expr]`
|
||||||
inside_comptime_for_field bool
|
inside_comptime_for_field bool
|
||||||
skip_flags bool // should `#flag` and `#include` be skipped
|
skip_flags bool // should `#flag` and `#include` be skipped
|
||||||
fn_level int // 0 for the top level, 1 for `fn abc() {}`, 2 for a nested fn, etc
|
fn_level int // 0 for the top level, 1 for `fn abc() {}`, 2 for a nested fn, etc
|
||||||
smartcast_mut_pos token.Pos
|
smartcast_mut_pos token.Pos // match mut foo, if mut foo is Foo
|
||||||
|
smartcast_cond_pos token.Pos // match cond
|
||||||
ct_cond_stack []ast.Expr
|
ct_cond_stack []ast.Expr
|
||||||
mut:
|
mut:
|
||||||
stmt_level int // the nesting level inside each stmts list;
|
stmt_level int // the nesting level inside each stmts list;
|
||||||
|
@ -594,12 +595,20 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
|
||||||
match mut node.left {
|
match mut node.left {
|
||||||
ast.Ident, ast.SelectorExpr {
|
ast.Ident, ast.SelectorExpr {
|
||||||
if node.left.is_mut {
|
if node.left.is_mut {
|
||||||
c.error('remove unnecessary `mut`', node.left.mut_pos)
|
c.error('the `mut` keyword is invalid here', node.left.mut_pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {}
|
else {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
match mut node.right {
|
||||||
|
ast.Ident, ast.SelectorExpr {
|
||||||
|
if node.right.is_mut {
|
||||||
|
c.error('the `mut` keyword is invalid here', node.right.mut_pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {}
|
||||||
|
}
|
||||||
eq_ne := node.op in [.eq, .ne]
|
eq_ne := node.op in [.eq, .ne]
|
||||||
// Single side check
|
// Single side check
|
||||||
// Place these branches according to ops' usage frequency to accelerate.
|
// Place these branches according to ops' usage frequency to accelerate.
|
||||||
|
@ -1522,41 +1531,36 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
|
||||||
stmt.pos)
|
stmt.pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else if stmt is ast.ExprStmt {
|
||||||
match stmt {
|
match stmt.expr {
|
||||||
ast.ExprStmt {
|
ast.IfExpr {
|
||||||
match stmt.expr {
|
for branch in stmt.expr.branches {
|
||||||
ast.IfExpr {
|
last_stmt := branch.stmts[branch.stmts.len - 1]
|
||||||
for branch in stmt.expr.branches {
|
c.check_or_last_stmt(last_stmt, ret_type, expr_return_type)
|
||||||
last_stmt := branch.stmts[branch.stmts.len - 1]
|
|
||||||
c.check_or_last_stmt(last_stmt, ret_type, expr_return_type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ast.MatchExpr {
|
|
||||||
for branch in stmt.expr.branches {
|
|
||||||
last_stmt := branch.stmts[branch.stmts.len - 1]
|
|
||||||
c.check_or_last_stmt(last_stmt, ret_type, expr_return_type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if stmt.typ == ast.void_type {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if is_noreturn_callexpr(stmt.expr) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if c.check_types(stmt.typ, expr_return_type) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// opt_returning_string() or { ... 123 }
|
|
||||||
type_name := c.table.type_to_str(stmt.typ)
|
|
||||||
expr_return_type_name := c.table.type_to_str(expr_return_type)
|
|
||||||
c.error('the default expression type in the `or` block should be `$expr_return_type_name`, instead you gave a value of type `$type_name`',
|
|
||||||
stmt.expr.pos())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {}
|
ast.MatchExpr {
|
||||||
|
for branch in stmt.expr.branches {
|
||||||
|
last_stmt := branch.stmts[branch.stmts.len - 1]
|
||||||
|
c.check_or_last_stmt(last_stmt, ret_type, expr_return_type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if stmt.typ == ast.void_type {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if is_noreturn_callexpr(stmt.expr) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if c.check_types(stmt.typ, expr_return_type) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// opt_returning_string() or { ... 123 }
|
||||||
|
type_name := c.table.type_to_str(stmt.typ)
|
||||||
|
expr_return_type_name := c.table.type_to_str(expr_return_type)
|
||||||
|
c.error('the default expression type in the `or` block should be `$expr_return_type_name`, instead you gave a value of type `$type_name`',
|
||||||
|
stmt.expr.pos())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1761,6 +1765,10 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
|
||||||
c.note('smartcasting requires either an immutable value, or an explicit mut keyword before the value',
|
c.note('smartcasting requires either an immutable value, or an explicit mut keyword before the value',
|
||||||
c.smartcast_mut_pos)
|
c.smartcast_mut_pos)
|
||||||
}
|
}
|
||||||
|
if c.smartcast_cond_pos != token.Pos{} {
|
||||||
|
c.note('smartcast can only be used on the ident or selector, e.g. match foo, match foo.bar',
|
||||||
|
c.smartcast_cond_pos)
|
||||||
|
}
|
||||||
c.error(unknown_field_msg, node.pos)
|
c.error(unknown_field_msg, node.pos)
|
||||||
}
|
}
|
||||||
return ast.void_type
|
return ast.void_type
|
||||||
|
@ -2089,6 +2097,10 @@ fn (mut c Checker) global_decl(mut node ast.GlobalDecl) {
|
||||||
c.error('unknown type `$sym.name`', field.typ_pos)
|
c.error('unknown type `$sym.name`', field.typ_pos)
|
||||||
}
|
}
|
||||||
if field.has_expr {
|
if field.has_expr {
|
||||||
|
if field.expr is ast.AnonFn && field.name == 'main' {
|
||||||
|
c.error('the `main` function is the program entry point, cannot redefine it',
|
||||||
|
field.pos)
|
||||||
|
}
|
||||||
field.typ = c.expr(field.expr)
|
field.typ = c.expr(field.expr)
|
||||||
mut v := c.file.global_scope.find_global(field.name) or {
|
mut v := c.file.global_scope.find_global(field.name) or {
|
||||||
panic('internal compiler error - could not find global in scope')
|
panic('internal compiler error - could not find global in scope')
|
||||||
|
@ -3349,7 +3361,9 @@ fn (mut c Checker) smartcast(expr_ ast.Expr, cur_type ast.Type, to_type_ ast.Typ
|
||||||
c.smartcast_mut_pos = expr.pos
|
c.smartcast_mut_pos = expr.pos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {}
|
else {
|
||||||
|
c.smartcast_cond_pos = expr.pos()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3765,6 +3779,9 @@ pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
|
||||||
&& typ !in [ast.byteptr_type, ast.charptr_type] && !typ.has_flag(.variadic) {
|
&& typ !in [ast.byteptr_type, ast.charptr_type] && !typ.has_flag(.variadic) {
|
||||||
c.error('type `$typ_sym.name` does not support indexing', node.pos)
|
c.error('type `$typ_sym.name` does not support indexing', node.pos)
|
||||||
}
|
}
|
||||||
|
if typ.has_flag(.optional) {
|
||||||
|
c.error('type `?$typ_sym.name` is optional, it does not support indexing', node.left.pos())
|
||||||
|
}
|
||||||
if typ_sym.kind == .string && !typ.is_ptr() && node.is_setter {
|
if typ_sym.kind == .string && !typ.is_ptr() && node.is_setter {
|
||||||
c.error('cannot assign to s[i] since V strings are immutable\n' +
|
c.error('cannot assign to s[i] since V strings are immutable\n' +
|
||||||
'(note, that variables may be mutable but string values are always immutable, like in Go and Java)',
|
'(note, that variables may be mutable but string values are always immutable, like in Go and Java)',
|
||||||
|
|
|
@ -352,32 +352,12 @@ fn (mut c Checker) anon_fn(mut node ast.AnonFn) ast.Type {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
|
pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
|
||||||
// First check everything that applies to both fns and methods
|
|
||||||
// TODO merge logic from method_call and fn_call
|
// TODO merge logic from method_call and fn_call
|
||||||
/*
|
// First check everything that applies to both fns and methods
|
||||||
for i, call_arg in node.args {
|
|
||||||
if call_arg.is_mut {
|
|
||||||
c.fail_if_immutable(call_arg.expr)
|
|
||||||
if !arg.is_mut {
|
|
||||||
tok := call_arg.share.str()
|
|
||||||
c.error('`$node.name` parameter `$arg.name` is not `$tok`, `$tok` is not needed`',
|
|
||||||
call_arg.expr.pos())
|
|
||||||
} else if arg.typ.share() != call_arg.share {
|
|
||||||
c.error('wrong shared type', call_arg.expr.pos())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) {
|
|
||||||
tok := call_arg.share.str()
|
|
||||||
c.error('`$node.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`',
|
|
||||||
call_arg.expr.pos())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
// Now call `method_call` or `fn_call` for specific checks.
|
|
||||||
old_inside_fn_arg := c.inside_fn_arg
|
old_inside_fn_arg := c.inside_fn_arg
|
||||||
c.inside_fn_arg = true
|
c.inside_fn_arg = true
|
||||||
mut continue_check := true
|
mut continue_check := true
|
||||||
|
// Now call `method_call` or `fn_call` for specific checks.
|
||||||
typ := if node.is_method {
|
typ := if node.is_method {
|
||||||
c.method_call(mut node)
|
c.method_call(mut node)
|
||||||
} else {
|
} else {
|
||||||
|
@ -620,9 +600,13 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
|
||||||
match obj {
|
match obj {
|
||||||
ast.GlobalField {
|
ast.GlobalField {
|
||||||
typ = obj.typ
|
typ = obj.typ
|
||||||
|
node.is_fn_var = true
|
||||||
|
node.fn_var_type = typ
|
||||||
}
|
}
|
||||||
ast.Var {
|
ast.Var {
|
||||||
typ = if obj.smartcasts.len != 0 { obj.smartcasts.last() } else { obj.typ }
|
typ = if obj.smartcasts.len != 0 { obj.smartcasts.last() } else { obj.typ }
|
||||||
|
node.is_fn_var = true
|
||||||
|
node.fn_var_type = typ
|
||||||
}
|
}
|
||||||
else {}
|
else {}
|
||||||
}
|
}
|
||||||
|
@ -810,7 +794,8 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
|
||||||
call_arg.expr.pos())
|
call_arg.expr.pos())
|
||||||
} else {
|
} else {
|
||||||
if param.typ.share() != call_arg.share {
|
if param.typ.share() != call_arg.share {
|
||||||
c.error('wrong shared type', call_arg.expr.pos())
|
c.error('wrong shared type `$call_arg.share.str()`, expected: `$param.typ.share().str()`',
|
||||||
|
call_arg.expr.pos())
|
||||||
}
|
}
|
||||||
if to_lock != '' && !param.typ.has_flag(.shared_f) {
|
if to_lock != '' && !param.typ.has_flag(.shared_f) {
|
||||||
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
|
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
|
||||||
|
@ -1258,10 +1243,6 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
|
||||||
final_arg_sym = c.table.sym(final_arg_typ)
|
final_arg_sym = c.table.sym(final_arg_typ)
|
||||||
}
|
}
|
||||||
if exp_arg_typ.has_flag(.generic) {
|
if exp_arg_typ.has_flag(.generic) {
|
||||||
if concrete_types.len == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if exp_utyp := c.table.resolve_generic_to_concrete(exp_arg_typ, method.generic_names,
|
if exp_utyp := c.table.resolve_generic_to_concrete(exp_arg_typ, method.generic_names,
|
||||||
concrete_types)
|
concrete_types)
|
||||||
{
|
{
|
||||||
|
@ -1299,7 +1280,8 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
|
||||||
arg.expr.pos())
|
arg.expr.pos())
|
||||||
} else {
|
} else {
|
||||||
if param_share != arg.share {
|
if param_share != arg.share {
|
||||||
c.error('wrong shared type', arg.expr.pos())
|
c.error('wrong shared type `$arg.share.str()`, expected: `$param_share.str()`',
|
||||||
|
arg.expr.pos())
|
||||||
}
|
}
|
||||||
if to_lock != '' && param_share != .shared_t {
|
if to_lock != '' && param_share != .shared_t {
|
||||||
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
|
c.error('$to_lock is `shared` and must be `lock`ed to be passed as `mut`',
|
||||||
|
|
|
@ -152,9 +152,8 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
|
||||||
} else {
|
} else {
|
||||||
c.stmts(branch.stmts)
|
c.stmts(branch.stmts)
|
||||||
}
|
}
|
||||||
if c.smartcast_mut_pos != token.Pos{} {
|
c.smartcast_mut_pos = token.Pos{}
|
||||||
c.smartcast_mut_pos = token.Pos{}
|
c.smartcast_cond_pos = token.Pos{}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if expr_required {
|
if expr_required {
|
||||||
if branch.stmts.len > 0 && branch.stmts[branch.stmts.len - 1] is ast.ExprStmt {
|
if branch.stmts.len > 0 && branch.stmts[branch.stmts.len - 1] is ast.ExprStmt {
|
||||||
|
|
|
@ -43,9 +43,8 @@ pub fn (mut c Checker) match_expr(mut node ast.MatchExpr) ast.Type {
|
||||||
} else {
|
} else {
|
||||||
c.stmts(branch.stmts)
|
c.stmts(branch.stmts)
|
||||||
}
|
}
|
||||||
if c.smartcast_mut_pos != token.Pos{} {
|
c.smartcast_mut_pos = token.Pos{}
|
||||||
c.smartcast_mut_pos = token.Pos{}
|
c.smartcast_cond_pos = token.Pos{}
|
||||||
}
|
|
||||||
if node.is_expr {
|
if node.is_expr {
|
||||||
if branch.stmts.len > 0 {
|
if branch.stmts.len > 0 {
|
||||||
// ignore last statement - workaround
|
// ignore last statement - workaround
|
||||||
|
@ -66,37 +65,36 @@ pub fn (mut c Checker) match_expr(mut node ast.MatchExpr) ast.Type {
|
||||||
// If the last statement is an expression, return its type
|
// If the last statement is an expression, return its type
|
||||||
if branch.stmts.len > 0 {
|
if branch.stmts.len > 0 {
|
||||||
mut stmt := branch.stmts[branch.stmts.len - 1]
|
mut stmt := branch.stmts[branch.stmts.len - 1]
|
||||||
match mut stmt {
|
if mut stmt is ast.ExprStmt {
|
||||||
ast.ExprStmt {
|
if node.is_expr {
|
||||||
if node.is_expr {
|
c.expected_type = node.expected_type
|
||||||
c.expected_type = node.expected_type
|
}
|
||||||
|
expr_type := c.expr(stmt.expr)
|
||||||
|
if first_iteration {
|
||||||
|
if node.is_expr && (node.expected_type.has_flag(.optional)
|
||||||
|
|| c.table.type_kind(node.expected_type) == .sum_type) {
|
||||||
|
ret_type = node.expected_type
|
||||||
|
} else {
|
||||||
|
ret_type = expr_type
|
||||||
}
|
}
|
||||||
expr_type := c.expr(stmt.expr)
|
stmt.typ = expr_type
|
||||||
if first_iteration {
|
} else if node.is_expr && ret_type.idx() != expr_type.idx() {
|
||||||
if node.is_expr && (node.expected_type.has_flag(.optional)
|
if !c.check_types(ret_type, expr_type) && !c.check_types(expr_type, ret_type) {
|
||||||
|| c.table.type_kind(node.expected_type) == .sum_type) {
|
ret_sym := c.table.sym(ret_type)
|
||||||
ret_type = node.expected_type
|
is_noreturn := is_noreturn_callexpr(stmt.expr)
|
||||||
} else {
|
if !(node.is_expr && ret_sym.kind == .sum_type
|
||||||
ret_type = expr_type
|
&& (ret_type.has_flag(.generic)
|
||||||
}
|
|| c.table.is_sumtype_or_in_variant(ret_type, expr_type)))
|
||||||
stmt.typ = expr_type
|
&& !is_noreturn {
|
||||||
} else if node.is_expr && ret_type.idx() != expr_type.idx() {
|
c.error('return type mismatch, it should be `$ret_sym.name`',
|
||||||
if !c.check_types(ret_type, expr_type)
|
stmt.expr.pos())
|
||||||
&& !c.check_types(expr_type, ret_type) {
|
|
||||||
ret_sym := c.table.sym(ret_type)
|
|
||||||
is_noreturn := is_noreturn_callexpr(stmt.expr)
|
|
||||||
if !(node.is_expr && ret_sym.kind == .sum_type) && !is_noreturn {
|
|
||||||
c.error('return type mismatch, it should be `$ret_sym.name`',
|
|
||||||
stmt.expr.pos())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
} else {
|
||||||
if node.is_expr && ret_type != ast.void_type {
|
if node.is_expr && ret_type != ast.void_type {
|
||||||
c.error('`match` expression requires an expression as the last statement of every branch',
|
c.error('`match` expression requires an expression as the last statement of every branch',
|
||||||
stmt.pos)
|
stmt.pos)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,10 +117,6 @@ pub fn (mut c Checker) match_expr(mut node ast.MatchExpr) ast.Type {
|
||||||
c.returns = false
|
c.returns = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// if ret_type != ast.void_type {
|
|
||||||
// node.is_expr = c.expected_type != ast.void_type
|
|
||||||
// node.expected_type = c.expected_type
|
|
||||||
// }
|
|
||||||
node.return_type = ret_type
|
node.return_type = ret_type
|
||||||
cond_var := c.get_base_name(&node.cond)
|
cond_var := c.get_base_name(&node.cond)
|
||||||
if cond_var != '' {
|
if cond_var != '' {
|
||||||
|
@ -241,7 +235,11 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, cond_type_sym ast.TypeSym
|
||||||
if expr_type !in cond_type_sym.info.variants {
|
if expr_type !in cond_type_sym.info.variants {
|
||||||
expr_str := c.table.type_to_str(expr_type)
|
expr_str := c.table.type_to_str(expr_type)
|
||||||
expect_str := c.table.type_to_str(node.cond_type)
|
expect_str := c.table.type_to_str(node.cond_type)
|
||||||
c.error('`$expect_str` has no variant `$expr_str`', expr.pos())
|
sumtype_variant_names := cond_type_sym.info.variants.map(c.table.type_to_str_using_aliases(it,
|
||||||
|
{}))
|
||||||
|
suggestion := util.new_suggestion(expr_str, sumtype_variant_names)
|
||||||
|
c.error(suggestion.say('`$expect_str` has no variant `$expr_str`'),
|
||||||
|
expr.pos())
|
||||||
}
|
}
|
||||||
} else if cond_type_sym.info is ast.Alias && expr_type_sym.info is ast.Struct {
|
} else if cond_type_sym.info is ast.Alias && expr_type_sym.info is ast.Struct {
|
||||||
expr_str := c.table.type_to_str(expr_type)
|
expr_str := c.table.type_to_str(expr_type)
|
||||||
|
|
|
@ -55,6 +55,16 @@ pub fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
|
||||||
field.type_pos)
|
field.type_pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
field_sym := c.table.sym(field.typ)
|
||||||
|
if field_sym.kind == .function {
|
||||||
|
fn_info := field_sym.info as ast.FnType
|
||||||
|
c.ensure_type_exists(fn_info.func.return_type, fn_info.func.return_type_pos) or {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for param in fn_info.func.params {
|
||||||
|
c.ensure_type_exists(param.typ, param.type_pos) or { return }
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if sym.kind == .struct_ {
|
if sym.kind == .struct_ {
|
||||||
info := sym.info as ast.Struct
|
info := sym.info as ast.Struct
|
||||||
|
@ -324,6 +334,15 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
|
||||||
field.pos)
|
field.pos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if field_type_sym.kind == .function && field_type_sym.language == .v {
|
||||||
|
pos := field.expr.pos()
|
||||||
|
if mut field.expr is ast.AnonFn {
|
||||||
|
if field.expr.decl.no_body {
|
||||||
|
c.error('cannot initialize the fn field with anonymous fn that does not have a body',
|
||||||
|
pos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
node.fields[i].typ = expr_type
|
node.fields[i].typ = expr_type
|
||||||
node.fields[i].expected_type = field_info.typ
|
node.fields[i].expected_type = field_info.typ
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
module amod
|
||||||
|
|
||||||
|
pub struct Xyz {}
|
||||||
|
|
||||||
|
pub struct Bcg {}
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/checker/tests/generic_parameter_on_method.vv:15:15: error: cannot use `&Type<int>` as `Type<>` in argument 1 to `ContainerType<int>.contains`
|
||||||
|
13 | fn main() {
|
||||||
|
14 | con := ContainerType<int>{typ: &Type<int>{0}}
|
||||||
|
15 | con.contains(con.typ)
|
||||||
|
| ~~~~~~~
|
||||||
|
16 | println(con)
|
||||||
|
17 | }
|
|
@ -0,0 +1,17 @@
|
||||||
|
struct Type<T> {
|
||||||
|
value T
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ContainerType<T> {
|
||||||
|
typ &Type<T>
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (instance &ContainerType<T>) contains(typ Type<T>) {
|
||||||
|
println(typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
con := ContainerType<int>{typ: &Type<int>{0}}
|
||||||
|
con.contains(con.typ)
|
||||||
|
println(con)
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
vlib/v/checker/tests/globals/redefine_main.vv:1:10: error: the `main` function is the program entry point, cannot redefine it
|
||||||
|
1 | __global main = fn () int { return 22 }
|
||||||
|
| ~~~~
|
|
@ -0,0 +1 @@
|
||||||
|
__global main = fn () int { return 22 }
|
|
@ -0,0 +1,14 @@
|
||||||
|
vlib/v/checker/tests/incorrect_smartcast2_err.vv:24:9: notice: smartcast can only be used on the ident or selector, e.g. match foo, match foo.bar
|
||||||
|
22 |
|
||||||
|
23 | fn doesntwork(v []Either<int, int>) {
|
||||||
|
24 | match v[0] {
|
||||||
|
| ~~~
|
||||||
|
25 | Left<int> {
|
||||||
|
26 | println(v[0].error)
|
||||||
|
vlib/v/checker/tests/incorrect_smartcast2_err.vv:26:17: error: field `error` does not exist or have the same type in all sumtype variants
|
||||||
|
24 | match v[0] {
|
||||||
|
25 | Left<int> {
|
||||||
|
26 | println(v[0].error)
|
||||||
|
| ~~~~~
|
||||||
|
27 | }
|
||||||
|
28 | else {}
|
|
@ -0,0 +1,32 @@
|
||||||
|
struct Left<E> {
|
||||||
|
error E
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Right<T> {
|
||||||
|
inner T
|
||||||
|
}
|
||||||
|
|
||||||
|
type Either<T, E> =
|
||||||
|
Left<E> |
|
||||||
|
Right<T>
|
||||||
|
|
||||||
|
fn works(v []Either<int, int>) {
|
||||||
|
first := v[0]
|
||||||
|
match first {
|
||||||
|
Left<int> {
|
||||||
|
println(first.error)
|
||||||
|
}
|
||||||
|
else {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doesntwork(v []Either<int, int>) {
|
||||||
|
match v[0] {
|
||||||
|
Left<int> {
|
||||||
|
println(v[0].error)
|
||||||
|
}
|
||||||
|
else {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/checker/tests/index_of_optional_err.vv:6:7: error: type `?[]int` is optional, it does not support indexing
|
||||||
|
4 |
|
||||||
|
5 | fn main() {
|
||||||
|
6 | a := abc()[0] or { 5 }
|
||||||
|
| ~~~~~
|
||||||
|
7 | dump(a)
|
||||||
|
8 | }
|
|
@ -0,0 +1,8 @@
|
||||||
|
fn abc() ?[]int {
|
||||||
|
return [1, 2, 3]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
a := abc()[0] or { 5 }
|
||||||
|
dump(a)
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
vlib/v/checker/tests/invalid_mut.vv:3:5: error: the `mut` keyword is invalid here
|
||||||
|
1 | fn main() {
|
||||||
|
2 | mut x := 0
|
||||||
|
3 | if mut x == 0 {
|
||||||
|
| ~~~
|
||||||
|
4 | println(true)
|
||||||
|
5 | }
|
||||||
|
vlib/v/checker/tests/invalid_mut.vv:6:10: error: the `mut` keyword is invalid here
|
||||||
|
4 | println(true)
|
||||||
|
5 | }
|
||||||
|
6 | if 0 == mut x {
|
||||||
|
| ~~~
|
||||||
|
7 | println(true)
|
||||||
|
8 | }
|
|
@ -3,5 +3,8 @@ fn main() {
|
||||||
if mut x == 0 {
|
if mut x == 0 {
|
||||||
println(true)
|
println(true)
|
||||||
}
|
}
|
||||||
|
if 0 == mut x {
|
||||||
|
println(true)
|
||||||
|
}
|
||||||
_ = x
|
_ = x
|
||||||
}
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
vlib/v/checker/tests/match_invalid_type.vv:5:3: error: `IoS` has no variant `byte`
|
vlib/v/checker/tests/match_invalid_type.vv:5:3: error: `IoS` has no variant `byte`.
|
||||||
|
2 possibilities: `int`, `string`.
|
||||||
3 | fn sum() {
|
3 | fn sum() {
|
||||||
4 | match IoS(1) {
|
4 | match IoS(1) {
|
||||||
5 | byte {
|
5 | byte {
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/checker/tests/match_return_sumtype_mismatch_err.vv:15:11: error: return type mismatch, it should be `Myt`
|
||||||
|
13 | return match b {
|
||||||
|
14 | true { St('TRUE') }
|
||||||
|
15 | false { `F` }
|
||||||
|
| ~~~
|
||||||
|
16 | }
|
||||||
|
17 | }
|
|
@ -0,0 +1,19 @@
|
||||||
|
type St = string
|
||||||
|
type Ru = rune
|
||||||
|
type Myt = Ru | St
|
||||||
|
|
||||||
|
fn myt_t1(b bool) Myt {
|
||||||
|
match b {
|
||||||
|
true { return St('TRUE') }
|
||||||
|
false { return Ru(`F`) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn myt_t2(b bool) Myt {
|
||||||
|
return match b {
|
||||||
|
true { St('TRUE') }
|
||||||
|
false { `F` }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/checker/tests/struct_field_init_with_nobody_anon_fn_err.vv:7:7: error: cannot initialize the fn field with anonymous fn that does not have a body
|
||||||
|
5 | fn main() {
|
||||||
|
6 | _ = App{
|
||||||
|
7 | cb: fn(x int) // Note the missing `{}` (the function body) here
|
||||||
|
| ~~~~~~~~~
|
||||||
|
8 | }
|
||||||
|
9 | }
|
|
@ -0,0 +1,9 @@
|
||||||
|
struct App {
|
||||||
|
cb fn(x int) // the function signature doesn't make a difference
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
_ = App{
|
||||||
|
cb: fn(x int) // Note the missing `{}` (the function body) here
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
vlib/v/checker/tests/sumtype_has_no_variant_suggestion.vv:14:5: error: `Abc` has no variant `amod.NonExisting`.
|
||||||
|
5 possibilities: `amod.Bcg`, `amod.Xyz`, `AnotherStruct`, `Struct1`, `ThirdStruct`.
|
||||||
|
12 | a := Abc(Struct1{})
|
||||||
|
13 | match a {
|
||||||
|
14 | x.NonExisting { println('----') }
|
||||||
|
| ~~~~~~~~~~~
|
||||||
|
15 | else {}
|
||||||
|
16 | }
|
|
@ -0,0 +1,17 @@
|
||||||
|
import v.checker.tests.amod as x
|
||||||
|
|
||||||
|
struct Struct1 {}
|
||||||
|
|
||||||
|
struct AnotherStruct {}
|
||||||
|
|
||||||
|
struct ThirdStruct {}
|
||||||
|
|
||||||
|
type Abc = AnotherStruct | Struct1 | ThirdStruct | x.Bcg | x.Xyz
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
a := Abc(Struct1{})
|
||||||
|
match a {
|
||||||
|
x.NonExisting { println('----') }
|
||||||
|
else {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/checker/tests/unknown_type_in_anon_fn.vv:5:10: error: unknown type `Another`
|
||||||
|
3 | struct Struc{
|
||||||
|
4 | mut:
|
||||||
|
5 | f fn (s Another, i int) ?
|
||||||
|
| ~~~~~~~
|
||||||
|
6 | }
|
||||||
|
7 |
|
|
@ -0,0 +1,8 @@
|
||||||
|
module main
|
||||||
|
|
||||||
|
struct Struc{
|
||||||
|
mut:
|
||||||
|
f fn (s Another, i int) ?
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,35 @@
|
||||||
|
vlib/v/checker/tests/unnecessary_parenthesis_of_reference.vv:29:10: notice: unnecessary `()`, use `&Quad{....}` instead of `&(Quad{....})`
|
||||||
|
27 | // ritorna una nuova Quadrica somma del ricevente e di un'altra
|
||||||
|
28 | fn (q &Quad) add(other &Quad) &Quad {
|
||||||
|
29 | return &(Quad{q.x + other.x, q.y + other.y, q.z + other.z, q.w + other.w})
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
30 | }
|
||||||
|
31 |
|
||||||
|
vlib/v/checker/tests/unnecessary_parenthesis_of_reference.vv:34:10: notice: unnecessary `()`, use `&Quad{....}` instead of `&(Quad{....})`
|
||||||
|
32 | // ritorna una nuova Quadrica differenza tra il ricevente e un'altra
|
||||||
|
33 | fn (q &Quad) sub(other &Quad) &Quad {
|
||||||
|
34 | return &(Quad{q.x - other.x, q.y - other.y, q.z - other.z, q.w - other.w})
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
35 | }
|
||||||
|
36 |
|
||||||
|
vlib/v/checker/tests/unnecessary_parenthesis_of_reference.vv:39:10: notice: unnecessary `()`, use `&Quad{....}` instead of `&(Quad{....})`
|
||||||
|
37 | // ritorna una nuova Quadrica ottenuta negando il ricevente
|
||||||
|
38 | fn (q &Quad) neg() &Quad {
|
||||||
|
39 | return &(Quad{-q.x, -q.y, -q.z, -q.w})
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
40 | }
|
||||||
|
41 |
|
||||||
|
vlib/v/checker/tests/unnecessary_parenthesis_of_reference.vv:44:10: notice: unnecessary `()`, use `&Quad{....}` instead of `&(Quad{....})`
|
||||||
|
42 | // ritorna una nuova Quadrica ottenuta moltiplicando il ricevente per una costante
|
||||||
|
43 | fn (q &Quad) mult(factor f64) &Quad {
|
||||||
|
44 | return &(Quad{q.x * factor, q.y * factor, q.z * factor, q.w * factor})
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
45 | }
|
||||||
|
46 |
|
||||||
|
vlib/v/checker/tests/unnecessary_parenthesis_of_reference.vv:49:10: notice: unnecessary `()`, use `&Quad{....}` instead of `&(Quad{....})`
|
||||||
|
47 | // ritorna una nuova Quadrica ottenuta dividendo il ricevente per una costante
|
||||||
|
48 | fn (q &Quad) div(factor f64) &Quad {
|
||||||
|
49 | return &(Quad{q.x / factor, q.y / factor, q.z / factor, q.w / factor})
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
50 | }
|
||||||
|
51 |
|
|
@ -0,0 +1,69 @@
|
||||||
|
struct Quad {
|
||||||
|
mut:
|
||||||
|
x f64
|
||||||
|
y f64
|
||||||
|
z f64
|
||||||
|
w f64
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (q Quad) get(i int) f64 {
|
||||||
|
return match i {
|
||||||
|
0 { q.x }
|
||||||
|
1 { q.y }
|
||||||
|
2 { q.z }
|
||||||
|
else { q.w }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (mut q Quad) set(i int, v f64) {
|
||||||
|
match i {
|
||||||
|
0 { q.x = v }
|
||||||
|
1 { q.y = v }
|
||||||
|
2 { q.z = v }
|
||||||
|
else { q.w = v }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ritorna una nuova Quadrica somma del ricevente e di un'altra
|
||||||
|
fn (q &Quad) add(other &Quad) &Quad {
|
||||||
|
return &(Quad{q.x + other.x, q.y + other.y, q.z + other.z, q.w + other.w})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ritorna una nuova Quadrica differenza tra il ricevente e un'altra
|
||||||
|
fn (q &Quad) sub(other &Quad) &Quad {
|
||||||
|
return &(Quad{q.x - other.x, q.y - other.y, q.z - other.z, q.w - other.w})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ritorna una nuova Quadrica ottenuta negando il ricevente
|
||||||
|
fn (q &Quad) neg() &Quad {
|
||||||
|
return &(Quad{-q.x, -q.y, -q.z, -q.w})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ritorna una nuova Quadrica ottenuta moltiplicando il ricevente per una costante
|
||||||
|
fn (q &Quad) mult(factor f64) &Quad {
|
||||||
|
return &(Quad{q.x * factor, q.y * factor, q.z * factor, q.w * factor})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ritorna una nuova Quadrica ottenuta dividendo il ricevente per una costante
|
||||||
|
fn (q &Quad) div(factor f64) &Quad {
|
||||||
|
return &(Quad{q.x / factor, q.y / factor, q.z / factor, q.w / factor})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
mut n := Quad{1, 2, 3, 4}
|
||||||
|
|
||||||
|
println(n)
|
||||||
|
println(n.get(0))
|
||||||
|
println(n.get(1))
|
||||||
|
println(n.get(2))
|
||||||
|
println(n.get(3))
|
||||||
|
n.set(0, 5)
|
||||||
|
n.set(1, 6)
|
||||||
|
n.set(2, 7)
|
||||||
|
n.set(3, 8)
|
||||||
|
println(n)
|
||||||
|
println(n.get(0))
|
||||||
|
println(n.get(1))
|
||||||
|
println(n.get(2))
|
||||||
|
println(n.get(3))
|
||||||
|
}
|
|
@ -7,15 +7,14 @@ import strings
|
||||||
|
|
||||||
fn (mut g Gen) get_free_method(typ ast.Type) string {
|
fn (mut g Gen) get_free_method(typ ast.Type) string {
|
||||||
g.autofree_methods[typ] = true
|
g.autofree_methods[typ] = true
|
||||||
styp := g.typ(typ).replace('*', '')
|
|
||||||
mut sym := g.table.sym(g.unwrap_generic(typ))
|
mut sym := g.table.sym(g.unwrap_generic(typ))
|
||||||
mut fn_name := styp_to_free_fn_name(styp)
|
|
||||||
if mut sym.info is ast.Alias {
|
if mut sym.info is ast.Alias {
|
||||||
if sym.info.is_import {
|
if sym.info.is_import {
|
||||||
sym = g.table.sym(sym.info.parent_type)
|
sym = g.table.sym(sym.info.parent_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
styp := g.typ(typ).replace('*', '')
|
||||||
|
fn_name := styp_to_free_fn_name(styp)
|
||||||
if sym.has_method_with_generic_parent('free') {
|
if sym.has_method_with_generic_parent('free') {
|
||||||
return fn_name
|
return fn_name
|
||||||
}
|
}
|
||||||
|
@ -30,21 +29,23 @@ fn (mut g Gen) gen_free_methods() {
|
||||||
|
|
||||||
fn (mut g Gen) gen_free_method(typ ast.Type) string {
|
fn (mut g Gen) gen_free_method(typ ast.Type) string {
|
||||||
styp := g.typ(typ).replace('*', '')
|
styp := g.typ(typ).replace('*', '')
|
||||||
mut sym := g.table.sym(g.unwrap_generic(typ))
|
|
||||||
mut fn_name := styp_to_free_fn_name(styp)
|
mut fn_name := styp_to_free_fn_name(styp)
|
||||||
if typ in g.generated_free_methods {
|
deref_typ := typ.set_nr_muls(0)
|
||||||
|
if deref_typ in g.generated_free_methods {
|
||||||
return fn_name
|
return fn_name
|
||||||
}
|
}
|
||||||
g.generated_free_methods[typ] = true
|
g.generated_free_methods[deref_typ] = true
|
||||||
|
|
||||||
|
mut sym := g.table.sym(g.unwrap_generic(typ))
|
||||||
if mut sym.info is ast.Alias {
|
if mut sym.info is ast.Alias {
|
||||||
if sym.info.is_import {
|
if sym.info.is_import {
|
||||||
sym = g.table.sym(sym.info.parent_type)
|
sym = g.table.sym(sym.info.parent_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if sym.has_method_with_generic_parent('free') {
|
if sym.has_method_with_generic_parent('free') {
|
||||||
return fn_name
|
return fn_name
|
||||||
}
|
}
|
||||||
|
|
||||||
match mut sym.info {
|
match mut sym.info {
|
||||||
ast.Struct {
|
ast.Struct {
|
||||||
g.gen_free_for_struct(sym.info, styp, fn_name)
|
g.gen_free_for_struct(sym.info, styp, fn_name)
|
||||||
|
|
|
@ -144,7 +144,7 @@ fn (mut g Gen) get_str_fn(typ ast.Type) string {
|
||||||
styp := g.typ(unwrapped)
|
styp := g.typ(unwrapped)
|
||||||
mut sym := g.table.sym(unwrapped)
|
mut sym := g.table.sym(unwrapped)
|
||||||
mut str_fn_name := styp_to_str_fn_name(styp)
|
mut str_fn_name := styp_to_str_fn_name(styp)
|
||||||
if mut sym.info is ast.Alias {
|
if mut sym.info is ast.Alias && !sym.has_method('str') {
|
||||||
if sym.info.is_import {
|
if sym.info.is_import {
|
||||||
sym = g.table.sym(sym.info.parent_type)
|
sym = g.table.sym(sym.info.parent_type)
|
||||||
str_fn_name = styp_to_str_fn_name(sym.name)
|
str_fn_name = styp_to_str_fn_name(sym.name)
|
||||||
|
|
|
@ -20,10 +20,11 @@ const (
|
||||||
// `small` should not be needed, but see: https://stackoverflow.com/questions/5874215/what-is-rpcndr-h
|
// `small` should not be needed, but see: https://stackoverflow.com/questions/5874215/what-is-rpcndr-h
|
||||||
c_reserved = ['array', 'auto', 'bool', 'break', 'calloc', 'case', 'char', 'class', 'complex',
|
c_reserved = ['array', 'auto', 'bool', 'break', 'calloc', 'case', 'char', 'class', 'complex',
|
||||||
'const', 'continue', 'default', 'delete', 'do', 'double', 'else', 'enum', 'error', 'exit',
|
'const', 'continue', 'default', 'delete', 'do', 'double', 'else', 'enum', 'error', 'exit',
|
||||||
'export', 'extern', 'float', 'for', 'free', 'goto', 'if', 'inline', 'int', 'link', 'long',
|
'export', 'extern', 'false', 'float', 'for', 'free', 'goto', 'if', 'inline', 'int', 'link',
|
||||||
'malloc', 'namespace', 'new', 'panic', 'register', 'restrict', 'return', 'short', 'signed',
|
'long', 'malloc', 'namespace', 'new', 'panic', 'register', 'restrict', 'return', 'short',
|
||||||
'sizeof', 'static', 'string', 'struct', 'switch', 'typedef', 'typename', 'union', 'unix',
|
'signed', 'sizeof', 'static', 'string', 'struct', 'switch', 'typedef', 'typename', 'union',
|
||||||
'unsigned', 'void', 'volatile', 'while', 'template', 'small', 'stdout', 'stdin', 'stderr']
|
'unix', 'unsigned', 'void', 'volatile', 'while', 'template', 'true', 'small', 'stdout',
|
||||||
|
'stdin', 'stderr']
|
||||||
c_reserved_map = string_array_to_map(c_reserved)
|
c_reserved_map = string_array_to_map(c_reserved)
|
||||||
// same order as in token.Kind
|
// same order as in token.Kind
|
||||||
cmp_str = ['eq', 'ne', 'gt', 'lt', 'ge', 'le']
|
cmp_str = ['eq', 'ne', 'gt', 'lt', 'ge', 'le']
|
||||||
|
@ -955,7 +956,13 @@ fn (mut g Gen) optional_type_name(t ast.Type) (string, string) {
|
||||||
|
|
||||||
fn (g Gen) optional_type_text(styp string, base string) string {
|
fn (g Gen) optional_type_text(styp string, base string) string {
|
||||||
// replace void with something else
|
// replace void with something else
|
||||||
size := if base == 'void' { 'byte' } else { base }
|
size := if base == 'void' {
|
||||||
|
'byte'
|
||||||
|
} else if base.starts_with('anon_fn') {
|
||||||
|
'void*'
|
||||||
|
} else {
|
||||||
|
base
|
||||||
|
}
|
||||||
ret := 'struct $styp {
|
ret := 'struct $styp {
|
||||||
byte state;
|
byte state;
|
||||||
IError err;
|
IError err;
|
||||||
|
@ -1834,7 +1841,8 @@ fn (mut g Gen) stmt(node ast.Stmt) {
|
||||||
}
|
}
|
||||||
ast.Module {
|
ast.Module {
|
||||||
// g.is_builtin_mod = node.name == 'builtin'
|
// g.is_builtin_mod = node.name == 'builtin'
|
||||||
g.is_builtin_mod = node.name in ['builtin', 'strconv', 'strings', 'dlmalloc']
|
// g.is_builtin_mod = node.name in ['builtin', 'strconv', 'strings', 'dlmalloc']
|
||||||
|
g.is_builtin_mod = util.module_is_builtin(node.name)
|
||||||
// g.cur_mod = node.name
|
// g.cur_mod = node.name
|
||||||
g.cur_mod = node
|
g.cur_mod = node
|
||||||
}
|
}
|
||||||
|
@ -2037,7 +2045,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
|
||||||
&& !expected_type.has_flag(.optional) {
|
&& !expected_type.has_flag(.optional) {
|
||||||
if expr is ast.StructInit && !got_type.is_ptr() {
|
if expr is ast.StructInit && !got_type.is_ptr() {
|
||||||
g.inside_cast_in_heap++
|
g.inside_cast_in_heap++
|
||||||
got_styp := g.cc_type(got_type.ref(), true)
|
got_styp := g.cc_type(got_type_raw.ref(), true)
|
||||||
// TODO: why does cc_type even add this in the first place?
|
// TODO: why does cc_type even add this in the first place?
|
||||||
exp_styp := exp_sym.cname
|
exp_styp := exp_sym.cname
|
||||||
mut fname := 'I_${got_styp}_to_Interface_$exp_styp'
|
mut fname := 'I_${got_styp}_to_Interface_$exp_styp'
|
||||||
|
@ -2048,12 +2056,7 @@ fn (mut g Gen) expr_with_cast(expr ast.Expr, got_type_raw ast.Type, expected_typ
|
||||||
got_styp)
|
got_styp)
|
||||||
g.inside_cast_in_heap--
|
g.inside_cast_in_heap--
|
||||||
} else {
|
} else {
|
||||||
mut got_styp := g.cc_type(got_type, true)
|
got_styp := g.cc_type(got_type_raw, true)
|
||||||
got_styp = match got_styp {
|
|
||||||
'int' { 'int_literal' }
|
|
||||||
'f64' { 'float_literal' }
|
|
||||||
else { got_styp }
|
|
||||||
}
|
|
||||||
got_is_shared := got_type.has_flag(.shared_f)
|
got_is_shared := got_type.has_flag(.shared_f)
|
||||||
exp_styp := if got_is_shared { '__shared__$exp_sym.cname' } else { exp_sym.cname }
|
exp_styp := if got_is_shared { '__shared__$exp_sym.cname' } else { exp_sym.cname }
|
||||||
// If it's shared, we need to use the other caster:
|
// If it's shared, we need to use the other caster:
|
||||||
|
@ -3645,11 +3648,23 @@ fn (mut g Gen) ident(node ast.Ident) {
|
||||||
g.write(util.no_dots(node.name[2..]))
|
g.write(util.no_dots(node.name[2..]))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if node.kind == .constant { // && !node.name.starts_with('g_') {
|
|
||||||
// TODO globals hack
|
|
||||||
g.write('_const_')
|
|
||||||
}
|
|
||||||
mut name := c_name(node.name)
|
mut name := c_name(node.name)
|
||||||
|
if node.kind == .constant { // && !node.name.starts_with('g_') {
|
||||||
|
if g.pref.translated && !g.is_builtin_mod
|
||||||
|
&& !util.module_is_builtin(node.name.all_before_last('.')) {
|
||||||
|
// Don't prepend "_const" to translated C consts,
|
||||||
|
// but only in user code, continue prepending "_const" to builtin consts.
|
||||||
|
mut x := util.no_dots(node.name)
|
||||||
|
if x.starts_with('main__') {
|
||||||
|
x = x['main__'.len..]
|
||||||
|
}
|
||||||
|
g.write(x)
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
// TODO globals hack
|
||||||
|
g.write('_const_')
|
||||||
|
}
|
||||||
|
}
|
||||||
// TODO: temporary, remove this
|
// TODO: temporary, remove this
|
||||||
node_info := node.info
|
node_info := node.info
|
||||||
mut is_auto_heap := false
|
mut is_auto_heap := false
|
||||||
|
@ -4114,6 +4129,12 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
name := c_name(field.name)
|
name := c_name(field.name)
|
||||||
|
const_name := if node.attrs.contains('export') && !g.is_builtin_mod {
|
||||||
|
// TODO this only works for the first const in the group for now
|
||||||
|
node.attrs[0].arg
|
||||||
|
} else {
|
||||||
|
'_const_' + name
|
||||||
|
}
|
||||||
field_expr := field.expr
|
field_expr := field.expr
|
||||||
match field.expr {
|
match field.expr {
|
||||||
ast.ArrayInit {
|
ast.ArrayInit {
|
||||||
|
@ -4121,19 +4142,19 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
|
||||||
styp := g.typ(field.expr.typ)
|
styp := g.typ(field.expr.typ)
|
||||||
if g.pref.build_mode != .build_module {
|
if g.pref.build_mode != .build_module {
|
||||||
val := g.expr_string(field.expr)
|
val := g.expr_string(field.expr)
|
||||||
g.definitions.writeln('$styp _const_$name = $val; // fixed array const')
|
g.definitions.writeln('$styp $const_name = $val; // fixed array const')
|
||||||
} else {
|
} else {
|
||||||
g.definitions.writeln('$styp _const_$name; // fixed array const')
|
g.definitions.writeln('$styp $const_name; // fixed array const')
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
g.const_decl_init_later(field.mod, name, field.expr, field.typ, false)
|
g.const_decl_init_later(field.mod, name, field.expr, field.typ, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast.StringLiteral {
|
ast.StringLiteral {
|
||||||
g.definitions.writeln('string _const_$name; // a string literal, inited later')
|
g.definitions.writeln('string $const_name; // a string literal, inited later')
|
||||||
if g.pref.build_mode != .build_module {
|
if g.pref.build_mode != .build_module {
|
||||||
val := g.expr_string(field.expr)
|
val := g.expr_string(field.expr)
|
||||||
g.stringliterals.writeln('\t_const_$name = $val;')
|
g.stringliterals.writeln('\t$const_name = $val;')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast.CallExpr {
|
ast.CallExpr {
|
||||||
|
@ -4177,7 +4198,7 @@ fn (mut g Gen) const_decl(node ast.ConstDecl) {
|
||||||
|
|
||||||
fn (mut g Gen) const_decl_precomputed(mod string, name string, ct_value ast.ComptTimeConstValue, typ ast.Type) bool {
|
fn (mut g Gen) const_decl_precomputed(mod string, name string, ct_value ast.ComptTimeConstValue, typ ast.Type) bool {
|
||||||
mut styp := g.typ(typ)
|
mut styp := g.typ(typ)
|
||||||
cname := '_const_$name'
|
cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_$name' }
|
||||||
$if trace_const_precomputed ? {
|
$if trace_const_precomputed ? {
|
||||||
eprintln('> styp: $styp | cname: $cname | ct_value: $ct_value | $ct_value.type_name()')
|
eprintln('> styp: $styp | cname: $cname | ct_value: $ct_value | $ct_value.type_name()')
|
||||||
}
|
}
|
||||||
|
@ -4246,7 +4267,7 @@ fn (mut g Gen) const_decl_precomputed(mod string, name string, ct_value ast.Comp
|
||||||
// TODO: ^ the above for strings, cause:
|
// TODO: ^ the above for strings, cause:
|
||||||
// `error C2099: initializer is not a constant` errors in MSVC,
|
// `error C2099: initializer is not a constant` errors in MSVC,
|
||||||
// so fall back to the delayed initialisation scheme:
|
// so fall back to the delayed initialisation scheme:
|
||||||
g.definitions.writeln('$styp $cname; // inited later')
|
g.definitions.writeln('$styp $cname; // str inited later')
|
||||||
g.init.writeln('\t$cname = _SLIT("$escaped_val");')
|
g.init.writeln('\t$cname = _SLIT("$escaped_val");')
|
||||||
if g.is_autofree {
|
if g.is_autofree {
|
||||||
g.cleanups[mod].writeln('\tstring_free(&$cname);')
|
g.cleanups[mod].writeln('\tstring_free(&$cname);')
|
||||||
|
@ -4276,7 +4297,7 @@ fn (mut g Gen) const_decl_init_later(mod string, name string, expr ast.Expr, typ
|
||||||
// Initialize more complex consts in `void _vinit/2{}`
|
// Initialize more complex consts in `void _vinit/2{}`
|
||||||
// (C doesn't allow init expressions that can't be resolved at compile time).
|
// (C doesn't allow init expressions that can't be resolved at compile time).
|
||||||
mut styp := g.typ(typ)
|
mut styp := g.typ(typ)
|
||||||
cname := '_const_$name'
|
cname := if g.pref.translated && !g.is_builtin_mod { name } else { '_const_$name' }
|
||||||
g.definitions.writeln('$styp $cname; // inited later')
|
g.definitions.writeln('$styp $cname; // inited later')
|
||||||
if cname == '_const_os__args' {
|
if cname == '_const_os__args' {
|
||||||
if g.pref.os == .windows {
|
if g.pref.os == .windows {
|
||||||
|
@ -4339,6 +4360,13 @@ fn (mut g Gen) global_decl(node ast.GlobalDecl) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
styp := g.typ(field.typ)
|
styp := g.typ(field.typ)
|
||||||
|
mut anon_fn_expr := unsafe { field.expr }
|
||||||
|
if field.has_expr && mut anon_fn_expr is ast.AnonFn {
|
||||||
|
g.gen_anon_fn_decl(mut anon_fn_expr)
|
||||||
|
fn_type_name := g.get_anon_fn_type_name(mut anon_fn_expr, field.name)
|
||||||
|
g.definitions.writeln('$fn_type_name = ${g.table.sym(field.typ).name}; // global')
|
||||||
|
continue
|
||||||
|
}
|
||||||
g.definitions.write_string('$visibility_kw$styp $attributes $field.name')
|
g.definitions.write_string('$visibility_kw$styp $attributes $field.name')
|
||||||
if field.has_expr {
|
if field.has_expr {
|
||||||
if field.expr.is_literal() && should_init {
|
if field.expr.is_literal() && should_init {
|
||||||
|
@ -4524,17 +4552,17 @@ fn (mut g Gen) write_builtin_types() {
|
||||||
// Sort the types, make sure types that are referenced by other types
|
// Sort the types, make sure types that are referenced by other types
|
||||||
// are added before them.
|
// are added before them.
|
||||||
fn (mut g Gen) write_sorted_types() {
|
fn (mut g Gen) write_sorted_types() {
|
||||||
|
g.type_definitions.writeln('// #start sorted_symbols')
|
||||||
|
defer {
|
||||||
|
g.type_definitions.writeln('// #end sorted_symbols')
|
||||||
|
}
|
||||||
mut symbols := []&ast.TypeSymbol{cap: g.table.type_symbols.len} // structs that need to be sorted
|
mut symbols := []&ast.TypeSymbol{cap: g.table.type_symbols.len} // structs that need to be sorted
|
||||||
for sym in g.table.type_symbols {
|
for sym in g.table.type_symbols {
|
||||||
if sym.name !in c.builtins {
|
if sym.name !in c.builtins {
|
||||||
symbols << sym
|
symbols << sym
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// sort structs
|
|
||||||
sorted_symbols := g.sort_structs(symbols)
|
sorted_symbols := g.sort_structs(symbols)
|
||||||
// Generate C code
|
|
||||||
g.type_definitions.writeln('// builtin types:')
|
|
||||||
g.type_definitions.writeln('//------------------ #endbuiltin')
|
|
||||||
g.write_types(sorted_symbols)
|
g.write_types(sorted_symbols)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4706,7 +4734,7 @@ fn (mut g Gen) write_types(symbols []&ast.TypeSymbol) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// sort structs by dependant fields
|
// sort structs by dependant fields
|
||||||
fn (g &Gen) sort_structs(typesa []&ast.TypeSymbol) []&ast.TypeSymbol {
|
fn (mut g Gen) sort_structs(typesa []&ast.TypeSymbol) []&ast.TypeSymbol {
|
||||||
util.timing_start(@METHOD)
|
util.timing_start(@METHOD)
|
||||||
defer {
|
defer {
|
||||||
util.timing_measure(@METHOD)
|
util.timing_measure(@METHOD)
|
||||||
|
@ -4742,12 +4770,23 @@ fn (g &Gen) sort_structs(typesa []&ast.TypeSymbol) []&ast.TypeSymbol {
|
||||||
field_deps << dep
|
field_deps << dep
|
||||||
}
|
}
|
||||||
for field in sym.info.fields {
|
for field in sym.info.fields {
|
||||||
dep := g.table.sym(field.typ).name
|
if field.typ.is_ptr() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fsym := g.table.sym(field.typ)
|
||||||
|
dep := fsym.name
|
||||||
// skip if not in types list or already in deps
|
// skip if not in types list or already in deps
|
||||||
if dep !in type_names || dep in field_deps || field.typ.is_ptr() {
|
if dep !in type_names || dep in field_deps {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
field_deps << dep
|
field_deps << dep
|
||||||
|
if fsym.info is ast.Alias {
|
||||||
|
xdep := g.table.sym(fsym.info.parent_type).name
|
||||||
|
if xdep !in type_names || xdep in field_deps {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
field_deps << xdep
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// ast.Interface {}
|
// ast.Interface {}
|
||||||
|
@ -4795,7 +4834,7 @@ fn (mut g Gen) go_before_ternary() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut g Gen) insert_before_stmt(s string) {
|
fn (mut g Gen) insert_before_stmt(s string) {
|
||||||
cur_line := g.go_before_stmt(0)
|
cur_line := g.go_before_stmt(g.inside_ternary)
|
||||||
g.writeln(s)
|
g.writeln(s)
|
||||||
g.write(cur_line)
|
g.write(cur_line)
|
||||||
}
|
}
|
||||||
|
@ -4815,12 +4854,15 @@ fn (mut g Gen) insert_at(pos int, s string) {
|
||||||
// Returns the type of the last stmt
|
// Returns the type of the last stmt
|
||||||
fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Type) {
|
fn (mut g Gen) or_block(var_name string, or_block ast.OrExpr, return_type ast.Type) {
|
||||||
cvar_name := c_name(var_name)
|
cvar_name := c_name(var_name)
|
||||||
mr_styp := g.base_type(return_type)
|
mut mr_styp := g.base_type(return_type)
|
||||||
is_none_ok := return_type == ast.ovoid_type
|
is_none_ok := return_type == ast.ovoid_type
|
||||||
g.writeln(';')
|
g.writeln(';')
|
||||||
if is_none_ok {
|
if is_none_ok {
|
||||||
g.writeln('if (${cvar_name}.state != 0 && ${cvar_name}.err._typ != _IError_None___index) {')
|
g.writeln('if (${cvar_name}.state != 0 && ${cvar_name}.err._typ != _IError_None___index) {')
|
||||||
} else {
|
} else {
|
||||||
|
if return_type != 0 && g.table.sym(return_type).kind == .function {
|
||||||
|
mr_styp = 'voidptr'
|
||||||
|
}
|
||||||
g.writeln('if (${cvar_name}.state != 0) { /*or block*/ ')
|
g.writeln('if (${cvar_name}.state != 0) { /*or block*/ ')
|
||||||
}
|
}
|
||||||
if or_block.kind == .block {
|
if or_block.kind == .block {
|
||||||
|
@ -5106,6 +5148,8 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
|
||||||
} else if mut expr.left is ast.AnonFn {
|
} else if mut expr.left is ast.AnonFn {
|
||||||
g.gen_anon_fn_decl(mut expr.left)
|
g.gen_anon_fn_decl(mut expr.left)
|
||||||
name = expr.left.decl.name
|
name = expr.left.decl.name
|
||||||
|
} else if expr.is_fn_var {
|
||||||
|
name = g.table.sym(expr.fn_var_type).name
|
||||||
}
|
}
|
||||||
name = util.no_dots(name)
|
name = util.no_dots(name)
|
||||||
if g.pref.obfuscate && g.cur_mod.name == 'main' && name.starts_with('main__') {
|
if g.pref.obfuscate && g.cur_mod.name == 'main' && name.starts_with('main__') {
|
||||||
|
@ -5119,7 +5163,8 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
|
||||||
panic('cgen: obf name "$key" not found, this should never happen')
|
panic('cgen: obf name "$key" not found, this should never happen')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
g.writeln('// go')
|
g.empty_line = true
|
||||||
|
g.writeln('// start go')
|
||||||
wrapper_struct_name := 'thread_arg_' + name
|
wrapper_struct_name := 'thread_arg_' + name
|
||||||
wrapper_fn_name := name + '_thread_wrapper'
|
wrapper_fn_name := name + '_thread_wrapper'
|
||||||
arg_tmp_var := 'arg_' + tmp
|
arg_tmp_var := 'arg_' + tmp
|
||||||
|
@ -5158,7 +5203,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
|
||||||
} else {
|
} else {
|
||||||
'thread_$tmp'
|
'thread_$tmp'
|
||||||
}
|
}
|
||||||
g.writeln('HANDLE $simple_handle = CreateThread(0,0, (LPTHREAD_START_ROUTINE)$wrapper_fn_name, $arg_tmp_var, 0,0);')
|
g.writeln('HANDLE $simple_handle = CreateThread(0, 0, (LPTHREAD_START_ROUTINE)$wrapper_fn_name, $arg_tmp_var, 0, 0);')
|
||||||
g.writeln('if (!$simple_handle) panic_lasterr(tos3("`go ${name}()`: "));')
|
g.writeln('if (!$simple_handle) panic_lasterr(tos3("`go ${name}()`: "));')
|
||||||
if node.is_expr && node.call_expr.return_type != ast.void_type {
|
if node.is_expr && node.call_expr.return_type != ast.void_type {
|
||||||
g.writeln('$gohandle_name thread_$tmp = {')
|
g.writeln('$gohandle_name thread_$tmp = {')
|
||||||
|
@ -5177,7 +5222,7 @@ fn (mut g Gen) go_expr(node ast.GoExpr) {
|
||||||
g.writeln('pthread_detach(thread_$tmp);')
|
g.writeln('pthread_detach(thread_$tmp);')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
g.writeln('// endgo\n')
|
g.writeln('// end go')
|
||||||
if node.is_expr {
|
if node.is_expr {
|
||||||
handle = 'thread_$tmp'
|
handle = 'thread_$tmp'
|
||||||
// create wait handler for this return type if none exists
|
// create wait handler for this return type if none exists
|
||||||
|
@ -5643,7 +5688,8 @@ static inline __shared__$interface_name ${shared_fn_name}(__shared__$cctype* x)
|
||||||
if fargs.len > 1 {
|
if fargs.len > 1 {
|
||||||
methods_wrapper.write_string(', ')
|
methods_wrapper.write_string(', ')
|
||||||
}
|
}
|
||||||
methods_wrapper.writeln('${fargs[1..].join(', ')});')
|
args := fargs[1..].join(', ')
|
||||||
|
methods_wrapper.writeln('$args);')
|
||||||
} else {
|
} else {
|
||||||
if parameter_name.starts_with('__shared__') {
|
if parameter_name.starts_with('__shared__') {
|
||||||
methods_wrapper.writeln('${method_call}(${fargs.join(', ')}->val);')
|
methods_wrapper.writeln('${method_call}(${fargs.join(', ')}->val);')
|
||||||
|
|
|
@ -203,7 +203,7 @@ fn cgen_attrs(attrs []ast.Attr) []string {
|
||||||
|
|
||||||
fn (mut g Gen) comptime_at(node ast.AtExpr) {
|
fn (mut g Gen) comptime_at(node ast.AtExpr) {
|
||||||
if node.kind == .vmod_file {
|
if node.kind == .vmod_file {
|
||||||
val := cnewlines(node.val.replace('\r', ''))
|
val := cescape_nonascii(util.smart_quote(node.val, false))
|
||||||
g.write('_SLIT("$val")')
|
g.write('_SLIT("$val")')
|
||||||
} else {
|
} else {
|
||||||
val := node.val.replace('\\', '\\\\')
|
val := node.val.replace('\\', '\\\\')
|
||||||
|
|
|
@ -612,6 +612,25 @@ fn (mut g Gen) fn_decl_params(params []ast.Param, scope &ast.Scope, is_variadic
|
||||||
return fargs, fargtypes, heap_promoted
|
return fargs, fargtypes, heap_promoted
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn (mut g Gen) get_anon_fn_type_name(mut node ast.AnonFn, var_name string) string {
|
||||||
|
mut builder := strings.new_builder(64)
|
||||||
|
return_styp := g.typ(node.decl.return_type)
|
||||||
|
builder.write_string('$return_styp (*$var_name) (')
|
||||||
|
if node.decl.params.len == 0 {
|
||||||
|
builder.write_string('void)')
|
||||||
|
} else {
|
||||||
|
for i, param in node.decl.params {
|
||||||
|
param_styp := g.typ(param.typ)
|
||||||
|
builder.write_string('$param_styp $param.name')
|
||||||
|
if i != node.decl.params.len - 1 {
|
||||||
|
builder.write_string(', ')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder.write_string(')')
|
||||||
|
}
|
||||||
|
return builder.str()
|
||||||
|
}
|
||||||
|
|
||||||
fn (mut g Gen) call_expr(node ast.CallExpr) {
|
fn (mut g Gen) call_expr(node ast.CallExpr) {
|
||||||
// g.write('/*call expr*/')
|
// g.write('/*call expr*/')
|
||||||
// NOTE: everything could be done this way
|
// NOTE: everything could be done this way
|
||||||
|
|
|
@ -115,92 +115,91 @@ fn (mut g Gen) if_expr(node ast.IfExpr) {
|
||||||
cvar_name := guard_vars[guard_idx]
|
cvar_name := guard_vars[guard_idx]
|
||||||
g.writeln('\tIError err = ${cvar_name}.err;')
|
g.writeln('\tIError err = ${cvar_name}.err;')
|
||||||
}
|
}
|
||||||
} else {
|
} else if branch.cond is ast.IfGuardExpr {
|
||||||
match branch.cond {
|
mut var_name := guard_vars[i]
|
||||||
ast.IfGuardExpr {
|
mut short_opt := false
|
||||||
mut var_name := guard_vars[i]
|
if var_name == '' {
|
||||||
mut short_opt := false
|
short_opt = true // we don't need a further tmp, so use the one we'll get later
|
||||||
if var_name == '' {
|
var_name = g.new_tmp_var()
|
||||||
short_opt = true // we don't need a further tmp, so use the one we'll get later
|
guard_vars[i] = var_name // for `else`
|
||||||
var_name = g.new_tmp_var()
|
g.tmp_count--
|
||||||
guard_vars[i] = var_name // for `else`
|
g.writeln('if (${var_name}.state == 0) {')
|
||||||
g.tmp_count--
|
} else {
|
||||||
g.writeln('if (${var_name}.state == 0) {')
|
g.write('if ($var_name = ')
|
||||||
|
g.expr(branch.cond.expr)
|
||||||
|
g.writeln(', ${var_name}.state == 0) {')
|
||||||
|
}
|
||||||
|
if short_opt || branch.cond.vars[0].name != '_' {
|
||||||
|
base_type := g.base_type(branch.cond.expr_type)
|
||||||
|
if short_opt {
|
||||||
|
cond_var_name := if branch.cond.vars[0].name == '_' {
|
||||||
|
'_dummy_${g.tmp_count + 1}'
|
||||||
} else {
|
} else {
|
||||||
g.write('if ($var_name = ')
|
branch.cond.vars[0].name
|
||||||
g.expr(branch.cond.expr)
|
|
||||||
g.writeln(', ${var_name}.state == 0) {')
|
|
||||||
}
|
}
|
||||||
if short_opt || branch.cond.vars[0].name != '_' {
|
g.write('\t$base_type $cond_var_name = ')
|
||||||
base_type := g.base_type(branch.cond.expr_type)
|
g.expr(branch.cond.expr)
|
||||||
if short_opt {
|
g.writeln(';')
|
||||||
cond_var_name := if branch.cond.vars[0].name == '_' {
|
} else {
|
||||||
'_dummy_${g.tmp_count + 1}'
|
mut is_auto_heap := false
|
||||||
} else {
|
if branch.stmts.len > 0 {
|
||||||
branch.cond.vars[0].name
|
scope := g.file.scope.innermost(ast.Node(branch.stmts[branch.stmts.len - 1]).pos().pos)
|
||||||
}
|
if v := scope.find_var(branch.cond.vars[0].name) {
|
||||||
g.write('\t$base_type $cond_var_name = ')
|
is_auto_heap = v.is_auto_heap
|
||||||
g.expr(branch.cond.expr)
|
}
|
||||||
g.writeln(';')
|
}
|
||||||
|
if branch.cond.vars.len == 1 {
|
||||||
|
left_var_name := c_name(branch.cond.vars[0].name)
|
||||||
|
if is_auto_heap {
|
||||||
|
g.writeln('\t$base_type* $left_var_name = HEAP($base_type, *($base_type*)${var_name}.data);')
|
||||||
} else {
|
} else {
|
||||||
mut is_auto_heap := false
|
g.writeln('\t$base_type $left_var_name = *($base_type*)${var_name}.data;')
|
||||||
if branch.stmts.len > 0 {
|
}
|
||||||
scope := g.file.scope.innermost(ast.Node(branch.stmts[branch.stmts.len - 1]).pos().pos)
|
} else if branch.cond.vars.len > 1 {
|
||||||
if v := scope.find_var(branch.cond.vars[0].name) {
|
for vi, var in branch.cond.vars {
|
||||||
is_auto_heap = v.is_auto_heap
|
left_var_name := c_name(var.name)
|
||||||
}
|
sym := g.table.sym(branch.cond.expr_type)
|
||||||
}
|
if sym.kind == .multi_return {
|
||||||
if branch.cond.vars.len == 1 {
|
mr_info := sym.info as ast.MultiReturn
|
||||||
left_var_name := c_name(branch.cond.vars[0].name)
|
if mr_info.types.len == branch.cond.vars.len {
|
||||||
if is_auto_heap {
|
var_typ := g.typ(mr_info.types[vi])
|
||||||
g.writeln('\t$base_type* $left_var_name = HEAP($base_type, *($base_type*)${var_name}.data);')
|
if is_auto_heap {
|
||||||
} else {
|
g.writeln('\t$var_typ* $left_var_name = (HEAP($base_type, *($base_type*)${var_name}.data).arg$vi);')
|
||||||
g.writeln('\t$base_type $left_var_name = *($base_type*)${var_name}.data;')
|
} else {
|
||||||
}
|
g.writeln('\t$var_typ $left_var_name = (*($base_type*)${var_name}.data).arg$vi;')
|
||||||
} else if branch.cond.vars.len > 1 {
|
|
||||||
for vi, var in branch.cond.vars {
|
|
||||||
left_var_name := c_name(var.name)
|
|
||||||
sym := g.table.sym(branch.cond.expr_type)
|
|
||||||
if sym.kind == .multi_return {
|
|
||||||
mr_info := sym.info as ast.MultiReturn
|
|
||||||
if mr_info.types.len == branch.cond.vars.len {
|
|
||||||
var_typ := g.typ(mr_info.types[vi])
|
|
||||||
if is_auto_heap {
|
|
||||||
g.writeln('\t$var_typ* $left_var_name = (HEAP($base_type, *($base_type*)${var_name}.data).arg$vi);')
|
|
||||||
} else {
|
|
||||||
g.writeln('\t$var_typ $left_var_name = (*($base_type*)${var_name}.data).arg$vi;')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
}
|
||||||
mut no_needs_par := false
|
} else {
|
||||||
if branch.cond is ast.InfixExpr {
|
mut no_needs_par := false
|
||||||
if branch.cond.op == .key_in && branch.cond.left !is ast.InfixExpr
|
if branch.cond is ast.InfixExpr {
|
||||||
&& branch.cond.right is ast.ArrayInit {
|
if branch.cond.op == .key_in && branch.cond.left !is ast.InfixExpr
|
||||||
no_needs_par = true
|
&& branch.cond.right is ast.ArrayInit {
|
||||||
}
|
no_needs_par = true
|
||||||
}
|
|
||||||
if no_needs_par {
|
|
||||||
g.write('if ')
|
|
||||||
} else {
|
|
||||||
g.write('if (')
|
|
||||||
}
|
|
||||||
g.expr(branch.cond)
|
|
||||||
if no_needs_par {
|
|
||||||
g.writeln(' {')
|
|
||||||
} else {
|
|
||||||
g.writeln(') {')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if no_needs_par {
|
||||||
|
g.write('if ')
|
||||||
|
} else {
|
||||||
|
g.write('if (')
|
||||||
|
}
|
||||||
|
g.expr(branch.cond)
|
||||||
|
if no_needs_par {
|
||||||
|
g.writeln(' {')
|
||||||
|
} else {
|
||||||
|
g.writeln(') {')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if needs_tmp_var {
|
if needs_tmp_var {
|
||||||
|
if node.is_expr && g.table.sym(node.typ).kind == .sum_type {
|
||||||
|
g.expected_cast_type = node.typ
|
||||||
|
}
|
||||||
g.stmts_with_tmp_var(branch.stmts, tmp)
|
g.stmts_with_tmp_var(branch.stmts, tmp)
|
||||||
|
g.expected_cast_type = 0
|
||||||
} else {
|
} else {
|
||||||
// restore if_expr stmt header pos
|
// restore if_expr stmt header pos
|
||||||
stmt_pos := g.nth_stmt_pos(0)
|
stmt_pos := g.nth_stmt_pos(0)
|
||||||
|
|
|
@ -62,6 +62,7 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
|
||||||
mut tmp_opt := ''
|
mut tmp_opt := ''
|
||||||
mut cur_line := ''
|
mut cur_line := ''
|
||||||
mut gen_or := node.or_expr.kind != .absent || node.is_option
|
mut gen_or := node.or_expr.kind != .absent || node.is_option
|
||||||
|
mut tmp_left := ''
|
||||||
|
|
||||||
if sym.kind == .string {
|
if sym.kind == .string {
|
||||||
if node.is_gated {
|
if node.is_gated {
|
||||||
|
@ -82,6 +83,15 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
|
||||||
}
|
}
|
||||||
g.expr(node.left)
|
g.expr(node.left)
|
||||||
} else if sym.kind == .array {
|
} else if sym.kind == .array {
|
||||||
|
if !range.has_high {
|
||||||
|
tmp_left = g.new_tmp_var()
|
||||||
|
tmp_type := g.typ(node.left_type)
|
||||||
|
g.insert_before_stmt('${util.tabs(g.indent)}$tmp_type $tmp_left;')
|
||||||
|
// (tmp = expr, array_slice(...))
|
||||||
|
g.write('($tmp_left = ')
|
||||||
|
g.expr(node.left)
|
||||||
|
g.write(', ')
|
||||||
|
}
|
||||||
if node.is_gated {
|
if node.is_gated {
|
||||||
g.write('array_slice_ni(')
|
g.write('array_slice_ni(')
|
||||||
} else {
|
} else {
|
||||||
|
@ -90,7 +100,11 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
|
||||||
if node.left_type.is_ptr() {
|
if node.left_type.is_ptr() {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
g.expr(node.left)
|
if range.has_high {
|
||||||
|
g.expr(node.left)
|
||||||
|
} else {
|
||||||
|
g.write(tmp_left)
|
||||||
|
}
|
||||||
} else if sym.kind == .array_fixed {
|
} else if sym.kind == .array_fixed {
|
||||||
// Convert a fixed array to V array when doing `fixed_arr[start..end]`
|
// Convert a fixed array to V array when doing `fixed_arr[start..end]`
|
||||||
info := sym.info as ast.ArrayFixed
|
info := sym.info as ast.ArrayFixed
|
||||||
|
@ -101,17 +115,8 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
|
||||||
g.write('array_slice(')
|
g.write('array_slice(')
|
||||||
}
|
}
|
||||||
g.write('new_array_from_c_array${noscan}(')
|
g.write('new_array_from_c_array${noscan}(')
|
||||||
g.write('$info.size')
|
ctype := g.typ(info.elem_type)
|
||||||
g.write(', $info.size')
|
g.write('$info.size, $info.size, sizeof($ctype), ')
|
||||||
g.write(', sizeof(')
|
|
||||||
if node.left_type.is_ptr() {
|
|
||||||
g.write('(*')
|
|
||||||
}
|
|
||||||
g.expr(node.left)
|
|
||||||
if node.left_type.is_ptr() {
|
|
||||||
g.write(')')
|
|
||||||
}
|
|
||||||
g.write('[0]), ')
|
|
||||||
if node.left_type.is_ptr() {
|
if node.left_type.is_ptr() {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
|
@ -132,15 +137,17 @@ fn (mut g Gen) range_expr(node ast.IndexExpr, range ast.RangeExpr) {
|
||||||
} else if sym.kind == .array_fixed {
|
} else if sym.kind == .array_fixed {
|
||||||
info := sym.info as ast.ArrayFixed
|
info := sym.info as ast.ArrayFixed
|
||||||
g.write('$info.size')
|
g.write('$info.size')
|
||||||
} else if node.left_type.is_ptr() {
|
} else if sym.kind == .array {
|
||||||
g.write('(')
|
if node.left_type.is_ptr() {
|
||||||
g.write('*')
|
g.write('$tmp_left->')
|
||||||
g.expr(node.left)
|
} else {
|
||||||
g.write(')')
|
g.write('${tmp_left}.')
|
||||||
g.write('.len')
|
}
|
||||||
|
g.write('len)')
|
||||||
} else {
|
} else {
|
||||||
|
g.write('(')
|
||||||
g.expr(node.left)
|
g.expr(node.left)
|
||||||
g.write('.len')
|
g.write(').len')
|
||||||
}
|
}
|
||||||
g.write(')')
|
g.write(')')
|
||||||
|
|
||||||
|
@ -157,23 +164,22 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
gen_or := node.or_expr.kind != .absent || node.is_option
|
gen_or := node.or_expr.kind != .absent || node.is_option
|
||||||
left_is_ptr := node.left_type.is_ptr()
|
left_is_ptr := node.left_type.is_ptr()
|
||||||
info := sym.info as ast.Array
|
info := sym.info as ast.Array
|
||||||
elem_type_str := g.typ(info.elem_type)
|
mut elem_type_str := g.typ(info.elem_type)
|
||||||
elem_type := info.elem_type
|
elem_type := info.elem_type
|
||||||
elem_typ := g.table.sym(elem_type)
|
elem_sym := g.table.sym(elem_type)
|
||||||
|
if elem_sym.kind == .function {
|
||||||
|
elem_type_str = 'voidptr'
|
||||||
|
}
|
||||||
// `vals[i].field = x` is an exception and requires `array_get`:
|
// `vals[i].field = x` is an exception and requires `array_get`:
|
||||||
// `(*(Val*)array_get(vals, i)).field = x;`
|
// `(*(Val*)array_get(vals, i)).field = x;`
|
||||||
is_selector := node.left is ast.SelectorExpr
|
is_selector := node.left is ast.SelectorExpr
|
||||||
if g.is_assign_lhs && !is_selector && node.is_setter {
|
if g.is_assign_lhs && !is_selector && node.is_setter {
|
||||||
is_direct_array_access := (g.fn_decl != 0 && g.fn_decl.is_direct_arr) || node.is_direct
|
is_direct_array_access := (g.fn_decl != 0 && g.fn_decl.is_direct_arr) || node.is_direct
|
||||||
is_op_assign := g.assign_op != .assign && info.elem_type != ast.string_type
|
is_op_assign := g.assign_op != .assign && info.elem_type != ast.string_type
|
||||||
array_ptr_type_str := match elem_typ.kind {
|
|
||||||
.function { 'voidptr*' }
|
|
||||||
else { '$elem_type_str*' }
|
|
||||||
}
|
|
||||||
if is_direct_array_access {
|
if is_direct_array_access {
|
||||||
g.write('(($array_ptr_type_str)')
|
g.write('(($elem_type_str*)')
|
||||||
} else if is_op_assign {
|
} else if is_op_assign {
|
||||||
g.write('(*($array_ptr_type_str)array_get(')
|
g.write('(*($elem_type_str*)array_get(')
|
||||||
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
|
@ -217,11 +223,7 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
if need_wrapper {
|
if need_wrapper {
|
||||||
if elem_typ.kind == .function {
|
g.write(', &($elem_type_str[]) { ')
|
||||||
g.write(', &(voidptr[]) { ')
|
|
||||||
} else {
|
|
||||||
g.write(', &($elem_type_str[]) { ')
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
g.write(', &')
|
g.write(', &')
|
||||||
}
|
}
|
||||||
|
@ -232,10 +234,6 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
is_direct_array_access := (g.fn_decl != 0 && g.fn_decl.is_direct_arr) || node.is_direct
|
is_direct_array_access := (g.fn_decl != 0 && g.fn_decl.is_direct_arr) || node.is_direct
|
||||||
array_ptr_type_str := match elem_typ.kind {
|
|
||||||
.function { 'voidptr*' }
|
|
||||||
else { '$elem_type_str*' }
|
|
||||||
}
|
|
||||||
// do not clone inside `opt_ok(opt_ok(&(string[]) {..})` before returns
|
// do not clone inside `opt_ok(opt_ok(&(string[]) {..})` before returns
|
||||||
needs_clone := info.elem_type == ast.string_type_idx && g.is_autofree && !(g.inside_return
|
needs_clone := info.elem_type == ast.string_type_idx && g.is_autofree && !(g.inside_return
|
||||||
&& g.fn_decl.return_type.has_flag(.optional)) && !g.is_assign_lhs
|
&& g.fn_decl.return_type.has_flag(.optional)) && !g.is_assign_lhs
|
||||||
|
@ -250,24 +248,24 @@ fn (mut g Gen) index_of_array(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
tmp_opt := if gen_or { g.new_tmp_var() } else { '' }
|
tmp_opt := if gen_or { g.new_tmp_var() } else { '' }
|
||||||
tmp_opt_ptr := if gen_or { g.new_tmp_var() } else { '' }
|
tmp_opt_ptr := if gen_or { g.new_tmp_var() } else { '' }
|
||||||
if gen_or {
|
if gen_or {
|
||||||
g.write('$array_ptr_type_str $tmp_opt_ptr = ($array_ptr_type_str)/*ee elem_ptr_typ */(array_get_with_check(')
|
g.write('$elem_type_str* $tmp_opt_ptr = ($elem_type_str*)/*ee elem_ptr_typ */(array_get_with_check(')
|
||||||
} else {
|
} else {
|
||||||
if needs_clone {
|
if needs_clone {
|
||||||
g.write('/*2*/string_clone(')
|
g.write('/*2*/string_clone(')
|
||||||
}
|
}
|
||||||
if g.is_fn_index_call {
|
if g.is_fn_index_call {
|
||||||
if elem_typ.info is ast.FnType {
|
if elem_sym.info is ast.FnType {
|
||||||
g.write('((')
|
g.write('((')
|
||||||
g.write_fn_ptr_decl(&elem_typ.info, '')
|
g.write_fn_ptr_decl(&elem_sym.info, '')
|
||||||
g.write(')(*($array_ptr_type_str)/*ee elem_typ */array_get(')
|
g.write(')(*($elem_type_str*)/*ee elem_sym */array_get(')
|
||||||
}
|
}
|
||||||
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
} else if is_direct_array_access {
|
} else if is_direct_array_access {
|
||||||
g.write('(($array_ptr_type_str)')
|
g.write('(($elem_type_str*)')
|
||||||
} else {
|
} else {
|
||||||
g.write('(*($array_ptr_type_str)/*ee elem_typ */array_get(')
|
g.write('(*($elem_type_str*)/*ee elem_sym */array_get(')
|
||||||
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
if left_is_ptr && !node.left_type.has_flag(.shared_f) {
|
||||||
g.write('*')
|
g.write('*')
|
||||||
}
|
}
|
||||||
|
@ -358,9 +356,12 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
info := sym.info as ast.Map
|
info := sym.info as ast.Map
|
||||||
key_type_str := g.typ(info.key_type)
|
key_type_str := g.typ(info.key_type)
|
||||||
elem_type := info.value_type
|
elem_type := info.value_type
|
||||||
elem_type_str := g.typ(elem_type)
|
mut elem_type_str := g.typ(elem_type)
|
||||||
elem_typ := g.table.sym(elem_type)
|
elem_sym := g.table.sym(elem_type)
|
||||||
get_and_set_types := elem_typ.kind in [.struct_, .map]
|
if elem_sym.kind == .function {
|
||||||
|
elem_type_str = 'voidptr'
|
||||||
|
}
|
||||||
|
get_and_set_types := elem_sym.kind in [.struct_, .map]
|
||||||
if g.is_assign_lhs && !g.is_arraymap_set && !get_and_set_types {
|
if g.is_assign_lhs && !g.is_arraymap_set && !get_and_set_types {
|
||||||
if g.assign_op == .assign || info.value_type == ast.string_type {
|
if g.assign_op == .assign || info.value_type == ast.string_type {
|
||||||
g.is_arraymap_set = true
|
g.is_arraymap_set = true
|
||||||
|
@ -394,12 +395,8 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
g.is_arraymap_set = old_is_arraymap_set
|
g.is_arraymap_set = old_is_arraymap_set
|
||||||
g.is_assign_lhs = old_is_assign_lhs
|
g.is_assign_lhs = old_is_assign_lhs
|
||||||
g.write('}')
|
g.write('}')
|
||||||
if elem_typ.kind == .function {
|
g.arraymap_set_pos = g.out.len
|
||||||
g.write(', &(voidptr[]) { ')
|
g.write(', &($elem_type_str[]) { ')
|
||||||
} else {
|
|
||||||
g.arraymap_set_pos = g.out.len
|
|
||||||
g.write(', &($elem_type_str[]) { ')
|
|
||||||
}
|
|
||||||
if g.assign_op != .assign && info.value_type != ast.string_type {
|
if g.assign_op != .assign && info.value_type != ast.string_type {
|
||||||
zero := g.type_default(info.value_type)
|
zero := g.type_default(info.value_type)
|
||||||
g.write('$zero })))')
|
g.write('$zero })))')
|
||||||
|
@ -438,13 +435,11 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
g.write('$elem_type_str* $tmp_opt_ptr = ($elem_type_str*)/*ee elem_ptr_typ */(map_get_check(')
|
g.write('$elem_type_str* $tmp_opt_ptr = ($elem_type_str*)/*ee elem_ptr_typ */(map_get_check(')
|
||||||
} else {
|
} else {
|
||||||
if g.is_fn_index_call {
|
if g.is_fn_index_call {
|
||||||
if elem_typ.info is ast.FnType {
|
if elem_sym.info is ast.FnType {
|
||||||
g.write('((')
|
g.write('((')
|
||||||
g.write_fn_ptr_decl(&elem_typ.info, '')
|
g.write_fn_ptr_decl(&elem_sym.info, '')
|
||||||
g.write(')(*(voidptr*)map_get(')
|
g.write(')(*(voidptr*)map_get(')
|
||||||
}
|
}
|
||||||
} else if elem_typ.kind == .function {
|
|
||||||
g.write('(*(voidptr*)map_get(')
|
|
||||||
} else {
|
} else {
|
||||||
g.write('(*($elem_type_str*)map_get(')
|
g.write('(*($elem_type_str*)map_get(')
|
||||||
}
|
}
|
||||||
|
@ -470,8 +465,6 @@ fn (mut g Gen) index_of_map(node ast.IndexExpr, sym ast.TypeSymbol) {
|
||||||
g.write('))')
|
g.write('))')
|
||||||
} else if g.is_fn_index_call {
|
} else if g.is_fn_index_call {
|
||||||
g.write(', &(voidptr[]){ $zero })))')
|
g.write(', &(voidptr[]){ $zero })))')
|
||||||
} else if elem_typ.kind == .function {
|
|
||||||
g.write(', &(voidptr[]){ $zero }))')
|
|
||||||
} else {
|
} else {
|
||||||
g.write(', &($elem_type_str[]){ $zero }))')
|
g.write(', &($elem_type_str[]){ $zero }))')
|
||||||
}
|
}
|
||||||
|
|
|
@ -507,7 +507,7 @@ fn (mut g Gen) infix_expr_is_op(node ast.InfixExpr) {
|
||||||
else { ast.Type(0) }
|
else { ast.Type(0) }
|
||||||
}
|
}
|
||||||
sub_sym := g.table.sym(sub_type)
|
sub_sym := g.table.sym(sub_type)
|
||||||
g.write('_${c_name(sym.name)}_${c_name(sub_sym.name)}_index')
|
g.write('_${sym.cname}_${sub_sym.cname}_index')
|
||||||
return
|
return
|
||||||
} else if sym.kind == .sum_type {
|
} else if sym.kind == .sum_type {
|
||||||
g.write('_typ $cmp_op ')
|
g.write('_typ $cmp_op ')
|
||||||
|
|
|
@ -233,7 +233,7 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
|
||||||
}
|
}
|
||||||
g.writeln(') {')
|
g.writeln(') {')
|
||||||
g.stmts_with_tmp_var(range_branch.stmts, tmp_var)
|
g.stmts_with_tmp_var(range_branch.stmts, tmp_var)
|
||||||
g.writeln('break;')
|
g.writeln('\tbreak;')
|
||||||
g.writeln('}')
|
g.writeln('}')
|
||||||
}
|
}
|
||||||
g.indent--
|
g.indent--
|
||||||
|
@ -259,7 +259,8 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
|
||||||
}
|
}
|
||||||
g.stmts_with_tmp_var(branch.stmts, tmp_var)
|
g.stmts_with_tmp_var(branch.stmts, tmp_var)
|
||||||
g.expected_cast_type = 0
|
g.expected_cast_type = 0
|
||||||
g.writeln('} break;')
|
g.writeln('\tbreak;')
|
||||||
|
g.writeln('}')
|
||||||
g.indent--
|
g.indent--
|
||||||
}
|
}
|
||||||
if range_branches.len > 0 && !default_generated {
|
if range_branches.len > 0 && !default_generated {
|
||||||
|
@ -297,7 +298,7 @@ fn (mut g Gen) match_expr_switch(node ast.MatchExpr, is_expr bool, cond_var stri
|
||||||
}
|
}
|
||||||
g.writeln(') {')
|
g.writeln(') {')
|
||||||
g.stmts_with_tmp_var(range_branch.stmts, tmp_var)
|
g.stmts_with_tmp_var(range_branch.stmts, tmp_var)
|
||||||
g.writeln('break;')
|
g.writeln('\tbreak;')
|
||||||
g.writeln('}')
|
g.writeln('}')
|
||||||
}
|
}
|
||||||
g.indent--
|
g.indent--
|
||||||
|
|
|
@ -70,8 +70,8 @@ fn (mut g Gen) gen_expr_to_string(expr ast.Expr, etype ast.Type) {
|
||||||
typ = typ.clear_flag(.shared_f).set_nr_muls(0)
|
typ = typ.clear_flag(.shared_f).set_nr_muls(0)
|
||||||
}
|
}
|
||||||
mut sym := g.table.sym(typ)
|
mut sym := g.table.sym(typ)
|
||||||
// when type is alias, print the aliased value
|
// when type is alias and doesn't has `str()`, print the aliased value
|
||||||
if mut sym.info is ast.Alias {
|
if mut sym.info is ast.Alias && !sym.has_method('str') {
|
||||||
parent_sym := g.table.sym(sym.info.parent_type)
|
parent_sym := g.table.sym(sym.info.parent_type)
|
||||||
if parent_sym.has_method('str') {
|
if parent_sym.has_method('str') {
|
||||||
typ = sym.info.parent_type
|
typ = sym.info.parent_type
|
||||||
|
|
|
@ -604,6 +604,13 @@ fn (mut p Parser) prefix_expr() ast.Expr {
|
||||||
return right
|
return right
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if mut right is ast.ParExpr {
|
||||||
|
if right.expr is ast.StructInit {
|
||||||
|
p.note_with_pos('unnecessary `()`, use `&$right.expr` instead of `&($right.expr)`',
|
||||||
|
right.pos)
|
||||||
|
right = right.expr
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
mut or_stmts := []ast.Stmt{}
|
mut or_stmts := []ast.Stmt{}
|
||||||
mut or_kind := ast.OrKind.absent
|
mut or_kind := ast.OrKind.absent
|
||||||
|
|
|
@ -107,6 +107,10 @@ fn (mut p Parser) if_expr(is_comptime bool) ast.IfExpr {
|
||||||
p.check(.decl_assign)
|
p.check(.decl_assign)
|
||||||
comments << p.eat_comments()
|
comments << p.eat_comments()
|
||||||
expr := p.expr(0)
|
expr := p.expr(0)
|
||||||
|
if expr !in [ast.CallExpr, ast.IndexExpr, ast.PrefixExpr] {
|
||||||
|
p.error_with_pos('if guard condition expression is illegal, it should return optional',
|
||||||
|
expr.pos())
|
||||||
|
}
|
||||||
|
|
||||||
cond = ast.IfGuardExpr{
|
cond = ast.IfGuardExpr{
|
||||||
vars: vars
|
vars: vars
|
||||||
|
|
|
@ -1943,6 +1943,7 @@ pub fn (mut p Parser) parse_ident(language ast.Language) ast.Ident {
|
||||||
p.register_auto_import('sync')
|
p.register_auto_import('sync')
|
||||||
}
|
}
|
||||||
mut_pos := p.tok.pos()
|
mut_pos := p.tok.pos()
|
||||||
|
modifier_kind := p.tok.kind
|
||||||
is_mut := p.tok.kind == .key_mut || is_shared || is_atomic
|
is_mut := p.tok.kind == .key_mut || is_shared || is_atomic
|
||||||
if is_mut {
|
if is_mut {
|
||||||
p.next()
|
p.next()
|
||||||
|
@ -1956,7 +1957,11 @@ pub fn (mut p Parser) parse_ident(language ast.Language) ast.Ident {
|
||||||
p.next()
|
p.next()
|
||||||
}
|
}
|
||||||
if p.tok.kind != .name {
|
if p.tok.kind != .name {
|
||||||
p.error('unexpected token `$p.tok.lit`')
|
if is_mut || is_static || is_volatile {
|
||||||
|
p.error_with_pos('the `$modifier_kind` keyword is invalid here', mut_pos)
|
||||||
|
} else {
|
||||||
|
p.error('unexpected token `$p.tok.lit`')
|
||||||
|
}
|
||||||
return ast.Ident{
|
return ast.Ident{
|
||||||
scope: p.scope
|
scope: p.scope
|
||||||
}
|
}
|
||||||
|
@ -2190,7 +2195,7 @@ pub fn (mut p Parser) name_expr() ast.Expr {
|
||||||
}
|
}
|
||||||
// Raw string (`s := r'hello \n ')
|
// Raw string (`s := r'hello \n ')
|
||||||
if p.peek_tok.kind == .string && !p.inside_str_interp && p.peek_token(2).kind != .colon {
|
if p.peek_tok.kind == .string && !p.inside_str_interp && p.peek_token(2).kind != .colon {
|
||||||
if p.tok.lit in ['r', 'c', 'js'] && p.tok.kind == .name {
|
if p.tok.kind == .name && p.tok.lit in ['r', 'c', 'js'] {
|
||||||
return p.string_expr()
|
return p.string_expr()
|
||||||
} else {
|
} else {
|
||||||
// don't allow any other string prefix except `r`, `js` and `c`
|
// don't allow any other string prefix except `r`, `js` and `c`
|
||||||
|
@ -2198,7 +2203,7 @@ pub fn (mut p Parser) name_expr() ast.Expr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// don't allow r`byte` and c`byte`
|
// don't allow r`byte` and c`byte`
|
||||||
if p.tok.lit in ['r', 'c'] && p.peek_tok.kind == .chartoken {
|
if p.peek_tok.kind == .chartoken && p.tok.lit.len == 1 && p.tok.lit[0] in [`r`, `c`] {
|
||||||
opt := if p.tok.lit == 'r' { '`r` (raw string)' } else { '`c` (c string)' }
|
opt := if p.tok.lit == 'r' { '`r` (raw string)' } else { '`c` (c string)' }
|
||||||
return p.error('cannot use $opt with `byte` and `rune`')
|
return p.error('cannot use $opt with `byte` and `rune`')
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
vlib/v/parser/tests/if_guard_cond_err.vv:16:16: error: if guard condition expression is illegal, it should return optional
|
||||||
|
14 | fp.usage_example('GOOG AAPL')
|
||||||
|
15 | _ := fp.bool('version', `v`, false, 'version information.')
|
||||||
|
16 | if args := fp.finalize() && args.len > 0 {
|
||||||
|
| ~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
17 | return args
|
||||||
|
18 | } else {
|
|
@ -0,0 +1,28 @@
|
||||||
|
import os
|
||||||
|
import flag
|
||||||
|
|
||||||
|
const version = "v0.1.0"
|
||||||
|
|
||||||
|
// getting command line options and arguments
|
||||||
|
// returns the arguments
|
||||||
|
fn get_args() ?[]string {
|
||||||
|
mut fp := flag.new_flag_parser(os.args)
|
||||||
|
fp.application('ticker')
|
||||||
|
fp.version(version)
|
||||||
|
fp.description('A CLI yahoo ticker app')
|
||||||
|
fp.skip_executable()
|
||||||
|
fp.usage_example('GOOG AAPL')
|
||||||
|
_ := fp.bool('version', `v`, false, 'version information.')
|
||||||
|
if args := fp.finalize() && args.len > 0 {
|
||||||
|
return args
|
||||||
|
} else {
|
||||||
|
eprintln(err.msg())
|
||||||
|
println(fp.usage())
|
||||||
|
return none
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
tickers := get_args() or { return }
|
||||||
|
println(tickers)
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
vlib/v/parser/tests/invalid_using_atomic.vv:2:5: error: the `atomic` keyword is invalid here
|
||||||
|
1 | fn main() {
|
||||||
|
2 | if atomic true {
|
||||||
|
| ~~~~~~
|
||||||
|
3 | println(true)
|
||||||
|
4 | }
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue