Compare commits

...

83 Commits

Author SHA1 Message Date
ChAoS_UnItY 5b16d5fdf4
compress.gzip / deflate: rename gzip module into deflate module (#14682)
ci/woodpecker/push/vc Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
2022-06-05 11:12:14 +02:00
yuyi 799d2dce4d
checker: check using literal argument as reference parameter (#14674) 2022-06-05 11:12:14 +02:00
ChAoS_UnItY a0f1c1ffef
cgen: fix array type as interface (fix #14677) (#14681) 2022-06-05 11:12:14 +02:00
yuyi a4829f64e8
json: fix json decode with missing map type field (#14678) 2022-06-05 11:12:14 +02:00
Wertzui123 ad923f9a20
thirdparty/sokol: apply speaker/headset bug fix from latest upstream sokol_audio.h (#14676) 2022-06-05 11:12:13 +02:00
Delyan Angelov bed93ad891
examples: simplify the shebang in the v_script.vsh example 2022-06-05 11:12:13 +02:00
Delyan Angelov cf7ae39e62
cgen: fix `for (int i = 0; i < 10; i++, a++) {` (multiple expressions in the inc part) 2022-06-05 11:12:13 +02:00
Delyan Angelov 726d0acfb6
ci: on windows-msvc, skip const_fixed_array_containing_references_to_itself_test.v 2022-06-05 11:12:13 +02:00
Delyan Angelov 5e5b931765
ci: fix macos clang failures with const_fixed_array_containing_references_to_itself_test.v 2022-06-05 11:12:13 +02:00
Delyan Angelov 55386627b2
checker: allow for references to fixed array consts inside their initialisation `const a = [ ... &a[0] ...]!` 2022-06-05 11:12:13 +02:00
Alexander Medvednikov 8bb2ccfdbc
vweb: use http.Cookie 2022-06-05 11:12:13 +02:00
yuyi bff84aab59
ast: cleanup is_lit() (#14672) 2022-06-05 11:12:13 +02:00
Delyan Angelov 54e851f8de
cgen,ci: fix cast_bool_to_int_test.v on windows-tcc 2022-06-05 11:12:13 +02:00
Delyan Angelov 3e1ff72247
cgen: do not initialise externally declared globals (with -cstrict with [c_extern]). 2022-06-05 11:12:13 +02:00
Delyan Angelov b387554840
cgen: simplify int(bool_var) casts; support [c_extern] tag for global declarations 2022-06-05 11:12:13 +02:00
yuyi 3d6d120d4c
cgen: fix autofree_variable() (fix #14576) (#14602) 2022-06-05 11:12:13 +02:00
yuyi 9699afc1fd
cgen: fix if expr with optional method call (#14600) 2022-06-05 11:12:12 +02:00
ChAoS_UnItY 4288c40bee
compress: Add gzip module & refactor compress & compress.zlib module (#14599) 2022-06-05 11:12:12 +02:00
yuyi e755008c29
vrepl: fix error for exitasdfasdf in repl (fix #14593) (#14598) 2022-06-05 11:12:12 +02:00
Delyan Angelov 28af327062
ci: vfmt vlib/v/checker/check_types.v 2022-06-05 11:12:12 +02:00
Delyan Angelov f8ad43be34
checker: speed up check_expected_call_arg, by only calling Table.type_to_str on errors 2022-06-05 11:12:12 +02:00
Hunam 28fb66118b
vlib: add `net.http.mime` (#14516) 2022-06-05 11:12:12 +02:00
Delyan Angelov f674787fac
ast.table: cache the returned values of Table.type_to_str_using_aliases/2
This results in 9% speedup of the checker stage for V itself.
2022-06-05 11:12:12 +02:00
Delyan Angelov ddb8c0ffaa
ci: vfmt vlib/v/parser/parser.v 2022-06-05 11:12:12 +02:00
Delyan Angelov 7ca1d2a93c
tools: add cmd/tools/measure/scanner_speed.v and cmd/tools/measure/parser_speed.v 2022-06-05 11:12:12 +02:00
Delyan Angelov d20eae2d34
strings: update docs for .str() and for .free() 2022-06-05 11:12:12 +02:00
yuyi 47300ae03f
ast: fix IfExpr.str() (#14595) 2022-06-05 11:12:12 +02:00
Alexander Medvednikov 34d30b0ee5
checker, cgen: c2v variadic fixes 2022-06-05 11:12:12 +02:00
yuyi ada04cfb6a
parser: fix optional with multiple statements (#14592) 2022-06-05 11:12:11 +02:00
Claudio Cesar de Sá 9dedc4b664
examples: some new graphs algorithms and improving 2 others (#14556) 2022-06-05 11:12:11 +02:00
Ben 5b97307c5a
os: fix file_ext function (#14566) 2022-06-05 11:12:11 +02:00
Wertzui123 df4dae6d40
help: add Windows to the list of supported native backends in `v help build-native` (#14589) 2022-06-05 11:12:11 +02:00
ChAoS_UnItY f174bb6d78
cgen: fix type not being unaliased (fix #14568) (#14591) 2022-06-05 11:12:11 +02:00
yuyi 48d6e702f2
checker: minor cleanup in if_expr() (#14584) 2022-06-05 11:12:11 +02:00
Hunam b70c60dcb8
ci: re-enable Go2V test suite (#14588) 2022-06-05 11:12:11 +02:00
yuyi 9459fb549f
cgen: fix fixed array of aliases struct (#14583) 2022-06-05 11:12:11 +02:00
Delyan Angelov 8cd891c9b6
v: add support for `v crun examples/hello_world.v`, use crun mode for .vsh files by default. (#14554) 2022-06-05 11:12:11 +02:00
Delyan Angelov 80879586df
examples: document how to produce the shared library, needed for a standalone run of examples/dynamic_library_loading/use.v 2022-06-05 11:12:11 +02:00
yuyi ee547994bb
parser: fix comptime if script mode (fix #6419) (#14578) 2022-06-05 11:12:11 +02:00
Delyan Angelov f268cf7858
cgen: do not #include signal.h, on -os wasm32 and -d no_segfault_handler 2022-06-05 11:12:10 +02:00
Delyan Angelov 9871434daa
ci: skip embed_file_test.v for now 2022-06-05 11:12:10 +02:00
yuyi d4b90827d0
checker, cgen: fix array index optional with if expr (#14575) 2022-06-05 11:12:10 +02:00
Delyan Angelov 215d7875e6
v: always embed file data of \$embed_file(file) constructs, even without -prod, unless `-d embed_only_metadata` is given. 2022-06-05 11:12:10 +02:00
Alexander Medvednikov 0e384bb60d
cgen: fix goto label 2022-06-05 11:12:10 +02:00
Alexander Medvednikov 0bdc213dc6
cgen: c2v infix fix 2022-06-05 11:12:10 +02:00
Larpon 2ab6ef7f0a
toml: update readme with value_opt() usage (#14569) 2022-06-05 11:12:10 +02:00
yuyi d2b097fbf8
scanner: minor cleanup in scanner.v (#14565) 2022-06-05 11:12:10 +02:00
playX 9e038d1a64
builtin.js: fix string.int method (#14564) 2022-06-05 11:12:10 +02:00
Delyan Angelov eb688d7fa1
v.util, v.builder: fix util.module_is_builtin on macos with -usecache 2022-06-05 11:12:10 +02:00
Ben 2308eec024
os: add existing_path function (#14536) 2022-06-05 11:12:10 +02:00
Delyan Angelov 0338ae98c2
strconv: make f64_to_str_lnd1 public (fix building vlang/coreutils printf) 2022-06-05 11:12:10 +02:00
Delyan Angelov 936622039f
crypto.md5: improve performance of md5.blockblock_generic 2022-06-05 11:12:10 +02:00
Delyan Angelov 77ce385a1a
builtin: use C.fwrite (buffered) for _write_buf_to_fd (#14558) 2022-06-05 11:12:09 +02:00
Delyan Angelov b7232df73c
builtin: fix sporadic linking failures on `v -cc gcc -gc boehm examples/hello_world.v` 2022-06-05 11:12:09 +02:00
yuyi 76c92715e6
checker: vfmt overload_return_type.vv (#14557) 2022-06-05 11:12:09 +02:00
Hunam 6beac6f4b7
net.http: `Response.text` -> `Response.body` (#14478) 2022-06-05 11:12:09 +02:00
yuyi 8698bb375f
scanner: fix string interpolation with inner curly braces (fix #12242) (#14553) 2022-06-05 11:12:09 +02:00
yuyi a396496b93
parser: fix cast or dump arguments ending with comma (#14552) 2022-06-05 11:12:09 +02:00
Delyan Angelov 0a3d41c5d7
docs: document explicitly, that maps support `if v := m[k] {` too 2022-06-05 11:12:09 +02:00
Delyan Angelov 8cecea9965
cgen: add support for `v -cmain SDL_main sdl_example.v` 2022-06-05 11:12:09 +02:00
yuyi e50d73983f
cgen: fix cross assign in closure (#14549) 2022-06-05 11:12:09 +02:00
yuyi 0a81074b1e
ast: fix call_expr.str() with propagate_option or propagate_result (#14550) 2022-06-05 11:12:09 +02:00
Delyan Angelov f02f2e4708
ci: temporary workaround for cross assignment in a closure leading to cgen error 2022-06-05 11:12:09 +02:00
yuyi e9da92c61d
parser, cgen: fix cross assign with parentheses (#14545) 2022-06-05 11:12:08 +02:00
yuyi 458f6f09e1
checker: fix declare assign literal with closure (#14544) 2022-06-05 11:12:08 +02:00
Delyan Angelov f4ccbcd2cf
toml: add `pub fn (d Doc) value_opt(key string) ?Any {` and some tests for toml.parse_dotted_key/1 2022-06-05 11:12:08 +02:00
yuyi 740a862dcd
parser: fix match expr case with struct init (#14538) 2022-06-05 11:12:08 +02:00
Larpon 31efb48fc5
tools: implement `v missdoc --diff oldv newv` (#14537) 2022-06-05 11:12:08 +02:00
spaceface 3d18c884d4
cgen: fix a race condition in the closure implementation (#14532) 2022-06-05 11:12:08 +02:00
Delyan Angelov 565561e0bd
checker: fix error position in `fn f() int { return 1,2 }` 2022-06-05 11:12:08 +02:00
Delyan Angelov 1fcc248d2e
ci: vfmt cmd/tools/vpm.v 2022-06-05 11:12:08 +02:00
Delyan Angelov 16bcfa7da3
tools: fix `v install https://github.com/nedpals/vex.git` (fix #14483) 2022-06-05 11:12:08 +02:00
Louis Schmieder 29fc96c040
orm: document & fix pg (#14533) 2022-06-05 11:12:08 +02:00
yuyi dca8739eeb
checker: cleanup checker.v (#14530) 2022-06-05 11:12:08 +02:00
Delyan Angelov c4783628e6
cgen: fix parallel cgen for json encoding of struct fields that have default values 2022-06-05 11:12:08 +02:00
Delyan Angelov 7541d84038
tests: make json_test.v less noisy, to see errors easier 2022-06-05 11:12:07 +02:00
Wertzui123 a7d7e34125
x.ttf: fix typo in README (#14528) 2022-06-05 11:12:07 +02:00
yuyi 82332344de
fmt: fix fn return types list ending with comma (#14529) 2022-06-05 11:12:07 +02:00
Delyan Angelov 140d494d4c
all: add support for struct field deprecation (#14527) 2022-06-05 11:12:07 +02:00
Larpon a61316ceea
ci: use missdoc as subcmd (#14524) 2022-06-05 11:12:07 +02:00
Larpon f13369dad3
tools: add v missdoc --verify mode (#14525) 2022-06-05 11:12:07 +02:00
Larpon b9c283b2b8
embed_file: rename debug_embed_file_in_prod -> force_embed_file (#14523) 2022-06-05 11:12:07 +02:00
yuyi b97a04abd9
fmt: fix fmt of Ok<[]Token>{[]} (#14522) 2022-06-05 11:12:07 +02:00
160 changed files with 18378 additions and 1038 deletions

View File

@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-20.04
timeout-minutes: 5
env:
MOPTIONS: --no-line-numbers --relative-paths --exclude /vlib/v/ --exclude /builtin/linux_bare/ --exclude /testdata/ --exclude /tests/ vlib/
MOPTIONS: --relative-paths --exclude /vlib/v/ --exclude /builtin/linux_bare/ --exclude /testdata/ --exclude /tests/
steps:
- uses: actions/checkout@v2
- name: Build V
@ -35,14 +35,4 @@ jobs:
- name: Check against parent commit
run: |
./v run cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/n_v.txt
cd pv/ && ../v run ../cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/o_v.txt
count_new=$(cat /tmp/n_v.txt | wc -l)
count_old=$(cat /tmp/o_v.txt | wc -l)
echo "new pubs: $count_new | old pubs: $count_old"
echo "new head: $(head -n1 /tmp/n_v.txt)"
echo "old head: $(head -n1 /tmp/o_v.txt)"
if [[ ${count_new} -gt ${count_old} ]]; then
echo "The following $((count_new-count_old)) function(s) are introduced with no documentation:"
diff /tmp/n_v.txt /tmp/o_v.txt ## diff does exit(1) when files are different
fi
./v missdoc --diff $MOPTIONS pv/vlib vlib

View File

@ -131,12 +131,12 @@ jobs:
- name: Build go2v
continue-on-error: true
run: |
echo "Clone go2v"
echo "Clone Go2V"
clone --depth=1 https://github.com/vlang/go2v go2v/
echo "Build go2v"
echo "Build Go2V"
./v go2v/
## echo "Run tests for go2v"
## VJOBS=1 ./v -stats test go2v/
echo "Run Go2V tests"
VJOBS=1 ./v -stats test go2v/
- name: Build vlang/pdf
continue-on-error: true

View File

@ -191,7 +191,6 @@ to create a copy of the compiler rather than replacing it with `v self`.
| `debug_codegen` | Prints automatically generated V code during the scanning phase |
| `debug_interface_table` | Prints generated interfaces during C generation |
| `debug_interface_type_implements` | Prints debug information when checking that a type implements in interface |
| `debug_embed_file_in_prod` | Prints debug information about the embedded files with `$embed_file('somefile')` |
| `print_vweb_template_expansions` | Prints vweb compiled HTML files |
| `time_checking` | Prints the time spent checking files and other related information |
| `time_parsing` | Prints the time spent parsing files and other related information |
@ -204,3 +203,4 @@ to create a copy of the compiler rather than replacing it with `v self`.
| `trace_thirdparty_obj_files` | Prints details about built thirdparty obj files |
| `trace_usecache` | Prints details when -usecache is used |
| `trace_embed_file` | Prints details when $embed_file is used |
| `embed_only_metadata` | Embed only the metadata for the file(s) with `$embed_file('somefile')`; faster; for development, *not* distribution |

View File

@ -0,0 +1,67 @@
import os
import time
import v.ast
import v.pref
import v.parser
import v.errors
import v.scanner
fn main() {
files := os.args#[1..]
if files.len > 0 && files[0].starts_with('@') {
lst_path := files[0].all_after('@')
listed_files := os.read_file(lst_path)?.split('\n')
process_files(listed_files)?
return
}
process_files(files)?
}
fn process_files(files []string) ? {
mut table := ast.new_table()
mut pref := pref.new_preferences()
pref.is_fmt = true
pref.skip_warnings = true
pref.output_mode = .silent
mut sw := time.new_stopwatch()
mut total_us := i64(0)
mut total_bytes := i64(0)
mut total_tokens := i64(0)
for f in files {
if f == '' {
continue
}
if f.ends_with('_test.v') {
continue
}
// do not measure the scanning, but only the parsing:
mut p := new_parser(f, .skip_comments, table, pref)
///
sw.restart()
_ := p.parse()
f_us := sw.elapsed().microseconds()
///
total_us += f_us
total_bytes += p.scanner.text.len
total_tokens += p.scanner.all_tokens.len
println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} $f')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}
fn new_parser(path string, comments_mode scanner.CommentsMode, table &ast.Table, pref &pref.Preferences) &parser.Parser {
mut p := &parser.Parser{
scanner: scanner.new_scanner_file(path, comments_mode, pref) or { panic(err) }
comments_mode: comments_mode
table: table
pref: pref
scope: &ast.Scope{
start_pos: 0
parent: table.global_scope
}
errors: []errors.Error{}
warnings: []errors.Warning{}
}
p.set_path(path)
return p
}

View File

@ -0,0 +1,42 @@
import os
import time
import v.scanner
import v.pref
fn main() {
files := os.args#[1..]
if files.len > 0 && files[0].starts_with('@') {
lst_path := files[0].all_after('@')
listed_files := os.read_file(lst_path)?.split('\n')
process_files(listed_files)?
return
}
process_files(files)?
}
fn process_files(files []string) ? {
mut pref := pref.new_preferences()
pref.is_fmt = true
pref.skip_warnings = true
pref.output_mode = .silent
mut sw := time.new_stopwatch()
mut total_us := i64(0)
mut total_bytes := i64(0)
mut total_tokens := i64(0)
for f in files {
if f == '' {
continue
}
if f.ends_with('_test.v') {
continue
}
sw.restart()
s := scanner.new_scanner_file(f, .skip_comments, pref)?
f_us := sw.elapsed().microseconds()
total_us += f_us
total_bytes += s.text.len
total_tokens += s.all_tokens.len
println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} $f')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3f} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}

View File

@ -24,6 +24,7 @@ pub fn cprint(omessage string) {
message = term.cyan(message)
}
print(message)
flush_stdout()
}
pub fn cprint_strong(omessage string) {
@ -32,16 +33,19 @@ pub fn cprint_strong(omessage string) {
message = term.bright_green(message)
}
print(message)
flush_stdout()
}
pub fn cprintln(omessage string) {
cprint(omessage)
println('')
flush_stdout()
}
pub fn cprintln_strong(omessage string) {
cprint_strong(omessage)
println('')
flush_stdout()
}
pub fn verbose_trace(label string, message string) {

View File

@ -123,6 +123,7 @@ pub fn (mut ts TestSession) print_messages() {
// progress mode, the last line is rewritten many times:
if is_ok && !ts.silent_mode {
print('\r$empty\r$msg')
flush_stdout()
} else {
// the last \n is needed, so SKIP/FAIL messages
// will not get overwritten by the OK ones

View File

@ -200,8 +200,13 @@ fn (mut context Context) parse_options() ? {
}
}
fn flushed_print(s string) {
print(s)
flush_stdout()
}
fn (mut context Context) clear_line() {
print(context.cline)
flushed_print(context.cline)
}
fn (mut context Context) expand_all_commands(commands []string) []string {
@ -247,7 +252,7 @@ fn (mut context Context) run() {
println('Series: ${si:4}/${context.series:-4}, command: $cmd')
if context.warmup > 0 && run_warmups < context.commands.len {
for i in 1 .. context.warmup + 1 {
print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
flushed_print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
mut sw := time.new_stopwatch()
res := os.execute(cmd)
if res.exit_code != 0 {
@ -260,9 +265,9 @@ fn (mut context Context) run() {
context.clear_line()
for i in 1 .. (context.count + 1) {
avg := f64(sum) / f64(i)
print('${context.cgoback}Average: ${avg:9.3f}ms | run: ${i:4}/${context.count:-4} | took ${duration:6} ms')
flushed_print('${context.cgoback}Average: ${avg:9.3f}ms | run: ${i:4}/${context.count:-4} | took ${duration:6} ms')
if context.show_output {
print(' | result: ${oldres:s}')
flushed_print(' | result: ${oldres:s}')
}
mut sw := time.new_stopwatch()
res := scripting.exec(cmd) or { continue }
@ -288,7 +293,7 @@ fn (mut context Context) run() {
context.results[icmd].atiming = new_aints(context.results[icmd].timings, context.nmins,
context.nmaxs)
context.clear_line()
print(context.cgoback)
flushed_print(context.cgoback)
mut m := map[string][]int{}
ioutputs := context.results[icmd].outputs
for o in ioutputs {
@ -358,7 +363,7 @@ fn (mut context Context) show_diff_summary() {
println('context: $context')
}
if int(base) > context.fail_on_maxtime {
print(performance_regression_label)
flushed_print(performance_regression_label)
println('average time: ${base:6.1f} ms > $context.fail_on_maxtime ms threshold.')
exit(2)
}
@ -367,7 +372,7 @@ fn (mut context Context) show_diff_summary() {
}
fail_threshold_max := f64(context.fail_on_regress_percent)
if first_cmd_percentage > fail_threshold_max {
print(performance_regression_label)
flushed_print(performance_regression_label)
println('${first_cmd_percentage:5.1f}% > ${fail_threshold_max:5.1f}% threshold.')
exit(3)
}

View File

@ -259,6 +259,8 @@ const (
'--relative-paths',
'-r',
'--js',
'--verify',
'--diff',
]
auto_complete_flags_self = [
'-prod',

View File

@ -183,6 +183,7 @@ fn (foptions &FormatOptions) format_pipe() {
// checker.new_checker(table, prefs).check(file_ast)
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug)
print(formatted_content)
flush_stdout()
foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to stdout.')
}
@ -279,6 +280,7 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
return
}
print(formatted_fc)
flush_stdout()
}
fn (f FormatOptions) str() string {

View File

@ -6,12 +6,13 @@ import flag
const (
tool_name = 'v missdoc'
tool_version = '0.0.4'
tool_version = '0.1.0'
tool_description = 'Prints all V functions in .v files under PATH/, that do not yet have documentation comments.'
work_dir_prefix = normalise_path(os.real_path(os.wd_at_startup) + '/')
work_dir_prefix = normalise_path(os.real_path(os.wd_at_startup) + os.path_separator)
)
struct UndocumentedFN {
file string
line int
signature string
tags []string
@ -26,11 +27,15 @@ struct Options {
no_line_numbers bool
exclude []string
relative_paths bool
mut:
verify bool
diff bool
additional_args []string
}
fn (opt Options) report_undocumented_functions_in_path(path string) {
fn (opt Options) collect_undocumented_functions_in_dir(directory string) []UndocumentedFN {
mut files := []string{}
collect(path, mut files, fn (npath string, mut accumulated_paths []string) {
collect(directory, mut files, fn (npath string, mut accumulated_paths []string) {
if !npath.ends_with('.v') {
return
}
@ -39,6 +44,7 @@ fn (opt Options) report_undocumented_functions_in_path(path string) {
}
accumulated_paths << npath
})
mut undocumented_fns := []UndocumentedFN{}
for file in files {
if !opt.js && file.ends_with('.js.v') {
continue
@ -46,15 +52,16 @@ fn (opt Options) report_undocumented_functions_in_path(path string) {
if opt.exclude.len > 0 && opt.exclude.any(file.contains(it)) {
continue
}
opt.report_undocumented_functions_in_file(file)
undocumented_fns << opt.collect_undocumented_functions_in_file(file)
}
return undocumented_fns
}
fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
fn (opt &Options) collect_undocumented_functions_in_file(nfile string) []UndocumentedFN {
file := os.real_path(nfile)
contents := os.read_file(file) or { panic(err) }
lines := contents.split('\n')
mut info := []UndocumentedFN{}
mut list := []UndocumentedFN{}
for i, line in lines {
if line.starts_with('pub fn') || (opt.private && (line.starts_with('fn ')
&& !(line.starts_with('fn C.') || line.starts_with('fn main')))) {
@ -78,14 +85,39 @@ fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
}
if grab {
clean_line := line.all_before_last(' {')
info << UndocumentedFN{i + 1, clean_line, tags}
list << UndocumentedFN{
line: i + 1
signature: clean_line
tags: tags
file: file
}
}
}
}
}
}
if info.len > 0 {
for undocumented_fn in info {
return list
}
fn (opt &Options) collect_undocumented_functions_in_path(path string) []UndocumentedFN {
mut undocumented_functions := []UndocumentedFN{}
if os.is_file(path) {
undocumented_functions << opt.collect_undocumented_functions_in_file(path)
} else {
undocumented_functions << opt.collect_undocumented_functions_in_dir(path)
}
return undocumented_functions
}
fn (opt &Options) report_undocumented_functions_in_path(path string) int {
mut list := opt.collect_undocumented_functions_in_path(path)
opt.report_undocumented_functions(list)
return list.len
}
fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
if list.len > 0 {
for undocumented_fn in list {
mut line_numbers := '$undocumented_fn.line:0:'
if opt.no_line_numbers {
line_numbers = ''
@ -95,10 +127,11 @@ fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
} else {
''
}
file := undocumented_fn.file
ofile := if opt.relative_paths {
nfile.replace(work_dir_prefix, '')
file.replace(work_dir_prefix, '')
} else {
os.real_path(nfile)
os.real_path(file)
}
if opt.deprecated {
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
@ -118,6 +151,54 @@ fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
}
}
fn (opt &Options) diff_undocumented_functions_in_paths(path_old string, path_new string) []UndocumentedFN {
old := os.real_path(path_old)
new := os.real_path(path_new)
mut old_undocumented_functions := opt.collect_undocumented_functions_in_path(old)
mut new_undocumented_functions := opt.collect_undocumented_functions_in_path(new)
mut differs := []UndocumentedFN{}
if new_undocumented_functions.len > old_undocumented_functions.len {
for new_undoc_fn in new_undocumented_functions {
new_relative_file := new_undoc_fn.file.replace(new, '').trim_string_left(os.path_separator)
mut found := false
for old_undoc_fn in old_undocumented_functions {
old_relative_file := old_undoc_fn.file.replace(old, '').trim_string_left(os.path_separator)
if new_relative_file == old_relative_file
&& new_undoc_fn.signature == old_undoc_fn.signature {
found = true
break
}
}
if !found {
differs << new_undoc_fn
}
}
}
differs.sort_with_compare(sort_undoc_fns)
return differs
}
fn sort_undoc_fns(a &UndocumentedFN, b &UndocumentedFN) int {
if a.file < b.file {
return -1
}
if a.file > b.file {
return 1
}
// same file sort by signature
else {
if a.signature < b.signature {
return -1
}
if a.signature > b.signature {
return 1
}
return 0
}
}
fn normalise_path(path string) string {
return path.replace('\\', '/')
}
@ -145,17 +226,15 @@ fn collect_tags(line string) []string {
}
fn main() {
if os.args.len == 1 {
println('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
exit(1)
}
mut fp := flag.new_flag_parser(os.args[1..])
mut fp := flag.new_flag_parser(os.args[1..]) // skip the "v" command.
fp.application(tool_name)
fp.version(tool_version)
fp.description(tool_description)
fp.arguments_description('PATH [PATH]...')
fp.skip_executable() // skip the "missdoc" command.
// Collect tool options
opt := Options{
mut opt := Options{
show_help: fp.bool('help', `h`, false, 'Show this help text.')
deprecated: fp.bool('deprecated', `d`, false, 'Include deprecated functions in output.')
private: fp.bool('private', `p`, false, 'Include private functions in output.')
@ -164,16 +243,58 @@ fn main() {
collect_tags: fp.bool('tags', `t`, false, 'Also print function tags if any is found.')
exclude: fp.string_multi('exclude', `e`, '')
relative_paths: fp.bool('relative-paths', `r`, false, 'Use relative paths in output.')
diff: fp.bool('diff', 0, false, 'exit(1) and show difference between two PATH inputs, return 0 otherwise.')
verify: fp.bool('verify', 0, false, 'exit(1) if documentation is missing, 0 otherwise.')
}
opt.additional_args = fp.finalize() or { panic(err) }
if opt.show_help {
println(fp.usage())
exit(0)
}
for path in os.args[1..] {
if os.is_file(path) {
opt.report_undocumented_functions_in_file(path)
} else {
opt.report_undocumented_functions_in_path(path)
if opt.additional_args.len == 0 {
println(fp.usage())
eprintln('Error: $tool_name is missing PATH input')
exit(1)
}
// Allow short-long versions to prevent false positive situations, should
// the user miss a `-`. E.g.: the `-verify` flag would be ignored and missdoc
// will return 0 for success plus a list of any undocumented functions.
if '-verify' in opt.additional_args {
opt.verify = true
}
if '-diff' in opt.additional_args {
opt.diff = true
}
if opt.diff {
if opt.additional_args.len < 2 {
println(fp.usage())
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
exit(1)
}
path_old := opt.additional_args[0]
path_new := opt.additional_args[1]
if !(os.is_file(path_old) || os.is_dir(path_old)) || !(os.is_file(path_new)
|| os.is_dir(path_new)) {
println(fp.usage())
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
exit(1)
}
list := opt.diff_undocumented_functions_in_paths(path_old, path_new)
if list.len > 0 {
opt.report_undocumented_functions(list)
exit(1)
}
exit(0)
}
mut total := 0
for path in opt.additional_args {
if os.is_file(path) || os.is_dir(path) {
total += opt.report_undocumented_functions_in_path(path)
}
}
if opt.verify && total > 0 {
exit(1)
}
}

View File

@ -208,24 +208,24 @@ fn vpm_install_from_vpm(module_names []string) {
println('VPM needs `$vcs` to be installed.')
continue
}
mod_name_as_path := mod.name.replace('.', os.path_separator).replace('-', '_').to_lower()
final_module_path := os.real_path(os.join_path(settings.vmodules_path, mod_name_as_path))
if os.exists(final_module_path) {
//
minfo := mod_name_info(mod.name)
if os.exists(minfo.final_module_path) {
vpm_update([name])
continue
}
println('Installing module "$name" from "$mod.url" to "$final_module_path" ...')
println('Installing module "$name" from "$mod.url" to "$minfo.final_module_path" ...')
vcs_install_cmd := supported_vcs_install_cmds[vcs]
cmd := '$vcs_install_cmd "$mod.url" "$final_module_path"'
cmd := '$vcs_install_cmd "$mod.url" "$minfo.final_module_path"'
verbose_println(' command: $cmd')
cmdres := os.execute(cmd)
if cmdres.exit_code != 0 {
errors++
println('Failed installing module "$name" to "$final_module_path" .')
println('Failed installing module "$name" to "$minfo.final_module_path" .')
print_failed_cmd(cmd, cmdres)
continue
}
resolve_dependencies(name, final_module_path, module_names)
resolve_dependencies(name, minfo.final_module_path, module_names)
}
if errors > 0 {
exit(1)
@ -270,7 +270,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
}
repo_name := url.substr(second_cut_pos + 1, first_cut_pos)
mut name := repo_name + os.path_separator + mod_name
mut name := os.join_path(repo_name, mod_name)
mod_name_as_path := name.replace('-', '_').to_lower()
mut final_module_path := os.real_path(os.join_path(settings.vmodules_path, mod_name_as_path))
if os.exists(final_module_path) {
@ -297,20 +297,19 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
if os.exists(vmod_path) {
data := os.read_file(vmod_path) or { return }
vmod := parse_vmod(data)
mod_path := os.real_path(os.join_path(settings.vmodules_path, vmod.name.replace('.',
os.path_separator)))
println('Relocating module from "$name" to "$vmod.name" ( "$mod_path" ) ...')
if os.exists(mod_path) {
println('Warning module "$mod_path" already exsits!')
println('Removing module "$mod_path" ...')
os.rmdir_all(mod_path) or {
minfo := mod_name_info(vmod.name)
println('Relocating module from "$name" to "$vmod.name" ( "$minfo.final_module_path" ) ...')
if os.exists(minfo.final_module_path) {
println('Warning module "$minfo.final_module_path" already exsits!')
println('Removing module "$minfo.final_module_path" ...')
os.rmdir_all(minfo.final_module_path) or {
errors++
println('Errors while removing "$mod_path" :')
println('Errors while removing "$minfo.final_module_path" :')
println(err)
continue
}
}
os.mv(final_module_path, mod_path) or {
os.mv(final_module_path, minfo.final_module_path) or {
errors++
println('Errors while relocating module "$name" :')
println(err)
@ -323,7 +322,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
continue
}
println('Module "$name" relocated to "$vmod.name" successfully.')
final_module_path = mod_path
final_module_path = minfo.final_module_path
name = vmod.name
}
resolve_dependencies(name, final_module_path, module_names)
@ -377,10 +376,7 @@ fn vpm_update(m []string) {
}
mut errors := 0
for modulename in module_names {
mut zname := modulename
if mod := get_mod_by_url(modulename) {
zname = mod.name
}
zname := url_to_module_name(modulename)
final_module_path := valid_final_path_of_existing_module(modulename) or { continue }
os.chdir(final_module_path) or {}
println('Updating module "$zname" in "$final_module_path" ...')
@ -503,26 +499,21 @@ fn vpm_remove(module_names []string) {
}
fn valid_final_path_of_existing_module(modulename string) ?string {
mut name := modulename
if mod := get_mod_by_url(name) {
name = mod.name
}
mod_name_as_path := name.replace('.', os.path_separator).replace('-', '_').to_lower()
name_of_vmodules_folder := os.join_path(settings.vmodules_path, mod_name_as_path)
final_module_path := os.real_path(name_of_vmodules_folder)
if !os.exists(final_module_path) {
println('No module with name "$name" exists at $name_of_vmodules_folder')
name := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
minfo := mod_name_info(name)
if !os.exists(minfo.final_module_path) {
println('No module with name "$minfo.mname_normalised" exists at $minfo.final_module_path')
return none
}
if !os.is_dir(final_module_path) {
println('Skipping "$name_of_vmodules_folder", since it is not a folder.')
if !os.is_dir(minfo.final_module_path) {
println('Skipping "$minfo.final_module_path", since it is not a folder.')
return none
}
vcs_used_in_dir(final_module_path) or {
println('Skipping "$name_of_vmodules_folder", since it does not use a supported vcs.')
vcs_used_in_dir(minfo.final_module_path) or {
println('Skipping "$minfo.final_module_path", since it does not use a supported vcs.')
return none
}
return final_module_path
return minfo.final_module_path
}
fn ensure_vmodules_dir_exist() {
@ -573,6 +564,31 @@ fn get_installed_modules() []string {
return modules
}
struct ModNameInfo {
mut:
mname string // The-user.The-mod , *never* The-user.The-mod.git
mname_normalised string // the_user.the_mod
mname_as_path string // the_user/the_mod
final_module_path string // ~/.vmodules/the_user/the_mod
}
fn mod_name_info(mod_name string) ModNameInfo {
mut info := ModNameInfo{}
info.mname = if mod_name.ends_with('.git') { mod_name.replace('.git', '') } else { mod_name }
info.mname_normalised = info.mname.replace('-', '_').to_lower()
info.mname_as_path = info.mname_normalised.replace('.', os.path_separator)
info.final_module_path = os.real_path(os.join_path(settings.vmodules_path, info.mname_as_path))
return info
}
fn url_to_module_name(modulename string) string {
mut res := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
if res.ends_with('.git') {
res = res.replace('.git', '')
}
return res
}
fn get_all_modules() []string {
url := get_working_server_url()
r := http.get(url) or { panic(err) }
@ -580,7 +596,7 @@ fn get_all_modules() []string {
println('Failed to search vpm.vlang.io. Status code: $r.status_code')
exit(1)
}
s := r.text
s := r.body
mut read_len := 0
mut modules := []string{}
for read_len < s.len {
@ -717,7 +733,7 @@ fn get_module_meta_info(name string) ?Mod {
errors << 'Error details: $err'
continue
}
if r.status_code == 404 || r.text.trim_space() == '404' {
if r.status_code == 404 || r.body.trim_space() == '404' {
errors << 'Skipping module "$name", since "$server_url" reported that "$name" does not exist.'
continue
}
@ -725,7 +741,7 @@ fn get_module_meta_info(name string) ?Mod {
errors << 'Skipping module "$name", since "$server_url" responded with $r.status_code http status code. Please try again later.'
continue
}
s := r.text
s := r.body
if s.len > 0 && s[0] != `{` {
errors << 'Invalid json data'
errors << s.trim_space().limit(100) + ' ...'

View File

@ -305,7 +305,6 @@ fn run_repl(workdir string, vrepl_prefix string) int {
return int(rc)
}
}
break
}
r.line = line
if r.line == '\n' {
@ -388,13 +387,13 @@ fn run_repl(workdir string, vrepl_prefix string) int {
'#include ',
'for ',
'or ',
'insert',
'delete',
'prepend',
'sort',
'clear',
'trim',
'as',
'insert(',
'delete(',
'prepend(',
'sort(',
'clear(',
'trim(',
' as ',
]
mut is_statement := false
if filter_line.count('=') % 2 == 1 {

View File

@ -66,6 +66,7 @@ const (
]
skip_with_werror = [
'do_not_remove',
'vlib/v/embed_file/tests/embed_file_test.v',
]
skip_with_asan_compiler = [
'do_not_remove',
@ -109,6 +110,10 @@ const (
skip_on_non_linux = [
'do_not_remove',
]
skip_on_windows_msvc = [
'do_not_remove',
'vlib/v/tests/const_fixed_array_containing_references_to_itself_test.v', // error C2099: initializer is not a constant
]
skip_on_windows = [
'vlib/context/cancel_test.v',
'vlib/context/deadline_test.v',
@ -264,6 +269,9 @@ fn main() {
}
$if windows {
tsession.skip_files << skip_on_windows
$if msvc {
tsession.skip_files << skip_on_windows_msvc
}
}
$if !windows {
tsession.skip_files << skip_on_non_windows

View File

@ -25,7 +25,20 @@ see also `v help build`.
-cstrict
Turn on additional C warnings. This slows down compilation
slightly (~10% for gcc), but sometimes provides better diagnosis.
slightly (~10% for gcc), but sometimes provides better error diagnosis.
-cmain <MainFunctionName>
Useful with framework like code, that uses macros to re-define `main`, like SDL2 does for example.
With that option, V will always generate:
`int MainFunctionName(int ___argc, char** ___argv) {` , for the program entry point function, *no matter* the OS.
Without it, on non Windows systems, it will generate:
`int main(int ___argc, char** ___argv) {`
... and on Windows, it will generate:
a) `int WINAPI wWinMain(HINSTANCE instance, HINSTANCE prev_instance, LPWSTR cmd_line, int show_cmd){`
when you are compiling applications that `import gg`.
... or it will generate:
b) `int wmain(int ___argc, wchar_t* ___argv[], wchar_t* ___envp[]){`
when you are compiling console apps.
-showcc
Prints the C command that is used to build the program.

View File

@ -14,4 +14,4 @@ For more general build help, see also `v help build`.
-os <os>, -target-os <os>
Change the target OS that V compiles for.
The supported targets for the native backend are: `macos`, `linux`
The supported targets for the native backend are: `macos`, `linux` and 'windows'

View File

@ -7,6 +7,7 @@ Examples:
v hello.v Compile the file `hello.v` and output it as `hello` or `hello.exe`.
v run hello.v Same as above but also run the produced executable immediately after compilation.
v -cg run hello.v Same as above, but make debugging easier (in case your program crashes).
v crun hello.v Same as above, but do not recompile, if the executable already exists, and is newer than the sources.
v -o h.c hello.v Translate `hello.v` to `h.c`. Do not compile further.
v -o - hello.v Translate `hello.v` and output the C source code to stdout. Do not compile further.
@ -20,7 +21,10 @@ V supports the following commands:
init Setup the file structure for an already existing V project.
* Ordinary development:
run Compile and run a V program.
run Compile and run a V program. Delete the executable after the run.
crun Compile and run a V program without deleting the executable.
If you run the same program a second time, without changing the source files,
V will just run the executable, without recompilation. Suitable for scripting.
test Run all test files in the provided directory.
fmt Format the V code provided.
vet Report suspicious code constructs.

View File

@ -1,4 +1,4 @@
v missdoc 0.0.4
v missdoc 0.1.0
-----------------------------------------------
Usage: v missdoc [options] PATH [PATH]...
@ -12,5 +12,25 @@ Options:
--js Include JavaScript functions in output.
-n, --no-line-numbers Exclude line numbers in output.
-e, --exclude <multiple strings>
-r, --relative-paths Use relative paths in output.
--verify exit(1) if documentation is missing, 0 otherwise.
--diff exit(1) and show difference between two PATH inputs, return 0 otherwise.
--version output version information and exit
-----------------------------------------------
PATH can be both files and directories.
The `--verify` flag is useful for use in CI setups for checking if a V project
has all it's functions and methods documented:
```
v missdoc --verify path/to/code
```
The `--diff` flag is useful if your project is not yet fully documented
but you want to ensure that no new functions or methods are introduced
between commits or branches:
```
v missdoc --diff current/code new/code
```

View File

@ -93,17 +93,21 @@ fn main() {
return
}
match command {
'run', 'crun', 'build', 'build-module' {
rebuild(prefs)
return
}
'help' {
invoke_help_and_exit(args)
}
'version' {
println(version.full_v_version(prefs.is_verbose))
return
}
'new', 'init' {
util.launch_tool(prefs.is_verbose, 'vcreate', os.args[1..])
return
}
'translate' {
eprintln('Translating C to V will be available in V 0.3')
exit(1)
}
'install', 'list', 'outdated', 'remove', 'search', 'show', 'update', 'upgrade' {
util.launch_tool(prefs.is_verbose, 'vpm', os.args[1..])
return
@ -118,42 +122,24 @@ fn main() {
eprintln('V Error: Use `v install` to install modules from vpm.vlang.io')
exit(1)
}
'version' {
println(version.full_v_version(prefs.is_verbose))
return
'translate' {
eprintln('Translating C to V will be available in V 0.3')
exit(1)
}
else {}
}
if command in ['run', 'build', 'build-module'] || command.ends_with('.v') || os.exists(command) {
// println('command')
// println(prefs.path)
match prefs.backend {
.c {
$if no_bootstrapv ? {
// TODO: improve the bootstrapping with a split C backend here.
// C code generated by `VEXE=v cmd/tools/builders/c_builder -os cross -o c.c cmd/tools/builders/c_builder.v`
// is enough to bootstrap the C backend, and thus the rest, but currently bootstrapping relies on
// `v -os cross -o v.c cmd/v` having a functional C codegen inside instead.
util.launch_tool(prefs.is_verbose, 'builders/c_builder', os.args[1..])
}
builder.compile('build', prefs, cbuilder.compile_c)
}
.js_node, .js_freestanding, .js_browser {
util.launch_tool(prefs.is_verbose, 'builders/js_builder', os.args[1..])
}
.native {
util.launch_tool(prefs.is_verbose, 'builders/native_builder', os.args[1..])
}
.interpret {
util.launch_tool(prefs.is_verbose, 'builders/interpret_builder', os.args[1..])
else {
if command.ends_with('.v') || os.exists(command) {
// println('command')
// println(prefs.path)
rebuild(prefs)
return
}
}
return
}
if prefs.is_help {
invoke_help_and_exit(args)
}
eprintln('v $command: unknown command\nRun ${term.highlight_command('v help')} for usage.')
eprintln('v $command: unknown command')
eprintln('Run ${term.highlight_command('v help')} for usage.')
exit(1)
}
@ -163,7 +149,31 @@ fn invoke_help_and_exit(remaining []string) {
2 { help.print_and_exit(remaining[1]) }
else {}
}
println('${term.highlight_command('v help')}: provide only one help topic.')
println('For usage information, use ${term.highlight_command('v help')}.')
eprintln('${term.highlight_command('v help')}: provide only one help topic.')
eprintln('For usage information, use ${term.highlight_command('v help')}.')
exit(1)
}
fn rebuild(prefs &pref.Preferences) {
match prefs.backend {
.c {
$if no_bootstrapv ? {
// TODO: improve the bootstrapping with a split C backend here.
// C code generated by `VEXE=v cmd/tools/builders/c_builder -os cross -o c.c cmd/tools/builders/c_builder.v`
// is enough to bootstrap the C backend, and thus the rest, but currently bootstrapping relies on
// `v -os cross -o v.c cmd/v` having a functional C codegen inside instead.
util.launch_tool(prefs.is_verbose, 'builders/c_builder', os.args[1..])
}
builder.compile('build', prefs, cbuilder.compile_c)
}
.js_node, .js_freestanding, .js_browser {
util.launch_tool(prefs.is_verbose, 'builders/js_builder', os.args[1..])
}
.native {
util.launch_tool(prefs.is_verbose, 'builders/native_builder', os.args[1..])
}
.interpret {
util.launch_tool(prefs.is_verbose, 'builders/interpret_builder', os.args[1..])
}
}
}

View File

@ -1295,6 +1295,16 @@ mm := map[string]int{}
val := mm['bad_key'] or { panic('key not found') }
```
You can also check, if a key is present, and get its value, if it was present, in one go:
```v
m := {
'abc': 'def'
}
if v := m['abc'] {
println('the map value for that key is: $v')
}
```
The same optional check applies to arrays:
```v
@ -5905,6 +5915,19 @@ fn main() {
}
```
Struct field deprecations:
```v oksyntax
module abc
// Note that only *direct* accesses to Xyz.d in *other modules*, will produce deprecation notices/warnings:
pub struct Xyz {
pub mut:
a int
d int [deprecated: 'use Xyz.a instead'; deprecated_after: '2999-03-01'] // produce a notice, the deprecation date is in the far future
}
```
Function/method deprecations:
```v
// Calling this function will result in a deprecation warning
[deprecated]

View File

@ -7,9 +7,9 @@ fn vlang_time(mut wg sync.WaitGroup) ?string {
data := http.get('https://vlang.io/utc_now')?
finish := time.ticks()
println('Finish getting time ${finish - start} ms')
println(data.text)
println(data.body)
wg.done()
return data.text
return data.body
}
fn remote_ip(mut wg sync.WaitGroup) ?string {
@ -17,9 +17,9 @@ fn remote_ip(mut wg sync.WaitGroup) ?string {
data := http.get('https://api.ipify.org')?
finish := time.ticks()
println('Finish getting ip ${finish - start} ms')
println(data.text)
println(data.body)
wg.done()
return data.text
return data.body
}
fn main() {

View File

@ -1,12 +1,15 @@
module main
// Note: This program, requires that the shared library was already compiled.
// To do so, run `v -d no_backtrace -o library -shared modules/library/library.v`
// before running this program.
import os
import dl
type FNAdder = fn (int, int) int
fn main() {
library_file_path := os.join_path(os.getwd(), dl.get_libname('library'))
library_file_path := os.join_path(os.dir(@FILE), dl.get_libname('library'))
handle := dl.open_opt(library_file_path, dl.rtld_lazy)?
eprintln('handle: ${ptr_str(handle)}')
f := FNAdder(dl.sym_opt(handle, 'add_1')?)

View File

@ -7,6 +7,6 @@ fn main() {
return
}
t := time.unix(resp.text.int())
t := time.unix(resp.body.int())
println(t.format())
}

View File

@ -46,7 +46,7 @@ fn main() {
return
}
weather := json.decode(Weather, resp.text) or {
weather := json.decode(Weather, resp.body) or {
println('failed to decode weather json')
return
}

View File

@ -0,0 +1,163 @@
/*
A V program for Bellman-Ford's single source
shortest path algorithm.
literaly adapted from:
https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
// Adapted from this site... from C++ and Python codes
For Portugese reference
http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.html
By CCS
*/
const large = 999999 // almost inifinity
// a structure to represent a weighted edge in graph
struct EDGE {
mut:
src int
dest int
weight int
}
// building a map of with all edges etc of a graph, represented from a matrix adjacency
// Input: matrix adjacency --> Output: edges list of src, dest and weight
fn build_map_edges_from_graph<T>(g [][]T) map[T]EDGE {
n := g.len // TOTAL OF NODES for this graph -- its dimmension
mut edges_map := map[int]EDGE{} // a graph represented by map of edges
mut edge := 0 // a counter of edges
for i in 0 .. n {
for j in 0 .. n {
// if exist an arc ... include as new edge
if g[i][j] != 0 {
edges_map[edge] = EDGE{i, j, g[i][j]}
edge++
}
}
}
// print('${edges_map}')
return edges_map
}
fn print_sol(dist []int) {
n_vertex := dist.len
print('\n Vertex Distance from Source')
for i in 0 .. n_vertex {
print('\n $i --> ${dist[i]}')
}
}
// The main function that finds shortest distances from src
// to all other vertices using Bellman-Ford algorithm. The
// function also detects negative weight cycle
fn bellman_ford<T>(graph [][]T, src int) {
mut edges := build_map_edges_from_graph(graph)
// this function was done to adapt a graph representation
// by a adjacency matrix, to list of adjacency (using a MAP)
n_edges := edges.len // number of EDGES
// Step 1: Initialize distances from src to all other
// vertices as INFINITE
n_vertex := graph.len // adjc matrix ... n nodes or vertex
mut dist := []int{len: n_vertex, init: large} // dist with -1 instead of INIFINITY
// mut path := []int{len: n , init:-1} // previous node of each shortest paht
dist[src] = 0
// Step 2: Relax all edges |V| - 1 times. A simple
// shortest path from src to any other vertex can have
// at-most |V| - 1 edges
for _ in 0 .. n_vertex {
for j in 0 .. n_edges {
mut u := edges[j].src
mut v := edges[j].dest
mut weight := edges[j].weight
if (dist[u] != large) && (dist[u] + weight < dist[v]) {
dist[v] = dist[u] + weight
}
}
}
// Step 3: check for negative-weight cycles. The above
// step guarantees shortest distances if graph doesn't
// contain negative weight cycle. If we get a shorter
// path, then there is a cycle.
for j in 0 .. n_vertex {
mut u := edges[j].src
mut v := edges[j].dest
mut weight := edges[j].weight
if (dist[u] != large) && (dist[u] + weight < dist[v]) {
print('\n Graph contains negative weight cycle')
// If negative cycle is detected, simply
// return or an exit(1)
return
}
}
print_sol(dist)
}
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, -1, 4, 0, 0],
[0, 0, 3, 2, 2],
[0, 0, 0, 0, 0],
[0, 1, 5, 0, 0],
[0, 0, 0, -3, 0],
]
// data from https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: $start_node)')
bellman_ford(graph, start_node)
}
println('\n BYE -- OK')
}
//=================================================

View File

@ -1,4 +1,4 @@
// Author: ccs
// Author: CCS
// I follow literally code in C, done many years ago
fn main() {
// Adjacency matrix as a map
@ -20,10 +20,9 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
mut path := []string{} // ONE PATH with SUCCESS = array
mut queue := []string{} // a queue ... many paths
// all_nodes := graph.keys() // get a key of this map
n_nodes := graph.len // numbers of nodes of this graph
// a map to store all the nodes visited to avoid cycles
// start all them with False, not visited yet
mut visited := a_map_nodes_bool(n_nodes) // a map fully
mut visited := visited_init(graph) // a map fully
// false ==> not visited yet: {'A': false, 'B': false, 'C': false, 'D': false, 'E': false}
queue << start // first arrival
for queue.len != 0 {
@ -51,19 +50,6 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
return path
}
// Creating a map for VISITED nodes ...
// starting by false ===> means this node was not visited yet
fn a_map_nodes_bool(size int) map[string]bool {
mut my_map := map[string]bool{} // look this map ...
base := u8(65)
mut key := base.ascii_str()
for i in 0 .. size {
key = u8(base + i).ascii_str()
my_map[key] = false
}
return my_map
}
// classical removing of a node from the start of a queue
fn departure(mut queue []string) string {
mut x := queue[0]
@ -71,6 +57,17 @@ fn departure(mut queue []string) string {
return x
}
// Creating aa map to initialize with of visited nodes .... all with false in the init
// so these nodes are NOT VISITED YET
fn visited_init(a_graph map[string][]string) map[string]bool {
mut array_of_keys := a_graph.keys() // get all keys of this map
mut temp := map[string]bool{} // attention in these initializations with maps
for i in array_of_keys {
temp[i] = false
}
return temp
}
// Based in the current node that is final, search for its parent, already visited, up to the root or start node
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
print('\n\n Nodes visited (true) or no (false): $visited')
@ -90,3 +87,5 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
}
return path
}
//======================================================

View File

@ -1,4 +1,4 @@
// Author: ccs
// Author: CCS
// I follow literally code in C, done many years ago
fn main() {
@ -35,8 +35,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
mut path := []string{} // ONE PATH with SUCCESS = array
mut stack := []string{} // a stack ... many nodes
// all_nodes := graph.keys() // get a key of this map
n_nodes := graph.len // numbers of nodes of this graph
mut visited := a_map_nodes_bool(n_nodes) // a map fully
mut visited := visited_init(graph) // a map fully with false in all vertex
// false ... not visited yet: {'A': false, 'B': false, 'C': false, 'D': false, 'E': false}
stack << start // first push on the stack
@ -72,14 +71,15 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
return path
}
// Creating a map for nodes not VISITED visited ...
// starting by false ===> means this node was not visited yet
fn a_map_nodes_bool(size int) map[string]bool {
mut my_map := map[string]bool{} // look this map ...
for i in 0 .. size {
my_map[u8(65 + i).ascii_str()] = false
// Creating aa map to initialize with of visited nodes .... all with false in the init
// so these nodes are NOT VISITED YET
fn visited_init(a_graph map[string][]string) map[string]bool {
mut array_of_keys := a_graph.keys() // get all keys of this map
mut temp := map[string]bool{} // attention in these initializations with maps
for i in array_of_keys {
temp[i] = false
}
return my_map
return temp
}
// Based in the current node that is final, search for his parent, that is already visited, up to the root or start node
@ -101,3 +101,5 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
}
return path
}
//*****************************************************

View File

@ -0,0 +1,241 @@
/*
Exploring Dijkstra,
The data example is from
https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
by CCS
Dijkstra's single source shortest path algorithm.
The program uses an adjacency matrix representation of a graph
This Dijkstra algorithm uses a priority queue to save
the shortest paths. The queue structure has a data
which is the number of the node,
and the priority field which is the shortest distance.
PS: all the pre-requisites of Dijkstra are considered
$ v run file_name.v
// Creating a executable
$ v run file_name.v -o an_executable.EXE
$ ./an_executable.EXE
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
pseudo code written in C
This idea is quite different: it uses a priority queue to store the current
shortest path evaluted
The priority queue structure built using a list to simulate
the queue. A heap is not used in this case.
*/
// a structure
struct NODE {
mut:
data int // NUMBER OF NODE
priority int // Lower values priority indicate ==> higher priority
}
// Function to push according to priority ... the lower priority is goes ahead
// The "push" always sorted in pq
fn push_pq<T>(mut prior_queue []T, data int, priority int) {
mut temp := []T{}
lenght_pq := prior_queue.len
mut i := 0
for (i < lenght_pq) && (priority > prior_queue[i].priority) {
temp << prior_queue[i]
i++
}
// INSERTING SORTED in the queue
temp << NODE{data, priority} // do the copy in the right place
// copy the another part (tail) of original prior_queue
for i < lenght_pq {
temp << prior_queue[i]
i++
}
prior_queue = temp.clone() // I am not sure if it the right way
// IS IT THE RIGHT WAY?
}
// Change the priority of a value/node ... exist a value, change its priority
fn updating_priority<T>(mut prior_queue []T, search_data int, new_priority int) {
mut i := 0
mut lenght_pq := prior_queue.len
for i < lenght_pq {
if search_data == prior_queue[i].data {
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
break
}
i++
// all the list was examined
if i >= lenght_pq {
print('\n This data $search_data does exist ... PRIORITY QUEUE problem\n')
exit(1) // panic(s string)
}
} // end for
}
// a single departure or remove from queue
fn departure_priority<T>(mut prior_queue []T) int {
mut x := prior_queue[0].data
prior_queue.delete(0) // or .delete_many(0, 1 )
return x
}
// give a NODE v, return a list with all adjacents
// Take care, only positive EDGES
fn all_adjacents<T>(g [][]T, v int) []int {
mut temp := []int{} //
for i in 0 .. (g.len) {
if g[v][i] > 0 {
temp << i
}
}
return temp
}
// print the costs from origin up to all nodes
fn print_solution<T>(dist []T) {
print('Vertex \tDistance from Source')
for node in 0 .. (dist.len) {
print('\n $node ==> \t ${dist[node]}')
}
}
// print all paths and their cost or weight
fn print_paths_dist<T>(path []T, dist []T) {
print('\n Read the nodes from right to left (a path): \n')
for node in 1 .. (path.len) {
print('\n $node ')
mut i := node
for path[i] != -1 {
print(' <= ${path[i]} ')
i = path[i]
}
print('\t PATH COST: ${dist[node]}')
}
}
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
// s: source for all nodes
// Two results are obtained ... cost and paths
fn dijkstra(g [][]int, s int) {
mut pq_queue := []NODE{} // creating a priority queue
push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
mut path := []int{len: n, init: -1} // previous node of each shortest paht
// Distance of source vertex from itself is always 0
dist[s] = 0
for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n ADJ ${v} is ${adjs_of_v}')
mut new_dist := 0
for w in adjs_of_v {
new_dist = dist[v] + g[v][w]
if dist[w] == -1 {
dist[w] = new_dist
push_pq(mut pq_queue, w, dist[w])
path[w] = v // collecting the previous node -- lowest weight
}
if dist[w] > new_dist {
dist[w] = new_dist
updating_priority(mut pq_queue, w, dist[w])
path[w] = v //
}
}
}
// print the constructed distance array
print_solution(dist)
// print('\n \n Previous node of shortest path: ${path}')
print_paths_dist(path, dist)
}
/*
Solution Expected
Vertex Distance from Source
0 0
1 4
2 12
3 19
4 21
5 11
6 9
7 8
8 14
*/
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0],
]
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: $start_node)')
dijkstra(graph, start_node)
}
println('\n BYE -- OK')
}
//********************************************************************

View File

@ -0,0 +1,230 @@
/*
Exploring PRIMS,
The data example is from
https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
by CCS
PS: all the pre-requisites of Dijkstra are considered
$ v run file_name.v
Creating a executable
$ v run file_name.v -o an_executable.EXE
$ ./an_executable.EXE
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
pseudo code written in C
This idea is quite different: it uses a priority queue to store the current
shortest path evaluted
The priority queue structure built using a list to simulate
the queue. A heap is not used in this case.
*/
// a structure
struct NODE {
mut:
data int // number of nodes
priority int // Lower values priority indicate ==> higher priority
}
// Function to push according to priority ... the lower priority is goes ahead
// The "push" always sorted in pq
fn push_pq<T>(mut prior_queue []T, data int, priority int) {
mut temp := []T{}
lenght_pq := prior_queue.len
mut i := 0
for (i < lenght_pq) && (priority > prior_queue[i].priority) {
temp << prior_queue[i]
i++
}
// INSERTING SORTED in the queue
temp << NODE{data, priority} // do the copy in the right place
// copy the another part (tail) of original prior_queue
for i < lenght_pq {
temp << prior_queue[i]
i++
}
prior_queue = temp.clone()
// I am not sure if it the right way
// IS IT THE RIGHT WAY?
}
// Change the priority of a value/node ... exist a value, change its priority
fn updating_priority<T>(mut prior_queue []T, search_data int, new_priority int) {
mut i := 0
mut lenght_pq := prior_queue.len
for i < lenght_pq {
if search_data == prior_queue[i].data {
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
break
}
i++
// all the list was examined
if i >= lenght_pq {
// print('\n Priority Queue: ${prior_queue}')
// print('\n These data ${search_data} and ${new_priority} do not exist ... PRIORITY QUEUE problem\n')
// if it does not find ... then push it
push_pq(mut prior_queue, search_data, new_priority)
// exit(1) // panic(s string)
}
} // end for
}
// a single departure or remove from queue
fn departure_priority<T>(mut prior_queue []T) int {
mut x := prior_queue[0].data
prior_queue.delete(0) // or .delete_many(0, 1 )
return x
}
// give a NODE v, return a list with all adjacents
// Take care, only positive EDGES
fn all_adjacents<T>(g [][]T, v int) []int {
mut temp := []int{} //
for i in 0 .. (g.len) {
if g[v][i] > 0 {
temp << i
}
}
return temp
}
// print the costs from origin up to all nodes
// A utility function to print the
// constructed MST stored in parent[]
// print all paths and their cost or weight
fn print_solution(path []int, g [][]int) {
// print(' PATH: ${path} ==> ${path.len}')
print(' Edge \tWeight\n')
mut sum := 0
for node in 0 .. (path.len) {
if path[node] == -1 {
print('\n $node <== reference or start node')
} else {
print('\n $node <--> ${path[node]} \t${g[node][path[node]]}')
sum += g[node][path[node]]
}
}
print('\n Minimum Cost Spanning Tree: $sum\n\n')
}
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
// s: source for all nodes
// Two results are obtained ... cost and paths
fn prim_mst(g [][]int, s int) {
mut pq_queue := []NODE{} // creating a priority queue
push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
mut path := []int{len: n, init: -1} // previous node of each shortest paht
// Distance of source vertex from itself is always 0
dist[s] = 0
for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n :${dist} :: ${pq_queue}')
// print('\n ADJ ${v} is ${adjs_of_v}')
mut new_dist := 0
for w in adjs_of_v {
new_dist = dist[v] + g[v][w]
if dist[w] == -1 {
dist[w] = g[v][w]
push_pq(mut pq_queue, w, dist[w])
path[w] = v // collecting the previous node -- lowest weight
}
if dist[w] > new_dist {
dist[w] = g[v][w] // new_dist//
updating_priority(mut pq_queue, w, dist[w])
path[w] = v // father / previous node
}
}
}
// print('\n \n Previous node of shortest path: ${path}')
// print_paths_dist(path , dist)
print_solution(path, g)
}
/*
Solution Expected graph_02
Edge Weight
0 - 1 2
1 - 2 3
0 - 3 6
1 - 4 5
*/
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0],
]
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
println('\n Minimal Spanning Tree of graph ${index + 1} using PRIM algorithm')
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// starting by node x ... see the graphs dimmension
start_node := 0
prim_mst(graph, start_node)
}
println('\n BYE -- OK')
}
//********************************************************************

View File

@ -11,7 +11,7 @@ fn (h ExampleHandler) handle(req Request) Response {
})
}
mut status_code := 200
res.text = match req.url {
res.body = match req.url {
'/foo' {
'bar\n'
}

View File

@ -8,7 +8,7 @@ fn send_request(mut wg sync.WaitGroup) ?string {
finish := time.ticks()
println('Finish getting time ${finish - start} ms')
wg.done()
return data.text
return data.body
}
fn main() {

View File

@ -16,7 +16,7 @@ fn worker_fetch(p &pool.PoolProcessor, cursor int, worker_id int) voidptr {
println('failed to fetch data from /v0/item/${id}.json')
return pool.no_result
}
story := json.decode(Story, resp.text) or {
story := json.decode(Story, resp.body) or {
println('failed to decode a story')
return pool.no_result
}
@ -30,7 +30,7 @@ fn main() {
println('failed to fetch data from /v0/topstories.json')
return
}
ids := json.decode([]int, resp.text) or {
ids := json.decode([]int, resp.body) or {
println('failed to decode topstories.json')
return
}#[0..10]

View File

@ -1,9 +1,13 @@
#!/usr/local/bin/v run
#!/usr/local/bin/v
// The shebang above associates the file to V on Unix-like systems,
// so it can be run just by specifying the path to the file
// once it's made executable using `chmod +x`.
// Note that you can also use: `#!/usr/bin/env -S v crun`, if your system supports the -S flag to env
// The benefit is that in this case, v could be anywhere in your path, while /usr/bin/env is guaranteed
// to be present on most Unix systems in that exact place.
for _ in 0 .. 3 {
println('V script')
}

View File

@ -1611,7 +1611,12 @@ _SOKOL_PRIVATE bool _saudio_backend_init(void) {
fmtex.Format.nAvgBytesPerSec = fmtex.Format.nSamplesPerSec * fmtex.Format.nBlockAlign;
fmtex.Format.cbSize = 22; /* WORD + DWORD + GUID */
fmtex.Samples.wValidBitsPerSample = 32;
fmtex.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT;
if (_saudio.num_channels == 1) {
fmtex.dwChannelMask = SPEAKER_FRONT_CENTER;
}
else {
fmtex.dwChannelMask = SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT;
}
fmtex.SubFormat = _saudio_KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
dur = (REFERENCE_TIME)
(((double)_saudio.buffer_frames) / (((double)_saudio.sample_rate) * (1.0/10000000.0)));

View File

@ -268,13 +268,28 @@ fn _write_buf_to_fd(fd int, buf &u8, buf_len int) {
if buf_len <= 0 {
return
}
unsafe {
mut ptr := buf
mut remaining_bytes := buf_len
for remaining_bytes > 0 {
x := C.write(fd, ptr, remaining_bytes)
ptr += x
remaining_bytes -= x
mut ptr := unsafe { buf }
mut remaining_bytes := isize(buf_len)
mut x := isize(0)
$if freestanding || vinix {
unsafe {
for remaining_bytes > 0 {
x = C.write(fd, ptr, remaining_bytes)
ptr += x
remaining_bytes -= x
}
}
} $else {
mut stream := voidptr(C.stdout)
if fd == 2 {
stream = voidptr(C.stderr)
}
unsafe {
for remaining_bytes > 0 {
x = isize(C.fwrite(ptr, 1, remaining_bytes, stream))
ptr += x
remaining_bytes -= x
}
}
}
}

View File

@ -32,7 +32,6 @@ $if dynamic_boehm ? {
$if macos || linux {
#flag -DGC_PTHREADS=1
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -lpthread
$if (prod && !tinyc && !debug) || !(amd64 || arm64 || i386 || arm32) {
// TODO: replace the architecture check with a `!$exists("@VEXEROOT/thirdparty/tcc/lib/libgc.a")` comptime call
#flag @VEXEROOT/thirdparty/libgc/gc.o
@ -40,6 +39,7 @@ $if dynamic_boehm ? {
#flag @VEXEROOT/thirdparty/tcc/lib/libgc.a
}
#flag -ldl
#flag -lpthread
} $else $if freebsd {
// Tested on FreeBSD 13.0-RELEASE-p3, with clang, gcc and tcc:
#flag -DBUS_PAGE_FAULT=T_PAGEFLT

View File

@ -205,7 +205,11 @@ pub fn (s string) hash() int {
// int returns the value of the string as an integer `'1'.int() == 1`.
pub fn (s string) int() int {
return int(JS.parseInt(s.str))
res := int(0)
#if (typeof(s) == "string") { res.val = parseInt(s) }
#else { res.val = parseInt(s.str) }
return res
}
// i64 returns the value of the string as i64 `'1'.i64() == i64(1)`.

View File

@ -1,4 +1,4 @@
## Description:
`compress` is a namespace for (multiple) compression algorithms supported by V.
At the moment, only `compress.zlib` is implemented.
At the moment, only `compress.zlib` and `compress.deflate` are implemented.

View File

@ -0,0 +1,44 @@
module compress
#flag -I @VEXEROOT/thirdparty/zip
#include "miniz.h"
pub const max_size = u64(1 << 31)
fn C.tdefl_compress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
fn C.tinfl_decompress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
// compresses an array of bytes based on providing flags and returns the compressed bytes in a new array
// see `gzip.compress([]u8)` and `zlib.compress([]u8)` for default implementations.
[manualfree]
pub fn compress(data []u8, flags int) ?[]u8 {
if u64(data.len) > compress.max_size {
return error('data too large ($data.len > $compress.max_size)')
}
mut out_len := usize(0)
address := C.tdefl_compress_mem_to_heap(data.data, data.len, &out_len, flags)
if address == 0 {
return error('compression failed')
}
if u64(out_len) > compress.max_size {
return error('compressed data is too large ($out_len > $compress.max_size)')
}
return unsafe { address.vbytes(int(out_len)) }
}
// decompresses an array of bytes based on providing flags and returns the decompressed bytes in a new array
// see `gzip.decompress([]u8)` and `zlib.decompress([]u8)` for default implementations.
[manualfree]
pub fn decompress(data []u8, flags int) ?[]u8 {
mut out_len := usize(0)
address := C.tinfl_decompress_mem_to_heap(data.data, data.len, &out_len, flags)
if address == 0 {
return error('decompression failed')
}
if u64(out_len) > compress.max_size {
return error('decompressed data is too large ($out_len > $compress.max_size)')
}
return unsafe { address.vbytes(int(out_len)) }
}

View File

@ -0,0 +1,21 @@
## Description:
`compress.deflate` is a module that assists in the compression and
decompression of binary data using `deflate` compression
NOTE: To decompress gzip, discard first 10 bytes of compressed bytes
then use `compress.deflate.decompress`. (Header validation won't be
performed in this case)
## Examples:
```v
import compress.deflate
fn main() {
uncompressed := 'Hello world!'
compressed := deflate.compress(uncompressed.bytes())?
decompressed := deflate.decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
```

View File

@ -0,0 +1,16 @@
module deflate
import compress
// compresses an array of bytes using gzip and returns the compressed bytes in a new array
// Example: compressed := gzip.compress(b)?
pub fn compress(data []u8) ?[]u8 {
return compress.compress(data, 0)
}
// decompresses an array of bytes using zlib and returns the decompressed bytes in a new array
// Example: decompressed := zlib.decompress(b)?
[manualfree]
pub fn decompress(data []u8) ?[]u8 {
return compress.decompress(data, 0)
}

View File

@ -0,0 +1,12 @@
module deflate
const gzip_magic_numbers = [u8(0x1f), 0x8b]
fn test_gzip() ? {
uncompressed := 'Hello world!'
compressed := compress(uncompressed.bytes())?
first2 := compressed[0..2]
assert first2 != deflate.gzip_magic_numbers
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}

View File

@ -1,60 +1,19 @@
module zlib
#flag -I @VEXEROOT/thirdparty/zip
#include "miniz.h"
pub const max_size = u64(1 << 31)
fn C.tdefl_compress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
fn C.tinfl_decompress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
import compress
// compresses an array of bytes using zlib and returns the compressed bytes in a new array
// Example: compressed := zlib.compress(b)?
[manualfree]
pub fn compress(data []u8) ?[]u8 {
if u64(data.len) > zlib.max_size {
return error('data too large ($data.len > $zlib.max_size)')
}
mut out_len := usize(0)
// flags = TDEFL_WRITE_ZLIB_HEADER (0x01000)
address := C.tdefl_compress_mem_to_heap(data.data, data.len, &out_len, 0x01000)
if address == 0 {
return error('compression failed')
}
if u64(out_len) > zlib.max_size {
return error('compressed data is too large ($out_len > $zlib.max_size)')
}
compressed := unsafe {
address.vbytes(int(out_len))
}
copy := compressed.clone()
unsafe {
free(address)
}
return copy
return compress.compress(data, 0x01000)
}
// decompresses an array of bytes using zlib and returns the decompressed bytes in a new array
// Example: decompressed := zlib.decompress(b)?
[manualfree]
pub fn decompress(data []u8) ?[]u8 {
mut out_len := usize(0)
// flags = TINFL_FLAG_PARSE_ZLIB_HEADER (0x1)
address := C.tinfl_decompress_mem_to_heap(data.data, data.len, &out_len, 0x1)
if address == 0 {
return error('decompression failed')
}
if u64(out_len) > zlib.max_size {
return error('decompressed data is too large ($out_len > $zlib.max_size)')
}
decompressed := unsafe {
address.vbytes(int(out_len))
}
copy := decompressed.clone()
unsafe {
free(address)
}
return copy
return compress.decompress(data, 0x1)
}

View File

@ -9,8 +9,14 @@
module md5
import math.bits
import encoding.binary
[direct_array_access; inline]
fn get_le_u32(b []u8, start int) u32 {
return u32(b[start]) | (u32(b[1 + start]) << u32(8)) | (u32(b[2 + start]) << u32(16)) | (u32(b[
3 + start]) << u32(24))
}
[direct_array_access]
fn block_generic(mut dig Digest, p []u8) {
// load state
mut a := dig.s[0]
@ -19,8 +25,6 @@ fn block_generic(mut dig Digest, p []u8) {
mut d := dig.s[3]
for i := 0; i <= p.len - block_size; i += block_size {
mut q := p[i..]
q = q[..block_size]
// save current state
aa := a
bb := b
@ -28,22 +32,22 @@ fn block_generic(mut dig Digest, p []u8) {
dd := d
// load input block
x0 := binary.little_endian_u32(q[4 * 0x0..])
x1 := binary.little_endian_u32(q[4 * 0x1..])
x2 := binary.little_endian_u32(q[4 * 0x2..])
x3 := binary.little_endian_u32(q[4 * 0x3..])
x4 := binary.little_endian_u32(q[4 * 0x4..])
x5 := binary.little_endian_u32(q[4 * 0x5..])
x6 := binary.little_endian_u32(q[4 * 0x6..])
x7 := binary.little_endian_u32(q[4 * 0x7..])
x8 := binary.little_endian_u32(q[4 * 0x8..])
x9 := binary.little_endian_u32(q[4 * 0x9..])
xa := binary.little_endian_u32(q[4 * 0xa..])
xb := binary.little_endian_u32(q[4 * 0xb..])
xc := binary.little_endian_u32(q[4 * 0xc..])
xd := binary.little_endian_u32(q[4 * 0xd..])
xe := binary.little_endian_u32(q[4 * 0xe..])
xf := binary.little_endian_u32(q[4 * 0xf..])
x0 := get_le_u32(p, 4 * 0x0 + i)
x1 := get_le_u32(p, 4 * 0x1 + i)
x2 := get_le_u32(p, 4 * 0x2 + i)
x3 := get_le_u32(p, 4 * 0x3 + i)
x4 := get_le_u32(p, 4 * 0x4 + i)
x5 := get_le_u32(p, 4 * 0x5 + i)
x6 := get_le_u32(p, 4 * 0x6 + i)
x7 := get_le_u32(p, 4 * 0x7 + i)
x8 := get_le_u32(p, 4 * 0x8 + i)
x9 := get_le_u32(p, 4 * 0x9 + i)
xa := get_le_u32(p, 4 * 0xa + i)
xb := get_le_u32(p, 4 * 0xb + i)
xc := get_le_u32(p, 4 * 0xc + i)
xd := get_le_u32(p, 4 * 0xd + i)
xe := get_le_u32(p, 4 * 0xe + i)
xf := get_le_u32(p, 4 * 0xf + i)
// round 1
a = b + bits.rotate_left_32((((c ^ d) & b) ^ d) + a + x0 + u32(0xd76aa478), 7)

View File

@ -4,26 +4,26 @@
module binary
// Little Endian
[inline]
[direct_array_access; inline]
pub fn little_endian_u16(b []u8) u16 {
_ = b[1] // bounds check
return u16(b[0]) | (u16(b[1]) << u16(8))
}
[inline]
[direct_array_access; inline]
pub fn little_endian_put_u16(mut b []u8, v u16) {
_ = b[1] // bounds check
b[0] = u8(v)
b[1] = u8(v >> u16(8))
}
[inline]
[direct_array_access; inline]
pub fn little_endian_u32(b []u8) u32 {
_ = b[3] // bounds check
return u32(b[0]) | (u32(b[1]) << u32(8)) | (u32(b[2]) << u32(16)) | (u32(b[3]) << u32(24))
}
[inline]
[direct_array_access; inline]
pub fn little_endian_put_u32(mut b []u8, v u32) {
_ = b[3] // bounds check
b[0] = u8(v)
@ -32,13 +32,13 @@ pub fn little_endian_put_u32(mut b []u8, v u32) {
b[3] = u8(v >> u32(24))
}
[inline]
[direct_array_access; inline]
pub fn little_endian_u64(b []u8) u64 {
_ = b[7] // bounds check
return u64(b[0]) | (u64(b[1]) << u64(8)) | (u64(b[2]) << u64(16)) | (u64(b[3]) << u64(24)) | (u64(b[4]) << u64(32)) | (u64(b[5]) << u64(40)) | (u64(b[6]) << u64(48)) | (u64(b[7]) << u64(56))
}
[inline]
[direct_array_access; inline]
pub fn little_endian_put_u64(mut b []u8, v u64) {
_ = b[7] // bounds check
b[0] = u8(v)
@ -52,26 +52,26 @@ pub fn little_endian_put_u64(mut b []u8, v u64) {
}
// Big Endian
[inline]
[direct_array_access; inline]
pub fn big_endian_u16(b []u8) u16 {
_ = b[1] // bounds check
return u16(b[1]) | (u16(b[0]) << u16(8))
}
[inline]
[direct_array_access; inline]
pub fn big_endian_put_u16(mut b []u8, v u16) {
_ = b[1] // bounds check
b[0] = u8(v >> u16(8))
b[1] = u8(v)
}
[inline]
[direct_array_access; inline]
pub fn big_endian_u32(b []u8) u32 {
_ = b[3] // bounds check
return u32(b[3]) | (u32(b[2]) << u32(8)) | (u32(b[1]) << u32(16)) | (u32(b[0]) << u32(24))
}
[inline]
[direct_array_access; inline]
pub fn big_endian_put_u32(mut b []u8, v u32) {
_ = b[3] // bounds check
b[0] = u8(v >> u32(24))
@ -80,13 +80,13 @@ pub fn big_endian_put_u32(mut b []u8, v u32) {
b[3] = u8(v)
}
[inline]
[direct_array_access; inline]
pub fn big_endian_u64(b []u8) u64 {
_ = b[7] // bounds check
return u64(b[7]) | (u64(b[6]) << u64(8)) | (u64(b[5]) << u64(16)) | (u64(b[4]) << u64(24)) | (u64(b[3]) << u64(32)) | (u64(b[2]) << u64(40)) | (u64(b[1]) << u64(48)) | (u64(b[0]) << u64(56))
}
[inline]
[direct_array_access; inline]
pub fn big_endian_put_u64(mut b []u8, v u64) {
_ = b[7] // bounds check
b[0] = u8(v >> u64(56))

View File

@ -17,10 +17,10 @@ struct Employee {
fn test_simple() ? {
x := Employee{'Peter', 28, 95000.5, .worker}
s := json.encode(x)
eprintln('Employee x: $s')
// eprintln('Employee x: $s')
assert s == '{"name":"Peter","age":28,"salary":95000.5,"title":2}'
y := json.decode(Employee, s)?
eprintln('Employee y: $y')
// eprintln('Employee y: $y')
assert y.name == 'Peter'
assert y.age == 28
assert y.salary == 95000.5
@ -90,15 +90,15 @@ fn test_encode_decode_sumtype() ? {
t,
]
}
eprintln('Game: $game')
// eprintln('Game: $game')
enc := json.encode(game)
eprintln('Encoded Game: $enc')
// eprintln('Encoded Game: $enc')
assert enc == '{"title":"Super Mega Game","player":{"name":"Monke","_type":"Human"},"other":[{"tag":"Pen","_type":"Item"},{"tag":"Cookie","_type":"Item"},1,"Stool",{"_type":"Time","value":$t.unix_time()}]}'
dec := json.decode(SomeGame, enc)?
eprintln('Decoded Game: $dec')
// eprintln('Decoded Game: $dec')
assert game.title == dec.title
assert game.player == dec.player
@ -138,9 +138,9 @@ struct User {
fn test_parse_user() ? {
s := '{"age": 10, "nums": [1,2,3], "type": 1, "lastName": "Johnson", "IsRegistered": true, "pet_animals": {"name": "Bob", "animal": "Dog"}}'
u2 := json.decode(User2, s)?
println(u2)
// println(u2)
u := json.decode(User, s)?
println(u)
// println(u)
assert u.age == 10
assert u.last_name == 'Johnson'
assert u.is_registered == true
@ -158,12 +158,12 @@ fn test_encode_decode_time() ? {
reg_date: time.new_time(year: 2020, month: 12, day: 22, hour: 7, minute: 23)
}
s := json.encode(user)
println(s)
// println(s)
assert s.contains('"reg_date":1608621780')
user2 := json.decode(User2, s)?
assert user2.reg_date.str() == '2020-12-22 07:23:00'
println(user2)
println(user2.reg_date)
// println(user2)
// println(user2.reg_date)
}
fn (mut u User) foo() string {
@ -181,7 +181,7 @@ fn test_encode_user() {
}
expected := '{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"foo"}'
out := json.encode(usr)
println(out)
// println(out)
assert out == expected
// Test json.encode on mutable pointers
assert usr.foo() == expected
@ -194,7 +194,7 @@ struct Color {
fn test_raw_json_field() {
color := json.decode(Color, '{"space": "YCbCr", "point": {"Y": 123}}') or {
println('text')
// println('text')
return
}
assert color.point == '{"Y":123}'
@ -203,7 +203,7 @@ fn test_raw_json_field() {
fn test_bad_raw_json_field() {
color := json.decode(Color, '{"space": "YCbCr"}') or {
println('text')
// println('text')
return
}
assert color.point == ''
@ -225,7 +225,7 @@ fn test_struct_in_struct() ? {
assert country.cities.len == 2
assert country.cities[0].name == 'London'
assert country.cities[1].name == 'Manchester'
println(country.cities)
// println(country.cities)
}
fn test_encode_map() {
@ -237,7 +237,7 @@ fn test_encode_map() {
'four': 4
}
out := json.encode(numbers)
println(out)
// println(out)
assert out == expected
}
@ -249,7 +249,7 @@ fn test_parse_map() ? {
'four': 4
}
out := json.decode(map[string]int, '{"one":1,"two":2,"three":3,"four":4}')?
println(out)
// println(out)
assert out == expected
}
@ -306,7 +306,7 @@ fn test_nested_type() ? {
}
}
out := json.encode(data)
println(out)
// println(out)
assert out == data_expected
data2 := json.decode(Data, data_expected)?
assert data2.countries.len == data.countries.len
@ -351,7 +351,7 @@ fn test_errors() {
invalid_array := fn () {
data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":{"name":"Donlon"},"name":"KU"}],"users":{"Foo":{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},"Boo":{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}},"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}'
json.decode(Data, data) or {
println(err)
// println(err)
assert err.msg().starts_with('Json element is not an array:')
return
}
@ -360,7 +360,7 @@ fn test_errors() {
invalid_object := fn () {
data := '{"countries":[{"cities":[{"name":"London"},{"name":"Manchester"}],"name":"UK"},{"cities":[{"name":"Donlon"},{"name":"Termanches"}],"name":"KU"}],"users":[{"age":10,"nums":[1,2,3],"lastName":"Johnson","IsRegistered":true,"type":0,"pet_animals":"little foo"},{"age":20,"nums":[5,3,1],"lastName":"Smith","IsRegistered":false,"type":4,"pet_animals":"little boo"}],"extra":{"2":{"n1":2,"n2":4,"n3":8,"n4":16},"3":{"n1":3,"n2":9,"n3":27,"n4":81}}}'
json.decode(Data, data) or {
println(err)
// println(err)
assert err.msg().starts_with('Json element is not an object:')
return
}
@ -425,6 +425,13 @@ fn test_decode_null_object() ? {
assert '$info.maps' == '{}'
}
fn test_decode_missing_maps_field() ? {
info := json.decode(Info, '{"id": 22, "items": null}')?
assert info.id == 22
assert '$info.items' == '[]'
assert '$info.maps' == '{}'
}
struct Foo2 {
name string
}
@ -470,7 +477,7 @@ fn create_game_packet(data &GamePacketData) string {
fn test_encode_sumtype_defined_ahead() {
ret := create_game_packet(&GamePacketData(GPScale{}))
println(ret)
// println(ret)
assert ret == '{"value":0,"_type":"GPScale"}'
}

View File

@ -139,6 +139,13 @@ pub fn ones_count_64(x u64) int {
return int(y) & ((1 << 7) - 1)
}
const (
n8 = u8(8)
n16 = u16(16)
n32 = u32(32)
n64 = u64(64)
)
// --- RotateLeft ---
// rotate_left_8 returns the value of x rotated left by (k mod 8) bits.
// To rotate x right by k bits, call rotate_left_8(x, -k).
@ -146,9 +153,8 @@ pub fn ones_count_64(x u64) int {
// This function's execution time does not depend on the inputs.
[inline]
pub fn rotate_left_8(x u8, k int) u8 {
n := u8(8)
s := u8(k) & (n - u8(1))
return (x << s) | (x >> (n - s))
s := u8(k) & (bits.n8 - u8(1))
return (x << s) | (x >> (bits.n8 - s))
}
// rotate_left_16 returns the value of x rotated left by (k mod 16) bits.
@ -157,9 +163,8 @@ pub fn rotate_left_8(x u8, k int) u8 {
// This function's execution time does not depend on the inputs.
[inline]
pub fn rotate_left_16(x u16, k int) u16 {
n := u16(16)
s := u16(k) & (n - u16(1))
return (x << s) | (x >> (n - s))
s := u16(k) & (bits.n16 - u16(1))
return (x << s) | (x >> (bits.n16 - s))
}
// rotate_left_32 returns the value of x rotated left by (k mod 32) bits.
@ -168,9 +173,8 @@ pub fn rotate_left_16(x u16, k int) u16 {
// This function's execution time does not depend on the inputs.
[inline]
pub fn rotate_left_32(x u32, k int) u32 {
n := u32(32)
s := u32(k) & (n - u32(1))
return (x << s) | (x >> (n - s))
s := u32(k) & (bits.n32 - u32(1))
return (x << s) | (x >> (bits.n32 - s))
}
// rotate_left_64 returns the value of x rotated left by (k mod 64) bits.
@ -179,9 +183,8 @@ pub fn rotate_left_32(x u32, k int) u32 {
// This function's execution time does not depend on the inputs.
[inline]
pub fn rotate_left_64(x u64, k int) u64 {
n := u64(64)
s := u64(k) & (n - u64(1))
return (x << s) | (x >> (n - s))
s := u64(k) & (bits.n64 - u64(1))
return (x << s) | (x >> (bits.n64 - s))
}
// --- Reverse ---

View File

@ -16,9 +16,9 @@ pub fn download_file(url string, out_file_path string) ? {
return error('received http code $s.status_code')
}
$if debug_http ? {
println('http.download_file saving $s.text.len bytes')
println('http.download_file saving $s.body.len bytes')
}
os.write_file(out_file_path, s.text)?
os.write_file(out_file_path, s.body)?
}
// TODO: implement download_file_with_progress

View File

@ -161,7 +161,7 @@ pub fn fetch(config FetchConfig) ?Response {
// get_text sends a GET HTTP request to the URL and returns the text content of the response
pub fn get_text(url string) string {
resp := fetch(url: url, method: .get) or { return '' }
return resp.text
return resp.body
}
// url_encode_form_data converts mapped data to an URL encoded string

View File

@ -25,7 +25,7 @@ fn http_fetch_mock(_methods []string, _config FetchConfig) ?[]Response {
config.method = method_from_str(method)
res := fetch(FetchConfig{ ...config, url: url + lmethod })?
// TODO
// body := json.decode(HttpbinResponseBody,res.text)?
// body := json.decode(HttpbinResponseBody,res.body)?
result << res
}
return result
@ -49,7 +49,7 @@ fn test_http_fetch_with_data() {
data: 'hello world'
) or { panic(err) }
for response in responses {
payload := json.decode(HttpbinResponseBody, response.text) or { panic(err) }
payload := json.decode(HttpbinResponseBody, response.body) or { panic(err) }
assert payload.data == 'hello world'
}
}
@ -65,7 +65,7 @@ fn test_http_fetch_with_params() {
}
) or { panic(err) }
for response in responses {
// payload := json.decode(HttpbinResponseBody,response.text) or {
// payload := json.decode(HttpbinResponseBody,response.body) or {
// panic(err)
// }
assert response.status() == .ok
@ -85,7 +85,7 @@ fn test_http_fetch_with_headers() ? {
header: header
) or { panic(err) }
for response in responses {
// payload := json.decode(HttpbinResponseBody,response.text) or {
// payload := json.decode(HttpbinResponseBody,response.body) or {
// panic(err)
// }
assert response.status() == .ok

View File

@ -17,9 +17,9 @@ fn test_http_get_from_vlang_utc_now() {
println('Test getting current time from $url by http.get')
res := http.get(url) or { panic(err) }
assert res.status() == .ok
assert res.text.len > 0
assert res.text.int() > 1566403696
println('Current time is: $res.text.int()')
assert res.body.len > 0
assert res.body.int() > 1566403696
println('Current time is: $res.body.int()')
}
}
@ -39,7 +39,7 @@ fn test_public_servers() {
println('Testing http.get on public url: $url ')
res := http.get(url) or { panic(err) }
assert res.status() == .ok
assert res.text.len > 0
assert res.body.len > 0
}
}
@ -51,6 +51,6 @@ fn test_relative_redirects() {
} // tempfix periodic: httpbin relative redirects are broken
res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or { panic(err) }
assert res.status() == .ok
assert res.text.len > 0
assert res.text.contains('"abc": "xyz"')
assert res.body.len > 0
assert res.body.contains('"abc": "xyz"')
}

View File

@ -0,0 +1,33 @@
import net.http
import json
struct MimeType {
source string
extensions []string
compressible bool
charset string
}
fn main() {
mt_json := http.get('https://raw.githubusercontent.com/jshttp/mime-db/master/db.json')?
mt_map := json.decode(map[string]MimeType, mt_json.text)?
mut ext_to_mt_str := map[string]string{}
for mt_str, mt in mt_map {
for ext in mt.extensions {
ext_to_mt_str[ext] = mt_str
}
}
write_file('db.v', '
module mime
// FILE AUTOGENERATED BY `build.vsh` - DO NOT MANUALLY EDIT
const (
db = $mt_map
ext_to_mt_str = $ext_to_mt_str
)
')?
execute('${@VEXE} fmt -w db.v')
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
module mime
pub struct MimeType {
source string
extensions []string
compressible bool
charset string
}
// returns a `MimeType` for the given MIME type
pub fn get_complete_mime_type(mt string) MimeType {
return db[mt]
}
// returns the MIME type for the given file extension
pub fn get_mime_type(ext string) string {
return ext_to_mt_str[ext]
}
// returns a `content-type` header ready to use for the given MIME type
pub fn get_content_type(mt string) string {
mt_struct := db[mt]
charset := if mt_struct.charset.len > 0 { mt_struct.charset.to_lower() } else { 'utf-8' }
return '$mt; charset=$charset'
}
// returns the default extension for the given MIME type
pub fn get_default_ext(mt string) string {
return if db[mt].extensions.len > 0 {
db[mt].extensions[0]
} else {
''
}
}
// returns true if the given MIME type exists
pub fn exists(mt string) bool {
return mt in db
}

View File

@ -0,0 +1,29 @@
module mime
fn test_mime() {
assert get_complete_mime_type('application/json') == MimeType{
source: 'iana'
extensions: ['json', 'map']
compressible: true
charset: 'UTF-8'
}
assert get_mime_type('json') == 'application/json'
assert get_content_type('application/json') == 'application/json; charset=utf-8'
assert get_default_ext('application/json') == 'json'
assert get_complete_mime_type('text/markdown') == MimeType{
source: 'iana'
extensions: ['md', 'markdown']
compressible: true
charset: ''
}
assert get_mime_type('md') == 'text/markdown'
assert get_content_type('text/markdown') == 'text/markdown; charset=utf-8'
assert get_default_ext('text/markdown') == 'md'
assert exists('application/json') == true
assert exists('udfsbsfib') == false
assert get_default_ext('application/1d-interleaved-parityfec') == '' // valid mime type without associated extension
assert get_default_ext('invalid mime type') == '' // invalid mime type
}

View File

@ -9,7 +9,8 @@ import strconv
// Response represents the result of the request
pub struct Response {
pub mut:
text string
body string
text string [deprecated: 'use Response.body instead'; deprecated_after: '2022-10-03']
header Header
status_code int
status_msg string
@ -30,7 +31,7 @@ pub fn (resp Response) bytes() []u8 {
pub fn (resp Response) bytestr() string {
return 'HTTP/$resp.http_version $resp.status_code $resp.status_msg\r\n' + '${resp.header.render(
version: resp.version()
)}\r\n' + '$resp.text'
)}\r\n' + '$resp.body'
}
// Parse a raw HTTP response into a Response object
@ -39,16 +40,17 @@ pub fn parse_response(resp string) ?Response {
// Build resp header map and separate the body
start_idx, end_idx := find_headers_range(resp)?
header := parse_headers(resp.substr(start_idx, end_idx))?
mut text := resp.substr(end_idx, resp.len)
mut body := resp.substr(end_idx, resp.len)
if header.get(.transfer_encoding) or { '' } == 'chunked' {
text = chunked.decode(text)
body = chunked.decode(body)
}
return Response{
http_version: version
status_code: status_code
status_msg: status_msg
header: header
text: text
body: body
text: body // TODO: remove as depreciated
}
}
@ -113,18 +115,19 @@ pub struct ResponseConfig {
version Version = .v1_1
status Status = .ok
header Header
text string
body string
text string [deprecated: 'use ResponseConfig.body instead'; deprecated_after: '2022-10-03']
}
// new_response creates a Response object from the configuration. This
// function will add a Content-Length header if text is not empty.
// function will add a Content-Length header if body is not empty.
pub fn new_response(conf ResponseConfig) Response {
mut resp := Response{
text: conf.text
body: conf.body + conf.text
header: conf.header
}
if conf.text.len > 0 && !resp.header.contains(.content_length) {
resp.header.add(.content_length, conf.text.len.str())
if resp.body.len > 0 && !resp.header.contains(.content_length) {
resp.header.add(.content_length, resp.body.len.str())
}
resp.set_status(conf.status)
resp.set_version(conf.version)

View File

@ -4,14 +4,14 @@ fn test_response_bytestr() ? {
{
resp := new_response(
status: .ok
text: 'Foo'
text: 'Foo' // TODO: replace with `body` once deprecaped
)
assert resp.bytestr() == 'HTTP/1.1 200 OK\r\n' + 'Content-Length: 3\r\n' + '\r\n' + 'Foo'
}
{
resp := new_response(
status: .found
text: 'Foo'
body: 'Foo'
header: new_header(key: .location, value: '/')
)
lines := resp.bytestr().split_into_lines()

View File

@ -115,7 +115,7 @@ fn (d DebugHandler) handle(req Request) Response {
eprintln('[$time.now()] $req.method $req.url - 200')
}
mut r := Response{
text: req.data
body: req.data
header: req.header
}
r.set_status(.ok)

View File

@ -41,7 +41,7 @@ fn (mut handler MyHttpHandler) handle(req http.Request) http.Response {
handler.counter++
// eprintln('$time.now() | counter: $handler.counter | $req.method $req.url\n$req.header\n$req.data - 200 OK\n')
mut r := http.Response{
text: req.data + ', $req.url'
body: req.data + ', $req.url'
header: req.header
}
match req.url.all_before('?') {
@ -72,11 +72,11 @@ fn test_server_custom_handler() ? {
time.sleep(10 * time.millisecond)
}
x := http.fetch(url: 'http://localhost:$cport/endpoint?abc=xyz', data: 'my data')?
assert x.text == 'my data, /endpoint?abc=xyz'
assert x.body == 'my data, /endpoint?abc=xyz'
assert x.status_code == 200
assert x.http_version == '1.1'
y := http.fetch(url: 'http://localhost:$cport/another/endpoint', data: 'abcde')?
assert y.text == 'abcde, /another/endpoint'
assert y.body == 'abcde, /another/endpoint'
assert y.status_code == 200
assert y.status() == .ok
assert y.http_version == '1.1'

View File

@ -102,6 +102,10 @@ fn (kind OrderType) to_str() string {
}
}
// Examples for QueryData in SQL: abc == 3 && b == 'test'
// => fields[abc, b]; data[3, 'test']; types[index of int, index of string]; kinds[.eq, .eq]; is_and[true];
// Every field, data, type & kind of operation in the expr share the same index in the arrays
// is_and defines how they're addicted to each other either and or or
pub struct QueryData {
pub:
fields []string
@ -128,6 +132,17 @@ pub:
attrs []StructAttribute
}
// table - Table name
// is_count - Either the data will be returned or an integer with the count
// has_where - Select all or use a where expr
// has_order - Order the results
// order - Name of the column which will be ordered
// order_type - Type of order (asc, desc)
// has_limit - Limits the output data
// primary - Name of the primary field
// has_offset - Add an offset to the result
// fields - Fields to select
// types - Types to select
pub struct SelectConfig {
pub:
table string
@ -143,6 +158,14 @@ pub:
types []int
}
// Interfaces gets called from the backend and can be implemented
// Since the orm supports arrays aswell, they have to be returned too.
// A row is represented as []Primitive, where the data is connected to the fields of the struct by their
// index. The indices are mapped with the SelectConfig.field array. This is the mapping for a struct.
// To have an array, there has to be an array of structs, basically [][]Primitive
//
// Every function without last_id() returns an optional, which returns an error if present
// last_id returns the last inserted id of the db
pub interface Connection {
@select(config SelectConfig, data QueryData, where QueryData) ?[][]Primitive
insert(table string, data QueryData) ?
@ -153,7 +176,12 @@ pub interface Connection {
last_id() Primitive
}
pub fn orm_stmt_gen(table string, para string, kind StmtKind, num bool, qm string, start_pos int, data QueryData, where QueryData) string {
// Generates an sql stmt, from universal parameter
// q - The quotes character, which can be different in every type, so it's variable
// num - Stmt uses nums at prepared statements (? or ?1)
// qm - Character for prepared statment, qm because of quotation mark like in sqlite
// start_pos - When num is true, it's the start position of the counter
pub fn orm_stmt_gen(table string, q string, kind StmtKind, num bool, qm string, start_pos int, data QueryData, where QueryData) string {
mut str := ''
mut c := start_pos
@ -163,7 +191,7 @@ pub fn orm_stmt_gen(table string, para string, kind StmtKind, num bool, qm strin
mut values := []string{}
for _ in 0 .. data.fields.len {
// loop over the length of data.field and generate ?0, ?1 or just ? based on the $num parameter for value placeholders
// loop over the length of data.field and generate ?0, ?1 or just ? based on the $num qmeter for value placeholders
if num {
values << '$qm$c'
c++
@ -172,16 +200,16 @@ pub fn orm_stmt_gen(table string, para string, kind StmtKind, num bool, qm strin
}
}
str += 'INSERT INTO $para$table$para ('
str += data.fields.map('$para$it$para').join(', ')
str += 'INSERT INTO $q$table$q ('
str += data.fields.map('$q$it$q').join(', ')
str += ') VALUES ('
str += values.join(', ')
str += ')'
}
.update {
str += 'UPDATE $para$table$para SET '
str += 'UPDATE $q$table$q SET '
for i, field in data.fields {
str += '$para$field$para = '
str += '$q$field$q = '
if data.data.len > i {
d := data.data[i]
if d is InfixType {
@ -217,12 +245,12 @@ pub fn orm_stmt_gen(table string, para string, kind StmtKind, num bool, qm strin
str += ' WHERE '
}
.delete {
str += 'DELETE FROM $para$table$para WHERE '
str += 'DELETE FROM $q$table$q WHERE '
}
}
if kind == .update || kind == .delete {
for i, field in where.fields {
str += '$para$field$para ${where.kinds[i].to_str()} $qm'
str += '$q$field$q ${where.kinds[i].to_str()} $qm'
if num {
str += '$c'
c++
@ -236,28 +264,32 @@ pub fn orm_stmt_gen(table string, para string, kind StmtKind, num bool, qm strin
return str
}
pub fn orm_select_gen(orm SelectConfig, para string, num bool, qm string, start_pos int, where QueryData) string {
// Generates an sql select stmt, from universal parameter
// orm - See SelectConfig
// q, num, qm, start_pos - see orm_stmt_gen
// where - See QueryData
pub fn orm_select_gen(orm SelectConfig, q string, num bool, qm string, start_pos int, where QueryData) string {
mut str := 'SELECT '
if orm.is_count {
str += 'COUNT(*)'
} else {
for i, field in orm.fields {
str += '$para$field$para'
str += '$q$field$q'
if i < orm.fields.len - 1 {
str += ', '
}
}
}
str += ' FROM $para$orm.table$para'
str += ' FROM $q$orm.table$q'
mut c := start_pos
if orm.has_where {
str += ' WHERE '
for i, field in where.fields {
str += '$para$field$para ${where.kinds[i].to_str()} $qm'
str += '$q$field$q ${where.kinds[i].to_str()} $qm'
if num {
str += '$c'
c++
@ -276,7 +308,7 @@ pub fn orm_select_gen(orm SelectConfig, para string, num bool, qm string, start_
// ordering is *slow*, especially if there are no indexes!
if orm.has_order {
str += ' ORDER BY '
str += '$para$orm.order$para '
str += '$q$orm.order$q '
str += orm.order_type.to_str()
}
@ -300,11 +332,19 @@ pub fn orm_select_gen(orm SelectConfig, para string, num bool, qm string, start_
return str
}
pub fn orm_table_gen(table string, para string, defaults bool, def_unique_len int, fields []TableField, sql_from_v fn (int) ?string, alternative bool) ?string {
mut str := 'CREATE TABLE IF NOT EXISTS $para$table$para ('
// Generates an sql table stmt, from universal parameter
// table - Table name
// q - see orm_stmt_gen
// defaults - enables default values in stmt
// def_unique_len - sets default unique length for texts
// fields - See TableField
// sql_from_v - Function which maps type indices to sql type names
// alternative - Needed for msdb
pub fn orm_table_gen(table string, q string, defaults bool, def_unique_len int, fields []TableField, sql_from_v fn (int) ?string, alternative bool) ?string {
mut str := 'CREATE TABLE IF NOT EXISTS $q$table$q ('
if alternative {
str = 'IF NOT EXISTS (SELECT * FROM sysobjects WHERE name=$para$table$para and xtype=${para}U$para) CREATE TABLE $para$table$para ('
str = 'IF NOT EXISTS (SELECT * FROM sysobjects WHERE name=$q$table$q and xtype=${q}U$q) CREATE TABLE $q$table$q ('
}
mut fs := []string{}
@ -368,7 +408,7 @@ pub fn orm_table_gen(table string, para string, defaults bool, def_unique_len in
if ctyp == '' {
return error('Unknown type ($field.typ) for field $field.name in struct $table')
}
stmt = '$para$field_name$para $ctyp'
stmt = '$q$field_name$q $ctyp'
if defaults && field.default_val != '' {
stmt += ' DEFAULT $field.default_val'
}
@ -376,7 +416,7 @@ pub fn orm_table_gen(table string, para string, defaults bool, def_unique_len in
stmt += ' NOT NULL'
}
if is_unique {
mut f := 'UNIQUE($para$field_name$para'
mut f := 'UNIQUE($q$field_name$q'
if ctyp == 'TEXT' && def_unique_len > 0 {
if unique_len > 0 {
f += '($unique_len)'
@ -396,18 +436,19 @@ pub fn orm_table_gen(table string, para string, defaults bool, def_unique_len in
for k, v in unique {
mut tmp := []string{}
for f in v {
tmp << '$para$f$para'
tmp << '$q$f$q'
}
fs << '/* $k */UNIQUE(${tmp.join(', ')})'
}
}
fs << 'PRIMARY KEY($para$primary$para)'
fs << 'PRIMARY KEY($q$primary$q)'
fs << unique_fields
str += fs.join(', ')
str += ');'
return str
}
// Get's the sql field type
fn sql_field_type(field TableField) int {
mut typ := field.typ
if field.is_time {
@ -426,6 +467,7 @@ fn sql_field_type(field TableField) int {
return typ
}
// Get's the sql field name
fn sql_field_name(field TableField) string {
mut name := field.name
for attr in field.attrs {

View File

@ -1,7 +1,7 @@
// import os
// import pg
// import term
import time
// import pg
import sqlite
struct Module {
@ -31,9 +31,12 @@ struct TestTime {
create time.Time
}
fn test_orm_sqlite() {
fn test_orm() {
db := sqlite.connect(':memory:') or { panic(err) }
db.exec('drop table if exists User')
// db.exec('drop table if exists User')
// db := pg.connect(host: 'localhost', port: 5432, user: 'louis', password: 'abc', dbname: 'orm') or { panic(err) }
sql db {
create table Module
}
@ -242,7 +245,7 @@ fn test_orm_sqlite() {
//
offset_const := 2
z := sql db {
select from User limit 2 offset offset_const
select from User order by id limit 2 offset offset_const
}
assert z.len == 2
assert z[0].id == 3
@ -264,6 +267,7 @@ fn test_orm_sqlite() {
}
assert updated_oldest.age == 31
// Remove this when pg is used
db.exec('insert into User (name, age) values (NULL, 31)')
null_user := sql db {
select from User where id == 5
@ -336,11 +340,18 @@ fn test_orm_sqlite() {
sql db {
update Module set created = t where id == 1
}
updated_time_mod := sql db {
select from Module where id == 1
}
// Note: usually updated_time_mod.created != t, because t has
// its microseconds set, while the value retrieved from the DB
// has them zeroed, because the db field resolution is seconds.
assert updated_time_mod.created.format_ss() == t.format_ss()
sql db {
drop table Module
drop table TestTime
}
}

View File

@ -123,6 +123,52 @@ pub fn norm_path(path string) string {
return res
}
// existing_path returns the existing part of the given `path`.
// An error is returned if there is no existing part of the given `path`.
pub fn existing_path(path string) ?string {
err := error('path does not exist')
if path.len == 0 {
return err
}
if exists(path) {
return path
}
mut volume_len := 0
$if windows {
volume_len = win_volume_len(path)
}
if volume_len > 0 && is_slash(path[volume_len - 1]) {
volume_len++
}
mut sc := textscanner.new(path[volume_len..])
mut recent_path := path[..volume_len]
for sc.next() != -1 {
curr := u8(sc.current())
peek := sc.peek()
back := sc.peek_back()
if is_curr_dir_ref(back, curr, peek) {
continue
}
range := sc.ilen - sc.remaining() + volume_len
if is_slash(curr) && !is_slash(u8(peek)) {
recent_path = path[..range]
continue
}
if !is_slash(curr) && (peek == -1 || is_slash(u8(peek))) {
curr_path := path[..range]
if exists(curr_path) {
recent_path = curr_path
continue
}
if recent_path.len == 0 {
break
}
return recent_path
}
}
return err
}
// clean_path returns the "cleaned" version of the given `path`
// by turning forward slashes into back slashes
// on a Windows system and eliminating:
@ -144,8 +190,7 @@ fn clean_path(path string) string {
continue
}
// skip reference to current dir (.)
if (back == -1 || is_slash(u8(back))) && curr == os.dot
&& (peek == -1 || is_slash(u8(peek))) {
if is_curr_dir_ref(back, curr, peek) {
// skip if the next byte is a path separator
if peek != -1 && is_slash(u8(peek)) {
sc.skip_n(1)
@ -246,3 +291,13 @@ fn is_normal_path(path string) bool {
return (plen == 1 && is_slash(path[0])) || (plen >= 2 && is_slash(path[0])
&& !is_slash(path[1]))
}
// is_curr_dir_ref returns `true` if the 3 given integer construct
// a reference to a current directory (.).
// NOTE: a negative integer means that no byte is present
fn is_curr_dir_ref(byte_one int, byte_two int, byte_three int) bool {
if u8(byte_two) != os.dot {
return false
}
return (byte_one < 0 || is_slash(u8(byte_one))) && (byte_three < 0 || is_slash(u8(byte_three)))
}

View File

@ -36,11 +36,13 @@ fn test_clean_path() {
assert clean_path(r'\./path/dir\\file.exe') == r'\path\dir\file.exe'
assert clean_path(r'.') == ''
assert clean_path(r'./') == ''
assert clean_path('') == ''
assert clean_path(r'\./') == '\\'
assert clean_path(r'//\/\/////') == '\\'
return
}
assert clean_path('./../.././././//') == '../..'
assert clean_path('') == ''
assert clean_path('.') == ''
assert clean_path('./path/to/file.v//./') == 'path/to/file.v'
assert clean_path('./') == ''
@ -127,3 +129,26 @@ fn test_abs_path() {
assert abs_path('path/../file.v/..') == wd
assert abs_path('///') == '/'
}
fn test_existing_path() {
wd := getwd()
$if windows {
assert existing_path('') or { '' } == ''
assert existing_path('..') or { '' } == '..'
assert existing_path('.') or { '' } == '.'
assert existing_path(wd) or { '' } == wd
assert existing_path('\\') or { '' } == '\\'
assert existing_path('$wd\\.\\\\does/not/exist\\.\\') or { '' } == '$wd\\.\\\\'
assert existing_path('$wd\\\\/\\.\\.\\/.') or { '' } == '$wd\\\\/\\.\\.\\/.'
assert existing_path('$wd\\././/\\/oh') or { '' } == '$wd\\././/\\/'
return
}
assert existing_path('') or { '' } == ''
assert existing_path('..') or { '' } == '..'
assert existing_path('.') or { '' } == '.'
assert existing_path(wd) or { '' } == wd
assert existing_path('/') or { '' } == '/'
assert existing_path('$wd/does/.///not/exist///.//') or { '' } == '$wd/'
assert existing_path('$wd//././/.//') or { '' } == '$wd//././/.//'
assert existing_path('$wd//././/.//oh') or { '' } == '$wd//././/.//'
}

View File

@ -173,8 +173,20 @@ pub fn is_dir_empty(path string) bool {
// file_ext will return the part after the last occurence of `.` in `path`.
// The `.` is included.
// Examples:
// ```v
// assert os.file_ext('file.v') == '.v'
// assert os.file_ext('.ignore_me') == ''
// assert os.file_ext('.') == ''
// ```
pub fn file_ext(path string) string {
pos := path.last_index('.') or { return '' }
if path.len < 3 {
return empty_str
}
pos := path.last_index(dot_str) or { return empty_str }
if pos + 1 >= path.len || pos == 0 {
return empty_str
}
return path[pos..]
}

View File

@ -585,9 +585,19 @@ fn test_is_executable_writable_readable() ? {
os.rm(file_name) or { panic(err) }
}
fn test_ext() {
fn test_file_ext() {
assert os.file_ext('file.v') == '.v'
assert os.file_ext('file.js.v') == '.v'
assert os.file_ext('file.ext1.ext2.ext3') == '.ext3'
assert os.file_ext('.ignore_me.v') == '.v'
assert os.file_ext('file') == ''
assert os.file_ext('.git') == ''
assert os.file_ext('file.') == ''
assert os.file_ext('.') == ''
assert os.file_ext('..') == ''
assert os.file_ext('file...') == ''
assert os.file_ext('.file.') == ''
assert os.file_ext('..file..') == ''
}
fn test_join() {

View File

@ -8,9 +8,13 @@ import net.conv
pub fn (db DB) @select(config orm.SelectConfig, data orm.QueryData, where orm.QueryData) ?[][]orm.Primitive {
query := orm.orm_select_gen(config, '"', true, '$', 1, where)
res := pg_stmt_worker(db, query, where, data)?
mut ret := [][]orm.Primitive{}
res := pg_stmt_worker(db, query, orm.QueryData{}, where)?
if config.is_count {
}
for row in res {
mut row_data := []orm.Primitive{}
@ -166,7 +170,9 @@ fn pg_stmt_match(mut types []u32, mut vals []&char, mut lens []int, mut formats
}
time.Time {
types << u32(Oid.t_int4)
vals << &char(&int(data.unix))
unix := int(data.unix)
num := conv.htn32(unsafe { &u32(&unix) })
vals << &char(&num)
lens << int(sizeof(u32))
formats << 1
}
@ -178,19 +184,22 @@ fn pg_stmt_match(mut types []u32, mut vals []&char, mut lens []int, mut formats
fn pg_type_from_v(typ int) ?string {
str := match typ {
6, 10 {
orm.type_idx['i8'], orm.type_idx['i16'], orm.type_idx['byte'], orm.type_idx['u16'] {
'SMALLINT'
}
7, 11, orm.time {
orm.type_idx['bool'] {
'BOOLEAN'
}
orm.type_idx['int'], orm.type_idx['u32'], orm.time {
'INT'
}
8, 12 {
orm.type_idx['i64'], orm.type_idx['u64'] {
'BIGINT'
}
13 {
orm.float[0] {
'REAL'
}
14 {
orm.float[1] {
'DOUBLE PRECISION'
}
orm.string {
@ -212,54 +221,51 @@ fn pg_type_from_v(typ int) ?string {
fn str_to_primitive(str string, typ int) ?orm.Primitive {
match typ {
// bool
16 {
return orm.Primitive(str.i8() == 1)
}
18 {
orm.type_idx['bool'] {
return orm.Primitive(str == 't')
}
// i8
5 {
orm.type_idx['i8'] {
return orm.Primitive(str.i8())
}
// i16
6 {
orm.type_idx['i16'] {
return orm.Primitive(str.i16())
}
// int
7 {
orm.type_idx['int'] {
return orm.Primitive(str.int())
}
// i64
8 {
orm.type_idx['i64'] {
return orm.Primitive(str.i64())
}
// byte
9 {
orm.type_idx['byte'] {
data := str.i8()
return orm.Primitive(*unsafe { &u8(&data) })
}
// u16
10 {
orm.type_idx['u16'] {
data := str.i16()
return orm.Primitive(*unsafe { &u16(&data) })
}
// u32
11 {
orm.type_idx['u32'] {
data := str.int()
return orm.Primitive(*unsafe { &u32(&data) })
}
// u64
12 {
orm.type_idx['u64'] {
data := str.i64()
return orm.Primitive(*unsafe { &u64(&data) })
}
// f32
13 {
orm.type_idx['f32'] {
return orm.Primitive(str.f32())
}
// f64
14 {
orm.type_idx['f64'] {
return orm.Primitive(str.f64())
}
orm.string {

View File

@ -6,6 +6,7 @@ import time
// sql expr
pub fn (db DB) @select(config orm.SelectConfig, data orm.QueryData, where orm.QueryData) ?[][]orm.Primitive {
// 1. Create query and bind necessary data
query := orm.orm_select_gen(config, '`', true, '?', 1, where)
stmt := db.new_init_stmt(query)?
mut c := 1
@ -19,6 +20,7 @@ pub fn (db DB) @select(config orm.SelectConfig, data orm.QueryData, where orm.Qu
mut ret := [][]orm.Primitive{}
if config.is_count {
// 2. Get count of returned values & add it to ret array
step := stmt.step()
if step !in [sqlite_row, sqlite_ok, sqlite_done] {
return db.error_message(step, query)
@ -28,6 +30,7 @@ pub fn (db DB) @select(config orm.SelectConfig, data orm.QueryData, where orm.Qu
return ret
}
for {
// 2. Parse returned values
step := stmt.step()
if step == sqlite_done {
break
@ -83,6 +86,7 @@ pub fn (db DB) drop(table string) ? {
// helper
// Executes query and bind prepared statement data directly
fn sqlite_stmt_worker(db DB, query string, data orm.QueryData, where orm.QueryData) ? {
stmt := db.new_init_stmt(query)?
mut c := 1
@ -92,6 +96,7 @@ fn sqlite_stmt_worker(db DB, query string, data orm.QueryData, where orm.QueryDa
stmt.finalize()
}
// Binds all values of d in the prepared statement
fn sqlite_stmt_binder(stmt Stmt, d orm.QueryData, query string, mut c &int) ? {
for data in d.data {
err := bind(stmt, c, data)
@ -103,6 +108,7 @@ fn sqlite_stmt_binder(stmt Stmt, d orm.QueryData, query string, mut c &int) ? {
}
}
// Universal bind function
fn bind(stmt Stmt, c &int, data orm.Primitive) int {
mut err := 0
match data {
@ -128,6 +134,7 @@ fn bind(stmt Stmt, c &int, data orm.Primitive) int {
return err
}
// Selects column in result and converts it to an orm.Primitive
fn (stmt Stmt) sqlite_select_column(idx int, typ int) ?orm.Primitive {
mut primitive := orm.Primitive(0)
@ -149,6 +156,7 @@ fn (stmt Stmt) sqlite_select_column(idx int, typ int) ?orm.Primitive {
return primitive
}
// Convert type int to sql type string
fn sqlite_type_from_v(typ int) ?string {
return if typ in orm.nums || typ < 0 || typ in orm.num64 || typ == orm.time {
'INTEGER'

View File

@ -137,7 +137,7 @@ pub fn format_dec_sb(d u64, p BF_param, mut res strings.Builder) {
// f64_to_str_lnd1 formats a f64 to a `string` with `dec_digit` digits after the dot.
[direct_array_access; manualfree]
fn f64_to_str_lnd1(f f64, dec_digit int) string {
pub fn f64_to_str_lnd1(f f64, dec_digit int) string {
unsafe {
// we add the rounding value
s := f64_to_str(f + dec_round[dec_digit], 18)

View File

@ -172,12 +172,9 @@ pub fn (b &Builder) after(n int) string {
}
// str returns a copy of all of the accumulated buffer content.
// Note: after a call to b.str(), the builder b should not be
// used again, you need to call b.free() first, or just leave
// it to be freed by -autofree when it goes out of scope.
// The returned string *owns* its own separate copy of the
// accumulated data that was in the string builder, before the
// .str() call.
// Note: after a call to b.str(), the builder b will be empty, and could be used again.
// The returned string *owns* its own separate copy of the accumulated data that was in
// the string builder, before the .str() call.
pub fn (mut b Builder) str() string {
b << u8(0)
bcopy := unsafe { &u8(memdup_noscan(b.data, b.len)) }
@ -208,7 +205,8 @@ pub fn (mut b Builder) ensure_cap(n int) {
}
}
// free is for manually freeing the contents of the buffer
// free frees the memory block, used for the buffer.
// Note: do not use the builder, after a call to free().
[unsafe]
pub fn (mut b Builder) free() {
if b.data != 0 {

View File

@ -16,6 +16,7 @@ module term
// y is the y coordinate
pub fn set_cursor_position(c Coord) {
print('\x1b[$c.y;$c.x' + 'H')
flush_stdout()
}
// n is number of cells
@ -25,6 +26,7 @@ pub fn set_cursor_position(c Coord) {
// direction: D is backward / West
pub fn move(n int, direction string) {
print('\x1b[$n$direction')
flush_stdout()
}
pub fn cursor_up(n int) {
@ -50,6 +52,7 @@ pub fn cursor_back(n int) {
pub fn erase_display(t string) {
print('\x1b[' + t + 'J')
flush_stdout()
}
pub fn erase_toend() {
@ -63,6 +66,7 @@ pub fn erase_tobeg() {
// clears entire screen and returns cursor to top left-corner
pub fn erase_clear() {
print('\033[H\033[J')
flush_stdout()
}
pub fn erase_del_clear() {
@ -75,6 +79,7 @@ pub fn erase_del_clear() {
// Note: Cursor position does not change
pub fn erase_line(t string) {
print('\x1b[' + t + 'K')
flush_stdout()
}
pub fn erase_line_toend() {
@ -92,11 +97,13 @@ pub fn erase_line_clear() {
// Will make cursor appear if not visible
pub fn show_cursor() {
print('\x1b[?25h')
flush_stdout()
}
// Will make cursor invisible
pub fn hide_cursor() {
print('\x1b[?25l')
flush_stdout()
}
// clear_previous_line - useful for progressbars.
@ -105,4 +112,5 @@ pub fn hide_cursor() {
// the previous content.
pub fn clear_previous_line() {
print('\r\x1b[1A\x1b[2K')
flush_stdout()
}

View File

@ -52,6 +52,7 @@ pub fn get_cursor_position() ?Coord {
unsafe { C.tcsetattr(0, C.TCSANOW, &state) }
print('\e[6n')
flush_stdout()
mut x := 0
mut y := 0
@ -87,6 +88,7 @@ pub fn set_terminal_title(title string) bool {
print('\033]0')
print(title)
print('\007')
flush_stdout()
return true
}
@ -94,4 +96,5 @@ pub fn set_terminal_title(title string) bool {
pub fn clear() {
print('\x1b[2J')
print('\x1b[H')
flush_stdout()
}

View File

@ -32,12 +32,14 @@ pub fn init(cfg Config) &Context {
fn save_title() {
// restore the previously saved terminal title
print('\x1b[22;0t')
flush_stdout()
}
[inline]
fn load_title() {
// restore the previously saved terminal title
print('\x1b[23;0t')
flush_stdout()
}
pub fn (mut ctx Context) run() ? {

View File

@ -26,6 +26,7 @@ fn restore_terminal_state() {
// clear the terminal and set the cursor to the origin
print('\x1b[2J\x1b[3J')
print('\x1b[?1049l')
flush_stdout()
}
C.SetConsoleMode(ui.ctx_ptr.stdin_handle, ui.stdin_at_startup)
}
@ -65,6 +66,7 @@ pub fn init(cfg Config) &Context {
print('\x1b[?1049h')
// clear the terminal and set the cursor to the origin
print('\x1b[2J\x1b[3J\x1b[1;1H')
flush_stdout()
}
if ctx.cfg.hide_cursor {
@ -74,6 +76,7 @@ pub fn init(cfg Config) &Context {
if ctx.cfg.window_title != '' {
print('\x1b]0;$ctx.cfg.window_title\x07')
flush_stdout()
}
unsafe {
@ -317,10 +320,12 @@ fn (mut ctx Context) parse_events() {
fn save_title() {
// restore the previously saved terminal title
print('\x1b[22;0t')
flush_stdout()
}
[inline]
fn load_title() {
// restore the previously saved terminal title
print('\x1b[23;0t')
flush_stdout()
}

View File

@ -79,6 +79,7 @@ fn (mut ctx Context) termios_setup() ? {
if ctx.cfg.window_title != '' {
print('\x1b]0;$ctx.cfg.window_title\x07')
flush_stdout()
}
if !ctx.cfg.skip_init_checks {
@ -90,6 +91,7 @@ fn (mut ctx Context) termios_setup() ? {
// feature-test the SU spec
sx, sy := get_cursor_position()
print('$bsu$esu')
flush_stdout()
ex, ey := get_cursor_position()
if sx == ex && sy == ey {
// the terminal either ignored or handled the sequence properly, enable SU
@ -108,11 +110,14 @@ fn (mut ctx Context) termios_setup() ? {
C.tcsetattr(C.STDIN_FILENO, C.TCSAFLUSH, &termios)
// enable mouse input
print('\x1b[?1003h\x1b[?1006h')
flush_stdout()
if ctx.cfg.use_alternate_buffer {
// switch to the alternate buffer
print('\x1b[?1049h')
flush_stdout()
// clear the terminal and set the cursor to the origin
print('\x1b[2J\x1b[3J\x1b[1;1H')
flush_stdout()
}
ctx.window_height, ctx.window_width = get_terminal_size()
@ -162,6 +167,7 @@ fn (mut ctx Context) termios_setup() ? {
fn get_cursor_position() (int, int) {
print('\033[6n')
flush_stdout()
mut s := ''
unsafe {
buf := malloc_noscan(25)
@ -183,8 +189,10 @@ fn supports_truecolor() bool {
}
// set the bg color to some arbirtrary value (#010203), assumed not to be the default
print('\x1b[48:2:1:2:3m')
flush_stdout()
// andquery the current color
print('\x1bP\$qm\x1b\\')
flush_stdout()
mut s := ''
unsafe {
buf := malloc_noscan(25)
@ -199,6 +207,7 @@ fn termios_reset() {
// C.TCSANOW ??
C.tcsetattr(C.STDIN_FILENO, C.TCSAFLUSH, &ui.termios_at_startup)
print('\x1b[?1003l\x1b[?1006l\x1b[?25h')
flush_stdout()
c := ctx_ptr
if unsafe { c != 0 } && c.cfg.use_alternate_buffer {
print('\x1b[?1049l')

View File

@ -116,6 +116,7 @@ pub fn (mut ctx Context) clear() {
[inline]
pub fn (mut ctx Context) set_window_title(s string) {
print('\x1b]0;$s\x07')
flush_stdout()
}
// draw_point draws a point at position `x`,`y`.

View File

@ -79,6 +79,10 @@ To query for a value that might not be in the document you
can use the `.default_to(...)` function to provide a
default value.
For cases where a default value might not be appropiate or
to check if a value exists you can use `doc.value_opt('query')?`
instead.
```v
import toml
@ -100,8 +104,15 @@ assert doc.value('table.array[0].a').string() == 'A'
// Provides a default value
assert doc.value('non.existing').default_to(false).bool() == false
// Check if value exist
// doc.value_opt('should.exist') or { ... }
// or
if value := doc.value_opt('table.array[1].b') {
assert value.string() == 'B'
}
// You can pass parts of the TOML document around
// and still use .value() to get the values
// and still use .value()/.value_opt() to get the values
arr := doc.value('table.array')
assert arr.value('[1].b').string() == 'B'
```

View File

@ -281,24 +281,35 @@ pub fn (a []Any) to_toml() string {
// quoted keys are supported as `a."b.c"` or `a.'b.c'`.
// Arrays can be queried with `a[0].b[1].[2]`.
pub fn (a Any) value(key string) Any {
key_split := parse_dotted_key(key) or { return Any(Null{}) }
key_split := parse_dotted_key(key) or { return null }
return a.value_(a, key_split)
}
pub fn (a Any) value_opt(key string) ?Any {
key_split := parse_dotted_key(key) or { return error('invalid dotted key') }
x := a.value_(a, key_split)
if x is Null {
return error('no value for key')
}
return x
}
// value_ returns the `Any` value found at `key`.
fn (a Any) value_(value Any, key []string) Any {
assert key.len > 0
mut any_value := Any(Null{})
if key.len == 0 {
return null
}
mut any_value := null
k, index := parse_array_key(key[0])
if k == '' {
arr := value as []Any
any_value = arr[index] or { return Any(Null{}) }
any_value = arr[index] or { return null }
}
if value is map[string]Any {
any_value = value[k] or { return Any(Null{}) }
any_value = value[k] or { return null }
if index > -1 {
arr := any_value as []Any
any_value = arr[index] or { return Any(Null{}) }
any_value = arr[index] or { return null }
}
}
if key.len <= 1 {

View File

@ -2,11 +2,12 @@ import os
import toml
import toml.to
fn test_keys() {
toml_file :=
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
'.toml'
toml_doc := toml.parse_file(toml_file) or { panic(err) }
fn path_by_extension(ext string) string {
return os.join_path(os.dir(@VEXE), 'vlib/toml/tests/testdata/key_test.$ext')
}
fn test_keys() ? {
toml_doc := toml.parse_file(path_by_extension('toml'))?
mut value := toml_doc.value('34-11')
assert value.int() == 23
@ -18,10 +19,30 @@ fn test_keys() {
assert value.int() == 42
toml_json := to.json(toml_doc)
out_file :=
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
'.out'
out_file_json := os.read_file(out_file) or { panic(err) }
out_file_json := os.read_file(path_by_extension('out'))?
println(toml_json)
assert toml_json == out_file_json
//
if x := toml_doc.value_opt('unknown key') {
assert false
} else {
assert err.msg() == 'no value for key'
}
if x := toml_doc.value_opt("'a") {
assert false
} else {
assert err.msg() == 'invalid dotted key'
}
}
fn test_parse_dotted_key() ? {
assert toml.parse_dotted_key('')? == []
assert toml.parse_dotted_key('abc')? == ['abc']
assert toml.parse_dotted_key('tube.test."test.test".h."i.j."."k"')? == ['tube', 'test',
'test.test', 'h', 'i.j.', 'k']
if x := toml.parse_dotted_key("'some unclosed string") {
assert false
} else {
assert err.msg().starts_with('parse_dotted_key: could not parse key, missing closing string delimiter')
}
}

View File

@ -201,26 +201,39 @@ pub fn (d Doc) reflect<T>() T {
// quoted keys are supported as `a."b.c"` or `a.'b.c'`.
// Arrays can be queried with `a[0].b[1].[2]`.
pub fn (d Doc) value(key string) Any {
key_split := parse_dotted_key(key) or { return Any(Null{}) }
key_split := parse_dotted_key(key) or { return toml.null }
return d.value_(d.ast.table, key_split)
}
pub const null = Any(Null{})
pub fn (d Doc) value_opt(key string) ?Any {
key_split := parse_dotted_key(key) or { return error('invalid dotted key') }
x := d.value_(d.ast.table, key_split)
if x is Null {
return error('no value for key')
}
return x
}
// value_ returns the value found at `key` in the map `values` as `Any` type.
fn (d Doc) value_(value ast.Value, key []string) Any {
assert key.len > 0
if key.len == 0 {
return toml.null
}
mut ast_value := ast.Value(ast.Null{})
k, index := parse_array_key(key[0])
if k == '' {
a := value as []ast.Value
ast_value = a[index] or { return Any(Null{}) }
ast_value = a[index] or { return toml.null }
}
if value is map[string]ast.Value {
ast_value = value[k] or { return Any(Null{}) }
ast_value = value[k] or { return toml.null }
if index > -1 {
a := ast_value as []ast.Value
ast_value = a[index] or { return Any(Null{}) }
ast_value = a[index] or { return toml.null }
}
}
@ -298,11 +311,11 @@ pub fn ast_to_any(value ast.Value) Any {
return aa
}
else {
return Any(Null{})
return toml.null
}
}
return Any(Null{})
return toml.null
// TODO decide this
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' can\'t convert "$value"')
// return Any('')

View File

@ -304,6 +304,10 @@ pub:
is_mut bool
is_global bool
is_volatile bool
//
is_deprecated bool
deprecation_msg string
deprecated_after string
pub mut:
default_expr Expr
default_expr_typ Type
@ -1821,9 +1825,9 @@ pub fn (expr Expr) is_expr() bool {
return true
}
pub fn (expr Expr) is_lit() bool {
pub fn (expr Expr) is_pure_literal() bool {
return match expr {
BoolLiteral, CharLiteral, StringLiteral, IntegerLiteral { true }
BoolLiteral, CharLiteral, FloatLiteral, StringLiteral, IntegerLiteral { true }
else { false }
}
}

View File

@ -303,7 +303,13 @@ pub fn (x Expr) str() string {
}
CallExpr {
sargs := args2str(x.args)
propagate_suffix := if x.or_block.kind == .propagate_option { ' ?' } else { '' }
propagate_suffix := if x.or_block.kind == .propagate_option {
'?'
} else if x.or_block.kind == .propagate_result {
'!'
} else {
''
}
if x.is_method {
return '${x.left.str()}.${x.name}($sargs)$propagate_suffix'
}
@ -357,6 +363,8 @@ pub fn (x Expr) str() string {
}
if i < x.branches.len - 1 || !x.has_else {
parts << ' ${dollar}if ' + branch.cond.str() + ' { '
} else if x.has_else && i == x.branches.len - 1 {
parts << '{ '
}
for stmt in branch.stmts {
parts << stmt.str()

View File

@ -45,6 +45,8 @@ pub mut:
mdeprecated_after map[string]time.Time // module deprecation date
builtin_pub_fns map[string]bool
pointer_size int
// cache for type_to_str_using_aliases
cached_type_to_str map[u64]string
}
// used by vls to avoid leaks

View File

@ -1127,8 +1127,17 @@ pub fn (t &Table) clean_generics_type_str(typ Type) string {
// import_aliases is a map of imported symbol aliases 'module.Type' => 'Type'
pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]string) string {
cache_key := (u64(import_aliases.len) << 32) | u64(typ)
if cached_res := t.cached_type_to_str[cache_key] {
return cached_res
}
sym := t.sym(typ)
mut res := sym.name
mut mt := unsafe { &Table(t) }
defer {
// Note, that this relies on `res = value return res` if you want to return early!
mt.cached_type_to_str[cache_key] = res
}
// Note, that the duplication of code in some of the match branches here
// is VERY deliberate. DO NOT be tempted to use `else {}` instead, because
// that strongly reduces the usefullness of the exhaustive checking that
@ -1147,7 +1156,8 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
}
.array {
if typ == ast.array_type {
return 'array'
res = 'array'
return res
}
if typ.has_flag(.variadic) {
res = t.type_to_str_using_aliases(t.value_type(typ), import_aliases)
@ -1202,7 +1212,8 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
}
.map {
if int(typ) == ast.map_type_idx {
return 'map'
res = 'map'
return res
}
info := sym.info as Map
key_str := t.type_to_str_using_aliases(info.key_type, import_aliases)
@ -1257,12 +1268,15 @@ pub fn (t &Table) type_to_str_using_aliases(typ Type, import_aliases map[string]
}
.void {
if typ.has_flag(.optional) {
return '?'
res = '?'
return res
}
if typ.has_flag(.result) {
return '!'
res = '!'
return res
}
return 'void'
res = 'void'
return res
}
.thread {
rtype := sym.thread_info().return_type

View File

@ -17,7 +17,7 @@ import v.dotgraph
pub struct Builder {
pub:
compiled_dir string // contains os.real_path() of the dir of the final file beeing compiled, or the dir itself when doing `v .`
compiled_dir string // contains os.real_path() of the dir of the final file being compiled, or the dir itself when doing `v .`
module_path string
pub mut:
checker &checker.Checker
@ -40,6 +40,7 @@ pub mut:
mod_invalidates_paths map[string][]string // changes in mod `os`, invalidate only .v files, that do `import os`
mod_invalidates_mods map[string][]string // changes in mod `os`, force invalidation of mods, that do `import os`
path_invalidates_mods map[string][]string // changes in a .v file from `os`, invalidates `os`
crun_cache_keys []string // target executable + top level source files; filled in by Builder.should_rebuild
}
pub fn new_builder(pref &pref.Preferences) Builder {

View File

@ -3,9 +3,7 @@
// that can be found in the LICENSE file.
module builder
import time
import os
import rand
import v.pref
import v.util
import v.checker
@ -13,6 +11,19 @@ import v.checker
pub type FnBackend = fn (mut b Builder)
pub fn compile(command string, pref &pref.Preferences, backend_cb FnBackend) {
check_if_output_folder_is_writable(pref)
// Construct the V object from command line arguments
mut b := new_builder(pref)
if b.should_rebuild() {
b.rebuild(backend_cb)
}
b.exit_on_invalid_syntax()
// running does not require the parsers anymore
unsafe { b.myfree() }
b.run_compiled_executable_and_exit()
}
fn check_if_output_folder_is_writable(pref &pref.Preferences) {
odir := os.dir(pref.out_name)
// When pref.out_name is just the name of an executable, i.e. `./v -o executable main.v`
// without a folder component, just use the current folder instead:
@ -24,56 +35,6 @@ pub fn compile(command string, pref &pref.Preferences, backend_cb FnBackend) {
// An early error here, is better than an unclear C error later:
verror(err.msg())
}
// Construct the V object from command line arguments
mut b := new_builder(pref)
if pref.is_verbose {
println('builder.compile() pref:')
// println(pref)
}
mut sw := time.new_stopwatch()
backend_cb(mut b)
mut timers := util.get_timers()
timers.show_remaining()
if pref.is_stats {
compilation_time_micros := 1 + sw.elapsed().microseconds()
scompilation_time_ms := util.bold('${f64(compilation_time_micros) / 1000.0:6.3f}')
mut all_v_source_lines, mut all_v_source_bytes := 0, 0
for pf in b.parsed_files {
all_v_source_lines += pf.nr_lines
all_v_source_bytes += pf.nr_bytes
}
mut sall_v_source_lines := all_v_source_lines.str()
mut sall_v_source_bytes := all_v_source_bytes.str()
sall_v_source_lines = util.bold('${sall_v_source_lines:10s}')
sall_v_source_bytes = util.bold('${sall_v_source_bytes:10s}')
println(' V source code size: $sall_v_source_lines lines, $sall_v_source_bytes bytes')
//
mut slines := b.stats_lines.str()
mut sbytes := b.stats_bytes.str()
slines = util.bold('${slines:10s}')
sbytes = util.bold('${sbytes:10s}')
println('generated target code size: $slines lines, $sbytes bytes')
//
vlines_per_second := int(1_000_000.0 * f64(all_v_source_lines) / f64(compilation_time_micros))
svlines_per_second := util.bold(vlines_per_second.str())
println('compilation took: $scompilation_time_ms ms, compilation speed: $svlines_per_second vlines/s')
}
b.exit_on_invalid_syntax()
// running does not require the parsers anymore
unsafe { b.myfree() }
if pref.is_test || pref.is_run {
b.run_compiled_executable_and_exit()
}
}
pub fn (mut b Builder) get_vtmp_filename(base_file_name string, postfix string) string {
vtmp := util.get_vtmp_folder()
mut uniq := ''
if !b.pref.reuse_tmpc {
uniq = '.$rand.u64()'
}
fname := os.file_name(os.real_path(base_file_name)) + '$uniq$postfix'
return os.real_path(os.join_path(vtmp, fname))
}
// Temporary, will be done by -autofree
@ -118,47 +79,45 @@ fn (mut b Builder) run_compiled_executable_and_exit() {
if b.pref.os == .ios {
panic('Running iOS apps is not supported yet.')
}
if !(b.pref.is_test || b.pref.is_run || b.pref.is_crun) {
exit(0)
}
compiled_file := os.real_path(b.pref.out_name)
run_file := if b.pref.backend.is_js() {
node_basename := $if windows { 'node.exe' } $else { 'node' }
os.find_abs_path_of_executable(node_basename) or {
panic('Could not find `node` in system path. Do you have Node.js installed?')
}
} else {
compiled_file
}
mut run_args := []string{cap: b.pref.run_args.len + 1}
if b.pref.backend.is_js() {
run_args << compiled_file
}
run_args << b.pref.run_args
mut run_process := os.new_process(run_file)
run_process.set_args(run_args)
if b.pref.is_verbose {
println('running $run_process.filename with arguments $run_process.args')
}
if b.pref.is_test || b.pref.is_run {
compiled_file := os.real_path(b.pref.out_name)
run_file := if b.pref.backend.is_js() {
node_basename := $if windows { 'node.exe' } $else { 'node' }
os.find_abs_path_of_executable(node_basename) or {
panic('Could not find `node` in system path. Do you have Node.js installed?')
}
} else {
compiled_file
}
mut run_args := []string{cap: b.pref.run_args.len + 1}
if b.pref.backend.is_js() {
run_args << compiled_file
}
run_args << b.pref.run_args
mut run_process := os.new_process(run_file)
run_process.set_args(run_args)
if b.pref.is_verbose {
println('running $run_process.filename with arguments $run_process.args')
}
// Ignore sigint and sigquit while running the compiled file,
// so ^C doesn't prevent v from deleting the compiled file.
// See also https://git.musl-libc.org/cgit/musl/tree/src/process/system.c
prev_int_handler := os.signal_opt(.int, eshcb) or { serror('set .int', err) }
mut prev_quit_handler := os.SignalHandler(eshcb)
$if !windows { // There's no sigquit on windows
prev_quit_handler = os.signal_opt(.quit, eshcb) or { serror('set .quit', err) }
}
run_process.wait()
os.signal_opt(.int, prev_int_handler) or { serror('restore .int', err) }
$if !windows {
os.signal_opt(.quit, prev_quit_handler) or { serror('restore .quit', err) }
}
ret := run_process.code
run_process.close()
b.cleanup_run_executable_after_exit(compiled_file)
exit(ret)
// Ignore sigint and sigquit while running the compiled file,
// so ^C doesn't prevent v from deleting the compiled file.
// See also https://git.musl-libc.org/cgit/musl/tree/src/process/system.c
prev_int_handler := os.signal_opt(.int, eshcb) or { serror('set .int', err) }
mut prev_quit_handler := os.SignalHandler(eshcb)
$if !windows { // There's no sigquit on windows
prev_quit_handler = os.signal_opt(.quit, eshcb) or { serror('set .quit', err) }
}
exit(0)
run_process.wait()
os.signal_opt(.int, prev_int_handler) or { serror('restore .int', err) }
$if !windows {
os.signal_opt(.quit, prev_quit_handler) or { serror('restore .quit', err) }
}
ret := run_process.code
run_process.close()
b.cleanup_run_executable_after_exit(compiled_file)
exit(ret)
}
fn eshcb(_ os.Signal) {
@ -171,6 +130,9 @@ fn serror(reason string, e IError) {
}
fn (mut v Builder) cleanup_run_executable_after_exit(exefile string) {
if v.pref.is_crun {
return
}
if v.pref.reuse_tmpc {
v.pref.vrun_elog('keeping executable: $exefile , because -keepc was passed')
return

View File

@ -2,6 +2,8 @@ module builder
import os
import hash
import time
import rand
import strings
import v.util
import v.pref
@ -11,11 +13,27 @@ pub fn (mut b Builder) rebuild_modules() {
if !b.pref.use_cache || b.pref.build_mode == .build_module {
return
}
all_files := b.parsed_files.map(it.path)
$if trace_invalidations ? {
eprintln('> rebuild_modules all_files: $all_files')
}
invalidations := b.find_invalidated_modules_by_files(all_files)
$if trace_invalidations ? {
eprintln('> rebuild_modules invalidations: $invalidations')
}
if invalidations.len > 0 {
vexe := pref.vexe_path()
for imp in invalidations {
b.v_build_module(vexe, imp)
}
}
}
pub fn (mut b Builder) find_invalidated_modules_by_files(all_files []string) []string {
util.timing_start('${@METHOD} source_hashing')
mut new_hashes := map[string]string{}
mut old_hashes := map[string]string{}
mut sb_new_hashes := strings.new_builder(1024)
all_files := b.parsed_files.map(it.path)
//
mut cm := vcache.new_cache_manager(all_files)
sold_hashes := cm.load('.hashes', 'all_files') or { ' ' }
@ -31,8 +49,7 @@ pub fn (mut b Builder) rebuild_modules() {
old_hashes[cpath] = chash
}
// eprintln('old_hashes: $old_hashes')
for p in b.parsed_files {
cpath := p.path
for cpath in all_files {
ccontent := util.read_file(cpath) or { '' }
chash := hash.sum64_string(ccontent, 7).hex_full()
new_hashes[cpath] = chash
@ -48,6 +65,7 @@ pub fn (mut b Builder) rebuild_modules() {
cm.save('.hashes', 'all_files', snew_hashes) or {}
util.timing_measure('${@METHOD} source_hashing')
mut invalidations := []string{}
if new_hashes != old_hashes {
util.timing_start('${@METHOD} rebuilding')
// eprintln('> b.mod_invalidates_paths: $b.mod_invalidates_paths')
@ -148,13 +166,13 @@ pub fn (mut b Builder) rebuild_modules() {
}
if invalidated_mod_paths.len > 0 {
impaths := invalidated_mod_paths.keys()
vexe := pref.vexe_path()
for imp in impaths {
b.v_build_module(vexe, imp)
invalidations << imp
}
}
util.timing_measure('${@METHOD} rebuilding')
}
return invalidations
}
fn (mut b Builder) v_build_module(vexe string, imp_path string) {
@ -211,7 +229,7 @@ fn (mut b Builder) handle_usecache(vexe string) {
// strconv is already imported inside builtin, so skip generating its object file
// TODO: incase we have other modules with the same name, make sure they are vlib
// is this even doign anything?
if imp in ['strconv', 'strings', 'dlmalloc'] {
if util.module_is_builtin(imp) {
continue
}
if imp in built_modules {
@ -237,3 +255,114 @@ fn (mut b Builder) handle_usecache(vexe string) {
}
b.ccoptions.post_args << libs
}
pub fn (mut b Builder) should_rebuild() bool {
mut exe_name := b.pref.out_name
$if windows {
exe_name = exe_name + '.exe'
}
if !os.is_file(exe_name) {
return true
}
if !b.pref.is_crun {
return true
}
mut v_program_files := []string{}
is_file := os.is_file(b.pref.path)
is_dir := os.is_dir(b.pref.path)
if is_file {
v_program_files << b.pref.path
} else if is_dir {
v_program_files << b.v_files_from_dir(b.pref.path)
}
v_program_files.sort() // ensure stable keys for the dependencies cache
b.crun_cache_keys = v_program_files
b.crun_cache_keys << exe_name
// just check the timestamps for now:
exe_stamp := os.file_last_mod_unix(exe_name)
source_stamp := most_recent_timestamp(v_program_files)
if exe_stamp <= source_stamp {
return true
}
////////////////////////////////////////////////////////////////////////////
// The timestamps for the top level files were found ok,
// however we want to *also* make sure that a full rebuild will be done
// if any of the dependencies (if we know them) are changed.
mut cm := vcache.new_cache_manager(b.crun_cache_keys)
// always rebuild, when the compilation options changed between 2 sequential cruns:
sbuild_options := cm.load('.build_options', '.crun') or { return true }
if sbuild_options != b.pref.build_options.join('\n') {
return true
}
sdependencies := cm.load('.dependencies', '.crun') or {
// empty/wiped out cache, we do not know what the dependencies are, so just
// rebuild, which will fill in the dependencies cache for the next crun
return true
}
dependencies := sdependencies.split('\n')
// we have already compiled these source files, and have their dependencies
dependencies_stamp := most_recent_timestamp(dependencies)
if dependencies_stamp < exe_stamp {
return false
}
return true
}
fn most_recent_timestamp(files []string) i64 {
mut res := i64(0)
for f in files {
f_stamp := os.file_last_mod_unix(f)
if res <= f_stamp {
res = f_stamp
}
}
return res
}
pub fn (mut b Builder) rebuild(backend_cb FnBackend) {
mut sw := time.new_stopwatch()
backend_cb(mut b)
if b.pref.is_crun {
// save the dependencies after the first compilation, they will be used for subsequent ones:
mut cm := vcache.new_cache_manager(b.crun_cache_keys)
dependency_files := b.parsed_files.map(it.path)
cm.save('.dependencies', '.crun', dependency_files.join('\n')) or {}
cm.save('.build_options', '.crun', b.pref.build_options.join('\n')) or {}
}
mut timers := util.get_timers()
timers.show_remaining()
if b.pref.is_stats {
compilation_time_micros := 1 + sw.elapsed().microseconds()
scompilation_time_ms := util.bold('${f64(compilation_time_micros) / 1000.0:6.3f}')
mut all_v_source_lines, mut all_v_source_bytes := 0, 0
for pf in b.parsed_files {
all_v_source_lines += pf.nr_lines
all_v_source_bytes += pf.nr_bytes
}
mut sall_v_source_lines := all_v_source_lines.str()
mut sall_v_source_bytes := all_v_source_bytes.str()
sall_v_source_lines = util.bold('${sall_v_source_lines:10s}')
sall_v_source_bytes = util.bold('${sall_v_source_bytes:10s}')
println(' V source code size: $sall_v_source_lines lines, $sall_v_source_bytes bytes')
//
mut slines := b.stats_lines.str()
mut sbytes := b.stats_bytes.str()
slines = util.bold('${slines:10s}')
sbytes = util.bold('${sbytes:10s}')
println('generated target code size: $slines lines, $sbytes bytes')
//
vlines_per_second := int(1_000_000.0 * f64(all_v_source_lines) / f64(compilation_time_micros))
svlines_per_second := util.bold(vlines_per_second.str())
println('compilation took: $scompilation_time_ms ms, compilation speed: $svlines_per_second vlines/s')
}
}
pub fn (mut b Builder) get_vtmp_filename(base_file_name string, postfix string) string {
vtmp := util.get_vtmp_folder()
mut uniq := ''
if !b.pref.reuse_tmpc {
uniq = '.$rand.u64()'
}
fname := os.file_name(os.real_path(base_file_name)) + '$uniq$postfix'
return os.real_path(os.join_path(vtmp, fname))
}

View File

@ -305,7 +305,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
}
}
left_type_unwrapped := c.unwrap_generic(left_type)
left_type_unwrapped := c.unwrap_generic(ast.mktyp(left_type))
right_type_unwrapped := c.unwrap_generic(right_type)
if right_type_unwrapped == 0 {
// right type was a generic `T`

View File

@ -232,10 +232,6 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
return
}
}
got_typ_sym := c.table.sym(got)
got_typ_str := c.table.type_to_str(got.clear_flag(.variadic))
expected_typ_sym := c.table.sym(expected_)
expected_typ_str := c.table.type_to_str(expected.clear_flag(.variadic))
if c.check_types(got, expected) {
if language != .v || expected.is_ptr() == got.is_ptr() || arg.is_mut
@ -244,6 +240,9 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
return
}
} else {
got_typ_sym := c.table.sym(got)
expected_typ_sym := c.table.sym(expected_)
// Check on Generics types, there are some case where we have the following case
// `&Type<int> == &Type<>`. This is a common case we are implementing a function
// with generic parameters like `compare(bst Bst<T> node) {}`
@ -251,6 +250,7 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
// Check if we are making a comparison between two different types of
// the same type like `Type<int> and &Type<>`
if (got.is_ptr() != expected.is_ptr()) || !c.check_same_module(got, expected) {
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
return error('cannot use `$got_typ_str` as `$expected_typ_str`')
}
return
@ -258,14 +258,22 @@ pub fn (mut c Checker) check_expected_call_arg(got ast.Type, expected_ ast.Type,
if got == ast.void_type {
return error('`$arg.expr` (no value) used as value')
}
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
return error('cannot use `$got_typ_str` as `$expected_typ_str`')
}
if got != ast.void_type {
got_typ_str, expected_typ_str := c.get_string_names_of(got, expected)
return error('cannot use `$got_typ_str` as `$expected_typ_str`')
}
}
fn (c Checker) get_string_names_of(got ast.Type, expected ast.Type) (string, string) {
got_typ_str := c.table.type_to_str(got.clear_flag(.variadic))
expected_typ_str := c.table.type_to_str(expected.clear_flag(.variadic))
return got_typ_str, expected_typ_str
}
// helper method to check if the type is of the same module.
// FIXME(vincenzopalazzo) This is a work around to the issue
// explained in the https://github.com/vlang/v/pull/13718#issuecomment-1074517800
@ -614,7 +622,7 @@ pub fn (mut c Checker) infer_fn_generic_types(func ast.Fn, mut node ast.CallExpr
sym := c.table.sym(node.receiver_type)
match sym.info {
ast.Struct, ast.Interface, ast.SumType {
if c.table.cur_fn.generic_names.len > 0 { // in generic fn
if !isnil(c.table.cur_fn) && c.table.cur_fn.generic_names.len > 0 { // in generic fn
if gt_name in c.table.cur_fn.generic_names
&& c.table.cur_fn.generic_names.len == c.table.cur_concrete_types.len {
idx := c.table.cur_fn.generic_names.index(gt_name)
@ -671,6 +679,7 @@ pub fn (mut c Checker) infer_fn_generic_types(func ast.Fn, mut node ast.CallExpr
mut param_elem_sym := c.table.sym(param_elem_info.elem_type)
for {
if arg_elem_sym.kind == .array && param_elem_sym.kind == .array
&& !isnil(c.table.cur_fn)
&& param_elem_sym.name !in c.table.cur_fn.generic_names {
arg_elem_info = arg_elem_sym.info as ast.Array
arg_elem_sym = c.table.sym(arg_elem_info.elem_type)
@ -690,6 +699,7 @@ pub fn (mut c Checker) infer_fn_generic_types(func ast.Fn, mut node ast.CallExpr
mut param_elem_sym := c.table.sym(param_elem_info.elem_type)
for {
if arg_elem_sym.kind == .array_fixed && param_elem_sym.kind == .array_fixed
&& !isnil(c.table.cur_fn)
&& param_elem_sym.name !in c.table.cur_fn.generic_names {
arg_elem_info = arg_elem_sym.info as ast.ArrayFixed
arg_elem_sym = c.table.sym(arg_elem_info.elem_type)

View File

@ -13,16 +13,14 @@ import v.util.version
import v.errors
import v.pkgconfig
const int_min = int(0x80000000)
const int_max = int(0x7FFFFFFF)
// prevent stack overflows by restricting too deep recursion:
const expr_level_cutoff_limit = 40
const stmt_level_cutoff_limit = 40
const iface_level_cutoff_limit = 100
const (
int_min = int(0x80000000)
int_max = int(0x7FFFFFFF)
// prevent stack overflows by restricting too deep recursion:
expr_level_cutoff_limit = 40
stmt_level_cutoff_limit = 40
iface_level_cutoff_limit = 100
)
pub const (
valid_comptime_if_os = ['windows', 'ios', 'macos', 'mach', 'darwin', 'hpux', 'gnu',
@ -58,28 +56,27 @@ fn all_valid_comptime_idents() []string {
pub struct Checker {
pref &pref.Preferences // Preferences shared from V struct
pub mut:
table &ast.Table
file &ast.File = 0
nr_errors int
nr_warnings int
nr_notices int
errors []errors.Error
warnings []errors.Warning
notices []errors.Notice
error_lines []int // to avoid printing multiple errors for the same line
expected_type ast.Type
expected_or_type ast.Type // fn() or { 'this type' } eg. string. expected or block type
expected_expr_type ast.Type // if/match is_expr: expected_type
mod string // current module name
const_decl string
const_deps []string
const_names []string
global_names []string
locked_names []string // vars that are currently locked
rlocked_names []string // vars that are currently read-locked
in_for_count int // if checker is currently in a for loop
// checked_ident string // to avoid infinite checker loops
should_abort bool // when too many errors/warnings/notices are accumulated, .should_abort becomes true. It is checked in statement/expression loops, so the checker can return early, instead of wasting time.
table &ast.Table
file &ast.File = 0
nr_errors int
nr_warnings int
nr_notices int
errors []errors.Error
warnings []errors.Warning
notices []errors.Notice
error_lines []int // to avoid printing multiple errors for the same line
expected_type ast.Type
expected_or_type ast.Type // fn() or { 'this type' } eg. string. expected or block type
expected_expr_type ast.Type // if/match is_expr: expected_type
mod string // current module name
const_var &ast.ConstField = voidptr(0) // the current constant, when checking const declarations
const_deps []string
const_names []string
global_names []string
locked_names []string // vars that are currently locked
rlocked_names []string // vars that are currently read-locked
in_for_count int // if checker is currently in a for loop
should_abort bool // when too many errors/warnings/notices are accumulated, .should_abort becomes true. It is checked in statement/expression loops, so the checker can return early, instead of wasting time.
returns bool
scope_returns bool
is_builtin_mod bool // true inside the 'builtin', 'os' or 'strconv' modules; TODO: remove the need for special casing this
@ -145,7 +142,7 @@ pub fn new_checker(table &ast.Table, pref &pref.Preferences) &Checker {
fn (mut c Checker) reset_checker_state_at_start_of_new_file() {
c.expected_type = ast.void_type
c.expected_or_type = ast.void_type
c.const_decl = ''
c.const_var = voidptr(0)
c.in_for_count = 0
c.returns = false
c.scope_returns = false
@ -203,7 +200,7 @@ pub fn (mut c Checker) check(ast_file_ &ast.File) {
return
}
}
//
c.stmt_level = 0
for mut stmt in ast_file.stmts {
if stmt is ast.GlobalDecl {
@ -214,7 +211,7 @@ pub fn (mut c Checker) check(ast_file_ &ast.File) {
return
}
}
//
c.stmt_level = 0
for mut stmt in ast_file.stmts {
if stmt !is ast.ConstDecl && stmt !is ast.GlobalDecl && stmt !is ast.ExprStmt {
@ -225,7 +222,7 @@ pub fn (mut c Checker) check(ast_file_ &ast.File) {
return
}
}
//
c.check_scope_vars(c.file.scope)
}
@ -745,7 +742,7 @@ fn (mut c Checker) fail_if_immutable(expr_ ast.Expr) (string, token.Pos) {
return '', pos
}
else {
if !expr.is_lit() {
if !expr.is_pure_literal() {
c.error('unexpected expression `$expr.type_name()`', expr.pos())
return '', pos
}
@ -807,7 +804,6 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
}
}
}
styp := c.table.type_to_str(utyp)
if utyp.idx() == interface_type.idx() {
// same type -> already casted to the interface
return true
@ -816,6 +812,7 @@ fn (mut c Checker) type_implements(typ ast.Type, interface_type ast.Type, pos to
// `none` "implements" the Error interface
return true
}
styp := c.table.type_to_str(utyp)
if typ_sym.kind == .interface_ && inter_sym.kind == .interface_ && !styp.starts_with('JS.')
&& !inter_sym.name.starts_with('JS.') {
c.error('cannot implement interface `$inter_sym.name` with a different interface `$styp`',
@ -934,8 +931,8 @@ pub fn (mut c Checker) check_expr_opt_call(expr ast.Expr, ret_type ast.Type) ast
pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_return_type ast.Type) {
if node.kind == .propagate_option {
if !c.table.cur_fn.return_type.has_flag(.optional) && c.table.cur_fn.name != 'main.main'
&& !c.inside_const {
if !isnil(c.table.cur_fn) && !c.table.cur_fn.return_type.has_flag(.optional)
&& c.table.cur_fn.name != 'main.main' && !c.inside_const {
c.error('to propagate the call, `$c.table.cur_fn.name` must return an optional type',
node.pos)
}
@ -951,8 +948,8 @@ pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_re
return
}
if node.kind == .propagate_result {
if !c.table.cur_fn.return_type.has_flag(.result) && c.table.cur_fn.name != 'main.main'
&& !c.inside_const {
if !isnil(c.table.cur_fn) && !c.table.cur_fn.return_type.has_flag(.result)
&& c.table.cur_fn.name != 'main.main' && !c.inside_const {
c.error('to propagate the call, `$c.table.cur_fn.name` must return an result type',
node.pos)
}
@ -989,7 +986,6 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
if type_fits || is_noreturn {
return
}
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional))
if stmt.typ == ast.void_type {
if stmt.expr is ast.IfExpr {
for branch in stmt.expr.branches {
@ -1002,10 +998,12 @@ fn (mut c Checker) check_or_last_stmt(stmt ast.Stmt, ret_type ast.Type, expr_ret
}
return
}
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional))
c.error('`or` block must provide a default value of type `$expected_type_name`, or return/continue/break or call a [noreturn] function like panic(err) or exit(1)',
stmt.expr.pos())
} else {
type_name := c.table.type_to_str(last_stmt_typ)
expected_type_name := c.table.type_to_str(ret_type.clear_flag(.optional))
c.error('wrong return type `$type_name` in the `or {}` block, expected `$expected_type_name`',
stmt.expr.pos())
}
@ -1071,7 +1069,8 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
match mut node.expr {
ast.Ident {
name := node.expr.name
valid_generic := util.is_generic_type_name(name) && name in c.table.cur_fn.generic_names
valid_generic := util.is_generic_type_name(name) && !isnil(c.table.cur_fn)
&& name in c.table.cur_fn.generic_names
if valid_generic {
name_type = ast.Type(c.table.find_type_idx(name)).set_flag(.generic)
}
@ -1220,11 +1219,23 @@ pub fn (mut c Checker) selector_expr(mut node ast.SelectorExpr) ast.Type {
// <<<
if has_field {
if sym.mod != c.mod && !field.is_pub && sym.language != .c {
is_used_outside := sym.mod != c.mod
if is_used_outside && !field.is_pub && sym.language != .c {
unwrapped_sym := c.table.sym(c.unwrap_generic(typ))
c.error('field `${unwrapped_sym.name}.$field_name` is not public', node.pos)
}
field_sym := c.table.sym(field.typ)
if field.is_deprecated && is_used_outside {
now := time.now()
mut after_time := now
if field.deprecated_after != '' {
after_time = time.parse_iso8601(field.deprecated_after) or {
c.error('invalid time format', field.pos)
now
}
}
c.deprecate('field', field_name, field.deprecation_msg, now, after_time, node.pos)
}
if field_sym.kind in [.sum_type, .interface_] {
if !prevent_sum_type_unwrapping_once {
if scope_field := node.scope.find_struct_field(node.expr.str(), typ, field_name) {
@ -1316,8 +1327,9 @@ pub fn (mut c Checker) const_decl(mut node ast.ConstDecl) {
c.const_names << field.name
}
for i, mut field in node.fields {
c.const_decl = field.name
c.const_deps << field.name
prev_const_var := c.const_var
c.const_var = unsafe { field }
mut typ := c.check_expr_opt_call(field.expr, c.expr(field.expr))
if ct_value := c.eval_comptime_const_expr(field.expr, 0) {
field.comptime_expr_value = ct_value
@ -1327,6 +1339,7 @@ pub fn (mut c Checker) const_decl(mut node ast.ConstDecl) {
}
node.fields[i].typ = ast.mktyp(typ)
c.const_deps = []
c.const_var = prev_const_var
}
}
@ -1452,7 +1465,7 @@ fn (mut c Checker) stmt(node_ ast.Stmt) {
c.inside_const = false
}
ast.DeferStmt {
if node.idx_in_fn < 0 {
if node.idx_in_fn < 0 && !isnil(c.table.cur_fn) {
node.idx_in_fn = c.table.cur_fn.defer_stmts.len
c.table.cur_fn.defer_stmts << unsafe { &node }
}
@ -1541,7 +1554,7 @@ fn (mut c Checker) stmt(node_ ast.Stmt) {
c.warn('`goto` requires `unsafe` (consider using labelled break/continue)',
node.pos)
}
if node.name !in c.table.cur_fn.label_names {
if !isnil(c.table.cur_fn) && node.name !in c.table.cur_fn.label_names {
c.error('unknown label `$node.name`', node.pos)
}
// TODO: check label doesn't bypass variable declarations
@ -1879,7 +1892,6 @@ fn (mut c Checker) hash_stmt(mut node ast.HashStmt) {
}
}
}
// println('adding flag "$flag"')
c.table.parse_cflag(flag, c.mod, c.pref.compile_defines_all) or {
c.error(err.msg(), node.pos)
}
@ -1981,7 +1993,7 @@ fn (mut c Checker) stmts_ending_with_expression(stmts []ast.Stmt) {
}
pub fn (mut c Checker) unwrap_generic(typ ast.Type) ast.Type {
if typ.has_flag(.generic) {
if typ.has_flag(.generic) && !isnil(c.table.cur_fn) {
if t_typ := c.table.resolve_generic_to_concrete(typ, c.table.cur_fn.generic_names,
c.table.cur_concrete_types)
{
@ -2094,10 +2106,7 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
return c.chan_init(mut node)
}
ast.CharLiteral {
// return int_literal, not rune, so that we can do "bytes << `A`" without a cast etc
// return ast.int_literal_type
return ast.rune_type
// return ast.byte_type
}
ast.Comment {
return ast.void_type
@ -2142,10 +2151,7 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
return c.go_expr(mut node)
}
ast.Ident {
// c.checked_ident = node.name
res := c.ident(mut node)
// c.checked_ident = ''
return res
return c.ident(mut node)
}
ast.IfExpr {
return c.if_expr(mut node)
@ -2531,9 +2537,15 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
fn (mut c Checker) at_expr(mut node ast.AtExpr) ast.Type {
match node.kind {
.fn_name {
if isnil(c.table.cur_fn) {
return ast.void_type
}
node.val = c.table.cur_fn.name.all_after_last('.')
}
.method_name {
if isnil(c.table.cur_fn) {
return ast.void_type
}
fname := c.table.cur_fn.name.all_after_last('.')
if c.table.cur_fn.is_method {
node.val = c.table.type_to_str(c.table.cur_fn.receiver.typ).all_after_last('.') +
@ -2543,6 +2555,9 @@ fn (mut c Checker) at_expr(mut node ast.AtExpr) ast.Type {
}
}
.mod_name {
if isnil(c.table.cur_fn) {
return ast.void_type
}
node.val = c.table.cur_fn.mod
}
.struct_name {
@ -2607,7 +2622,23 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
if !name.contains('.') && node.mod != 'builtin' {
name = '${node.mod}.$node.name'
}
if name == c.const_decl {
// detect cycles, while allowing for references to the same constant,
// used inside its initialisation like: `struct Abc { x &Abc } ... const a = [ Abc{0}, Abc{unsafe{&a[0]}} ]!`
// see vlib/v/tests/const_fixed_array_containing_references_to_itself_test.v
if unsafe { c.const_var != 0 } && name == c.const_var.name {
if mut c.const_var.expr is ast.ArrayInit {
if c.const_var.expr.is_fixed && c.expected_type.nr_muls() > 0 {
elem_typ := c.expected_type.deref()
node.kind = .constant
node.name = c.const_var.name
node.info = ast.IdentVar{
typ: elem_typ
}
// c.const_var.typ = elem_typ
node.obj = c.const_var
return c.expected_type
}
}
c.error('cycle in constant `$c.const_decl`', node.pos)
return ast.void_type
}
@ -2693,13 +2724,6 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
typ: typ
is_optional: is_optional
}
// if typ == ast.t_type {
// sym := c.table.sym(c.cur_generic_type)
// println('IDENT T unresolved $node.name typ=$sym.name')
// Got a var with type T, return current generic type
// typ = c.cur_generic_type
// }
// } else {
if !is_sum_type_cast {
obj.typ = typ
}
@ -3052,7 +3076,7 @@ fn (mut c Checker) find_obj_definition(obj ast.ScopeObject) ?ast.Expr {
if mut expr is ast.Ident {
return c.find_definition(expr)
}
if !expr.is_lit() {
if !expr.is_pure_literal() {
return error('definition of `$name` is unknown at compile time')
}
return expr
@ -3266,9 +3290,6 @@ pub fn (mut c Checker) prefix_expr(mut node ast.PrefixExpr) ast.Type {
fn (mut c Checker) check_index(typ_sym &ast.TypeSymbol, index ast.Expr, index_type ast.Type, pos token.Pos, range_index bool, is_gated bool) {
index_type_sym := c.table.sym(index_type)
// println('index expr left=$typ_sym.name $node.pos.line_nr')
// if typ_sym.kind == .array && (!(ast.type_idx(index_type) in ast.number_type_idxs) &&
// index_type_sym.kind != .enum_) {
if typ_sym.kind in [.array, .array_fixed, .string] {
if !(index_type.is_int() || index_type_sym.kind == .enum_
|| (index_type_sym.kind == .alias
@ -3401,6 +3422,9 @@ pub fn (mut c Checker) index_expr(mut node ast.IndexExpr) ast.Type {
typ = value_type
}
}
if node.or_expr.stmts.len > 0 && node.or_expr.stmts.last() is ast.ExprStmt {
c.expected_or_type = typ
}
c.stmts_ending_with_expression(node.or_expr.stmts)
c.check_expr_opt_call(node, typ)
return typ

View File

@ -52,7 +52,8 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
c.ensure_sumtype_array_has_default_value(node)
}
c.ensure_type_exists(node.elem_type, node.elem_type_pos) or {}
if node.typ.has_flag(.generic) && c.table.cur_fn.generic_names.len == 0 {
if node.typ.has_flag(.generic) && !isnil(c.table.cur_fn)
&& c.table.cur_fn.generic_names.len == 0 {
c.error('generic struct cannot use in non-generic function', node.pos)
}
return node.typ

View File

@ -213,8 +213,8 @@ fn (mut c Checker) fn_decl(mut node ast.FnDecl) {
}
}
}
if (c.pref.translated || c.file.is_translated) && node.is_variadic
&& node.params.len == 1 && param.typ.is_ptr() {
//&& node.params.len == 1 && param.typ.is_ptr() {
if (c.pref.translated || c.file.is_translated) && node.is_variadic && param.typ.is_ptr() {
// TODO c2v hack to fix `(const char *s, ...)`
param.typ = ast.int_type.ref()
}
@ -421,8 +421,8 @@ pub fn (mut c Checker) call_expr(mut node ast.CallExpr) ast.Type {
c.expected_or_type = node.return_type.clear_flag(.optional)
c.stmts_ending_with_expression(node.or_block.stmts)
c.expected_or_type = ast.void_type
if node.or_block.kind == .propagate_option && !c.table.cur_fn.return_type.has_flag(.optional)
&& !c.inside_const {
if node.or_block.kind == .propagate_option && !isnil(c.table.cur_fn)
&& !c.table.cur_fn.return_type.has_flag(.optional) && !c.inside_const {
if !c.table.cur_fn.is_main {
c.error('to propagate the optional call, `$c.table.cur_fn.name` must return an optional',
node.or_block.pos)
@ -482,7 +482,9 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
c.error('JS.await: first argument must be a promise, got `$tsym.name`', node.pos)
return ast.void_type
}
c.table.cur_fn.has_await = true
if !isnil(c.table.cur_fn) {
c.table.cur_fn.has_await = true
}
match tsym.info {
ast.Struct {
mut ret_type := tsym.info.concrete_types[0]
@ -895,6 +897,11 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
continue
}
if param.typ.is_ptr() && !param.is_mut && !call_arg.typ.is_real_pointer()
&& call_arg.expr.is_literal() && func.language == .v {
c.error('literal argument cannot be passed as reference parameter `${c.table.type_to_str(param.typ)}`',
call_arg.pos)
}
c.check_expected_call_arg(arg_typ, c.unwrap_generic(param.typ), node.language,
call_arg) or {
if param.typ.has_flag(.generic) {
@ -1026,14 +1033,15 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
}
}
// resolve return generics struct to concrete type
if func.generic_names.len > 0 && func.return_type.has_flag(.generic)
if func.generic_names.len > 0 && func.return_type.has_flag(.generic) && !isnil(c.table.cur_fn)
&& c.table.cur_fn.generic_names.len == 0 {
node.return_type = c.table.unwrap_generic_type(func.return_type, func.generic_names,
concrete_types)
} else {
node.return_type = func.return_type
}
if node.concrete_types.len > 0 && func.return_type != 0 && c.table.cur_fn.generic_names.len == 0 {
if node.concrete_types.len > 0 && func.return_type != 0 && !isnil(c.table.cur_fn)
&& c.table.cur_fn.generic_names.len == 0 {
if typ := c.table.resolve_generic_to_concrete(func.return_type, func.generic_names,
concrete_types)
{
@ -1075,7 +1083,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
node.return_type = left_type
node.receiver_type = left_type
if c.table.cur_fn.generic_names.len > 0 {
if !isnil(c.table.cur_fn) && c.table.cur_fn.generic_names.len > 0 {
c.table.unwrap_generic_type(left_type, c.table.cur_fn.generic_names, c.table.cur_concrete_types)
}
unwrapped_left_type := c.unwrap_generic(left_type)
@ -1155,7 +1163,9 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if node.args.len > 0 {
c.error('wait() does not have any arguments', node.args[0].pos)
}
c.table.cur_fn.has_await = true
if !isnil(c.table.cur_fn) {
c.table.cur_fn.has_await = true
}
node.return_type = info.concrete_types[0]
node.return_type.set_flag(.optional)
return node.return_type
@ -1428,6 +1438,10 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
}
continue
}
if param.typ.is_ptr() && !arg.typ.is_real_pointer() && arg.expr.is_literal() {
c.error('literal argument cannot be passed as reference parameter `${c.table.type_to_str(param.typ)}`',
arg.pos)
}
c.check_expected_call_arg(got_arg_typ, exp_arg_typ, node.language, arg) or {
// str method, allow type with str method if fn arg is string
// Passing an int or a string array produces a c error here
@ -1454,7 +1468,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
c.warn('method `${left_sym.name}.$method_name` must be called from an `unsafe` block',
node.pos)
}
if !c.table.cur_fn.is_deprecated && method.is_deprecated {
if !isnil(c.table.cur_fn) && !c.table.cur_fn.is_deprecated && method.is_deprecated {
c.deprecate_fnmethod('method', '${left_sym.name}.$method.name', method, node)
}
c.set_node_expected_arg_types(mut node, method)
@ -1478,13 +1492,13 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
}
// resolve return generics struct to concrete type
if method.generic_names.len > 0 && method.return_type.has_flag(.generic)
&& c.table.cur_fn.generic_names.len == 0 {
&& !isnil(c.table.cur_fn) && c.table.cur_fn.generic_names.len == 0 {
node.return_type = c.table.unwrap_generic_type(method.return_type, method.generic_names,
concrete_types)
} else {
node.return_type = method.return_type
}
if node.concrete_types.len > 0 && method.return_type != 0
if node.concrete_types.len > 0 && method.return_type != 0 && !isnil(c.table.cur_fn)
&& c.table.cur_fn.generic_names.len == 0 {
if typ := c.table.resolve_generic_to_concrete(method.return_type, method.generic_names,
concrete_types)
@ -1615,7 +1629,7 @@ fn (mut c Checker) deprecate_fnmethod(kind string, name string, the_fn ast.Fn, n
if attr.name == 'deprecated_after' && attr.arg != '' {
after_time = time.parse_iso8601(attr.arg) or {
c.error('invalid time format', attr.pos)
time.now()
now
}
}
}

View File

@ -274,11 +274,7 @@ pub fn (mut c Checker) if_expr(mut node ast.IfExpr) ast.Type {
}
}
// if only untyped literals were given default to int/f64
if node.typ == ast.int_literal_type {
node.typ = ast.int_type
} else if node.typ == ast.float_literal_type {
node.typ = ast.f64_type
}
node.typ = ast.mktyp(node.typ)
if expr_required && !node.has_else {
d := if node.is_comptime { '$' } else { '' }
c.error('`$if_kind` expression needs `${d}else` clause', node.pos)

View File

@ -7,6 +7,9 @@ import v.pref
// TODO: non deferred
pub fn (mut c Checker) return_stmt(mut node ast.Return) {
if isnil(c.table.cur_fn) {
return
}
c.expected_type = c.table.cur_fn.return_type
mut expected_type := c.unwrap_generic(c.expected_type)
expected_type_sym := c.table.sym(expected_type)
@ -88,7 +91,9 @@ pub fn (mut c Checker) return_stmt(mut node ast.Return) {
}
if expected_types.len > 0 && expected_types.len != got_types.len {
arg := if expected_types.len == 1 { 'argument' } else { 'arguments' }
c.error('expected $expected_types.len $arg, but got $got_types.len', node.pos)
midx := imax(0, imin(expected_types.len, expr_idxs.len - 1))
mismatch_pos := node.exprs[expr_idxs[midx]].pos()
c.error('expected $expected_types.len $arg, but got $got_types.len', mismatch_pos)
return
}
for i, exp_type in expected_types {
@ -327,3 +332,11 @@ fn is_noreturn_callexpr(expr ast.Expr) bool {
}
return false
}
fn imin(a int, b int) int {
return if a < b { a } else { b }
}
fn imax(a int, b int) int {
return if a < b { b } else { a }
}

View File

@ -97,7 +97,7 @@ pub fn (mut c Checker) string_inter_lit(mut node ast.StringInterLiteral) ast.Typ
node.need_fmts[i] = fmt != c.get_default_fmt(ftyp, typ)
}
// check recursive str
if c.table.cur_fn.is_method && c.table.cur_fn.name == 'str'
if !isnil(c.table.cur_fn) && c.table.cur_fn.is_method && c.table.cur_fn.name == 'str'
&& c.table.cur_fn.receiver.name == expr.str() {
c.error('cannot call `str()` method recursively', expr.pos())
}

View File

@ -219,7 +219,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
&& node.generic_types.len != struct_sym.info.generic_types.len {
c.error('generic struct init expects $struct_sym.info.generic_types.len generic parameter, but got $node.generic_types.len',
node.pos)
} else if node.generic_types.len > 0 {
} else if node.generic_types.len > 0 && !isnil(c.table.cur_fn) {
for gtyp in node.generic_types {
gtyp_name := c.table.sym(gtyp).name
if gtyp_name !in c.table.cur_fn.generic_names {
@ -247,7 +247,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
}
}
// register generic struct type when current fn is generic fn
if c.table.cur_fn.generic_names.len > 0 {
if !isnil(c.table.cur_fn) && c.table.cur_fn.generic_names.len > 0 {
c.table.unwrap_generic_type(node.typ, c.table.cur_fn.generic_names, c.table.cur_concrete_types)
}
c.ensure_type_exists(node.typ, node.pos) or {}
@ -291,7 +291,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
'it cannot be initialized with `$type_sym.name{}`', node.pos)
}
}
if type_sym.name.len == 1 && c.table.cur_fn.generic_names.len == 0 {
if type_sym.name.len == 1 && !isnil(c.table.cur_fn) && c.table.cur_fn.generic_names.len == 0 {
c.error('unknown struct `$type_sym.name`', node.pos)
return 0
}

View File

@ -0,0 +1,42 @@
vlib/v/checker/tests/field_deprecations.vv:23:9: notice: field `d` will be deprecated after 2999-03-01, and will become an error after 2999-08-28; d use Xyz.a instead
21 | dump(x.c)
22 | x.c = 11
23 | dump(x.d)
| ^
24 | x.d = 45
25 | }
vlib/v/checker/tests/field_deprecations.vv:24:4: notice: field `d` will be deprecated after 2999-03-01, and will become an error after 2999-08-28; d use Xyz.a instead
22 | x.c = 11
23 | dump(x.d)
24 | x.d = 45
| ^
25 | }
26 |
vlib/v/checker/tests/field_deprecations.vv:19:9: warning: field `b` has been deprecated
17 | dump(x.a)
18 | x.a = 123
19 | dump(x.b)
| ^
20 | x.b = 456
21 | dump(x.c)
vlib/v/checker/tests/field_deprecations.vv:20:4: warning: field `b` has been deprecated
18 | x.a = 123
19 | dump(x.b)
20 | x.b = 456
| ^
21 | dump(x.c)
22 | x.c = 11
vlib/v/checker/tests/field_deprecations.vv:21:9: error: field `c` has been deprecated since 2021-03-01; c use Xyz.a instead
19 | dump(x.b)
20 | x.b = 456
21 | dump(x.c)
| ^
22 | x.c = 11
23 | dump(x.d)
vlib/v/checker/tests/field_deprecations.vv:22:4: error: field `c` has been deprecated since 2021-03-01; c use Xyz.a instead
20 | x.b = 456
21 | dump(x.c)
22 | x.c = 11
| ^
23 | dump(x.d)
24 | x.d = 45

View File

@ -0,0 +1,36 @@
import v.checker.tests.module_with_structs_with_deprecated_fields as m
struct Abc {
mut:
x int
d int [deprecated]
z int
}
fn use_m_externally() {
x := m.Xyz{}
dump(x)
}
fn use_m_externally_and_use_deprecated_fields() {
mut x := m.Xyz{}
dump(x.a)
x.a = 123
dump(x.b)
x.b = 456
dump(x.c)
x.c = 11
dump(x.d)
x.d = 45
}
fn main() {
mut a := Abc{}
a.x = 1
a.d = 1
a.z = 1
dump(a)
println(a.d)
x := a.d + 1
dump(x)
}

View File

@ -1,4 +1,4 @@
vlib/v/checker/tests/fn_call_arg_mismatch_err_c.vv:13:18: error: `os.chdir(files) ?` (no value) used as value in argument 1 to `os.ls`
vlib/v/checker/tests/fn_call_arg_mismatch_err_c.vv:13:18: error: `os.chdir(files)?` (no value) used as value in argument 1 to `os.ls`
11 | println(files)
12 | } else {
13 | println(os.ls(os.chdir(files)?)?)

View File

@ -0,0 +1,13 @@
vlib/v/checker/tests/fn_ref_arg_mismatch_err.vv:15:10: error: literal argument cannot be passed as reference parameter `&T`
13 | fn main() {
14 | foo := Foo<int>{}
15 | foo.foo(12)
| ~~
16 |
17 | bar<int>(12)
vlib/v/checker/tests/fn_ref_arg_mismatch_err.vv:17:11: error: literal argument cannot be passed as reference parameter `&T`
15 | foo.foo(12)
16 |
17 | bar<int>(12)
| ~~
18 | }

View File

@ -0,0 +1,18 @@
module main
struct Foo<T> { }
fn (f &Foo<T>) foo(a &T) {
println(a)
}
fn bar<T>(a &T) {
println(a)
}
fn main() {
foo := Foo<int>{}
foo.foo(12)
bar<int>(12)
}

Some files were not shown because too many files have changed in this diff Show More