Compare commits

...

264 Commits

Author SHA1 Message Date
Jef Roosens e493725190
ci: disable gc by default
ci/woodpecker/push/vc Pipeline failed Details
ci/woodpecker/push/arch unknown status Details
ci/woodpecker/push/docker unknown status Details
2022-06-22 20:12:31 +02:00
Jef Roosens 8e4b53acc4
ci: Added docker workflow & Dockerfile 2022-06-22 20:12:30 +02:00
Jef Roosens 60ff01f692
ci: bumped alpine version & added mandoc to builder image 2022-06-22 20:12:18 +02:00
Jef Roosens 3ce87c3ffc
ci: Added PKGBUILD & workflow for deploying Arch packages 2022-06-22 20:12:18 +02:00
Jef Roosens 7af1cb3ac4
ci: Added docker workflow & Dockerfile 2022-06-22 20:12:18 +02:00
yuyi 585b5145fa
cgen: fix auto string method generated for []&int{len:1} (#14829) 2022-06-22 14:54:04 +03:00
Delyan Angelov 6a4ba22eae
tests: flush test headers to ensure stable output with VJOBS>1 2022-06-22 12:46:32 +03:00
pancake 23d1c792c0
native: fix macho generation for macos11-amd64 (#14821) 2022-06-22 11:58:27 +03:00
Delyan Angelov 74fb473301
ci: use unique concurrency.group for native_backend_tests.yml 2022-06-22 11:23:40 +03:00
Delyan Angelov 6c060b76fd
ci: fix v building inside native_backend_tests.yml 2022-06-22 11:19:00 +03:00
Delyan Angelov 48b2ab157b
ci: add a separate native_backend_tests.yml, to run the native tests on all the available github actions vm environments 2022-06-22 09:11:01 +03:00
yuyi c64c4907a2
parser: check closure var name conflict (#14823) 2022-06-22 09:04:15 +03:00
Delyan Angelov e2e3992e0d
ci: remove `continue-on-error: true` for most ci jobs (#14811) 2022-06-22 00:20:57 +03:00
ghosttk 8172fecb51
vweb: remove the extra '/' in scan_static_directory (#14806) 2022-06-21 15:13:09 +03:00
Larpon 9f5e442dab
tools: add bump flags to vcomplete (#14813) 2022-06-21 15:11:42 +03:00
yuyi c160ba2a8d
checker: stricter mutable reference check (fix #14803) (#14805) 2022-06-21 13:23:21 +03:00
Delyan Angelov c6f94e9cab
tools: distribute vpm queries randomly between the available backend servers 2022-06-21 12:58:37 +03:00
yuyi cab6355a38
cgen: fix array of reference sumtype index() (#14812) 2022-06-21 12:37:54 +03:00
Delyan Angelov f08c768c8e
tools: add a new readonly VPM server mirror url, to mitigate failures of the main vpm.vlang.io 2022-06-21 12:27:41 +03:00
Ulises Jeremias Cornejo Fandos e505fcdac0
encoding.csv: update reader.v (#14807) 2022-06-21 08:31:47 +03:00
yuyi f6f77e5264
clipboard: make clipboard.Clipboard public on windows (#14810) 2022-06-21 08:21:54 +03:00
David 'Epper' Marshall 524df8da1b
math: add zpl stuff (#14543) 2022-06-21 00:17:49 +03:00
Delyan Angelov 473d26ce47 builtin: enforce linking to libgc.a with -prod on macos 2022-06-20 21:29:27 +03:00
pancake 1caff5b379
native: initial support for apple-m1 (#14795) 2022-06-20 21:25:12 +03:00
yuyi 8703e336e0
checker: cleanup in fn_decl() (#14802) 2022-06-20 17:56:02 +03:00
Delyan Angelov 1fc9e1a716
tools: build c2v in non verbose mode by default 2022-06-20 16:03:39 +03:00
Delyan Angelov fa2e8d8459
tools: use os.system for the c2v runs to monitor the progress more easily 2022-06-20 15:50:38 +03:00
Delyan Angelov 0e4198f23b
tools: fix `v vet file.v` for `return if x { y // comment } else { z }` 2022-06-20 13:29:22 +03:00
yuyi cf1fc6f090
ast: fix array of reference sumtype appending (#14797) 2022-06-20 12:23:53 +03:00
Alexander Medvednikov 924239026c pref: disable gc for translated code 2022-06-19 20:07:45 +03:00
Alexander Medvednikov bc60b0d1a3 builder: add -c when building object files 2022-06-19 19:57:52 +03:00
Alexander Medvednikov d215618f4c sokol: mark pub structs 2022-06-19 19:47:47 +03:00
yuyi de136f6baf
checker: improve pub struct check (fix #14446) (#14777) 2022-06-19 17:42:22 +03:00
Delyan Angelov 37ef1ee453
tools: do show the output of c2v, when it fails 2022-06-19 17:08:21 +03:00
Delyan Angelov 7b1ade237b
tools: fix the first run of `v translate hw.c` 2022-06-19 16:19:56 +03:00
Delyan Angelov b9cb56572f
ci: use VTEST_JUST_ESSENTIAL=1 for the -cstrict test-self task in ubuntu-clang too 2022-06-19 16:08:24 +03:00
Alexander Medvednikov 6875a173ec cmd: enable `v translate`, download and install c2v 2022-06-19 15:52:42 +03:00
Delyan Angelov 97be840a6d
ci: use VTEST_JUST_ESSENTIAL=1 for the ubuntu -cstrict gcc task (prevent 2 hour runs) 2022-06-19 13:29:36 +03:00
lemon e0310964d9
native: initial support for `defer` (#14779) 2022-06-18 23:51:31 +03:00
Alexander Medvednikov 10051e005a parser, cgen: temporary prefix ++ for translated code 2022-06-18 13:30:47 +03:00
wahur666 18dfaf6164
tools: handle fn attributes/comments more robustly, when `v missdoc` is run (#14774) 2022-06-18 11:02:39 +03:00
yuyi 01fdd5d07f
cgen: add a minor optimisation for array.push_many (#14770) 2022-06-17 09:44:13 +03:00
Alexander Medvednikov b89617726c pref: is_o 2022-06-16 20:51:21 +03:00
Louis Schmieder 5df3d8ac75
orm: mysql fixes (#14772) 2022-06-16 20:19:49 +03:00
Alexander Medvednikov fb5a40d1c8 builder: handle linker errors when building .o files 2022-06-16 15:19:13 +03:00
yuyi 26714fadc5
ast, checker, cgen: improve sorting globals and consts (#14769) 2022-06-16 14:32:41 +03:00
Alexander Medvednikov 9c72b85f72 checker: temporary c2v struct init fix 2022-06-16 14:24:17 +03:00
Danilo Lekovic df239b9208
doc: fix 'specifing' typo (#14768) 2022-06-16 11:12:58 +03:00
yuyi f2962c34dd
cgen: format module_init generated c code (#14764) 2022-06-16 11:01:17 +03:00
Delyan Angelov 205221074c tools: support `v watch -k run examples/gg/rectangles.v` (keep a program running) 2022-06-15 18:00:10 +03:00
yuyi 0c1708db23
cgen: minor cleanup in stmt() (#14763) 2022-06-15 17:44:00 +03:00
yuyi 5135952c9c
v.util: add a retry loop for tool compilation in launch_tool() (#14760) 2022-06-15 11:59:53 +03:00
yuyi 7f38b92ca8
cgen: simplify sorting globals and consts (#14761) 2022-06-15 11:56:17 +03:00
yuyi 7c50d276c7
ast: minor cleanup in is_comptime_type() (#14759) 2022-06-14 14:21:45 +03:00
lemon e4e858b132
transformer: fix a bug with string literal length (#14757) 2022-06-14 12:42:45 +03:00
yuyi 6d8a0ad15d
ast, checker, cgen: sort consts with call expr (fix #14748) (#14755) 2022-06-14 10:50:20 +03:00
Spydr 2f1a896d18
native: integer-to-string conversion (#14758) 2022-06-14 00:35:25 +03:00
Leo Developer 67716b5b59
cgen: fix comptime if attributes for `test_` functions (#14756) 2022-06-13 21:22:25 +03:00
yuyi 5efa67906c
cgen: sort const array init order (fix #14748) (#14749) 2022-06-13 21:09:24 +03:00
yuyi 3535927bcd
parser: correct error message for seclector_expr_assign.vv (#14747) 2022-06-12 20:29:54 +03:00
Delyan Angelov 139c34c07d
gg: optimise app.gg.show_fps() (cache ctx.text_size() results, round frame to int without interpolation, center text inside the background rectangle) 2022-06-12 16:07:00 +03:00
Delyan Angelov 4682e17ac1
gg: fix `v -d show_fps run examples/gg/stars.v` 2022-06-12 15:17:04 +03:00
Delyan Angelov 7e06203da8 gg: support `-d show_fps` for all gg apps 2022-06-12 14:20:32 +03:00
Delyan Angelov ff8e286c88
compress.gzip: support `gzip.decompress(data, verify_length: false, verify_checksum: false)?` 2022-06-12 11:25:01 +03:00
Spydr 5c104cf981
native: implement `neg()` for arm64 (#14745) 2022-06-12 09:42:39 +03:00
Spydr 8fa1e30dd2
native: int-to-string conversion detecting zeros and negative values (#14743) 2022-06-11 21:20:13 +03:00
Alexander Medvednikov f08266ab66
doc: autofree/gc update 2022-06-11 20:44:28 +03:00
Joe Conigliaro f3351b6a29
pref: maintain order of 'debug' in compile defines as mod cache key depends on it 2022-06-12 02:22:04 +10:00
Delyan Angelov 5cea8d30fa
ci: make crun_test.v more robust by reducing the tested gap from 5x to 4x 2022-06-11 18:44:02 +03:00
yuyi a538ab7e8c
cgen: minor cleanup for sort globals and consts (#14742) 2022-06-11 18:42:22 +03:00
yuyi cdf4ffc513
cgen: sort globals and consts definitions (#14739) 2022-06-11 17:17:49 +03:00
lemon c7a619d16e
native: support `else`, `break`, `continue` (#14738) 2022-06-11 11:50:19 +03:00
Dialga da7a166708
v.pkgconfig: fix duplication in short flags to the standalone pkgconfig binary (#14740) 2022-06-11 11:06:55 +03:00
spaceface 26d051475a
cgen: ensure closures are kept alive when using the GC (#14736) 2022-06-10 19:48:50 +03:00
Delyan Angelov b27b6b2047
tools: let `v gret` make an easily visible diff.png image after regressions, and upload it too, to make CI failures easier to diagnose 2022-06-10 15:38:50 +03:00
yuyi fcaf529228
parser, checker: check undefined ident in if expr using generic type name (#14734) 2022-06-10 11:57:26 +03:00
Larpon 690a8422d1
vcomplete: add `-no-parallel` to flag completions (#14735) 2022-06-10 11:57:12 +03:00
Delyan Angelov f4869bcdc6 ci: add `apt-get update` before `apt-get install` in gg-regressions 2022-06-10 11:50:30 +03:00
Delyan Angelov ea71ea3ec1
examples: speed up mandelbrot.v a little, increase iterations/details too 2022-06-09 20:10:00 +03:00
yuyi 922f003729
cgen: fix fixed array global variable (fix #14712) (#14730) 2022-06-09 16:53:19 +03:00
Larpon be23ddc253
ci: remove `VFLAGS: -gc none` from vab runs (#14731) 2022-06-09 15:37:53 +03:00
yuyi 784361f153
checker: fix generic method on aliases receiver type (#14729) 2022-06-09 15:36:31 +03:00
Delyan Angelov e1360ccf8c
ci: add `VTEST_JUST_ESSENTIAL=1 ./v test-self` mode. Use it for alpine-docker-musl-gcc . 2022-06-09 15:26:52 +03:00
Ben 39e54a508b
os: correct description of windows_volume function (#14726) 2022-06-09 10:56:58 +03:00
spaceface 4ed9780b80
all: enable `-gc boehm` by default (#14577) 2022-06-09 00:44:29 +03:00
yuyi e6580fefaa
ast: cleanup in generic_insts_to_concrete() (#14724) 2022-06-08 23:17:03 +03:00
Spydr 8563696476
native: added new helper functions (#14723) 2022-06-08 23:16:15 +03:00
Larpon f58e5a94c2
gg: fire resize event before init if necessary on Android (#14725) 2022-06-08 21:43:20 +03:00
Ben c6b1c8d07a
os: add windows_volume function (#14721) 2022-06-08 21:26:24 +03:00
Mikey 5ac9b5c9f1
term.ui: don't print event data in readme example (vlang#14719) (#14720) 2022-06-08 09:36:22 +03:00
Delyan Angelov 4b3c3d9082
checker: add error for `if c >= A && c <= Z {` in non generic functions 2022-06-07 20:43:31 +03:00
Larpon 96a9faf2fd
strings: add split_capital (#14716) 2022-06-07 18:43:06 +03:00
David Valdespino Pavon 1d462136bc
net.http: cookie parsing fixes (#14420) 2022-06-07 12:52:43 +03:00
Larpon 8027919285
ci: update vab install (vlang/vab#176) (#14713) 2022-06-07 12:51:43 +03:00
Delyan Angelov 82594c0156
v.vcache: improve the output of `-d trace_usecache_n` 2022-06-07 12:39:30 +03:00
Delyan Angelov a942ecf737
v.vcache: support `-d trace_usecache_n` too (less verbose tracing for just the initialisation of CacheManager) 2022-06-07 12:24:53 +03:00
Delyan Angelov 82d23dedf1
builtin: add flush_stdout and flush_stderr to builtin.js.v, for feature parity with builtin.c.v 2022-06-07 12:15:35 +03:00
yuyi 7780f56c31
cgen: fix cast to generic interface (#14708) 2022-06-07 08:32:25 +03:00
ChAoS_UnItY 73b59c7b16
cgen: fix none literal str() function calling (#14704) 2022-06-07 01:29:27 +03:00
yuyi abf35270cf
checker: fix json decoder with generic struct (#14700) 2022-06-06 19:25:02 +03:00
ChAoS_UnItY ce26d5bc5c
cgen: fix mutable receiver type calling mapping function causes C error (fix #14230) (#14696) 2022-06-06 18:34:04 +03:00
Delyan Angelov 778fe2cde0
ast: use `[direct_array_access]` for `attrs []Attr` lookup methods 2022-06-06 14:43:04 +03:00
Delyan Angelov 6398043094
cgen: fix missing function names in declarations on `[c2v_variadic][c: xyz]fn deh_fprintf(fstream &C.FILE, fmt &i8)` 2022-06-06 14:33:24 +03:00
Alexander Medvednikov 7f67981637 checker: allow literal args as references in translated code 2022-06-06 13:24:36 +03:00
yuyi 8a2236d3f8
cgen: fix generic interface with non-generic method (#14694) 2022-06-06 12:30:48 +03:00
Alexander Medvednikov e89a6269e4 checker: do not require fn main when building an object file 2022-06-06 12:29:57 +03:00
yuyi ce771876a3
cgen: fix nested map index check (fix #14683) (#14687) 2022-06-06 06:29:22 +03:00
ChAoS_UnItY df80b33dc0
cgen: fix array init with it (fix #14679) (#14680) 2022-06-05 19:41:54 +03:00
Leo Developer 3a90d8ef14
compress: add a new module `compress.gzip` too (#14686) 2022-06-05 18:53:45 +03:00
Ikko Ashimine 7b25957a26
builtin: fix typo in array.v (#14688) 2022-06-05 18:49:40 +03:00
ChAoS_UnItY b000728845
compress.gzip / deflate: rename gzip module into deflate module (#14682) 2022-06-05 08:48:38 +03:00
yuyi 4cf6abd99d
checker: check using literal argument as reference parameter (#14674) 2022-06-05 08:44:35 +03:00
ChAoS_UnItY f6ebbc99cd
cgen: fix array type as interface (fix #14677) (#14681) 2022-06-05 05:05:48 +03:00
yuyi 5d429140a4
json: fix json decode with missing map type field (#14678) 2022-06-04 20:27:11 +03:00
Wertzui123 d71fd04c81
thirdparty/sokol: apply speaker/headset bug fix from latest upstream sokol_audio.h (#14676) 2022-06-04 20:24:07 +03:00
Delyan Angelov 3c5ae41712
examples: simplify the shebang in the v_script.vsh example 2022-06-04 20:15:39 +03:00
Delyan Angelov 3ac3375b43
cgen: fix `for (int i = 0; i < 10; i++, a++) {` (multiple expressions in the inc part) 2022-06-04 20:03:59 +03:00
Delyan Angelov 82eb495617 ci: on windows-msvc, skip const_fixed_array_containing_references_to_itself_test.v 2022-06-04 13:05:27 +03:00
Delyan Angelov f2171b4148 ci: fix macos clang failures with const_fixed_array_containing_references_to_itself_test.v 2022-06-04 09:15:37 +03:00
Delyan Angelov 4cfff58fdf
checker: allow for references to fixed array consts inside their initialisation `const a = [ ... &a[0] ...]!` 2022-06-04 09:04:12 +03:00
Alexander Medvednikov a8461a900d vweb: use http.Cookie 2022-06-04 06:52:46 +03:00
yuyi 66572d5ead
ast: cleanup is_lit() (#14672) 2022-06-03 21:02:36 +03:00
Delyan Angelov c15d1c6e7e
cgen,ci: fix cast_bool_to_int_test.v on windows-tcc 2022-06-03 20:53:10 +03:00
Delyan Angelov 6f9070e06d cgen: do not initialise externally declared globals (with -cstrict with [c_extern]). 2022-06-03 19:14:01 +03:00
Delyan Angelov dbaecdc058
cgen: simplify int(bool_var) casts; support [c_extern] tag for global declarations 2022-06-03 18:48:08 +03:00
yuyi daa94de93f
cgen: fix autofree_variable() (fix #14576) (#14602) 2022-06-03 17:41:30 +03:00
yuyi dcbd8d6405
cgen: fix if expr with optional method call (#14600) 2022-06-03 15:57:39 +03:00
ChAoS_UnItY 65066098d8
compress: Add gzip module & refactor compress & compress.zlib module (#14599) 2022-06-03 09:00:11 +03:00
yuyi 251716fa0e
vrepl: fix error for exitasdfasdf in repl (fix #14593) (#14598) 2022-06-02 19:59:57 +03:00
Delyan Angelov 9f7656f328
ci: vfmt vlib/v/checker/check_types.v 2022-06-02 19:02:34 +03:00
Delyan Angelov c892b3203e
checker: speed up check_expected_call_arg, by only calling Table.type_to_str on errors 2022-06-02 18:58:20 +03:00
Hunam 41414b5d5f
vlib: add `net.http.mime` (#14516) 2022-06-02 18:07:25 +03:00
Delyan Angelov aae5b9fb95
ast.table: cache the returned values of Table.type_to_str_using_aliases/2
This results in 9% speedup of the checker stage for V itself.
2022-06-02 17:53:30 +03:00
Delyan Angelov ed759b2ec9
ci: vfmt vlib/v/parser/parser.v 2022-06-02 15:55:13 +03:00
Delyan Angelov 031629faa1
tools: add cmd/tools/measure/scanner_speed.v and cmd/tools/measure/parser_speed.v 2022-06-02 13:50:25 +03:00
Delyan Angelov 9a0ec7f367
strings: update docs for .str() and for .free() 2022-06-02 10:41:32 +03:00
yuyi 545eaae77b
ast: fix IfExpr.str() (#14595) 2022-06-02 10:28:31 +03:00
Alexander Medvednikov 8b0e843cb8 checker, cgen: c2v variadic fixes 2022-06-02 09:35:25 +03:00
yuyi 10fb16e00b
parser: fix optional with multiple statements (#14592) 2022-06-02 08:23:16 +03:00
Claudio Cesar de Sá 5bf246fce6
examples: some new graphs algorithms and improving 2 others (#14556) 2022-06-02 07:11:29 +03:00
Ben e201665e92
os: fix file_ext function (#14566) 2022-06-02 07:09:46 +03:00
Wertzui123 f971da9a93
help: add Windows to the list of supported native backends in `v help build-native` (#14589) 2022-06-02 07:08:46 +03:00
ChAoS_UnItY a95cdac635
cgen: fix type not being unaliased (fix #14568) (#14591) 2022-06-02 06:21:01 +03:00
yuyi 55951e0943
checker: minor cleanup in if_expr() (#14584) 2022-06-02 06:20:09 +03:00
Hunam d0a1608ede
ci: re-enable Go2V test suite (#14588) 2022-06-02 06:19:38 +03:00
yuyi 33a2d00445
cgen: fix fixed array of aliases struct (#14583) 2022-06-01 16:56:12 +03:00
Delyan Angelov bf70f0b436
v: add support for `v crun examples/hello_world.v`, use crun mode for .vsh files by default. (#14554) 2022-06-01 14:47:52 +03:00
Delyan Angelov c91b646372
examples: document how to produce the shared library, needed for a standalone run of examples/dynamic_library_loading/use.v 2022-06-01 13:58:49 +03:00
yuyi 786045c7da
parser: fix comptime if script mode (fix #6419) (#14578) 2022-06-01 13:27:27 +03:00
Delyan Angelov 5a2c271bd4
cgen: do not #include signal.h, on -os wasm32 and -d no_segfault_handler 2022-06-01 13:21:22 +03:00
Delyan Angelov 2fa64f1471
ci: skip embed_file_test.v for now 2022-06-01 12:06:02 +03:00
yuyi fefb9643b2
checker, cgen: fix array index optional with if expr (#14575) 2022-06-01 09:18:59 +03:00
Delyan Angelov 846ddfd728
v: always embed file data of \$embed_file(file) constructs, even without -prod, unless `-d embed_only_metadata` is given. 2022-06-01 09:08:18 +03:00
Alexander Medvednikov f40c30c3dc cgen: fix goto label 2022-06-01 06:34:04 +03:00
Alexander Medvednikov c54c9b817c cgen: c2v infix fix 2022-06-01 06:14:28 +03:00
Larpon 84e375e38a
toml: update readme with value_opt() usage (#14569) 2022-05-31 19:02:33 +03:00
yuyi 80cc88427b
scanner: minor cleanup in scanner.v (#14565) 2022-05-31 11:52:47 +03:00
playX db34adaec8
builtin.js: fix string.int method (#14564) 2022-05-31 11:52:11 +03:00
Delyan Angelov dc30089c74 v.util, v.builder: fix util.module_is_builtin on macos with -usecache 2022-05-31 09:30:45 +03:00
Ben 4ffdcf8058
os: add existing_path function (#14536) 2022-05-31 06:32:12 +03:00
Delyan Angelov 928dafeb6d
strconv: make f64_to_str_lnd1 public (fix building vlang/coreutils printf) 2022-05-30 22:14:22 +03:00
Delyan Angelov fc64f09f0b
crypto.md5: improve performance of md5.blockblock_generic 2022-05-30 21:56:39 +03:00
Delyan Angelov 0f3b2c2ae7
builtin: use C.fwrite (buffered) for _write_buf_to_fd (#14558) 2022-05-30 19:15:05 +03:00
Delyan Angelov 58ebc0680e
builtin: fix sporadic linking failures on `v -cc gcc -gc boehm examples/hello_world.v` 2022-05-30 15:17:01 +03:00
yuyi 844ba2a177
checker: vfmt overload_return_type.vv (#14557) 2022-05-30 13:49:13 +03:00
Hunam 78d1b7f4ef
net.http: `Response.text` -> `Response.body` (#14478) 2022-05-29 20:27:18 +03:00
yuyi 2c5febe25e
scanner: fix string interpolation with inner curly braces (fix #12242) (#14553) 2022-05-29 19:28:23 +03:00
yuyi 79d861ad4f
parser: fix cast or dump arguments ending with comma (#14552) 2022-05-29 09:15:55 +03:00
Delyan Angelov 63d15086e7 docs: document explicitly, that maps support `if v := m[k] {` too 2022-05-28 21:36:13 +03:00
Delyan Angelov c006d5c242
cgen: add support for `v -cmain SDL_main sdl_example.v` 2022-05-28 21:16:48 +03:00
yuyi c0ef6dbde8
cgen: fix cross assign in closure (#14549) 2022-05-28 20:47:54 +03:00
yuyi 7dcc19df55
ast: fix call_expr.str() with propagate_option or propagate_result (#14550) 2022-05-28 20:47:29 +03:00
Delyan Angelov c6a6eb9a3c
ci: temporary workaround for cross assignment in a closure leading to cgen error 2022-05-28 13:21:59 +03:00
yuyi b8e8768928
parser, cgen: fix cross assign with parentheses (#14545) 2022-05-28 12:02:17 +03:00
yuyi a46cf10e92
checker: fix declare assign literal with closure (#14544) 2022-05-28 11:50:37 +03:00
Delyan Angelov 4894f61998
toml: add `pub fn (d Doc) value_opt(key string) ?Any {` and some tests for toml.parse_dotted_key/1 2022-05-28 09:18:18 +03:00
yuyi a971b9a99a
parser: fix match expr case with struct init (#14538) 2022-05-27 18:51:40 +03:00
Larpon f3e7f24ee6
tools: implement `v missdoc --diff oldv newv` (#14537) 2022-05-27 18:19:06 +03:00
spaceface 52a3e5e780
cgen: fix a race condition in the closure implementation (#14532) 2022-05-27 17:35:02 +03:00
Delyan Angelov f7995c8916
checker: fix error position in `fn f() int { return 1,2 }` 2022-05-27 16:53:24 +03:00
Delyan Angelov 36cb552918
ci: vfmt cmd/tools/vpm.v 2022-05-27 15:22:47 +03:00
Delyan Angelov 156aa661ee
tools: fix `v install https://github.com/nedpals/vex.git` (fix #14483) 2022-05-27 15:21:01 +03:00
Louis Schmieder a83ac948a0
orm: document & fix pg (#14533) 2022-05-26 22:53:09 +03:00
yuyi b97ef09b2d
checker: cleanup checker.v (#14530) 2022-05-26 22:52:42 +03:00
Delyan Angelov bb6ef8bba8
cgen: fix parallel cgen for json encoding of struct fields that have default values 2022-05-26 16:55:44 +03:00
Delyan Angelov 8c969efe6b
tests: make json_test.v less noisy, to see errors easier 2022-05-26 15:27:54 +03:00
Wertzui123 1017335365
x.ttf: fix typo in README (#14528) 2022-05-26 11:17:15 +03:00
yuyi 3849cdcecc
fmt: fix fn return types list ending with comma (#14529) 2022-05-26 04:20:22 +03:00
Delyan Angelov 410b57b2fa
all: add support for struct field deprecation (#14527) 2022-05-26 00:44:18 +03:00
Larpon 95cc535fc7
ci: use missdoc as subcmd (#14524) 2022-05-25 19:07:07 +03:00
Larpon 9f5e999b4a
tools: add v missdoc --verify mode (#14525) 2022-05-25 19:06:11 +03:00
Larpon 6c08af63ff
embed_file: rename debug_embed_file_in_prod -> force_embed_file (#14523) 2022-05-25 18:26:17 +03:00
yuyi 59e57f0c62
fmt: fix fmt of Ok<[]Token>{[]} (#14522) 2022-05-25 15:17:30 +03:00
kahsa dd8c96f6bc
net.html: use `-d debug_html` instead of `-g`, prevent undesired output, while debugging user programs (#14521) 2022-05-25 11:23:56 +03:00
Delyan Angelov 31c234485a
cgen: split keys and values in generated new_map_init calls into separate lines 2022-05-25 10:26:17 +03:00
yuyi e19ac0c4a7
checker: check fn call with argument mismatch (#14519) 2022-05-25 09:00:26 +03:00
yuyi 79a75c5ac0
parser: fix fmt error for json decode (#14520) 2022-05-25 08:59:22 +03:00
yuyi 0eb3f8854d
fmt: fix file with just imports (fix #14267) (#14513) 2022-05-24 20:14:08 +03:00
Ben f431020764
os: minor clean ups on filepath.v (#14506) 2022-05-24 11:29:32 +03:00
yuyi f35f7fe997
fmt: fix using rand.seed() when import rand and rand.seed (#14511) 2022-05-24 11:21:49 +03:00
yuyi a5b98cb267
parser: check fn decl multi return types without parentheses (#14508) 2022-05-24 05:15:31 +03:00
yuyi 5ade39f8db
cgen: fix sizeof('str') and sizeof(r'str') (#14507) 2022-05-24 05:14:38 +03:00
Delyan Angelov 953ef1f8c9
pref: add support for `-dump-files -` and for `-dump-modules -` 2022-05-23 19:51:21 +03:00
Delyan Angelov dda49fe735
v.builder: support -dump-c-flags with -cc msvc too 2022-05-23 18:29:39 +03:00
yuyi a3c0a9b791
checker: minor optimization in fn_call() and method_call() (#14503) 2022-05-23 08:00:57 +03:00
yuyi 4ef9e2c05a
parser: fix sizeof(c'str') (fix #14499) (#14502) 2022-05-23 01:59:39 +03:00
Alexander Medvednikov 863eeca2e0 checker: vfmt checker.v 2022-05-23 00:12:37 +03:00
Daniel Däschle 5e95bdc451
checker: allow but deprecate propagating result as option (#14500) 2022-05-23 00:11:29 +03:00
yuyi 7f03b89611
checker: check method call argument type mismatch (#14496) 2022-05-22 22:16:46 +03:00
spaceface ba859c584b
builtin: add static GC support on Windows with TCC (#14497) 2022-05-22 21:25:46 +03:00
Delyan Angelov 5328dabad1
time: simplify some very commonly used t.format methods 2022-05-22 21:09:49 +03:00
spaceface e5ff2ab455
ci: run on all branches on forks (#14498) 2022-05-22 20:19:04 +03:00
yuyi 1f3336c9d3
checker: fix map get anon fn value with mut argument (fix #14479) (#14493) 2022-05-22 16:28:53 +03:00
Alexander Medvednikov 245d28d57a checker: c2v infix fix 2022-05-22 14:53:21 +03:00
Daniel Däschle d3ffd983c8
markused: add _result_ok (#14495) 2022-05-22 14:52:38 +03:00
Subhomoy Haldar 3647fb4def
rand: move dist functions to top module and PRNG interface; minor cleanup (#14481) 2022-05-22 13:21:52 +03:00
Delyan Angelov 64a686f41f examples: speed up mandelbrot.v by using a constant size thread pool, processing smaller chunks 2022-05-22 11:12:16 +03:00
yuyi 50ab2cfd1a
fmt: fix 'strings' name variable call generate auto import (fix #9713) (#14485) 2022-05-21 15:01:58 +03:00
Delyan Angelov 0ceb16f285
v.builder: use /NOLOGO, when building cached object files with msvc 2022-05-21 12:56:24 +03:00
Delyan Angelov c0dcc80e18
cgen: support `-profile -` for _test.v files too 2022-05-21 10:59:01 +03:00
Delyan Angelov a7afb2d1eb
time: remove `import math` dependency, by implementing a simpler version of mceil 2022-05-21 02:04:17 +03:00
Ben 971c55cf30
os: add norm_path and abs_path function (#14435) 2022-05-21 01:16:29 +03:00
Daniel Däschle efc5cab8c3
checker,cgen: allow result if guard (#14474) 2022-05-20 19:34:53 +03:00
Alexander Medvednikov 53c217fe5e cgen: do not generate `_vinit()` for translated .o code 2022-05-20 19:28:37 +03:00
Vincenzo Palazzo 17bba712bd
checker: ban unsafe pointer/fn comparison (#14462) 2022-05-20 18:30:16 +03:00
Daniel Däschle d81fbb1ccd
ci: only run on master (#14476) 2022-05-20 18:27:35 +03:00
Delyan Angelov dd1049f21d
thirdparty: fix typo in atomic.h, cleanup comments. 2022-05-20 14:21:31 +03:00
yuyi 28b0cbddad
parser: check error for script mode that define main function (fix #14467) (#14469) 2022-05-20 14:20:18 +03:00
yuyi 913164bc73
builtin: minor cleanup in string_interpolation.v (#14471) 2022-05-20 14:19:27 +03:00
yuyi bf44572f30
fmt: fix chain calls with comments (#14470) 2022-05-20 11:47:44 +03:00
StunxFS 11bdb04d0c
json: fix struct field default value support (#14304) 2022-05-20 11:22:17 +03:00
Delyan Angelov ca00b59b3f
tests: make potential failures in urllib_test.v more informative 2022-05-20 08:49:56 +03:00
David 'Epper' Marshall 120f31b4d9
math: update documentation (#14457) 2022-05-20 08:45:54 +03:00
David 'Epper' Marshall 23568f19da
url: fix parse (#14456) 2022-05-20 02:58:58 +03:00
crthpl 95d24e543d
checker, cgen: fix shared non-decl assignment (#14466) 2022-05-20 02:58:11 +03:00
yuyi 55e7daa2f9
cgen: simplify auto_str_methods.v (#14463) 2022-05-19 22:29:15 +03:00
crthpl 46f94e8d68
cgen: fix autostr of shared fields (#14455) 2022-05-19 17:00:27 +03:00
Daniel Däschle a52fbc5e51
checker: forbid optional variable (#14460) 2022-05-19 16:04:44 +03:00
yuyi 3291c59ebf
cgen: minor cleanup in auto_str_methods.v (#14461) 2022-05-19 14:02:07 +03:00
Delyan Angelov 634e8c3624
vfmt: keep selective imported names used for generic calls 2022-05-19 12:48:43 +03:00
yuyi 15c62bc8e8
parser: improve error messages of 'for val in array' (#14459) 2022-05-19 11:53:27 +03:00
Delyan Angelov 25812e52f0
ci: use V_CI_CSTRICT=1 consistently for every `v -cstrict test-self` 2022-05-19 11:52:31 +03:00
Delyan Angelov a52590572f
tests: fix the push_work_on_channel.vv output 2022-05-19 11:30:36 +03:00
Delyan Angelov 3d5617c4fa
tests: re-add the disambiguated `for (val in [TokenValue(`+`), TokenValue(`-`)]) {` test 2022-05-19 08:31:03 +03:00
Delyan Angelov 809b1ca3b4
Revert "parser: fix 'val in array' as condition in for stmt (fix #14440) (#14451)"
This reverts commit b482c0512b.
2022-05-19 08:29:30 +03:00
yuyi b482c0512b
parser: fix 'val in array' as condition in for stmt (fix #14440) (#14451) 2022-05-18 20:38:53 +03:00
Delyan Angelov 805a7d9713 ci: skip more .vv files on specific jobs 2022-05-18 20:04:59 +03:00
yuyi 5b96f7e8fd
checker: split up infix.v from checker.v (#14449) 2022-05-18 14:52:53 +03:00
yuyi 4cbfa884c5
cgen: fix another error for 'in array of sumtype' (#14448) 2022-05-18 14:39:35 +03:00
Delyan Angelov f2447a4bd8
tests: do not use -prod for compiling .vv files in vlib/v/checker/tests/ and vlib/v/parser/tests/ 2022-05-18 13:39:57 +03:00
Delyan Angelov 2cc3b74e19
tests: cleanup compiler_errors_test.v using language features (chan), instead of raw `sync.new_channel` and `unsafe { ch.push }` calls 2022-05-18 13:39:56 +03:00
Larpon 9de0c725f6
checker: add test for empty #flag node, (fix #14291) (#14447) 2022-05-18 13:38:58 +03:00
Adam Oates a786c58d0a
os: add `fn user_names()` (#14424) 2022-05-18 13:37:34 +03:00
yuyi 417a6dc506
cgen: fix error for 'in array of sumtype' (#14444) 2022-05-18 08:51:31 +03:00
Larpon 8eea861c93
vcomplete: improve flag completion, add missdoc (#14415) 2022-05-18 08:50:32 +03:00
Delyan Angelov ed17779434
math.big: fix Integer.bit_len() when there are no digits in the number 2022-05-18 08:29:08 +03:00
Delyan Angelov ebac3bebb1
pref: pass -v after a command, to the command only, do not set verbose mode on 2022-05-18 07:03:00 +03:00
playX a608516b82
checker: c2v fixed array fix (#14436) 2022-05-18 02:35:05 +03:00
spaceface b5fb848508
cgen: reduce the closure memory usage (#14437) 2022-05-18 02:34:51 +03:00
Delyan Angelov 65d9c8fa6f
tools: add a vet_known_failing_windows skip list to `v test-cleancode` 2022-05-17 20:19:37 +03:00
Delyan Angelov dfa2d63616
ci: use VJOBS=1 for the macos v test-cleancode task too; cleanup periodic.yml 2022-05-17 19:55:28 +03:00
Delyan Angelov 4e56147223 ci: vfmt builtin_d_use_libbacktrace.c.v 2022-05-17 19:41:57 +03:00
Alexander Medvednikov 2a06290ac7 cgen: fix eq generation for translated code 2022-05-17 15:30:05 +03:00
Ned db4b49a5ca
builtin: print libbacktrace output to stderr, on panics/segfault crash (#14434) 2022-05-17 14:56:34 +03:00
448 changed files with 24775 additions and 3395 deletions

View File

@ -15,6 +15,7 @@ concurrency:
jobs:
ubuntu-tcc:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -60,7 +61,7 @@ jobs:
- name: Self tests
run: ./v test-self
# - name: Self tests (-cstrict)
# run: ./v -cstrict test-self
# run: V_CI_CSTRICT=1 ./v -cstrict test-self
- name: Test time functions in a timezone UTC-12
run: TZ=Etc/GMT+12 ./v test vlib/time/
- name: Test time functions in a timezone UTC-3
@ -94,6 +95,7 @@ jobs:
ubuntu-tcc-boehm-gc:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -149,7 +151,8 @@ jobs:
[ "$(stat -c %s leaks.txt)" = "0" ]
macos:
runs-on: macOS-latest
runs-on: macOS-12
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc clang
@ -194,7 +197,7 @@ jobs:
./v cmd/tools/test_if_v_test_system_works.v
./cmd/tools/test_if_v_test_system_works
- name: All code is formatted
run: ./v test-cleancode
run: VJOBS=1 ./v test-cleancode
- name: Self tests
run: VJOBS=1 ./v test-self
- name: Build examples
@ -239,6 +242,7 @@ jobs:
ubuntu:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
steps:
- uses: actions/checkout@v2
@ -294,7 +298,7 @@ jobs:
- name: Self tests (-prod)
run: ./v -o vprod -prod cmd/v && ./vprod test-self
- name: Self tests (-cstrict)
run: ./v -cc gcc -cstrict test-self
run: VTEST_JUST_ESSENTIAL=1 V_CI_CSTRICT=1 ./v -cc gcc -cstrict test-self
- name: Build examples
run: ./v build-examples
- name: Build tetris.v with -autofree
@ -338,6 +342,7 @@ jobs:
ubuntu-clang:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc clang
@ -383,12 +388,16 @@ jobs:
./cmd/tools/test_if_v_test_system_works
- name: All code is formatted
run: ./v test-cleancode
- name: Self tests
run: ./v test-self
- name: Self tests (-prod)
run: ./v -o vprod -prod cmd/v && ./vprod test-self
- name: Self tests (vprod)
run: |
./v -o vprod -prod cmd/v
./vprod test-self
- name: Self tests (-cstrict)
run: ./v -cstrict test-self
run: VTEST_JUST_ESSENTIAL=1 V_CI_CSTRICT=1 ./vprod -cstrict test-self
- name: Build examples
run: ./v build-examples
- name: Build examples with -autofree
@ -424,6 +433,7 @@ jobs:
windows-gcc:
runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc gcc
@ -486,6 +496,7 @@ jobs:
windows-msvc:
runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc msvc
@ -533,6 +544,7 @@ jobs:
windows-tcc:
runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -582,7 +594,9 @@ jobs:
- name: Build examples
run: ./v build-examples
- name: v2 self compilation
run: .\v.exe -o v2.exe cmd/v && .\v2.exe -o v3.exe cmd/v
run: .\v.exe -o v2.exe cmd/v && .\v2.exe -o v3.exe cmd/v && .\v3.exe -o v4.exe cmd/v
- name: v2 self compilation with -gc boehm
run: .\v.exe -o v2.exe -gc boehm cmd/v && .\v2.exe -o v3.exe -gc boehm cmd/v && .\v3.exe -o v4.exe -gc boehm cmd/v
## ## tcc32
## - name: Build with make.bat -tcc32
@ -627,6 +641,7 @@ jobs:
# ubuntu-autofree-selfcompile:
# runs-on: ubuntu-20.04
# if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
# timeout-minutes: 121
# env:
# VFLAGS: -cc gcc
@ -640,6 +655,7 @@ jobs:
# ubuntu-musl:
# runs-on: ubuntu-20.04
# if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
# timeout-minutes: 121
# env:
# VFLAGS: -cc musl-gcc

View File

@ -11,6 +11,7 @@ on:
jobs:
ubuntu:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -49,6 +50,7 @@ jobs:
macos:
runs-on: macos-11
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
VFLAGS: -cc clang

View File

@ -12,6 +12,7 @@ jobs:
macos-cross:
runs-on: macOS-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25
env:
VFLAGS: -cc clang
@ -44,10 +45,11 @@ jobs:
- name: Compile to raw Android (non-graphic) compatible
run: |
# Test that V can compile non-graphic app to Android compatible code *without* using the -apk flag
./v -os android examples/toml.v
./v -os android -gc none examples/toml.v
linux-cross:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -96,11 +98,12 @@ jobs:
- name: toml.v can be compiled to raw Android C
run: |
# Test that V can compile non-graphic app to Android compatible code *without* using the -apk flag
./v -os android examples/toml.v
./v -os android -gc none examples/toml.v
windows-cross:
runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25
steps:
- uses: actions/checkout@v2

View File

@ -74,6 +74,7 @@ concurrency:
jobs:
tests-sanitize-undefined-clang:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc clang
@ -101,6 +102,7 @@ jobs:
tests-sanitize-undefined-gcc:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc gcc
@ -127,6 +129,7 @@ jobs:
tests-sanitize-address-clang:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc clang
@ -158,6 +161,7 @@ jobs:
tests-sanitize-address-msvc:
runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc msvc
@ -189,6 +193,7 @@ jobs:
tests-sanitize-address-gcc:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc gcc
@ -220,9 +225,10 @@ jobs:
tests-sanitize-memory-clang:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180
env:
VFLAGS: -cc clang
VFLAGS: -cc clang -gc none
VJOBS: 1
VTEST_SHOW_START: 1
steps:

View File

@ -12,6 +12,7 @@ jobs:
run:
name: Run
runs-on: ubuntu-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
steps:
- name: Checkout
uses: actions/checkout@v2

View File

@ -16,7 +16,8 @@ jobs:
alpine-docker-musl-gcc:
runs-on: ubuntu-20.04
timeout-minutes: 121
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 181
container:
# Alpine docker pre-built container
image: thevlang/vlang:alpine-build
@ -45,18 +46,19 @@ jobs:
- name: All code is formatted
run: ./v test-cleancode
- name: Test V fixed tests
run: ./v test-self
- name: Run only essential tests
run: VTEST_JUST_ESSENTIAL=1 ./v test-self
ubuntu-docker-musl:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
container:
image: thevlang/vlang:ubuntu-build
env:
V_CI_MUSL: 1
V_CI_UBUNTU_MUSL: 1
VFLAGS: -cc musl-gcc
VFLAGS: -cc musl-gcc -gc none
volumes:
- ${{github.workspace}}:/opt/vlang

View File

@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-20.04
timeout-minutes: 5
env:
MOPTIONS: --no-line-numbers --relative-paths --exclude /vlib/v/ --exclude /builtin/linux_bare/ --exclude /testdata/ --exclude /tests/ vlib/
MOPTIONS: --relative-paths --exclude /vlib/v/ --exclude /builtin/linux_bare/ --exclude /testdata/ --exclude /tests/
steps:
- uses: actions/checkout@v2
- name: Build V
@ -35,14 +35,4 @@ jobs:
- name: Check against parent commit
run: |
./v run cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/n_v.txt
cd pv/ && ../v run ../cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/o_v.txt
count_new=$(cat /tmp/n_v.txt | wc -l)
count_old=$(cat /tmp/o_v.txt | wc -l)
echo "new pubs: $count_new | old pubs: $count_old"
echo "new head: $(head -n1 /tmp/n_v.txt)"
echo "old head: $(head -n1 /tmp/o_v.txt)"
if [[ ${count_new} -gt ${count_old} ]]; then
echo "The following $((count_new-count_old)) function(s) are introduced with no documentation:"
diff /tmp/n_v.txt /tmp/o_v.txt ## diff does exit(1) when files are different
fi
./v missdoc --diff $MOPTIONS pv/vlib vlib

View File

@ -10,6 +10,7 @@ on:
jobs:
build-vc:
runs-on: ubuntu-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
env:
VREPO: github.com/vlang/vc.git
steps:

View File

@ -11,6 +11,7 @@ on:
jobs:
gg-regressions:
runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 10
env:
VFLAGS: -cc tcc
@ -20,25 +21,24 @@ jobs:
uses: actions/checkout@v2
- name: Build local v
run: make -j4
run: make
- uses: openrndr/setup-opengl@v1.1
- name: Setup dependencies
run: |
# imagemagick : convert, mogrify
# xvfb : xvfb (installed by openrndr/setup-opengl@v1.1)
# openimageio-tools : idiff
# libxcursor-dev libxi-dev : V gfx deps
# mesa-common-dev : For headless rendering
# freeglut3-dev : Fixes graphic apps compilation with tcc
sudo apt-get update
sudo apt-get install imagemagick openimageio-tools mesa-common-dev libxcursor-dev libxi-dev freeglut3-dev
wget https://raw.githubusercontent.com/tremby/imgur.sh/c98345d/imgur.sh
git clone https://github.com/Larpon/gg-regression-images gg-regression-images
chmod +x ./imgur.sh
- uses: openrndr/setup-opengl@v1.1
- uses: actions/checkout@v2
with:
repository: Larpon/gg-regression-images
path: gg-regression-images
- name: Sample and compare
id: compare
continue-on-error: true
@ -50,4 +50,5 @@ jobs:
if: steps.compare.outcome != 'success'
run: |
./imgur.sh /tmp/fail.png
./imgur.sh /tmp/diff.png
exit 1

View File

@ -0,0 +1,36 @@
name: native backend CI
on:
push:
paths-ignore:
- "**.md"
pull_request:
paths-ignore:
- "**.md"
concurrency:
group: native-backend-ci-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
native-backend:
strategy:
matrix:
os: [ubuntu-18.04, ubuntu-20.04, macos-10.15, macos-11, macos-12, windows-2016, windows-2019, windows-2022]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Build V with make.bat
if: ${{ startsWith(matrix.os, 'windows') }}
run: |
.\make.bat
.\v.exe symlink -githubci
- name: Build V with make
if: ${{ !startsWith(matrix.os, 'windows') }}
run: |
make
./v symlink -githubci
- name: Test the native backend
run: v test vlib/v/gen/native/

View File

@ -15,6 +15,7 @@ concurrency:
jobs:
no-gpl-by-accident:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15
steps:
- uses: actions/checkout@v2
@ -24,6 +25,7 @@ jobs:
code-formatting:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15
env:
VFLAGS: -cc gcc
@ -40,6 +42,7 @@ jobs:
performance-regressions:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15
env:
VFLAGS: -cc gcc
@ -64,6 +67,7 @@ jobs:
misc-tooling:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc tcc -no-retry-compilation
@ -85,7 +89,6 @@ jobs:
- name: g++ version
run: g++-9 --version
- name: V self compilation with g++
continue-on-error: true
run: ./v -cc g++-9 -no-std -cflags -std=c++11 -o v2 cmd/v && ./v2 -cc g++-9 -no-std -cflags -std=c++11 -o v3 cmd/v
## - name: Running tests with g++
## run: ./v -cc g++-9 test-self
@ -94,7 +97,6 @@ jobs:
run: ./v -autofree -o v2 cmd/v ## NB: this does not mean it runs, but at least keeps it from regressing
- name: Shader examples can be build
continue-on-error: true
run: |
wget https://github.com/floooh/sokol-tools-bin/raw/33d2e4cc26088c6c28eaef5467990f8940d15aab/bin/linux/sokol-shdc
chmod +x ./sokol-shdc
@ -112,6 +114,7 @@ jobs:
parser-silent:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
steps:
- uses: actions/checkout@v2

View File

@ -16,6 +16,7 @@ jobs:
space-paths-linux:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
MY_V_PATH: '你好 my $path, @с интервали'
@ -41,6 +42,7 @@ jobs:
space-paths-macos:
runs-on: macOS-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
MY_V_PATH: '你好 my $path, @с интервали'
@ -69,6 +71,7 @@ jobs:
space-paths-windows:
runs-on: windows-2022
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
MY_V_PATH: 'path with some $punctuation, and some spaces'

View File

@ -2,57 +2,56 @@ name: Periodic
on:
schedule:
- cron: '0 */2 * * *'
- cron: '0 */6 * * *'
jobs:
network-tests-ubuntu:
runs-on: ubuntu-20.04
timeout-minutes: 30
env:
V_CI_PERIODIC: 1
V_CI_PERIODIC: 1
steps:
- uses: actions/checkout@v2
- name: Install dependencies
run: sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev valgrind
- name: Build v
run: make -j4
- name: Symlink V
run: sudo ./v symlink
## - name: Run network tests
## run: ./v -d network test vlib/net
- uses: actions/checkout@v2
- name: Install dependencies 1
run: sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev
- name: Build v
run: make
- name: Symlink V
run: sudo ./v symlink
## - name: Run network tests
## run: ./v -d network test vlib/net
network-tests-macos:
runs-on: macOS-latest
timeout-minutes: 30
env:
V_CI_PERIODIC: 1
V_CI_PERIODIC: 1
steps:
- uses: actions/checkout@v2
- name: Setup openssl library path
run: export LIBRARY_PATH="$LIBRARY_PATH:/usr/local/opt/openssl/lib/"
- name: Build V
run: make -j4
- name: Symlink V
run: sudo ./v symlink
- name: Ensure thirdparty/cJSON/cJSON.o is compiled, before running tests.
run: ./v examples/json.v
## - name: Run network tests
## run: ./v -d network test vlib/net
- uses: actions/checkout@v2
- name: Setup openssl library path
run: export LIBRARY_PATH="$LIBRARY_PATH:/usr/local/opt/openssl/lib/"
- name: Build V
run: make
- name: Symlink V
run: sudo ./v symlink
- name: Ensure thirdparty/cJSON/cJSON.o is compiled, before running tests.
run: ./v examples/json.v
## - name: Run network tests
## run: ./v -d network test vlib/net
network-windows-msvc:
runs-on: windows-2019
timeout-minutes: 30
env:
V_CI_PERIODIC: 1
VFLAGS: -cc msvc
V_CI_PERIODIC: 1
VFLAGS: -cc msvc
steps:
- uses: actions/checkout@v2
- name: Build
run: |
echo %VFLAGS%
echo $VFLAGS
.\make.bat -msvc
## - name: Run network tests
## run: .\v.exe -d network test vlib/net
- uses: actions/checkout@v2
- name: Build
run: |
echo %VFLAGS%
echo $VFLAGS
.\make.bat -msvc
## - name: Run network tests
## run: .\v.exe -d network test vlib/net

View File

@ -11,6 +11,7 @@ on:
jobs:
v-compiles-sdl-examples:
runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
VFLAGS: -cc tcc

View File

@ -11,6 +11,7 @@ on:
jobs:
toml-module-pass-external-test-suites:
runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30
env:
TOML_BS_TESTS_PATH: vlib/toml/tests/testdata/burntsushi/toml-test

View File

@ -9,12 +9,13 @@ on:
- "**.md"
concurrency:
group: build-other-${{ github.event.pull_request.number || github.sha }}
group: build-v-apps-and-modules-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
v-apps-compile:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
steps:
- uses: actions/checkout@v2
@ -24,34 +25,94 @@ jobs:
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install --quiet -y libgc-dev
sudo apt-get install --quiet -y libsodium-dev libssl-dev sqlite3 libsqlite3-dev valgrind
sudo apt-get install --quiet -y libfreetype6-dev libxi-dev libxcursor-dev libgl-dev
sudo apt-get install --quiet -y xfonts-75dpi xfonts-base
- name: Install UI through VPM
continue-on-error: true
run: |
echo "Official VPM modules should be installable"
./v install ui
sudo apt-get install --quiet -y libgc-dev libsodium-dev libssl-dev sqlite3 libsqlite3-dev valgrind libfreetype6-dev libxi-dev libxcursor-dev libgl-dev xfonts-75dpi xfonts-base
sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev
- name: Build V Language Server (VLS)
continue-on-error: true
run: |
echo "Clone VLS"
git clone --depth 1 https://github.com/vlang/vls
git clone --depth 1 https://github.com/vlang/vls /tmp/vls
echo "Build VLS"
pushd vls; v cmd/vls ; popd
v /tmp/vls/cmd/vls
echo "Build VLS with -prod"
pushd vls; v -prod cmd/vls; popd
v -prod /tmp/vls/cmd/vls
echo "Build VLS with -gc boehm -skip-unused"
pushd vls; v -gc boehm -skip-unused cmd/vls; popd
v -gc boehm -skip-unused /tmp/vls/cmd/vls
- name: Build V Coreutils
run: |
echo "Clone Coreutils"
git clone --depth 1 https://github.com/vlang/coreutils /tmp/coreutils
echo "Build Coreutils"
cd /tmp/coreutils; make
- name: Build VAB
run: |
echo "Install VAB"
v install vab
echo "Build vab"
v ~/.vmodules/vab
echo "Build vab with -gc boehm -skip-unused"
v -gc boehm -skip-unused ~/.vmodules/vab
- name: Build Gitly
run: |
echo "Install markdown"
v install markdown
echo "Clone Gitly"
git clone https://github.com/vlang/gitly /tmp/gitly
echo "Build Gitly"
v /tmp/gitly
echo "Build Gitly with -autofree"
v -autofree /tmp/gitly
echo "Run first_run.v"
v run /tmp/gitly/tests/first_run.v
# /tmp/gitly/gitly -ci_run
- name: Build libsodium
run: |
echo "Install the libsodium wrapper"
v install libsodium
echo "Test libsodium"
VJOBS=1 v test ~/.vmodules/libsodium
- name: Build VEX
run: |
echo "Install Vex"
v install nedpals.vex
echo "Compile all of the Vex examples"
v should-compile-all ~/.vmodules/nedpals/vex/examples
echo "Compile the simple Vex example with -gc boehm -skip-unused"
v -gc boehm -skip-unused ~/.vmodules/nedpals/vex/examples/simple_example.v
echo "Run Vex Tests"
v test ~/.vmodules/nedpals/vex
- name: Build go2v
run: |
echo "Clone Go2V"
git clone --depth=1 https://github.com/vlang/go2v /tmp/go2v/
echo "Build Go2V"
v /tmp/go2v/
echo "Run Go2V tests"
VJOBS=1 v -stats test /tmp/go2v/
- name: Build vlang/pdf
run: |
v install pdf
echo "PDF examples should compile"
v should-compile-all ~/.vmodules/pdf/examples
- name: Install UI through VPM
run: |
echo "Official VPM modules should be installable"
v install ui
echo "Examples of UI should compile"
v ~/.vmodules/ui/examples/build_examples.vsh
- name: Build VSL
continue-on-error: true
run: |
git clone --depth 1 https://github.com/vlang/vsl ~/.vmodules/vsl
sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
echo "Install VSL"
v install vsl
echo "Execute Tests using Pure V Backend"
~/.vmodules/vsl/bin/test
echo "Execute Tests using Pure V Backend with Pure V Math"
@ -62,12 +123,10 @@ jobs:
~/.vmodules/vsl/bin/test --use-cblas --use-gc boehm
- name: Build VTL
continue-on-error: true
run: |
echo "Clone VTL"
git clone --depth 1 https://github.com/vlang/vtl ~/.vmodules/vtl
echo "Install VTL"
v install vtl
echo "Install dependencies"
sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
echo "Execute Tests using Pure V Backend"
~/.vmodules/vtl/bin/test
echo "Execute Tests using Pure V Backend with Pure V Math"
@ -76,70 +135,3 @@ jobs:
~/.vmodules/vtl/bin/test --use-gc boehm
echo "Execute Tests using Pure V Backend with Pure V Math and Garbage Collection enabled"
~/.vmodules/vtl/bin/test --use-cblas --use-gc boehm
- name: Build VAB
continue-on-error: true
run: |
echo "Clone vab"
git clone --depth 1 https://github.com/vlang/vab
echo "Build vab"
cd vab; ../v ./vab.v ; cd ..
echo "Build vab with -gc boehm -skip-unused"
cd vab; ../v -gc boehm -skip-unused ./vab.v ; cd ..
- name: Build Gitly
continue-on-error: true
run: |
echo "Clone markdown"
git clone https://github.com/vlang/markdown ~/.vmodules/markdown
echo "Clone Gitly"
git clone --depth 1 https://github.com/vlang/gitly
cd gitly
echo "Build Gitly"
../v .
echo "Build Gitly with -autofree"
../v -autofree .
echo "Run first_run.v"
../v run tests/first_run.v
# ./gitly -ci_run
- name: Build libsodium
continue-on-error: true
run: |
echo "Install libsodium-dev package"
sudo apt-get install --quiet -y libsodium-dev
echo "Clone the libsodium wrapper"
git clone https://github.com/vlang/libsodium ~/.vmodules/libsodium
echo "Test libsodium"
VJOBS=1 ./v -stats test ~/.vmodules/libsodium
- name: Build VEX
continue-on-error: true
run: |
echo "Install Vex dependencies"
sudo apt-get install --quiet -y libsodium-dev libssl-dev sqlite3 libsqlite3-dev
echo "Clone Vex"
mkdir -p ~/.vmodules/nedpals; git clone https://github.com/nedpals/vex ~/.vmodules/nedpals/vex
echo "Compile all of the Vex examples"
./v should-compile-all ~/.vmodules/nedpals/vex/examples
echo "Compile the simple Vex example with -gc boehm -skip-unused"
./v -gc boehm -skip-unused ~/.vmodules/nedpals/vex/examples/simple_example.v
echo "Run Vex Tests"
./v test ~/.vmodules/nedpals/vex
- name: Build go2v
continue-on-error: true
run: |
echo "Clone go2v"
clone --depth=1 https://github.com/vlang/go2v go2v/
echo "Build go2v"
./v go2v/
## echo "Run tests for go2v"
## VJOBS=1 ./v -stats test go2v/
- name: Build vlang/pdf
continue-on-error: true
run: |
git clone --depth=1 https://github.com/vlang/pdf ~/.vmodules/pdf/
echo "PDF examples should compile"
./v should-compile-all ~/.vmodules/pdf/examples

View File

@ -11,6 +11,7 @@ on:
jobs:
vab-compiles-v-examples:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VAB_FLAGS: --api 30 --build-tools 29.0.0 -v 3
@ -24,20 +25,14 @@ jobs:
- name: Build V
run: make && sudo ./v symlink
- name: Checkout vab
uses: actions/checkout@v2
with:
repository: vlang/vab
path: vab
- name: Build vab
- name: Install vab
run: |
cd vab
v -g vab.v
sudo ln -s $(pwd)/vab /usr/local/bin/vab
v install vab
v -g ~/.vmodules/vab
sudo ln -s ~/.vmodules/vab/vab /usr/local/bin/vab
- name: Run tests
run: v test vab
run: v test ~/.vmodules/vab
- name: Run vab --help
run: vab --help

View File

@ -13,6 +13,9 @@ on:
jobs:
vinix-build:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
env:
VFLAGS: -gc none
steps:
- uses: actions/checkout@v2

View File

@ -11,6 +11,7 @@ on:
jobs:
websocket_tests:
runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121
env:
VFLAGS: -cc tcc -no-retry-compilation

61
.woodpecker.yml 100644
View File

@ -0,0 +1,61 @@
platform: 'linux/amd64'
branches: ['master']
pipeline:
gen-vc:
# This is what the official CI uses as well
image: 'ubuntu:latest'
secrets:
- deploy_key
commands:
# Install necessary dependencies
- apt-get update -y && apt-get install openssh-client git build-essential -y
# Build the compiler
- make
# Run ssh-agent
- eval $(ssh-agent -s)
# Add ssh key
- echo "$DEPLOY_KEY" | tr -d '\r' | ssh-add -
# Create ssh dir with proper permissions
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
# Configure git credentials
- git config --global user.email 'vbot@rustybever.be'
- git config --global user.name 'vbot'
# Verify SSH keys
- ssh-keyscan git.rustybever.be > ~/.ssh/known_hosts
# The following is copied over from the official repo's CI
# https://github.com/vlang/v/blob/master/.github/workflows/gen_vc.yml
- export "COMMIT_HASH=$(git rev-parse --short HEAD)"
- export "COMMIT_MSG=$(git log -1 --oneline --pretty='%s' HEAD)"
- rm -rf vc
- git clone --depth=1 'git@git.rustybever.be:vieter/vc.git'
- rm -rf vc/v.c vc/v_win.c
- ./v -o vc/v.c -os cross cmd/v
- ./v -o vc/v_win.c -os windows -cc msvc cmd/v
- sed -i "1s/^/#define V_COMMIT_HASH \"$COMMIT_HASH\"\n/" vc/v.c
- sed -i "1s/^/#define V_COMMIT_HASH \"$COMMIT_HASH\"\n/" vc/v_win.c
# ensure the C files are over 5000 lines long, as a safety measure
- '[ $(wc -l < vc/v.c) -gt 5000 ]'
- '[ $(wc -l < vc/v_win.c) -gt 5000 ]'
- git -C vc add v.c v_win.c
- 'git -C vc commit -m "[v:master] $COMMIT_HASH - $COMMIT_MSG"'
# in case there are recent commits:
- git -C vc pull --rebase origin main
- git -C vc push
when:
event: push
publish:
image: woodpeckerci/plugin-docker-buildx
secrets: [ docker_username, docker_password ]
settings:
repo: chewingbever/vlang
tag: latest
dockerfile: Dockerfile.builder
platforms: [ linux/arm64/v8, linux/amd64 ]
# The build can run every time, because we should only push when there's
# actual changes
when:
event: push

View File

@ -0,0 +1,32 @@
matrix:
PLATFORM:
- 'linux/amd64'
- 'linux/arm64'
platform: ${PLATFORM}
branches: ['master']
depends_on:
- 'vc'
pipeline:
build:
image: 'menci/archlinuxarm:base-devel'
commands:
# Update packages
- pacman -Syu --noconfirm
# Create non-root user to perform build & switch to their home
- groupadd -g 1000 builder
- useradd -mg builder builder
- chown -R builder:builder "$PWD"
- "echo 'builder ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers"
- su builder
# Build the package
- makepkg -s --noconfirm --needed
publish:
image: 'curlimages/curl'
secrets:
- 'vieter_api_key'
commands:
# Publish the package
- 'for pkg in $(ls -1 *.pkg*); do curl -f -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done'

View File

@ -0,0 +1,18 @@
platform: 'linux/amd64'
branches: ['master']
depends_on:
- 'vc'
pipeline:
build-publish:
image: 'woodpeckerci/plugin-docker-buildx'
secrets: [ docker_username, docker_password ]
settings:
repo: chewingbever/vlang
tag: latest
dockerfile: Dockerfile.builder
platforms: [ linux/arm64/v8, linux/amd64 ]
# The build can run every time, because we should only push when there's
# actual changes
when:
event: push

View File

@ -0,0 +1,48 @@
platform: 'linux/amd64'
branches: ['master']
pipeline:
gen-vc:
# This is what the official CI uses as well
image: 'ubuntu:latest'
secrets:
- deploy_key
commands:
# Install necessary dependencies
- apt-get update -y && apt-get install openssh-client git build-essential -y
# Build the compiler
- make
# Run ssh-agent
- eval $(ssh-agent -s)
# Add ssh key
- echo "$DEPLOY_KEY" | tr -d '\r' | ssh-add -
# Create ssh dir with proper permissions
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
# Configure git credentials
- git config --global user.email 'vbot@rustybever.be'
- git config --global user.name 'vbot'
# Verify SSH keys
- ssh-keyscan git.rustybever.be > ~/.ssh/known_hosts
# The following is copied over from the official repo's CI
# https://github.com/vlang/v/blob/master/.github/workflows/gen_vc.yml
- export "COMMIT_HASH=$(git rev-parse --short HEAD)"
- export "COMMIT_MSG=$(git log -1 --oneline --pretty='%s' HEAD)"
- rm -rf vc
- git clone --depth=1 'git@git.rustybever.be:vieter/vc.git'
- rm -rf vc/v.c vc/v_win.c
- ./v -o vc/v.c -os cross cmd/v
- ./v -o vc/v_win.c -os windows -cc msvc cmd/v
- sed -i "1s/^/#define V_COMMIT_HASH \"$COMMIT_HASH\"\n/" vc/v.c
- sed -i "1s/^/#define V_COMMIT_HASH \"$COMMIT_HASH\"\n/" vc/v_win.c
# ensure the C files are over 5000 lines long, as a safety measure
- '[ $(wc -l < vc/v.c) -gt 5000 ]'
- '[ $(wc -l < vc/v_win.c) -gt 5000 ]'
- git -C vc add v.c v_win.c
- 'git -C vc commit -m "[v:master] $COMMIT_HASH - $COMMIT_MSG"'
# in case there are recent commits:
- git -C vc pull --rebase origin main
- git -C vc push
when:
event: push

View File

@ -191,7 +191,6 @@ to create a copy of the compiler rather than replacing it with `v self`.
| `debug_codegen` | Prints automatically generated V code during the scanning phase |
| `debug_interface_table` | Prints generated interfaces during C generation |
| `debug_interface_type_implements` | Prints debug information when checking that a type implements in interface |
| `debug_embed_file_in_prod` | Prints debug information about the embedded files with `$embed_file('somefile')` |
| `print_vweb_template_expansions` | Prints vweb compiled HTML files |
| `time_checking` | Prints the time spent checking files and other related information |
| `time_parsing` | Prints the time spent parsing files and other related information |
@ -204,3 +203,4 @@ to create a copy of the compiler rather than replacing it with `v self`.
| `trace_thirdparty_obj_files` | Prints details about built thirdparty obj files |
| `trace_usecache` | Prints details when -usecache is used |
| `trace_embed_file` | Prints details when $embed_file is used |
| `embed_only_metadata` | Embed only the metadata for the file(s) with `$embed_file('somefile')`; faster; for development, *not* distribution |

33
Dockerfile.builder 100644
View File

@ -0,0 +1,33 @@
FROM alpine:3.16
ARG TARGETPLATFORM
WORKDIR /opt/vlang
ENV VVV /opt/vlang
ENV PATH /opt/vlang:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
ENV VFLAGS -cc gcc -gc none
ENV V_PATH /opt/vlang/v
RUN ln -s /opt/vlang/v /usr/bin/v && \
apk --no-cache add \
git make gcc curl openssl \
musl-dev \
openssl-libs-static openssl-dev \
zlib-static bzip2-static xz-dev expat-static zstd-static lz4-static \
sqlite-static sqlite-dev \
libx11-dev glfw-dev freetype-dev \
libarchive-static libarchive-dev \
diffutils \
mandoc
RUN git clone https://git.rustybever.be/vieter/v /opt/vlang && \
make && \
v -version
RUN if [ "$TARGETPLATFORM" = 'linux/amd64' ]; then \
wget -O /usr/local/bin/mc https://dl.min.io/client/mc/release/linux-amd64/mc && \
chmod +x /usr/local/bin/mc ; \
fi
CMD ["v"]

View File

@ -5,7 +5,7 @@ TMPDIR ?= /tmp
VROOT ?= .
VC ?= ./vc
V ?= ./v
VCREPO ?= https://github.com/vlang/vc
VCREPO ?= https://git.rustybever.be/vieter/vc
TCCREPO ?= https://github.com/vlang/tccbin
VCFILE := v.c
@ -28,6 +28,9 @@ endif
ifeq ($(_SYS),Linux)
LINUX := 1
TCCOS := linux
ifneq ($(shell ldd /bin/ls | grep musl),)
TCCOS := linuxmusl
endif
endif
ifeq ($(_SYS),Darwin)
@ -113,7 +116,7 @@ endif
check_for_working_tcc:
@$(TMPTCC)/tcc.exe --version > /dev/null 2> /dev/null || echo "The executable '$(TMPTCC)/tcc.exe' does not work."
fresh_vc:
rm -rf $(VC)
$(GITFASTCLONE) $(VCREPO) $(VC)

View File

@ -7,7 +7,7 @@ LDFLAGS ?=
all:
rm -rf vc/
git clone --depth 1 --quiet https://github.com/vlang/vc
git clone --depth 1 --quiet https://git.rustybever.be/vieter/vc
$(CC) $(CFLAGS) -std=gnu11 -w -o v1 vc/v.c -lm -lexecinfo -lpthread $(LDFLAGS)
./v1 -no-parallel -o v2 $(VFLAGS) cmd/v
./v2 -o v $(VFLAGS) cmd/v

54
PKGBUILD 100644
View File

@ -0,0 +1,54 @@
# Maintainer: Jef Roosens
# This PKGBUILD is mostly copied over from the AUR
# https://aur.archlinux.org/packages/vlang-git
pkgname=vieter-v
pkgver=0.2.2.r796.gfbc02cbc5
pkgrel=1
pkgdesc='Simple, fast, safe, compiled language for developing maintainable software'
arch=('x86_64' 'aarch64')
url='https://vlang.io'
license=('MIT')
depends=('glibc')
makedepends=('git')
optdepends=('glfw: Needed for graphics support'
'freetype2: Needed for graphics support'
'openssl: Needed for http support')
provides=('vlang')
conflicts=('v' 'vlang' 'vlang-bin')
source=('vlang::git+https://git.rustybever.be/Chewing_Bever/v')
sha256sums=('SKIP')
pkgver() {
cd "${srcdir}/vlang"
# Weekly tags are considered older than semantic tags that are older than
# them, so to prevent version resolution problems we exclude weekly tags.
git describe --long --tags --exclude "weekly*" | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g'
}
build() {
cd "${srcdir}/vlang"
# We don't require optimizations when compiling the bootstrap executable and
# -O2 actually breaks `./v self` (resulting in "cgen error:"), so we empty
# CFLAGS and LDFLAGS to ensure successful compilation.
CFLAGS="" LDFLAGS="" prod=1 make
# vpm and vdoc fail to compile with "unsupported linker option" when LDFLAGS
# is set
LDFLAGS="" ./v build-tools
}
package() {
cd "${srcdir}/vlang"
install -d "$pkgdir/usr/lib/vlang" "$pkgdir/usr/share/vlang" "$pkgdir/usr/bin"
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
install -Dm755 v "$pkgdir/usr/lib/vlang"
cp -a cmd "$pkgdir/usr/lib/vlang/"
cp -a examples "$pkgdir/usr/share/vlang/"
cp -a thirdparty "$pkgdir/usr/lib/vlang/"
cp -a vlib "$pkgdir/usr/lib/vlang/"
cp v.mod "$pkgdir/usr/lib/vlang/"
ln -s /usr/lib/vlang/v "$pkgdir/usr/bin/v"
touch "$pkgdir/usr/lib/vlang/cmd/tools/.disable_autorecompilation"
}

View File

@ -0,0 +1,67 @@
import os
import time
import v.ast
import v.pref
import v.parser
import v.errors
import v.scanner
fn main() {
files := os.args#[1..]
if files.len > 0 && files[0].starts_with('@') {
lst_path := files[0].all_after('@')
listed_files := os.read_file(lst_path)?.split('\n')
process_files(listed_files)?
return
}
process_files(files)?
}
fn process_files(files []string) ? {
mut table := ast.new_table()
mut pref := pref.new_preferences()
pref.is_fmt = true
pref.skip_warnings = true
pref.output_mode = .silent
mut sw := time.new_stopwatch()
mut total_us := i64(0)
mut total_bytes := i64(0)
mut total_tokens := i64(0)
for f in files {
if f == '' {
continue
}
if f.ends_with('_test.v') {
continue
}
// do not measure the scanning, but only the parsing:
mut p := new_parser(f, .skip_comments, table, pref)
///
sw.restart()
_ := p.parse()
f_us := sw.elapsed().microseconds()
///
total_us += f_us
total_bytes += p.scanner.text.len
total_tokens += p.scanner.all_tokens.len
println('${f_us:10}us ${p.scanner.all_tokens.len:10} ${p.scanner.text.len:10} ${(f64(p.scanner.text.len) / p.scanner.all_tokens.len):7.3} ${p.errors.len:4} $f')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}
fn new_parser(path string, comments_mode scanner.CommentsMode, table &ast.Table, pref &pref.Preferences) &parser.Parser {
mut p := &parser.Parser{
scanner: scanner.new_scanner_file(path, comments_mode, pref) or { panic(err) }
comments_mode: comments_mode
table: table
pref: pref
scope: &ast.Scope{
start_pos: 0
parent: table.global_scope
}
errors: []errors.Error{}
warnings: []errors.Warning{}
}
p.set_path(path)
return p
}

View File

@ -0,0 +1,42 @@
import os
import time
import v.scanner
import v.pref
fn main() {
files := os.args#[1..]
if files.len > 0 && files[0].starts_with('@') {
lst_path := files[0].all_after('@')
listed_files := os.read_file(lst_path)?.split('\n')
process_files(listed_files)?
return
}
process_files(files)?
}
fn process_files(files []string) ? {
mut pref := pref.new_preferences()
pref.is_fmt = true
pref.skip_warnings = true
pref.output_mode = .silent
mut sw := time.new_stopwatch()
mut total_us := i64(0)
mut total_bytes := i64(0)
mut total_tokens := i64(0)
for f in files {
if f == '' {
continue
}
if f.ends_with('_test.v') {
continue
}
sw.restart()
s := scanner.new_scanner_file(f, .skip_comments, pref)?
f_us := sw.elapsed().microseconds()
total_us += f_us
total_bytes += s.text.len
total_tokens += s.all_tokens.len
println('${f_us:10}us ${s.all_tokens.len:10} ${s.text.len:10} ${(f64(s.text.len) / s.all_tokens.len):7.3f} $f')
}
println('${total_us:10}us ${total_tokens:10} ${total_bytes:10} ${(f64(total_tokens) / total_bytes):7.3f} | speed: ${(f64(total_bytes) / total_us):2.5f} MB/s')
}

View File

@ -24,6 +24,7 @@ pub fn cprint(omessage string) {
message = term.cyan(message)
}
print(message)
flush_stdout()
}
pub fn cprint_strong(omessage string) {
@ -32,16 +33,19 @@ pub fn cprint_strong(omessage string) {
message = term.bright_green(message)
}
print(message)
flush_stdout()
}
pub fn cprintln(omessage string) {
cprint(omessage)
println('')
flush_stdout()
}
pub fn cprintln_strong(omessage string) {
cprint_strong(omessage)
println('')
flush_stdout()
}
pub fn verbose_trace(label string, message string) {

View File

@ -123,6 +123,7 @@ pub fn (mut ts TestSession) print_messages() {
// progress mode, the last line is rewritten many times:
if is_ok && !ts.silent_mode {
print('\r$empty\r$msg')
flush_stdout()
} else {
// the last \n is needed, so SKIP/FAIL messages
// will not get overwritten by the OK ones
@ -560,6 +561,7 @@ pub fn eheader(msg string) {
pub fn header(msg string) {
println(term.header_left(msg, '-'))
flush_stdout()
}
pub fn setup_new_vtmp_folder() string {

View File

@ -200,8 +200,13 @@ fn (mut context Context) parse_options() ? {
}
}
fn flushed_print(s string) {
print(s)
flush_stdout()
}
fn (mut context Context) clear_line() {
print(context.cline)
flushed_print(context.cline)
}
fn (mut context Context) expand_all_commands(commands []string) []string {
@ -247,7 +252,7 @@ fn (mut context Context) run() {
println('Series: ${si:4}/${context.series:-4}, command: $cmd')
if context.warmup > 0 && run_warmups < context.commands.len {
for i in 1 .. context.warmup + 1 {
print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
flushed_print('${context.cgoback}warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
mut sw := time.new_stopwatch()
res := os.execute(cmd)
if res.exit_code != 0 {
@ -260,9 +265,9 @@ fn (mut context Context) run() {
context.clear_line()
for i in 1 .. (context.count + 1) {
avg := f64(sum) / f64(i)
print('${context.cgoback}Average: ${avg:9.3f}ms | run: ${i:4}/${context.count:-4} | took ${duration:6} ms')
flushed_print('${context.cgoback}Average: ${avg:9.3f}ms | run: ${i:4}/${context.count:-4} | took ${duration:6} ms')
if context.show_output {
print(' | result: ${oldres:s}')
flushed_print(' | result: ${oldres:s}')
}
mut sw := time.new_stopwatch()
res := scripting.exec(cmd) or { continue }
@ -288,7 +293,7 @@ fn (mut context Context) run() {
context.results[icmd].atiming = new_aints(context.results[icmd].timings, context.nmins,
context.nmaxs)
context.clear_line()
print(context.cgoback)
flushed_print(context.cgoback)
mut m := map[string][]int{}
ioutputs := context.results[icmd].outputs
for o in ioutputs {
@ -358,7 +363,7 @@ fn (mut context Context) show_diff_summary() {
println('context: $context')
}
if int(base) > context.fail_on_maxtime {
print(performance_regression_label)
flushed_print(performance_regression_label)
println('average time: ${base:6.1f} ms > $context.fail_on_maxtime ms threshold.')
exit(2)
}
@ -367,7 +372,7 @@ fn (mut context Context) show_diff_summary() {
}
fail_threshold_max := f64(context.fail_on_regress_percent)
if first_cmd_percentage > fail_threshold_max {
print(performance_regression_label)
flushed_print(performance_regression_label)
println('${first_cmd_percentage:5.1f}% > ${fail_threshold_max:5.1f}% threshold.')
exit(3)
}

View File

@ -0,0 +1,45 @@
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license that can be found in the LICENSE file.
module main
import os
import v.util
fn main() {
vmodules := os.vmodules_dir()
c2v_dir := os.join_path(vmodules, 'c2v_alpha')
c2v_bin := os.join_path(c2v_dir, 'c2v')
// Git clone c2v
if !os.exists(c2v_dir) {
println('C2V is not installed. Cloning C2V to $c2v_dir ...')
os.chdir(vmodules)?
res := os.execute('git clone --depth 1 git@github.com:/vlang/c2v_alpha.git')
if res.exit_code != 0 {
eprintln('Failed to download C2V. Perhaps it is not released yet? Is it June 20 yet?')
exit(1)
}
}
// Compile c2v
if !os.exists(c2v_bin) {
os.chdir(c2v_dir)?
println('Compiling c2v ...')
res2 := os.execute('v -keepc -g -experimental -o c2v .')
if res2.exit_code != 0 {
eprintln(res2.output)
eprintln('Failed to compile C2V. This should never happen, please report it via GitHub.')
exit(2)
}
}
if os.args.len < 3 {
eprintln('Wrong number of args. Use `v translate file.c`.')
exit(3)
}
passed_args := util.args_quote_paths(os.args[2..])
// println(passed_args)
os.chdir(os.wd_at_startup)?
res := os.system('$c2v_bin $passed_args')
if res != 0 {
eprintln('C2V failed to translate the C files. Please report it via GitHub.')
exit(4)
}
}

View File

@ -1358,6 +1358,7 @@ fn (t Tree) postfix_expr(node ast.PostfixExpr) &Node {
obj.add_terse('expr', t.expr(node.expr))
obj.add('auto_locked', t.string_node(node.auto_locked))
obj.add('pos', t.pos(node.pos))
obj.add('is_c2v_prefix', t.bool_node(node.is_c2v_prefix))
return obj
}

View File

@ -76,7 +76,7 @@ SUBCMD:
// Snooped from cmd/v/v.v, vlib/v/pref/pref.v
const (
auto_complete_commands = [
auto_complete_commands = [
// simple_cmd
'ast',
'doc',
@ -114,7 +114,6 @@ const (
'help',
'new',
'init',
'complete',
'translate',
'self',
'search',
@ -130,8 +129,13 @@ const (
'run',
'build',
'build-module',
'missdoc',
]
auto_complete_flags = [
// Entries in the flag arrays below should be entered as is:
// * Short flags, e.g.: "-v", should be entered: '-v'
// * Long flags, e.g.: "--version", should be entered: '--version'
// * Single-dash flags, e.g.: "-version", should be entered: '-version'
auto_complete_flags = [
'-apk',
'-show-timings',
'-check-syntax',
@ -150,6 +154,7 @@ const (
'-autofree',
'-compress',
'-freestanding',
'-no-parallel',
'-no-preludes',
'-prof',
'-profile',
@ -190,7 +195,7 @@ const (
'-version',
'--version',
]
auto_complete_flags_doc = [
auto_complete_flags_doc = [
'-all',
'-f',
'-h',
@ -209,7 +214,7 @@ const (
'-s',
'-l',
]
auto_complete_flags_fmt = [
auto_complete_flags_fmt = [
'-c',
'-diff',
'-l',
@ -217,7 +222,7 @@ const (
'-debug',
'-verify',
]
auto_complete_flags_bin2v = [
auto_complete_flags_bin2v = [
'-h',
'--help',
'-m',
@ -227,22 +232,46 @@ const (
'-w',
'--write',
]
auto_complete_flags_shader = [
'help',
'h',
'force-update',
'u',
'verbose',
'v',
'slang',
'l',
'output',
'o',
auto_complete_flags_shader = [
'--help',
'-h',
'--force-update',
'-u',
'--verbose',
'-v',
'--slang',
'-l',
'--output',
'-o',
]
auto_complete_flags_self = [
auto_complete_flags_missdoc = [
'--help',
'-h',
'--tags',
'-t',
'--deprecated',
'-d',
'--private',
'-p',
'--no-line-numbers',
'-n',
'--exclude',
'-e',
'--relative-paths',
'-r',
'--js',
'--verify',
'--diff',
]
auto_complete_flags_bump = [
'--patch',
'--minor',
'--major',
]
auto_complete_flags_self = [
'-prod',
]
auto_complete_compilers = [
auto_complete_compilers = [
'cc',
'gcc',
'tcc',
@ -372,12 +401,17 @@ fn auto_complete_request(args []string) []string {
parent_command = parts[i]
break
}
get_flags := fn (base []string, flag string) []string {
if flag.len == 1 { return base
} else { return base.filter(it.starts_with(flag))
}
}
if part.starts_with('-') { // 'v -<tab>' -> flags.
if part.starts_with('-') { // 'v [subcmd] -<tab>' or 'v [subcmd] --<tab>'-> flags.
get_flags := fn (base []string, flag string) []string {
mut results := []string{}
for entry in base {
if entry.starts_with(flag) {
results << entry
}
}
return results
}
match parent_command {
'bin2v' { // 'v bin2v -<tab>'
list = get_flags(auto_complete_flags_bin2v, part)
@ -397,6 +431,12 @@ fn auto_complete_request(args []string) []string {
'shader' { // 'v shader -<tab>' -> flags.
list = get_flags(auto_complete_flags_shader, part)
}
'missdoc' { // 'v missdoc -<tab>' -> flags.
list = get_flags(auto_complete_flags_missdoc, part)
}
'bump' { // 'v bump -<tab>' -> flags.
list = get_flags(auto_complete_flags_bump, part)
}
else {
for flag in auto_complete_flags {
if flag == part {
@ -414,6 +454,11 @@ fn auto_complete_request(args []string) []string {
}
}
}
// Clear the list if the result is identical to the part examined
// (the flag must have already been completed)
if list.len == 1 && part == list[0] {
list.clear()
}
} else {
match part {
'help' { // 'v help <tab>' -> top level commands except "help".

View File

@ -183,6 +183,7 @@ fn (foptions &FormatOptions) format_pipe() {
// checker.new_checker(table, prefs).check(file_ast)
formatted_content := fmt.fmt(file_ast, table, prefs, foptions.is_debug)
print(formatted_content)
flush_stdout()
foptions.vlog('fmt.fmt worked and $formatted_content.len bytes were written to stdout.')
}
@ -279,6 +280,7 @@ fn (mut foptions FormatOptions) post_process_file(file string, formatted_file_pa
return
}
print(formatted_fc)
flush_stdout()
}
fn (f FormatOptions) str() string {

View File

@ -37,7 +37,7 @@ import flag
import toml
const (
tool_name = os.file_name(os.executable())
tool_name = 'vgret'
tool_version = '0.0.1'
tool_description = '\n Dump and/or compare rendered frames of `gg` based apps
@ -57,7 +57,7 @@ Examples:
const (
supported_hosts = ['linux']
// External tool executables
v_exe = vexe()
v_exe = os.getenv('VEXE')
idiff_exe = os.find_abs_path_of_executable('idiff') or { '' }
)
@ -105,11 +105,27 @@ mut:
config Config
}
fn (opt Options) verbose_execute(cmd string) os.Result {
opt.verbose_eprintln('Running `$cmd`')
return os.execute(cmd)
}
fn (opt Options) verbose_eprintln(msg string) {
if opt.verbose {
eprintln(msg)
}
}
fn main() {
if os.args.len == 1 {
println('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
if runtime_os !in supported_hosts {
eprintln('$tool_name is currently only supported on $supported_hosts hosts')
exit(1)
}
if os.args.len == 1 {
eprintln('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
exit(1)
}
mut fp := flag.new_flag_parser(os.args[1..])
fp.application(tool_name)
fp.version(tool_version)
@ -131,17 +147,17 @@ fn main() {
}
toml_conf := fp.string('toml-config', `t`, default_toml, 'Path or string with TOML configuration')
ensure_env(opt) or { panic(err) }
arg_paths := fp.finalize() or { panic(err) }
arg_paths := fp.finalize()?
if arg_paths.len == 0 {
println(fp.usage())
println('\nError missing arguments')
exit(1)
}
if !os.exists(tmp_dir) {
os.mkdir_all(tmp_dir)?
}
opt.config = new_config(opt.root_path, toml_conf)?
gen_in_path := arg_paths[0]
@ -154,13 +170,15 @@ fn main() {
all_paths_in_use := [path, gen_in_path, target_path]
for path_in_use in all_paths_in_use {
if !os.is_dir(path_in_use) {
panic('`$path_in_use` is not a directory')
eprintln('`$path_in_use` is not a directory')
exit(1)
}
}
if path == target_path || gen_in_path == target_path || gen_in_path == path {
panic('Compare paths can not be the same directory `$path`/`$target_path`/`$gen_in_path`')
eprintln('Compare paths can not be the same directory `$path`/`$target_path`/`$gen_in_path`')
exit(1)
}
compare_screenshots(opt, gen_in_path, target_path) or { panic(err) }
compare_screenshots(opt, gen_in_path, target_path)?
}
}
@ -184,21 +202,15 @@ fn generate_screenshots(mut opt Options, output_path string) ? {
rel_out_path = file
}
if opt.verbose {
eprintln('Compiling shaders (if needed) for `$file`')
}
sh_result := os.execute('${os.quoted_path(v_exe)} shader ${os.quoted_path(app_path)}')
opt.verbose_eprintln('Compiling shaders (if needed) for `$file`')
sh_result := opt.verbose_execute('${os.quoted_path(v_exe)} shader ${os.quoted_path(app_path)}')
if sh_result.exit_code != 0 {
if opt.verbose {
eprintln('Skipping shader compile for `$file` v shader failed with:\n$sh_result.output')
}
opt.verbose_eprintln('Skipping shader compile for `$file` v shader failed with:\n$sh_result.output')
continue
}
if !os.exists(dst_path) {
if opt.verbose {
eprintln('Creating output path `$dst_path`')
}
opt.verbose_eprintln('Creating output path `$dst_path`')
os.mkdir_all(dst_path)?
}
@ -221,18 +233,13 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ? {
mut warns := map[string]string{}
for app_config in opt.config.apps {
screenshots := app_config.screenshots
if opt.verbose {
eprintln('Comparing $screenshots.len screenshots in `$output_path` with `$target_path`')
}
opt.verbose_eprintln('Comparing $screenshots.len screenshots in `$output_path` with `$target_path`')
for screenshot in screenshots {
relative_screenshot := screenshot.all_after(output_path + os.path_separator)
src := screenshot
target := os.join_path(target_path, relative_screenshot)
if opt.verbose {
eprintln('Comparing `$src` with `$target` with $app_config.compare.method')
}
opt.verbose_eprintln('Comparing `$src` with `$target` with $app_config.compare.method')
if app_config.compare.method == 'idiff' {
if idiff_exe == '' {
@ -242,14 +249,9 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ? {
'.diff.tif')
flags := app_config.compare.flags.join(' ')
diff_cmd := '${os.quoted_path(idiff_exe)} $flags -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
if opt.verbose {
eprintln('Running: $diff_cmd')
}
result := os.execute(diff_cmd)
if opt.verbose && result.exit_code == 0 {
eprintln('OUTPUT: \n$result.output')
result := opt.verbose_execute(diff_cmd)
if result.exit_code == 0 {
opt.verbose_eprintln('OUTPUT: \n$result.output')
}
if result.exit_code != 0 {
eprintln('OUTPUT: \n$result.output')
@ -278,15 +280,19 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ? {
}
first := fails.keys()[0]
fail_copy := os.join_path(os.temp_dir(), 'fail.' + first.all_after_last('.'))
os.cp(first, fail_copy) or { panic(err) }
os.cp(first, fail_copy)?
eprintln('First failed file `$first` is copied to `$fail_copy`')
diff_file := os.join_path(os.temp_dir(), os.file_name(first).all_before_last('.') +
'.diff.tif')
diff_copy := os.join_path(os.temp_dir(), 'diff.tif')
if os.is_file(diff_file) {
os.cp(diff_file, diff_copy) or { panic(err) }
os.cp(diff_file, diff_copy)?
eprintln('First failed diff file `$diff_file` is copied to `$diff_copy`')
eprintln('Removing alpha channel from $diff_copy ...')
final_fail_result_file := os.join_path(os.temp_dir(), 'diff.png')
opt.verbose_execute('convert ${os.quoted_path(diff_copy)} -alpha off ${os.quoted_path(final_fail_result_file)}')
eprintln('Final diff file: `$final_fail_result_file`')
}
exit(1)
}
@ -295,25 +301,16 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ? {
fn take_screenshots(opt Options, app AppConfig) ?[]string {
out_path := app.screenshots_path
if !opt.compare_only {
if opt.verbose {
eprintln('Taking screenshot(s) of `$app.path` to `$out_path`')
}
opt.verbose_eprintln('Taking screenshot(s) of `$app.path` to `$out_path`')
if app.capture.method == 'gg_record' {
for k, v in app.capture.env {
rv := v.replace('\$OUT_PATH', out_path)
if opt.verbose {
eprintln('Setting ENV `$k` = $rv ...')
}
opt.verbose_eprintln('Setting ENV `$k` = $rv ...')
os.setenv('$k', rv, true)
}
mut flags := app.capture.flags.join(' ')
v_cmd := '${os.quoted_path(v_exe)} $flags -d gg_record run ${os.quoted_path(app.abs_path)}'
if opt.verbose {
eprintln('Running `$v_cmd`')
}
result := os.execute('$v_cmd')
result := opt.verbose_execute('${os.quoted_path(v_exe)} $flags -d gg_record run ${os.quoted_path(app.abs_path)}')
if result.exit_code != 0 {
return error('Failed taking screenshot of `$app.abs_path`:\n$result.output')
}
@ -329,30 +326,6 @@ fn take_screenshots(opt Options, app AppConfig) ?[]string {
return screenshots
}
// ensure_env returns nothing if everything is okay.
fn ensure_env(opt Options) ? {
if !os.exists(tmp_dir) {
os.mkdir_all(tmp_dir)?
}
if runtime_os !in supported_hosts {
return error('$tool_name is currently only supported on $supported_hosts hosts')
}
}
// vexe returns the absolute path to the V compiler.
fn vexe() string {
mut exe := os.getenv('VEXE')
if os.is_executable(exe) {
return os.real_path(exe)
}
possible_symlink := os.find_abs_path_of_executable('v') or { '' }
if os.is_executable(possible_symlink) {
exe = os.real_path(possible_symlink)
}
return exe
}
fn new_config(root_path string, toml_config string) ?Config {
doc := if os.is_file(toml_config) {
toml.parse_file(toml_config)?

View File

@ -6,12 +6,13 @@ import flag
const (
tool_name = 'v missdoc'
tool_version = '0.0.4'
tool_version = '0.1.0'
tool_description = 'Prints all V functions in .v files under PATH/, that do not yet have documentation comments.'
work_dir_prefix = normalise_path(os.real_path(os.wd_at_startup) + '/')
work_dir_prefix = normalise_path(os.real_path(os.wd_at_startup) + os.path_separator)
)
struct UndocumentedFN {
file string
line int
signature string
tags []string
@ -26,11 +27,15 @@ struct Options {
no_line_numbers bool
exclude []string
relative_paths bool
mut:
verify bool
diff bool
additional_args []string
}
fn (opt Options) report_undocumented_functions_in_path(path string) {
fn (opt Options) collect_undocumented_functions_in_dir(directory string) []UndocumentedFN {
mut files := []string{}
collect(path, mut files, fn (npath string, mut accumulated_paths []string) {
collect(directory, mut files, fn (npath string, mut accumulated_paths []string) {
if !npath.ends_with('.v') {
return
}
@ -39,6 +44,7 @@ fn (opt Options) report_undocumented_functions_in_path(path string) {
}
accumulated_paths << npath
})
mut undocumented_fns := []UndocumentedFN{}
for file in files {
if !opt.js && file.ends_with('.js.v') {
continue
@ -46,46 +52,64 @@ fn (opt Options) report_undocumented_functions_in_path(path string) {
if opt.exclude.len > 0 && opt.exclude.any(file.contains(it)) {
continue
}
opt.report_undocumented_functions_in_file(file)
undocumented_fns << opt.collect_undocumented_functions_in_file(file)
}
return undocumented_fns
}
fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
fn (opt &Options) collect_undocumented_functions_in_file(nfile string) []UndocumentedFN {
file := os.real_path(nfile)
contents := os.read_file(file) or { panic(err) }
lines := contents.split('\n')
mut info := []UndocumentedFN{}
mut list := []UndocumentedFN{}
mut comments := []string{}
mut tags := []string{}
for i, line in lines {
if line.starts_with('pub fn') || (opt.private && (line.starts_with('fn ')
&& !(line.starts_with('fn C.') || line.starts_with('fn main')))) {
// println('Match: $line')
if i > 0 && lines.len > 0 {
mut line_above := lines[i - 1]
if !line_above.starts_with('//') {
mut tags := []string{}
mut grab := true
for j := i - 1; j >= 0; j-- {
prev_line := lines[j]
if prev_line.contains('}') { // We've looked back to the above scope, stop here
break
} else if prev_line.starts_with('[') {
tags << collect_tags(prev_line)
continue
} else if prev_line.starts_with('//') { // Single-line comment
grab = false
break
}
}
if grab {
clean_line := line.all_before_last(' {')
info << UndocumentedFN{i + 1, clean_line, tags}
}
if line.starts_with('//') {
comments << line
} else if line.trim_space().starts_with('[') {
tags << collect_tags(line)
} else if line.starts_with('pub fn')
|| (opt.private && (line.starts_with('fn ') && !(line.starts_with('fn C.')
|| line.starts_with('fn main')))) {
if comments.len == 0 {
clean_line := line.all_before_last(' {')
list << UndocumentedFN{
line: i + 1
signature: clean_line
tags: tags
file: file
}
}
tags = []
comments = []
} else {
tags = []
comments = []
}
}
if info.len > 0 {
for undocumented_fn in info {
return list
}
fn (opt &Options) collect_undocumented_functions_in_path(path string) []UndocumentedFN {
mut undocumented_functions := []UndocumentedFN{}
if os.is_file(path) {
undocumented_functions << opt.collect_undocumented_functions_in_file(path)
} else {
undocumented_functions << opt.collect_undocumented_functions_in_dir(path)
}
return undocumented_functions
}
fn (opt &Options) report_undocumented_functions_in_path(path string) int {
mut list := opt.collect_undocumented_functions_in_path(path)
opt.report_undocumented_functions(list)
return list.len
}
fn (opt &Options) report_undocumented_functions(list []UndocumentedFN) {
if list.len > 0 {
for undocumented_fn in list {
mut line_numbers := '$undocumented_fn.line:0:'
if opt.no_line_numbers {
line_numbers = ''
@ -95,10 +119,11 @@ fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
} else {
''
}
file := undocumented_fn.file
ofile := if opt.relative_paths {
nfile.replace(work_dir_prefix, '')
file.replace(work_dir_prefix, '')
} else {
os.real_path(nfile)
os.real_path(file)
}
if opt.deprecated {
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
@ -118,6 +143,54 @@ fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
}
}
fn (opt &Options) diff_undocumented_functions_in_paths(path_old string, path_new string) []UndocumentedFN {
old := os.real_path(path_old)
new := os.real_path(path_new)
mut old_undocumented_functions := opt.collect_undocumented_functions_in_path(old)
mut new_undocumented_functions := opt.collect_undocumented_functions_in_path(new)
mut differs := []UndocumentedFN{}
if new_undocumented_functions.len > old_undocumented_functions.len {
for new_undoc_fn in new_undocumented_functions {
new_relative_file := new_undoc_fn.file.replace(new, '').trim_string_left(os.path_separator)
mut found := false
for old_undoc_fn in old_undocumented_functions {
old_relative_file := old_undoc_fn.file.replace(old, '').trim_string_left(os.path_separator)
if new_relative_file == old_relative_file
&& new_undoc_fn.signature == old_undoc_fn.signature {
found = true
break
}
}
if !found {
differs << new_undoc_fn
}
}
}
differs.sort_with_compare(sort_undoc_fns)
return differs
}
fn sort_undoc_fns(a &UndocumentedFN, b &UndocumentedFN) int {
if a.file < b.file {
return -1
}
if a.file > b.file {
return 1
}
// same file sort by signature
else {
if a.signature < b.signature {
return -1
}
if a.signature > b.signature {
return 1
}
return 0
}
}
fn normalise_path(path string) string {
return path.replace('\\', '/')
}
@ -145,17 +218,15 @@ fn collect_tags(line string) []string {
}
fn main() {
if os.args.len == 1 {
println('Usage: $tool_name PATH \n$tool_description\n$tool_name -h for more help...')
exit(1)
}
mut fp := flag.new_flag_parser(os.args[1..])
mut fp := flag.new_flag_parser(os.args[1..]) // skip the "v" command.
fp.application(tool_name)
fp.version(tool_version)
fp.description(tool_description)
fp.arguments_description('PATH [PATH]...')
fp.skip_executable() // skip the "missdoc" command.
// Collect tool options
opt := Options{
mut opt := Options{
show_help: fp.bool('help', `h`, false, 'Show this help text.')
deprecated: fp.bool('deprecated', `d`, false, 'Include deprecated functions in output.')
private: fp.bool('private', `p`, false, 'Include private functions in output.')
@ -164,16 +235,58 @@ fn main() {
collect_tags: fp.bool('tags', `t`, false, 'Also print function tags if any is found.')
exclude: fp.string_multi('exclude', `e`, '')
relative_paths: fp.bool('relative-paths', `r`, false, 'Use relative paths in output.')
diff: fp.bool('diff', 0, false, 'exit(1) and show difference between two PATH inputs, return 0 otherwise.')
verify: fp.bool('verify', 0, false, 'exit(1) if documentation is missing, 0 otherwise.')
}
opt.additional_args = fp.finalize() or { panic(err) }
if opt.show_help {
println(fp.usage())
exit(0)
}
for path in os.args[1..] {
if os.is_file(path) {
opt.report_undocumented_functions_in_file(path)
} else {
opt.report_undocumented_functions_in_path(path)
if opt.additional_args.len == 0 {
println(fp.usage())
eprintln('Error: $tool_name is missing PATH input')
exit(1)
}
// Allow short-long versions to prevent false positive situations, should
// the user miss a `-`. E.g.: the `-verify` flag would be ignored and missdoc
// will return 0 for success plus a list of any undocumented functions.
if '-verify' in opt.additional_args {
opt.verify = true
}
if '-diff' in opt.additional_args {
opt.diff = true
}
if opt.diff {
if opt.additional_args.len < 2 {
println(fp.usage())
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
exit(1)
}
path_old := opt.additional_args[0]
path_new := opt.additional_args[1]
if !(os.is_file(path_old) || os.is_dir(path_old)) || !(os.is_file(path_new)
|| os.is_dir(path_new)) {
println(fp.usage())
eprintln('Error: $tool_name --diff needs two valid PATH inputs')
exit(1)
}
list := opt.diff_undocumented_functions_in_paths(path_old, path_new)
if list.len > 0 {
opt.report_undocumented_functions(list)
exit(1)
}
exit(0)
}
mut total := 0
for path in opt.additional_args {
if os.is_file(path) || os.is_dir(path) {
total += opt.report_undocumented_functions_in_path(path)
}
}
if opt.verify && total > 0 {
exit(1)
}
}

View File

@ -4,6 +4,7 @@
module main
import os
import rand
import os.cmdline
import net.http
import net.urllib
@ -12,7 +13,8 @@ import vhelp
import v.vmod
const (
default_vpm_server_urls = ['https://vpm.vlang.io']
default_vpm_server_urls = ['https://vpm.vlang.io', 'https://vpm.url4e.com']
vpm_server_urls = rand.shuffle_clone(default_vpm_server_urls) or { [] } // ensure that all queries are distributed fairly
valid_vpm_commands = ['help', 'search', 'install', 'update', 'upgrade', 'outdated',
'list', 'remove', 'show']
excluded_dirs = ['cache', 'vlib']
@ -208,24 +210,24 @@ fn vpm_install_from_vpm(module_names []string) {
println('VPM needs `$vcs` to be installed.')
continue
}
mod_name_as_path := mod.name.replace('.', os.path_separator).replace('-', '_').to_lower()
final_module_path := os.real_path(os.join_path(settings.vmodules_path, mod_name_as_path))
if os.exists(final_module_path) {
//
minfo := mod_name_info(mod.name)
if os.exists(minfo.final_module_path) {
vpm_update([name])
continue
}
println('Installing module "$name" from "$mod.url" to "$final_module_path" ...')
println('Installing module "$name" from "$mod.url" to "$minfo.final_module_path" ...')
vcs_install_cmd := supported_vcs_install_cmds[vcs]
cmd := '$vcs_install_cmd "$mod.url" "$final_module_path"'
cmd := '$vcs_install_cmd "$mod.url" "$minfo.final_module_path"'
verbose_println(' command: $cmd')
cmdres := os.execute(cmd)
if cmdres.exit_code != 0 {
errors++
println('Failed installing module "$name" to "$final_module_path" .')
println('Failed installing module "$name" to "$minfo.final_module_path" .')
print_failed_cmd(cmd, cmdres)
continue
}
resolve_dependencies(name, final_module_path, module_names)
resolve_dependencies(name, minfo.final_module_path, module_names)
}
if errors > 0 {
exit(1)
@ -270,7 +272,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
}
repo_name := url.substr(second_cut_pos + 1, first_cut_pos)
mut name := repo_name + os.path_separator + mod_name
mut name := os.join_path(repo_name, mod_name)
mod_name_as_path := name.replace('-', '_').to_lower()
mut final_module_path := os.real_path(os.join_path(settings.vmodules_path, mod_name_as_path))
if os.exists(final_module_path) {
@ -297,20 +299,19 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
if os.exists(vmod_path) {
data := os.read_file(vmod_path) or { return }
vmod := parse_vmod(data)
mod_path := os.real_path(os.join_path(settings.vmodules_path, vmod.name.replace('.',
os.path_separator)))
println('Relocating module from "$name" to "$vmod.name" ( "$mod_path" ) ...')
if os.exists(mod_path) {
println('Warning module "$mod_path" already exsits!')
println('Removing module "$mod_path" ...')
os.rmdir_all(mod_path) or {
minfo := mod_name_info(vmod.name)
println('Relocating module from "$name" to "$vmod.name" ( "$minfo.final_module_path" ) ...')
if os.exists(minfo.final_module_path) {
println('Warning module "$minfo.final_module_path" already exsits!')
println('Removing module "$minfo.final_module_path" ...')
os.rmdir_all(minfo.final_module_path) or {
errors++
println('Errors while removing "$mod_path" :')
println('Errors while removing "$minfo.final_module_path" :')
println(err)
continue
}
}
os.mv(final_module_path, mod_path) or {
os.mv(final_module_path, minfo.final_module_path) or {
errors++
println('Errors while relocating module "$name" :')
println(err)
@ -323,7 +324,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
continue
}
println('Module "$name" relocated to "$vmod.name" successfully.')
final_module_path = mod_path
final_module_path = minfo.final_module_path
name = vmod.name
}
resolve_dependencies(name, final_module_path, module_names)
@ -377,10 +378,7 @@ fn vpm_update(m []string) {
}
mut errors := 0
for modulename in module_names {
mut zname := modulename
if mod := get_mod_by_url(modulename) {
zname = mod.name
}
zname := url_to_module_name(modulename)
final_module_path := valid_final_path_of_existing_module(modulename) or { continue }
os.chdir(final_module_path) or {}
println('Updating module "$zname" in "$final_module_path" ...')
@ -503,26 +501,21 @@ fn vpm_remove(module_names []string) {
}
fn valid_final_path_of_existing_module(modulename string) ?string {
mut name := modulename
if mod := get_mod_by_url(name) {
name = mod.name
}
mod_name_as_path := name.replace('.', os.path_separator).replace('-', '_').to_lower()
name_of_vmodules_folder := os.join_path(settings.vmodules_path, mod_name_as_path)
final_module_path := os.real_path(name_of_vmodules_folder)
if !os.exists(final_module_path) {
println('No module with name "$name" exists at $name_of_vmodules_folder')
name := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
minfo := mod_name_info(name)
if !os.exists(minfo.final_module_path) {
println('No module with name "$minfo.mname_normalised" exists at $minfo.final_module_path')
return none
}
if !os.is_dir(final_module_path) {
println('Skipping "$name_of_vmodules_folder", since it is not a folder.')
if !os.is_dir(minfo.final_module_path) {
println('Skipping "$minfo.final_module_path", since it is not a folder.')
return none
}
vcs_used_in_dir(final_module_path) or {
println('Skipping "$name_of_vmodules_folder", since it does not use a supported vcs.')
vcs_used_in_dir(minfo.final_module_path) or {
println('Skipping "$minfo.final_module_path", since it does not use a supported vcs.')
return none
}
return final_module_path
return minfo.final_module_path
}
fn ensure_vmodules_dir_exist() {
@ -573,6 +566,31 @@ fn get_installed_modules() []string {
return modules
}
struct ModNameInfo {
mut:
mname string // The-user.The-mod , *never* The-user.The-mod.git
mname_normalised string // the_user.the_mod
mname_as_path string // the_user/the_mod
final_module_path string // ~/.vmodules/the_user/the_mod
}
fn mod_name_info(mod_name string) ModNameInfo {
mut info := ModNameInfo{}
info.mname = if mod_name.ends_with('.git') { mod_name.replace('.git', '') } else { mod_name }
info.mname_normalised = info.mname.replace('-', '_').to_lower()
info.mname_as_path = info.mname_normalised.replace('.', os.path_separator)
info.final_module_path = os.real_path(os.join_path(settings.vmodules_path, info.mname_as_path))
return info
}
fn url_to_module_name(modulename string) string {
mut res := if mod := get_mod_by_url(modulename) { mod.name } else { modulename }
if res.ends_with('.git') {
res = res.replace('.git', '')
}
return res
}
fn get_all_modules() []string {
url := get_working_server_url()
r := http.get(url) or { panic(err) }
@ -580,7 +598,7 @@ fn get_all_modules() []string {
println('Failed to search vpm.vlang.io. Status code: $r.status_code')
exit(1)
}
s := r.text
s := r.body
mut read_len := 0
mut modules := []string{}
for read_len < s.len {
@ -648,7 +666,7 @@ fn get_working_server_url() string {
server_urls := if settings.server_urls.len > 0 {
settings.server_urls
} else {
default_vpm_server_urls
vpm_server_urls
}
for url in server_urls {
verbose_println('Trying server url: $url')
@ -709,7 +727,8 @@ fn get_module_meta_info(name string) ?Mod {
return mod
}
mut errors := []string{}
for server_url in default_vpm_server_urls {
for server_url in vpm_server_urls {
modurl := server_url + '/jsmod/$name'
verbose_println('Retrieving module metadata from: "$modurl" ...')
r := http.get(modurl) or {
@ -717,7 +736,7 @@ fn get_module_meta_info(name string) ?Mod {
errors << 'Error details: $err'
continue
}
if r.status_code == 404 || r.text.trim_space() == '404' {
if r.status_code == 404 || r.body.trim_space() == '404' {
errors << 'Skipping module "$name", since "$server_url" reported that "$name" does not exist.'
continue
}
@ -725,7 +744,7 @@ fn get_module_meta_info(name string) ?Mod {
errors << 'Skipping module "$name", since "$server_url" responded with $r.status_code http status code. Please try again later.'
continue
}
s := r.text
s := r.body
if s.len > 0 && s[0] != `{` {
errors << 'Invalid json data'
errors << s.trim_space().limit(100) + ' ...'

View File

@ -305,7 +305,6 @@ fn run_repl(workdir string, vrepl_prefix string) int {
return int(rc)
}
}
break
}
r.line = line
if r.line == '\n' {
@ -388,13 +387,13 @@ fn run_repl(workdir string, vrepl_prefix string) int {
'#include ',
'for ',
'or ',
'insert',
'delete',
'prepend',
'sort',
'clear',
'trim',
'as',
'insert(',
'delete(',
'prepend(',
'sort(',
'clear(',
'trim(',
' as ',
]
mut is_statement := false
if filter_line.count('=') % 2 == 1 {

View File

@ -26,6 +26,7 @@ fn main() {
spent := sw.elapsed().milliseconds()
oks := commands.filter(it.ecode == 0)
fails := commands.filter(it.ecode != 0)
flush_stdout()
println('')
println(term.header_left(term_highlight('Summary of `v test-all`:'), '-'))
println(term_highlight('Total runtime: $spent ms'))
@ -37,6 +38,7 @@ fn main() {
msg := if fcmd.errmsg != '' { fcmd.errmsg } else { fcmd.line }
println(term.failed('> Failed:') + ' $msg')
}
flush_stdout()
if fails.len > 0 {
exit(1)
}

View File

@ -5,52 +5,67 @@ import testing
import v.util
import arrays
const (
vet_known_failing_exceptions = []string{}
vet_folders = [
'vlib/sqlite',
'vlib/v',
'vlib/x/json2',
'vlib/x/ttf',
'cmd/v',
'cmd/tools',
'examples/2048',
'examples/tetris',
'examples/term.ui',
]
verify_known_failing_exceptions = [
// Handcrafted meaningful formatting of code parts (mostly arrays)
'examples/sokol/02_cubes_glsl/cube_glsl.v',
'examples/sokol/03_march_tracing_glsl/rt_glsl.v',
'examples/sokol/04_multi_shader_glsl/rt_glsl.v',
'examples/sokol/05_instancing_glsl/rt_glsl.v',
'examples/sokol/06_obj_viewer/show_obj.v',
'vlib/v/checker/tests/modules/deprecated_module/main.v' /* adds deprecated_module. module prefix to imports, even though the folder has v.mod */,
'vlib/gg/m4/graphic.v',
'vlib/gg/m4/m4_test.v',
'vlib/gg/m4/matrix.v',
'vlib/builtin/int_test.v' /* special number formatting that should be tested */,
// TODOs and unfixed vfmt bugs
'vlib/v/gen/js/tests/js.v', /* local `hello` fn, gets replaced with module `hello` aliased as `hl` */
]
vfmt_verify_list = [
'cmd/',
'examples/',
'tutorials/',
'vlib/',
]
vfmt_known_failing_exceptions = arrays.merge(verify_known_failing_exceptions, [
'vlib/regex/regex_test.v' /* contains meaningfull formatting of the test case data */,
'vlib/crypto/sha512/sha512block_generic.v' /* formatting of large constant arrays wraps to too many lines */,
'vlib/crypto/aes/const.v' /* formatting of large constant arrays wraps to too many lines */,
])
)
const vet_known_failing = [
'do_not_delete_this',
]
const (
vexe = os.getenv('VEXE')
vroot = os.dir(vexe)
is_fix = '-fix' in os.args
)
const vet_known_failing_windows = [
'do_not_delete_this',
'vlib/v/gen/js/tests/testdata/byte_is_space.v',
'vlib/v/gen/js/tests/testdata/compare_ints.v',
'vlib/v/gen/js/tests/testdata/hw.v',
'vlib/v/gen/js/tests/testdata/string_methods.v',
'vlib/v/tests/project_with_modules_having_submodules/bin/main.vsh',
'vlib/v/tests/valgrind/simple_interpolation_script_mode.v',
'vlib/v/tests/valgrind/simple_interpolation_script_mode_more_scopes.v',
]
const vet_folders = [
'vlib/sqlite',
'vlib/v',
'vlib/x/json2',
'vlib/x/ttf',
'cmd/v',
'cmd/tools',
'examples/2048',
'examples/tetris',
'examples/term.ui',
]
const verify_known_failing_exceptions = [
// Handcrafted meaningful formatting of code parts (mostly arrays)
'examples/sokol/02_cubes_glsl/cube_glsl.v',
'examples/sokol/03_march_tracing_glsl/rt_glsl.v',
'examples/sokol/04_multi_shader_glsl/rt_glsl.v',
'examples/sokol/05_instancing_glsl/rt_glsl.v',
'examples/sokol/06_obj_viewer/show_obj.v',
'vlib/v/checker/tests/modules/deprecated_module/main.v' /* adds deprecated_module. module prefix to imports, even though the folder has v.mod */,
'vlib/gg/m4/graphic.v',
'vlib/gg/m4/m4_test.v',
'vlib/gg/m4/matrix.v',
'vlib/builtin/int_test.v' /* special number formatting that should be tested */,
// TODOs and unfixed vfmt bugs
'vlib/v/gen/js/tests/js.v', /* local `hello` fn, gets replaced with module `hello` aliased as `hl` */
]
const vfmt_verify_list = [
'cmd/',
'examples/',
'tutorials/',
'vlib/',
]
const vfmt_known_failing_exceptions = arrays.merge(verify_known_failing_exceptions, [
'vlib/regex/regex_test.v' /* contains meaningfull formatting of the test case data */,
'vlib/crypto/sha512/sha512block_generic.v' /* formatting of large constant arrays wraps to too many lines */,
'vlib/crypto/aes/const.v' /* formatting of large constant arrays wraps to too many lines */,
])
const vexe = os.getenv('VEXE')
const vroot = os.dir(vexe)
const is_fix = '-fix' in os.args
fn main() {
args_string := os.args[1..].join(' ')
@ -76,8 +91,12 @@ fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string,
fn v_test_vetting(vargs string) {
expanded_vet_list := util.find_all_v_files(vet_folders) or { return }
mut vet_known_exceptions := vet_known_failing.clone()
if os.user_os() == 'windows' {
vet_known_exceptions << vet_known_failing_windows
}
vet_session := tsession(vargs, 'vvet', '${os.quoted_path(vexe)} vet', 'vet', expanded_vet_list,
vet_known_failing_exceptions)
vet_known_exceptions)
//
fmt_cmd, fmt_args := if is_fix {
'${os.quoted_path(vexe)} fmt -w', 'fmt -w'

View File

@ -6,7 +6,83 @@ import v.pref
const github_job = os.getenv('GITHUB_JOB')
const just_essential = os.getenv('VTEST_JUST_ESSENTIAL') != ''
const (
essential_list = [
'cmd/tools/vvet/vet_test.v',
'vlib/arrays/arrays_test.v',
'vlib/bitfield/bitfield_test.v',
//
'vlib/builtin/int_test.v',
'vlib/builtin/array_test.v',
'vlib/builtin/float_test.v',
'vlib/builtin/byte_test.v',
'vlib/builtin/rune_test.v',
'vlib/builtin/builtin_test.v',
'vlib/builtin/map_of_floats_test.v',
'vlib/builtin/string_int_test.v',
'vlib/builtin/utf8_test.v',
'vlib/builtin/map_test.v',
'vlib/builtin/string_test.v',
'vlib/builtin/sorting_test.v',
'vlib/builtin/gated_array_string_test.v',
'vlib/builtin/array_shrinkage_test.v',
'vlib/builtin/isnil_test.v',
'vlib/builtin/string_match_glob_test.v',
'vlib/builtin/string_strip_margin_test.v',
//
'vlib/cli/command_test.v',
'vlib/crypto/md5/md5_test.v',
'vlib/dl/dl_test.v',
'vlib/encoding/base64/base64_test.v',
'vlib/encoding/utf8/encoding_utf8_test.v',
'vlib/encoding/utf8/utf8_util_test.v',
'vlib/flag/flag_test.v',
'vlib/json/json_decode_test.v',
'vlib/math/math_test.v',
'vlib/net/tcp_test.v',
'vlib/net/http/http_test.v',
'vlib/net/http/server_test.v',
'vlib/net/http/request_test.v',
'vlib/io/io_test.v',
'vlib/io/os_file_reader_test.v',
'vlib/os/process_test.v',
'vlib/os/file_test.v',
'vlib/os/notify/notify_test.v',
'vlib/os/filepath_test.v',
'vlib/os/environment_test.v',
'vlib/os/glob_test.v',
'vlib/os/os_test.v',
'vlib/rand/random_numbers_test.v',
'vlib/rand/wyrand/wyrand_test.v',
'vlib/runtime/runtime_test.v',
'vlib/semver/semver_test.v',
'vlib/sync/stdatomic/atomic_test.v',
'vlib/sync/thread_test.v',
'vlib/sync/waitgroup_test.v',
'vlib/sync/pool/pool_test.v',
'vlib/strings/builder_test.v',
'vlib/strconv/atof_test.v',
'vlib/strconv/atoi_test.v',
'vlib/strconv/f32_f64_to_string_test.v',
'vlib/strconv/format_test.v',
'vlib/strconv/number_to_base_test.v',
'vlib/time/time_test.v',
'vlib/toml/tests/toml_test.v',
'vlib/v/compiler_errors_test.v',
'vlib/v/doc/doc_test.v',
'vlib/v/eval/interpret_test.v',
'vlib/v/fmt/fmt_keep_test.v',
'vlib/v/fmt/fmt_test.v',
'vlib/v/gen/c/coutput_test.v',
'vlib/v/gen/js/program_test.v',
'vlib/v/gen/native/macho_test.v',
'vlib/v/gen/native/tests/native_test.v',
'vlib/v/pkgconfig/pkgconfig_test.v',
'vlib/v/tests/inout/compiler_test.v',
'vlib/x/json2/json2_test.v',
]
skip_test_files = [
'cmd/tools/vdoc/html_tag_escape_test.v', /* can't locate local module: markdown */
'cmd/tools/vdoc/tests/vdoc_file_test.v', /* fails on Windows; order of output is not as expected */
@ -66,6 +142,7 @@ const (
]
skip_with_werror = [
'do_not_remove',
'vlib/v/embed_file/tests/embed_file_test.v',
]
skip_with_asan_compiler = [
'do_not_remove',
@ -109,6 +186,10 @@ const (
skip_on_non_linux = [
'do_not_remove',
]
skip_on_windows_msvc = [
'do_not_remove',
'vlib/v/tests/const_fixed_array_containing_references_to_itself_test.v', // error C2099: initializer is not a constant
]
skip_on_windows = [
'vlib/context/cancel_test.v',
'vlib/context/deadline_test.v',
@ -172,6 +253,11 @@ fn main() {
all_test_files << os.walk_ext(os.join_path(vroot, 'cmd'), '_test.v')
test_js_files := os.walk_ext(os.join_path(vroot, 'vlib'), '_test.js.v')
all_test_files << test_js_files
if just_essential {
rooted_essential_list := essential_list.map(os.join_path(vroot, it))
all_test_files = all_test_files.filter(rooted_essential_list.contains(it))
}
testing.eheader(title)
mut tsession := testing.new_test_session(cmd_prefix, true)
tsession.files << all_test_files.filter(!it.contains('testdata' + os.path_separator))
@ -264,6 +350,9 @@ fn main() {
}
$if windows {
tsession.skip_files << skip_on_windows
$if msvc {
tsession.skip_files << skip_on_windows_msvc
}
}
$if !windows {
tsession.skip_files << skip_on_non_windows

View File

@ -0,0 +1,7 @@
fn abc() int {
return if true {
0x4000 // 16KB
} else {
0x1000 // 4KB
}
}

View File

@ -89,6 +89,7 @@ mut:
v_cycles int // how many times the worker has restarted the V compiler
scan_cycles int // how many times the worker has scanned for source file changes
clear_terminal bool // whether to clear the terminal before each re-run
keep_running bool // when true, re-run the program automatically if it exits on its own. Useful for gg apps.
silent bool // when true, watch will not print a timestamp line before each re-run
add_files []string // path to additional files that have to be watched for changes
ignore_exts []string // extensions of files that will be ignored, even if they change (useful for sqlite.db files for example)
@ -207,7 +208,7 @@ fn change_detection_loop(ocontext &Context) {
}
fn (mut context Context) kill_pgroup() {
if context.child_process == 0 {
if unsafe { context.child_process == 0 } {
return
}
if context.child_process.is_alive() {
@ -260,6 +261,9 @@ fn (mut context Context) compilation_runner_loop() {
if notalive_count == 1 {
// a short lived process finished, do cleanup:
context.run_after_cmd()
if context.keep_running {
break
}
}
}
select {
@ -282,6 +286,7 @@ fn (mut context Context) compilation_runner_loop() {
}
}
if !context.child_process.is_alive() {
context.elog('> child_process is no longer alive | notalive_count: $notalive_count')
context.child_process.wait()
context.child_process.close()
if notalive_count == 0 {
@ -317,6 +322,7 @@ fn main() {
context.is_worker = fp.bool('vwatchworker', 0, false, 'Internal flag. Used to distinguish vwatch manager and worker processes.')
context.silent = fp.bool('silent', `s`, false, 'Be more silent; do not print the watch timestamp before each re-run.')
context.clear_terminal = fp.bool('clear', `c`, false, 'Clears the terminal before each re-run.')
context.keep_running = fp.bool('keep', `k`, false, 'Keep the program running. Restart it automatically, if it exits by itself. Useful for gg/ui apps.')
context.add_files = fp.string('add', `a`, '', 'Add more files to be watched. Useful with `v watch -add=/tmp/feature.v run cmd/v /tmp/feature.v`, if you change *both* the compiler, and the feature.v file.').split(',')
context.ignore_exts = fp.string('ignore', `i`, '', 'Ignore files having these extensions. Useful with `v watch -ignore=.db run server.v`, if your server writes to an sqlite.db file in the same folder.').split(',')
show_help := fp.bool('help', `h`, false, 'Show this help screen.')

View File

@ -25,7 +25,20 @@ see also `v help build`.
-cstrict
Turn on additional C warnings. This slows down compilation
slightly (~10% for gcc), but sometimes provides better diagnosis.
slightly (~10% for gcc), but sometimes provides better error diagnosis.
-cmain <MainFunctionName>
Useful with framework like code, that uses macros to re-define `main`, like SDL2 does for example.
With that option, V will always generate:
`int MainFunctionName(int ___argc, char** ___argv) {` , for the program entry point function, *no matter* the OS.
Without it, on non Windows systems, it will generate:
`int main(int ___argc, char** ___argv) {`
... and on Windows, it will generate:
a) `int WINAPI wWinMain(HINSTANCE instance, HINSTANCE prev_instance, LPWSTR cmd_line, int show_cmd){`
when you are compiling applications that `import gg`.
... or it will generate:
b) `int wmain(int ___argc, wchar_t* ___argv[], wchar_t* ___envp[]){`
when you are compiling console apps.
-showcc
Prints the C command that is used to build the program.
@ -239,7 +252,15 @@ see also `v help build`.
-dump-c-flags file.txt
Write all C flags into `file.txt`, one flag per line.
If `file.txt` is `-`, then write the flags to stdout, one flag per line.
If `file.txt` is `-`, write to stdout instead.
-dump-modules file.txt
Write all module names used by the program in `file.txt`, one module per line.
If `file.txt` is `-`, write to stdout instead.
-dump-files file.txt
Write all used V file paths used by the program in `file.txt`, one module per line.
If `file.txt` is `-`, write to stdout instead.
-no-rsp
By default, V passes all C compiler options to the backend C compiler

View File

@ -14,4 +14,4 @@ For more general build help, see also `v help build`.
-os <os>, -target-os <os>
Change the target OS that V compiles for.
The supported targets for the native backend are: `macos`, `linux`
The supported targets for the native backend are: `macos`, `linux` and 'windows'

View File

@ -7,6 +7,7 @@ Examples:
v hello.v Compile the file `hello.v` and output it as `hello` or `hello.exe`.
v run hello.v Same as above but also run the produced executable immediately after compilation.
v -cg run hello.v Same as above, but make debugging easier (in case your program crashes).
v crun hello.v Same as above, but do not recompile, if the executable already exists, and is newer than the sources.
v -o h.c hello.v Translate `hello.v` to `h.c`. Do not compile further.
v -o - hello.v Translate `hello.v` and output the C source code to stdout. Do not compile further.
@ -20,7 +21,10 @@ V supports the following commands:
init Setup the file structure for an already existing V project.
* Ordinary development:
run Compile and run a V program.
run Compile and run a V program. Delete the executable after the run.
crun Compile and run a V program without deleting the executable.
If you run the same program a second time, without changing the source files,
V will just run the executable, without recompilation. Suitable for scripting.
test Run all test files in the provided directory.
fmt Format the V code provided.
vet Report suspicious code constructs.

View File

@ -1,4 +1,4 @@
v missdoc 0.0.4
v missdoc 0.1.0
-----------------------------------------------
Usage: v missdoc [options] PATH [PATH]...
@ -12,5 +12,25 @@ Options:
--js Include JavaScript functions in output.
-n, --no-line-numbers Exclude line numbers in output.
-e, --exclude <multiple strings>
-r, --relative-paths Use relative paths in output.
--verify exit(1) if documentation is missing, 0 otherwise.
--diff exit(1) and show difference between two PATH inputs, return 0 otherwise.
--version output version information and exit
-----------------------------------------------
PATH can be both files and directories.
The `--verify` flag is useful for use in CI setups for checking if a V project
has all it's functions and methods documented:
```
v missdoc --verify path/to/code
```
The `--diff` flag is useful if your project is not yet fully documented
but you want to ensure that no new functions or methods are introduced
between commits or branches:
```
v missdoc --diff current/code new/code
```

View File

@ -93,17 +93,21 @@ fn main() {
return
}
match command {
'run', 'crun', 'build', 'build-module' {
rebuild(prefs)
return
}
'help' {
invoke_help_and_exit(args)
}
'version' {
println(version.full_v_version(prefs.is_verbose))
return
}
'new', 'init' {
util.launch_tool(prefs.is_verbose, 'vcreate', os.args[1..])
return
}
'translate' {
eprintln('Translating C to V will be available in V 0.3')
exit(1)
}
'install', 'list', 'outdated', 'remove', 'search', 'show', 'update', 'upgrade' {
util.launch_tool(prefs.is_verbose, 'vpm', os.args[1..])
return
@ -118,42 +122,25 @@ fn main() {
eprintln('V Error: Use `v install` to install modules from vpm.vlang.io')
exit(1)
}
'version' {
println(version.full_v_version(prefs.is_verbose))
return
'translate' {
util.launch_tool(prefs.is_verbose, 'translate', os.args[1..])
// exit(1)
// return
}
else {}
}
if command in ['run', 'build', 'build-module'] || command.ends_with('.v') || os.exists(command) {
// println('command')
// println(prefs.path)
match prefs.backend {
.c {
$if no_bootstrapv ? {
// TODO: improve the bootstrapping with a split C backend here.
// C code generated by `VEXE=v cmd/tools/builders/c_builder -os cross -o c.c cmd/tools/builders/c_builder.v`
// is enough to bootstrap the C backend, and thus the rest, but currently bootstrapping relies on
// `v -os cross -o v.c cmd/v` having a functional C codegen inside instead.
util.launch_tool(prefs.is_verbose, 'builders/c_builder', os.args[1..])
}
builder.compile('build', prefs, cbuilder.compile_c)
}
.js_node, .js_freestanding, .js_browser {
util.launch_tool(prefs.is_verbose, 'builders/js_builder', os.args[1..])
}
.native {
util.launch_tool(prefs.is_verbose, 'builders/native_builder', os.args[1..])
}
.interpret {
util.launch_tool(prefs.is_verbose, 'builders/interpret_builder', os.args[1..])
else {
if command.ends_with('.v') || os.exists(command) {
// println('command')
// println(prefs.path)
rebuild(prefs)
return
}
}
return
}
if prefs.is_help {
invoke_help_and_exit(args)
}
eprintln('v $command: unknown command\nRun ${term.highlight_command('v help')} for usage.')
eprintln('v $command: unknown command')
eprintln('Run ${term.highlight_command('v help')} for usage.')
exit(1)
}
@ -163,7 +150,31 @@ fn invoke_help_and_exit(remaining []string) {
2 { help.print_and_exit(remaining[1]) }
else {}
}
println('${term.highlight_command('v help')}: provide only one help topic.')
println('For usage information, use ${term.highlight_command('v help')}.')
eprintln('${term.highlight_command('v help')}: provide only one help topic.')
eprintln('For usage information, use ${term.highlight_command('v help')}.')
exit(1)
}
fn rebuild(prefs &pref.Preferences) {
match prefs.backend {
.c {
$if no_bootstrapv ? {
// TODO: improve the bootstrapping with a split C backend here.
// C code generated by `VEXE=v cmd/tools/builders/c_builder -os cross -o c.c cmd/tools/builders/c_builder.v`
// is enough to bootstrap the C backend, and thus the rest, but currently bootstrapping relies on
// `v -os cross -o v.c cmd/v` having a functional C codegen inside instead.
util.launch_tool(prefs.is_verbose, 'builders/c_builder', os.args[1..])
}
builder.compile('build', prefs, cbuilder.compile_c)
}
.js_node, .js_freestanding, .js_browser {
util.launch_tool(prefs.is_verbose, 'builders/js_builder', os.args[1..])
}
.native {
util.launch_tool(prefs.is_verbose, 'builders/native_builder', os.args[1..])
}
.interpret {
util.launch_tool(prefs.is_verbose, 'builders/interpret_builder', os.args[1..])
}
}
}

View File

@ -616,7 +616,7 @@ Also note: in most cases, it's best to leave the format type empty. Floats will
default as `g`, integers will be rendered by default as `d`, and `s` is almost always redundant.
There are only three cases where specifying a type is recommended:
- format strings are parsed at compile time, so specifing a type can help detect errors then
- format strings are parsed at compile time, so specifying a type can help detect errors then
- format strings default to using lowercase letters for hex digits and the `e` in exponents. Use a
uppercase type to force the use of uppercase hex digits and an uppercase `E` in exponents.
- format strings are the most convenient way to get hex, binary or octal strings from an integer.
@ -1295,6 +1295,16 @@ mm := map[string]int{}
val := mm['bad_key'] or { panic('key not found') }
```
You can also check, if a key is present, and get its value, if it was present, in one go:
```v
m := {
'abc': 'def'
}
if v := m['abc'] {
println('the map value for that key is: $v')
}
```
The same optional check applies to arrays:
```v
@ -4104,17 +4114,16 @@ fn (data &MyType) free() {
Just as the compiler frees C data types with C's `free()`, it will statically insert
`free()` calls for your data type at the end of each variable's lifetime.
Autofree can be enabled with an `-autofree` flag.
For developers willing to have more low level control, autofree can be disabled with
`-manualfree`, or by adding a `[manualfree]` on each function that wants manage its
memory manually. (See [attributes](#attributes)).
_Note: right now autofree is hidden behind the -autofree flag. It will be enabled by
default in V 0.3. If autofree is not used, V programs will leak memory._
Note 2: Autofree is still WIP. Until it stabilises and becomes the default, please
compile your long running processes with `-gc boehm`, which will use the
Boehm-Demers-Weiser conservative garbage collector, to free the memory, that your
programs leak, at runtime.
avoid using it. Right now allocations are handled by a minimal and well performing GC
until V's autofree engine is production ready.
### Examples
@ -5905,6 +5914,19 @@ fn main() {
}
```
Struct field deprecations:
```v oksyntax
module abc
// Note that only *direct* accesses to Xyz.d in *other modules*, will produce deprecation notices/warnings:
pub struct Xyz {
pub mut:
a int
d int [deprecated: 'use Xyz.a instead'; deprecated_after: '2999-03-01'] // produce a notice, the deprecation date is in the far future
}
```
Function/method deprecations:
```v
// Calling this function will result in a deprecation warning
[deprecated]

View File

@ -7,9 +7,9 @@ fn vlang_time(mut wg sync.WaitGroup) ?string {
data := http.get('https://vlang.io/utc_now')?
finish := time.ticks()
println('Finish getting time ${finish - start} ms')
println(data.text)
println(data.body)
wg.done()
return data.text
return data.body
}
fn remote_ip(mut wg sync.WaitGroup) ?string {
@ -17,9 +17,9 @@ fn remote_ip(mut wg sync.WaitGroup) ?string {
data := http.get('https://api.ipify.org')?
finish := time.ticks()
println('Finish getting ip ${finish - start} ms')
println(data.text)
println(data.body)
wg.done()
return data.text
return data.body
}
fn main() {

View File

@ -1,12 +1,15 @@
module main
// Note: This program, requires that the shared library was already compiled.
// To do so, run `v -d no_backtrace -o library -shared modules/library/library.v`
// before running this program.
import os
import dl
type FNAdder = fn (int, int) int
fn main() {
library_file_path := os.join_path(os.getwd(), dl.get_libname('library'))
library_file_path := os.join_path(os.dir(@FILE), dl.get_libname('library'))
handle := dl.open_opt(library_file_path, dl.rtld_lazy)?
eprintln('handle: ${ptr_str(handle)}')
f := FNAdder(dl.sym_opt(handle, 'add_1')?)

View File

@ -7,6 +7,6 @@ fn main() {
return
}
t := time.unix(resp.text.int())
t := time.unix(resp.body.int())
println(t.format())
}

View File

@ -42,7 +42,7 @@ fn (mut l Layer) populate(nb_neurons int, nb_inputs int) {
}
}
struct Network {
pub struct Network {
mut:
layers []Layer
}

View File

@ -46,7 +46,7 @@ fn main() {
return
}
weather := json.decode(Weather, resp.text) or {
weather := json.decode(Weather, resp.body) or {
println('failed to decode weather json')
return
}

View File

@ -7,8 +7,12 @@ const pwidth = 800
const pheight = 600
const chunk_height = 2 // the image is recalculated in chunks, each chunk processed in a separate thread
const zoom_factor = 1.1
const max_iterations = 255
struct ViewRect {
mut:
x_min f64
@ -17,22 +21,48 @@ mut:
y_max f64
}
fn (v &ViewRect) width() f64 {
return v.x_max - v.x_min
}
fn (v &ViewRect) height() f64 {
return v.y_max - v.y_min
}
struct AppState {
mut:
gg &gg.Context = 0
iidx int
pixels []u32 = []u32{len: pwidth * pheight}
npixels []u32 = []u32{len: pwidth * pheight} // all drawing happens here, results are copied at the end
view ViewRect = ViewRect{-2.7610033817025625, 1.1788897130338223, -1.824584023871934, 2.1153096311072788}
pixels &u32 = unsafe { vcalloc(pwidth * pheight * sizeof(u32)) }
npixels &u32 = unsafe { vcalloc(pwidth * pheight * sizeof(u32)) } // all drawing happens here, results are swapped at the end
view ViewRect = ViewRect{-3.0773593290970673, 1.4952456603855397, -2.019938598189011, 2.3106642054225945}
scale int = 1
ntasks int = runtime.nr_jobs()
}
const colors = [gx.black, gx.blue, gx.red, gx.green, gx.yellow, gx.orange, gx.purple, gx.white,
gx.indigo, gx.violet, gx.black]
gx.indigo, gx.violet, gx.black, gx.blue, gx.orange, gx.yellow, gx.green].map(u32(it.abgr8()))
struct MandelChunk {
cview ViewRect
ymin f64
ymax f64
}
fn (mut state AppState) update() {
mut sw := time.new_stopwatch()
mut chunk_channel := chan MandelChunk{cap: state.ntasks}
mut chunk_ready_channel := chan bool{cap: 1000}
mut threads := []thread{cap: state.ntasks}
defer {
chunk_channel.close()
threads.wait()
}
for t in 0 .. state.ntasks {
threads << go state.worker(t, chunk_channel, chunk_ready_channel)
}
//
mut oview := ViewRect{}
mut sw := time.new_stopwatch()
for {
sw.restart()
cview := state.view
@ -40,39 +70,61 @@ fn (mut state AppState) update() {
time.sleep(5 * time.millisecond)
continue
}
sheight := pheight / state.ntasks
mut threads := []thread{}
for start := 0; start < pheight; start += sheight {
threads << go state.recalc_lines(cview, start, start + sheight)
// schedule chunks, describing the work:
mut nchunks := 0
for start := 0; start < pheight; start += chunk_height {
chunk_channel <- MandelChunk{
cview: cview
ymin: start
ymax: start + chunk_height
}
nchunks++
}
threads.wait()
state.pixels = state.npixels
println('$state.ntasks threads; $sw.elapsed().milliseconds() ms / frame')
// wait for all chunks to be processed:
for _ in 0 .. nchunks {
_ := <-chunk_ready_channel
}
// everything is done, swap the buffer pointers
state.pixels, state.npixels = state.npixels, state.pixels
println('${state.ntasks:2} threads; ${sw.elapsed().milliseconds():3} ms / frame; scale: ${state.scale:4}')
oview = cview
}
}
fn (mut state AppState) recalc_lines(cview ViewRect, ymin f64, ymax f64) {
for y_pixel := ymin; y_pixel < ymax && y_pixel < pheight; y_pixel++ {
y0 := (y_pixel / pheight) * (cview.y_max - cview.y_min) + cview.y_min
for x_pixel := 0.0; x_pixel < pwidth; x_pixel++ {
x0 := (x_pixel / pwidth) * (cview.x_max - cview.x_min) + cview.x_min
mut x, mut y := x0, y0
mut iter := 0
for ; iter < 80; iter++ {
x, y = x * x - y * y + x0, 2 * x * y + y0
if x * x + y * y > 4 {
break
[direct_array_access]
fn (mut state AppState) worker(id int, input chan MandelChunk, ready chan bool) {
for {
chunk := <-input or { break }
yscale := chunk.cview.height() / pheight
xscale := chunk.cview.width() / pwidth
mut x, mut y, mut iter := 0.0, 0.0, 0
mut y0 := chunk.ymin * yscale + chunk.cview.y_min
mut x0 := chunk.cview.x_min
for y_pixel := chunk.ymin; y_pixel < chunk.ymax && y_pixel < pheight; y_pixel++ {
yrow := unsafe { &state.npixels[int(y_pixel * pwidth)] }
y0 += yscale
x0 = chunk.cview.x_min
for x_pixel := 0; x_pixel < pwidth; x_pixel++ {
x0 += xscale
x, y = x0, y0
for iter = 0; iter < max_iterations; iter++ {
x, y = x * x - y * y + x0, 2 * x * y + y0
if x * x + y * y > 4 {
break
}
}
unsafe {
yrow[x_pixel] = colors[iter & 15]
}
}
state.npixels[int(y_pixel) * pwidth + int(x_pixel)] = u32(colors[iter % 8].abgr8())
}
ready <- true
}
}
fn (mut state AppState) draw() {
mut istream_image := state.gg.get_cached_image_by_idx(state.iidx)
istream_image.update_pixel_data(&state.pixels[0])
istream_image.update_pixel_data(state.pixels)
size := gg.window_size()
state.gg.draw_image(0, 0, size.width, size.height, istream_image)
}
@ -84,6 +136,7 @@ fn (mut state AppState) zoom(zoom_factor f64) {
state.view.x_max = c_x + zoom_factor * d_x
state.view.y_min = c_y - zoom_factor * d_y
state.view.y_max = c_y + zoom_factor * d_y
state.scale += if zoom_factor < 1 { 1 } else { -1 }
}
fn (mut state AppState) center(s_x f64, s_y f64) {
@ -110,8 +163,8 @@ fn graphics_frame(mut state AppState) {
fn graphics_click(x f32, y f32, btn gg.MouseButton, mut state AppState) {
if btn == .right {
size := gg.window_size()
m_x := (x / size.width) * (state.view.x_max - state.view.x_min) + state.view.x_min
m_y := (y / size.height) * (state.view.y_max - state.view.y_min) + state.view.y_min
m_x := (x / size.width) * state.view.width() + state.view.x_min
m_y := (y / size.height) * state.view.height() + state.view.y_min
state.center(m_x, m_y)
}
}
@ -119,8 +172,8 @@ fn graphics_click(x f32, y f32, btn gg.MouseButton, mut state AppState) {
fn graphics_move(x f32, y f32, mut state AppState) {
if state.gg.mouse_buttons.has(.left) {
size := gg.window_size()
d_x := (f64(state.gg.mouse_dx) / size.width) * (state.view.x_max - state.view.x_min)
d_y := (f64(state.gg.mouse_dy) / size.height) * (state.view.y_max - state.view.y_min)
d_x := (f64(state.gg.mouse_dx) / size.width) * state.view.width()
d_y := (f64(state.gg.mouse_dy) / size.height) * state.view.height()
state.view.x_min -= d_x
state.view.x_max -= d_x
state.view.y_min -= d_y
@ -133,12 +186,12 @@ fn graphics_scroll(e &gg.Event, mut state AppState) {
}
fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) {
s_x := (state.view.x_max - state.view.x_min) / 5
s_y := (state.view.y_max - state.view.y_min) / 5
s_x := state.view.width() / 5
s_y := state.view.height() / 5
// movement
mut d_x, mut d_y := 0.0, 0.0
if code == .enter {
println('> $state.view.x_min | $state.view.x_max | $state.view.y_min | $state.view.y_max')
println('> ViewRect{$state.view.x_min, $state.view.x_max, $state.view.y_min, $state.view.y_max}')
}
if state.gg.pressed_keys[int(gg.KeyCode.left)] {
d_x -= s_x

View File

@ -0,0 +1,163 @@
/*
A V program for Bellman-Ford's single source
shortest path algorithm.
literaly adapted from:
https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
// Adapted from this site... from C++ and Python codes
For Portugese reference
http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.html
By CCS
*/
const large = 999999 // almost inifinity
// a structure to represent a weighted edge in graph
struct EDGE {
mut:
src int
dest int
weight int
}
// building a map of with all edges etc of a graph, represented from a matrix adjacency
// Input: matrix adjacency --> Output: edges list of src, dest and weight
fn build_map_edges_from_graph<T>(g [][]T) map[T]EDGE {
n := g.len // TOTAL OF NODES for this graph -- its dimmension
mut edges_map := map[int]EDGE{} // a graph represented by map of edges
mut edge := 0 // a counter of edges
for i in 0 .. n {
for j in 0 .. n {
// if exist an arc ... include as new edge
if g[i][j] != 0 {
edges_map[edge] = EDGE{i, j, g[i][j]}
edge++
}
}
}
// print('${edges_map}')
return edges_map
}
fn print_sol(dist []int) {
n_vertex := dist.len
print('\n Vertex Distance from Source')
for i in 0 .. n_vertex {
print('\n $i --> ${dist[i]}')
}
}
// The main function that finds shortest distances from src
// to all other vertices using Bellman-Ford algorithm. The
// function also detects negative weight cycle
fn bellman_ford<T>(graph [][]T, src int) {
mut edges := build_map_edges_from_graph(graph)
// this function was done to adapt a graph representation
// by a adjacency matrix, to list of adjacency (using a MAP)
n_edges := edges.len // number of EDGES
// Step 1: Initialize distances from src to all other
// vertices as INFINITE
n_vertex := graph.len // adjc matrix ... n nodes or vertex
mut dist := []int{len: n_vertex, init: large} // dist with -1 instead of INIFINITY
// mut path := []int{len: n , init:-1} // previous node of each shortest paht
dist[src] = 0
// Step 2: Relax all edges |V| - 1 times. A simple
// shortest path from src to any other vertex can have
// at-most |V| - 1 edges
for _ in 0 .. n_vertex {
for j in 0 .. n_edges {
mut u := edges[j].src
mut v := edges[j].dest
mut weight := edges[j].weight
if (dist[u] != large) && (dist[u] + weight < dist[v]) {
dist[v] = dist[u] + weight
}
}
}
// Step 3: check for negative-weight cycles. The above
// step guarantees shortest distances if graph doesn't
// contain negative weight cycle. If we get a shorter
// path, then there is a cycle.
for j in 0 .. n_vertex {
mut u := edges[j].src
mut v := edges[j].dest
mut weight := edges[j].weight
if (dist[u] != large) && (dist[u] + weight < dist[v]) {
print('\n Graph contains negative weight cycle')
// If negative cycle is detected, simply
// return or an exit(1)
return
}
}
print_sol(dist)
}
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, -1, 4, 0, 0],
[0, 0, 3, 2, 2],
[0, 0, 0, 0, 0],
[0, 1, 5, 0, 0],
[0, 0, 0, -3, 0],
]
// data from https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: $start_node)')
bellman_ford(graph, start_node)
}
println('\n BYE -- OK')
}
//=================================================

View File

@ -1,4 +1,4 @@
// Author: ccs
// Author: CCS
// I follow literally code in C, done many years ago
fn main() {
// Adjacency matrix as a map
@ -20,10 +20,9 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
mut path := []string{} // ONE PATH with SUCCESS = array
mut queue := []string{} // a queue ... many paths
// all_nodes := graph.keys() // get a key of this map
n_nodes := graph.len // numbers of nodes of this graph
// a map to store all the nodes visited to avoid cycles
// start all them with False, not visited yet
mut visited := a_map_nodes_bool(n_nodes) // a map fully
mut visited := visited_init(graph) // a map fully
// false ==> not visited yet: {'A': false, 'B': false, 'C': false, 'D': false, 'E': false}
queue << start // first arrival
for queue.len != 0 {
@ -51,19 +50,6 @@ fn breadth_first_search_path(graph map[string][]string, start string, target str
return path
}
// Creating a map for VISITED nodes ...
// starting by false ===> means this node was not visited yet
fn a_map_nodes_bool(size int) map[string]bool {
mut my_map := map[string]bool{} // look this map ...
base := u8(65)
mut key := base.ascii_str()
for i in 0 .. size {
key = u8(base + i).ascii_str()
my_map[key] = false
}
return my_map
}
// classical removing of a node from the start of a queue
fn departure(mut queue []string) string {
mut x := queue[0]
@ -71,6 +57,17 @@ fn departure(mut queue []string) string {
return x
}
// Creating aa map to initialize with of visited nodes .... all with false in the init
// so these nodes are NOT VISITED YET
fn visited_init(a_graph map[string][]string) map[string]bool {
mut array_of_keys := a_graph.keys() // get all keys of this map
mut temp := map[string]bool{} // attention in these initializations with maps
for i in array_of_keys {
temp[i] = false
}
return temp
}
// Based in the current node that is final, search for its parent, already visited, up to the root or start node
fn build_path_reverse(graph map[string][]string, start string, final string, visited map[string]bool) []string {
print('\n\n Nodes visited (true) or no (false): $visited')
@ -90,3 +87,5 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
}
return path
}
//======================================================

View File

@ -1,4 +1,4 @@
// Author: ccs
// Author: CCS
// I follow literally code in C, done many years ago
fn main() {
@ -35,8 +35,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
mut path := []string{} // ONE PATH with SUCCESS = array
mut stack := []string{} // a stack ... many nodes
// all_nodes := graph.keys() // get a key of this map
n_nodes := graph.len // numbers of nodes of this graph
mut visited := a_map_nodes_bool(n_nodes) // a map fully
mut visited := visited_init(graph) // a map fully with false in all vertex
// false ... not visited yet: {'A': false, 'B': false, 'C': false, 'D': false, 'E': false}
stack << start // first push on the stack
@ -72,14 +71,15 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
return path
}
// Creating a map for nodes not VISITED visited ...
// starting by false ===> means this node was not visited yet
fn a_map_nodes_bool(size int) map[string]bool {
mut my_map := map[string]bool{} // look this map ...
for i in 0 .. size {
my_map[u8(65 + i).ascii_str()] = false
// Creating aa map to initialize with of visited nodes .... all with false in the init
// so these nodes are NOT VISITED YET
fn visited_init(a_graph map[string][]string) map[string]bool {
mut array_of_keys := a_graph.keys() // get all keys of this map
mut temp := map[string]bool{} // attention in these initializations with maps
for i in array_of_keys {
temp[i] = false
}
return my_map
return temp
}
// Based in the current node that is final, search for his parent, that is already visited, up to the root or start node
@ -101,3 +101,5 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
}
return path
}
//*****************************************************

View File

@ -0,0 +1,241 @@
/*
Exploring Dijkstra,
The data example is from
https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
by CCS
Dijkstra's single source shortest path algorithm.
The program uses an adjacency matrix representation of a graph
This Dijkstra algorithm uses a priority queue to save
the shortest paths. The queue structure has a data
which is the number of the node,
and the priority field which is the shortest distance.
PS: all the pre-requisites of Dijkstra are considered
$ v run file_name.v
// Creating a executable
$ v run file_name.v -o an_executable.EXE
$ ./an_executable.EXE
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
pseudo code written in C
This idea is quite different: it uses a priority queue to store the current
shortest path evaluted
The priority queue structure built using a list to simulate
the queue. A heap is not used in this case.
*/
// a structure
struct NODE {
mut:
data int // NUMBER OF NODE
priority int // Lower values priority indicate ==> higher priority
}
// Function to push according to priority ... the lower priority is goes ahead
// The "push" always sorted in pq
fn push_pq<T>(mut prior_queue []T, data int, priority int) {
mut temp := []T{}
lenght_pq := prior_queue.len
mut i := 0
for (i < lenght_pq) && (priority > prior_queue[i].priority) {
temp << prior_queue[i]
i++
}
// INSERTING SORTED in the queue
temp << NODE{data, priority} // do the copy in the right place
// copy the another part (tail) of original prior_queue
for i < lenght_pq {
temp << prior_queue[i]
i++
}
prior_queue = temp.clone() // I am not sure if it the right way
// IS IT THE RIGHT WAY?
}
// Change the priority of a value/node ... exist a value, change its priority
fn updating_priority<T>(mut prior_queue []T, search_data int, new_priority int) {
mut i := 0
mut lenght_pq := prior_queue.len
for i < lenght_pq {
if search_data == prior_queue[i].data {
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
break
}
i++
// all the list was examined
if i >= lenght_pq {
print('\n This data $search_data does exist ... PRIORITY QUEUE problem\n')
exit(1) // panic(s string)
}
} // end for
}
// a single departure or remove from queue
fn departure_priority<T>(mut prior_queue []T) int {
mut x := prior_queue[0].data
prior_queue.delete(0) // or .delete_many(0, 1 )
return x
}
// give a NODE v, return a list with all adjacents
// Take care, only positive EDGES
fn all_adjacents<T>(g [][]T, v int) []int {
mut temp := []int{} //
for i in 0 .. (g.len) {
if g[v][i] > 0 {
temp << i
}
}
return temp
}
// print the costs from origin up to all nodes
fn print_solution<T>(dist []T) {
print('Vertex \tDistance from Source')
for node in 0 .. (dist.len) {
print('\n $node ==> \t ${dist[node]}')
}
}
// print all paths and their cost or weight
fn print_paths_dist<T>(path []T, dist []T) {
print('\n Read the nodes from right to left (a path): \n')
for node in 1 .. (path.len) {
print('\n $node ')
mut i := node
for path[i] != -1 {
print(' <= ${path[i]} ')
i = path[i]
}
print('\t PATH COST: ${dist[node]}')
}
}
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
// s: source for all nodes
// Two results are obtained ... cost and paths
fn dijkstra(g [][]int, s int) {
mut pq_queue := []NODE{} // creating a priority queue
push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
mut path := []int{len: n, init: -1} // previous node of each shortest paht
// Distance of source vertex from itself is always 0
dist[s] = 0
for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n ADJ ${v} is ${adjs_of_v}')
mut new_dist := 0
for w in adjs_of_v {
new_dist = dist[v] + g[v][w]
if dist[w] == -1 {
dist[w] = new_dist
push_pq(mut pq_queue, w, dist[w])
path[w] = v // collecting the previous node -- lowest weight
}
if dist[w] > new_dist {
dist[w] = new_dist
updating_priority(mut pq_queue, w, dist[w])
path[w] = v //
}
}
}
// print the constructed distance array
print_solution(dist)
// print('\n \n Previous node of shortest path: ${path}')
print_paths_dist(path, dist)
}
/*
Solution Expected
Vertex Distance from Source
0 0
1 4
2 12
3 19
4 21
5 11
6 9
7 8
8 14
*/
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0],
]
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// allways starting by node 0
start_node := 0
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: $start_node)')
dijkstra(graph, start_node)
}
println('\n BYE -- OK')
}
//********************************************************************

View File

@ -0,0 +1,230 @@
/*
Exploring PRIMS,
The data example is from
https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
by CCS
PS: all the pre-requisites of Dijkstra are considered
$ v run file_name.v
Creating a executable
$ v run file_name.v -o an_executable.EXE
$ ./an_executable.EXE
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
pseudo code written in C
This idea is quite different: it uses a priority queue to store the current
shortest path evaluted
The priority queue structure built using a list to simulate
the queue. A heap is not used in this case.
*/
// a structure
struct NODE {
mut:
data int // number of nodes
priority int // Lower values priority indicate ==> higher priority
}
// Function to push according to priority ... the lower priority is goes ahead
// The "push" always sorted in pq
fn push_pq<T>(mut prior_queue []T, data int, priority int) {
mut temp := []T{}
lenght_pq := prior_queue.len
mut i := 0
for (i < lenght_pq) && (priority > prior_queue[i].priority) {
temp << prior_queue[i]
i++
}
// INSERTING SORTED in the queue
temp << NODE{data, priority} // do the copy in the right place
// copy the another part (tail) of original prior_queue
for i < lenght_pq {
temp << prior_queue[i]
i++
}
prior_queue = temp.clone()
// I am not sure if it the right way
// IS IT THE RIGHT WAY?
}
// Change the priority of a value/node ... exist a value, change its priority
fn updating_priority<T>(mut prior_queue []T, search_data int, new_priority int) {
mut i := 0
mut lenght_pq := prior_queue.len
for i < lenght_pq {
if search_data == prior_queue[i].data {
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
break
}
i++
// all the list was examined
if i >= lenght_pq {
// print('\n Priority Queue: ${prior_queue}')
// print('\n These data ${search_data} and ${new_priority} do not exist ... PRIORITY QUEUE problem\n')
// if it does not find ... then push it
push_pq(mut prior_queue, search_data, new_priority)
// exit(1) // panic(s string)
}
} // end for
}
// a single departure or remove from queue
fn departure_priority<T>(mut prior_queue []T) int {
mut x := prior_queue[0].data
prior_queue.delete(0) // or .delete_many(0, 1 )
return x
}
// give a NODE v, return a list with all adjacents
// Take care, only positive EDGES
fn all_adjacents<T>(g [][]T, v int) []int {
mut temp := []int{} //
for i in 0 .. (g.len) {
if g[v][i] > 0 {
temp << i
}
}
return temp
}
// print the costs from origin up to all nodes
// A utility function to print the
// constructed MST stored in parent[]
// print all paths and their cost or weight
fn print_solution(path []int, g [][]int) {
// print(' PATH: ${path} ==> ${path.len}')
print(' Edge \tWeight\n')
mut sum := 0
for node in 0 .. (path.len) {
if path[node] == -1 {
print('\n $node <== reference or start node')
} else {
print('\n $node <--> ${path[node]} \t${g[node][path[node]]}')
sum += g[node][path[node]]
}
}
print('\n Minimum Cost Spanning Tree: $sum\n\n')
}
// check structure from: https://www.geeksforgeeks.org/dijkstras-shortest-path-algorithm-greedy-algo-7/
// s: source for all nodes
// Two results are obtained ... cost and paths
fn prim_mst(g [][]int, s int) {
mut pq_queue := []NODE{} // creating a priority queue
push_pq(mut pq_queue, s, 0) // goes s with priority 0
mut n := g.len
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
mut path := []int{len: n, init: -1} // previous node of each shortest paht
// Distance of source vertex from itself is always 0
dist[s] = 0
for pq_queue.len != 0 {
mut v := departure_priority(mut pq_queue)
// for all W adjcents vertices of v
mut adjs_of_v := all_adjacents(g, v) // all_ADJ of v ....
// print('\n :${dist} :: ${pq_queue}')
// print('\n ADJ ${v} is ${adjs_of_v}')
mut new_dist := 0
for w in adjs_of_v {
new_dist = dist[v] + g[v][w]
if dist[w] == -1 {
dist[w] = g[v][w]
push_pq(mut pq_queue, w, dist[w])
path[w] = v // collecting the previous node -- lowest weight
}
if dist[w] > new_dist {
dist[w] = g[v][w] // new_dist//
updating_priority(mut pq_queue, w, dist[w])
path[w] = v // father / previous node
}
}
}
// print('\n \n Previous node of shortest path: ${path}')
// print_paths_dist(path , dist)
print_solution(path, g)
}
/*
Solution Expected graph_02
Edge Weight
0 - 1 2
1 - 2 3
0 - 3 6
1 - 4 5
*/
fn main() {
// adjacency matrix = cost or weight
graph_01 := [
[0, 4, 0, 0, 0, 0, 0, 8, 0],
[4, 0, 8, 0, 0, 0, 0, 11, 0],
[0, 8, 0, 7, 0, 4, 0, 0, 2],
[0, 0, 7, 0, 9, 14, 0, 0, 0],
[0, 0, 0, 9, 0, 10, 0, 0, 0],
[0, 0, 4, 14, 10, 0, 2, 0, 0],
[0, 0, 0, 0, 0, 2, 0, 1, 6],
[8, 11, 0, 0, 0, 0, 1, 0, 7],
[0, 0, 2, 0, 0, 0, 6, 7, 0],
]
graph_02 := [
[0, 2, 0, 6, 0],
[2, 0, 3, 8, 5],
[0, 3, 0, 0, 7],
[6, 8, 0, 0, 9],
[0, 5, 7, 9, 0],
]
// data from https://www.geeksforgeeks.org/prims-minimum-spanning-tree-mst-greedy-algo-5/
/*
The graph:
2 3
(0)--(1)--(2)
| / \ |
6| 8/ \5 |7
| / \ |
(3)-------(4)
9
*/
/*
Let us create following weighted graph
From https://www.geeksforgeeks.org/kruskals-minimum-spanning-tree-algorithm-greedy-algo-2/?ref=lbp
10
0--------1
| \ |
6| 5\ |15
| \ |
2--------3
4
*/
graph_03 := [
[0, 10, 6, 5],
[10, 0, 0, 15],
[6, 0, 0, 4],
[5, 15, 4, 0],
]
// To find number of coluns
// mut cols := an_array[0].len
mut graph := [][]int{} // the graph: adjacency matrix
// for index, g_value in [graph_01, graph_02, graph_03] {
for index, g_value in [graph_01, graph_02, graph_03] {
println('\n Minimal Spanning Tree of graph ${index + 1} using PRIM algorithm')
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
// starting by node x ... see the graphs dimmension
start_node := 0
prim_mst(graph, start_node)
}
println('\n BYE -- OK')
}
//********************************************************************

View File

@ -11,7 +11,7 @@ fn (h ExampleHandler) handle(req Request) Response {
})
}
mut status_code := 200
res.text = match req.url {
res.body = match req.url {
'/foo' {
'bar\n'
}

View File

@ -8,7 +8,7 @@ fn send_request(mut wg sync.WaitGroup) ?string {
finish := time.ticks()
println('Finish getting time ${finish - start} ms')
wg.done()
return data.text
return data.body
}
fn main() {

View File

@ -16,7 +16,7 @@ fn worker_fetch(p &pool.PoolProcessor, cursor int, worker_id int) voidptr {
println('failed to fetch data from /v0/item/${id}.json')
return pool.no_result
}
story := json.decode(Story, resp.text) or {
story := json.decode(Story, resp.body) or {
println('failed to decode a story')
return pool.no_result
}
@ -30,7 +30,7 @@ fn main() {
println('failed to fetch data from /v0/topstories.json')
return
}
ids := json.decode([]int, resp.text) or {
ids := json.decode([]int, resp.body) or {
println('failed to decode topstories.json')
return
}#[0..10]

View File

@ -95,7 +95,7 @@ pub fn (mut s System) explode(x f32, y f32) {
pub fn (mut s System) free() {
for p in s.pool {
if p == 0 {
if unsafe { p == 0 } {
print(ptr_str(p) + ' ouch')
continue
}
@ -103,7 +103,7 @@ pub fn (mut s System) free() {
}
s.pool.clear()
for p in s.bin {
if p == 0 {
if unsafe { p == 0 } {
print(ptr_str(p) + ' ouch')
continue
}

View File

@ -1,9 +1,13 @@
#!/usr/local/bin/v run
#!/usr/local/bin/v
// The shebang above associates the file to V on Unix-like systems,
// so it can be run just by specifying the path to the file
// once it's made executable using `chmod +x`.
// Note that you can also use: `#!/usr/bin/env -S v crun`, if your system supports the -S flag to env
// The benefit is that in this case, v could be anywhere in your path, while /usr/bin/env is guaranteed
// to be present on most Unix systems in that exact place.
for _ in 0 .. 3 {
println('V script')
}

View File

@ -1611,7 +1611,12 @@ _SOKOL_PRIVATE bool _saudio_backend_init(void) {
fmtex.Format.nAvgBytesPerSec = fmtex.Format.nSamplesPerSec * fmtex.Format.nBlockAlign;
fmtex.Format.cbSize = 22; /* WORD + DWORD + GUID */
fmtex.Samples.wValidBitsPerSample = 32;
fmtex.dwChannelMask = SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT;
if (_saudio.num_channels == 1) {
fmtex.dwChannelMask = SPEAKER_FRONT_CENTER;
}
else {
fmtex.dwChannelMask = SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT;
}
fmtex.SubFormat = _saudio_KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
dur = (REFERENCE_TIME)
(((double)_saudio.buffer_frames) / (((double)_saudio.sample_rate) * (1.0/10000000.0)));

View File

@ -1,7 +1,6 @@
/*
Compability header for stdatomic.h that works for all compilers supported
by V. For TCC libatomic from the operating system is used
Compatibility header for stdatomic.h that works for all compilers supported by V.
For TCC, we use libatomic from the OS.
*/
#ifndef __ATOMIC_H
#define __ATOMIC_H

View File

@ -614,7 +614,7 @@ fn (mut a array) set_unsafe(i int, val voidptr) {
unsafe { vmemcpy(&u8(a.data) + u64(a.element_size) * u64(i), val, a.element_size) }
}
// Private function. Used to implement assigment to the array element.
// Private function. Used to implement assignment to the array element.
fn (mut a array) set(i int, val voidptr) {
$if !no_bounds_checking ? {
if i < 0 || i >= a.len {
@ -636,6 +636,9 @@ fn (mut a array) push(val voidptr) {
// `val` is array.data and user facing usage is `a << [1,2,3]`
[unsafe]
pub fn (mut a3 array) push_many(val voidptr, size int) {
if size <= 0 || isnil(val) {
return
}
a3.ensure_cap(a3.len + size)
if a3.data == val && a3.data != 0 {
// handle `arr << arr`

View File

@ -227,6 +227,9 @@ fn (mut a array) push_noscan(val voidptr) {
// `val` is array.data and user facing usage is `a << [1,2,3]`
[unsafe]
fn (mut a3 array) push_many_noscan(val voidptr, size int) {
if size <= 0 || isnil(val) {
return
}
if a3.data == val && a3.data != 0 {
// handle `arr << arr`
copy := a3.clone()

View File

@ -1590,3 +1590,14 @@ fn test_inline_array_element_access() {
a2 := [1][0]
assert a2 == 1
}
//
fn f(x int, y int) []int {
return [x, y]
}
fn test_2d_array_init_with_it() {
a := [][]int{len: 6, init: f(it, 2 * it)}
assert a == [[0, 0], [1, 2], [2, 4], [3, 6], [4, 8], [5, 10]]
}

View File

@ -55,7 +55,7 @@ fn panic_debug(line_no int, file string, mod string, fn_name string, s string) {
C.exit(1)
}
$if use_libbacktrace ? {
print_libbacktrace(1)
eprint_libbacktrace(1)
} $else {
print_backtrace_skipping_top_frames(1)
}
@ -106,7 +106,7 @@ pub fn panic(s string) {
C.exit(1)
}
$if use_libbacktrace ? {
print_libbacktrace(1)
eprint_libbacktrace(1)
} $else {
print_backtrace_skipping_top_frames(1)
}
@ -268,13 +268,28 @@ fn _write_buf_to_fd(fd int, buf &u8, buf_len int) {
if buf_len <= 0 {
return
}
unsafe {
mut ptr := buf
mut remaining_bytes := buf_len
for remaining_bytes > 0 {
x := C.write(fd, ptr, remaining_bytes)
ptr += x
remaining_bytes -= x
mut ptr := unsafe { buf }
mut remaining_bytes := isize(buf_len)
mut x := isize(0)
$if freestanding || vinix {
unsafe {
for remaining_bytes > 0 {
x = C.write(fd, ptr, remaining_bytes)
ptr += x
remaining_bytes -= x
}
}
} $else {
mut stream := voidptr(C.stdout)
if fd == 2 {
stream = voidptr(C.stderr)
}
unsafe {
for remaining_bytes > 0 {
x = isize(C.fwrite(ptr, 1, remaining_bytes, stream))
ptr += x
remaining_bytes -= x
}
}
}
}
@ -373,6 +388,49 @@ pub fn malloc_noscan(n isize) &u8 {
return res
}
// malloc_uncollectable dynamically allocates a `n` bytes block of memory
// on the heap, which will NOT be garbage-collected (but its contents will).
[unsafe]
pub fn malloc_uncollectable(n isize) &u8 {
if n <= 0 {
panic('malloc_uncollectable($n <= 0)')
}
$if vplayground ? {
if n > 10000 {
panic('allocating more than 10 KB at once is not allowed in the V playground')
}
if total_m > 50 * 1024 * 1024 {
panic('allocating more than 50 MB is not allowed in the V playground')
}
}
$if trace_malloc ? {
total_m += n
C.fprintf(C.stderr, c'malloc_uncollectable %6d total %10d\n', n, total_m)
// print_backtrace()
}
mut res := &u8(0)
$if prealloc {
return unsafe { prealloc_malloc(n) }
} $else $if gcboehm ? {
unsafe {
res = C.GC_MALLOC_UNCOLLECTABLE(n)
}
} $else $if freestanding {
res = unsafe { __malloc(usize(n)) }
} $else {
res = unsafe { C.malloc(n) }
}
if res == 0 {
panic('malloc_uncollectable($n) failed')
}
$if debug_malloc ? {
// Fill in the memory with something != 0 i.e. `M`, so it is easier to spot
// when the calling code wrongly relies on it being zeroed.
unsafe { C.memset(res, 0x4D, n) }
}
return res
}
// v_realloc resizes the memory block `b` with `n` bytes.
// The `b byteptr` must be a pointer to an existing memory block
// previously allocated with `malloc`, `v_calloc` or `vcalloc`.
@ -540,6 +598,21 @@ pub fn memdup_noscan(src voidptr, sz int) voidptr {
}
}
// memdup_uncollectable dynamically allocates a `sz` bytes block of memory
// on the heap, which will NOT be garbage-collected (but its contents will).
// memdup_uncollectable then copies the contents of `src` into the allocated
// space and returns a pointer to the newly allocated space.
[unsafe]
pub fn memdup_uncollectable(src voidptr, sz int) voidptr {
if sz == 0 {
return vcalloc(1)
}
unsafe {
mem := malloc_uncollectable(sz)
return C.memcpy(mem, src, sz)
}
}
[inline]
fn v_fixed_index(i int, len int) int {
$if !no_bounds_checking ? {

View File

@ -130,6 +130,10 @@ pub:
[markused]
fn v_segmentation_fault_handler(signal int) {
eprintln('signal 11: segmentation fault')
print_backtrace()
$if use_libbacktrace ? {
eprint_libbacktrace(1)
} $else {
print_backtrace()
}
exit(128 + 11)
}

View File

@ -3,16 +3,16 @@ module builtin
$if dynamic_boehm ? {
$if windows {
$if tinyc {
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -L@VEXEROOT/thirdparty/tcc/lib
#flag -I @VEXEROOT/thirdparty/libgc/include
#flag -L @VEXEROOT/thirdparty/tcc/lib
#flag -lgc
} $else $if msvc {
#flag -DGC_BUILTIN_ATOMIC=1
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -I @VEXEROOT/thirdparty/libgc/include
} $else {
#flag -DGC_WIN32_THREADS=1
#flag -DGC_BUILTIN_ATOMIC=1
#flag -I@VEXEROOT/thirdparty/libgc
#flag -I @VEXEROOT/thirdparty/libgc
#flag @VEXEROOT/thirdparty/libgc/gc.o
}
} $else {
@ -31,21 +31,21 @@ $if dynamic_boehm ? {
#flag -DGC_BUILTIN_ATOMIC=1
$if macos || linux {
#flag -DGC_PTHREADS=1
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -lpthread
$if (prod && !tinyc && !debug) || !(amd64 || arm64 || i386 || arm32) {
#flag -I @VEXEROOT/thirdparty/libgc/include
$if (!macos && prod && !tinyc && !debug) || !(amd64 || arm64 || i386 || arm32) {
// TODO: replace the architecture check with a `!$exists("@VEXEROOT/thirdparty/tcc/lib/libgc.a")` comptime call
#flag @VEXEROOT/thirdparty/libgc/gc.o
} $else {
#flag @VEXEROOT/thirdparty/tcc/lib/libgc.a
}
#flag -ldl
#flag -lpthread
} $else $if freebsd {
// Tested on FreeBSD 13.0-RELEASE-p3, with clang, gcc and tcc:
#flag -DBUS_PAGE_FAULT=T_PAGEFLT
#flag -DGC_PTHREADS=1
$if !tinyc {
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -I @VEXEROOT/thirdparty/libgc/include
#flag @VEXEROOT/thirdparty/libgc/gc.o
}
$if tinyc {
@ -59,15 +59,14 @@ $if dynamic_boehm ? {
#flag $first_existing("/usr/local/lib/libgc.a", "/usr/lib/libgc.a")
#flag -lpthread
} $else $if windows {
#flag -DGC_NOT_DLL=1
#flag -DGC_WIN32_THREADS=1
$if tinyc {
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -L@VEXEROOT/thirdparty/tcc/lib
#flag -lgc
#flag -I @VEXEROOT/thirdparty/libgc/include
#flag @VEXEROOT/thirdparty/tcc/lib/libgc.a
#flag -luser32
} $else {
#flag -DGC_NOT_DLL=1
#flag -DGC_WIN32_THREADS=1
#flag -DGC_BUILTIN_ATOMIC=1
#flag -I@VEXEROOT/thirdparty/libgc/include
#flag -I @VEXEROOT/thirdparty/libgc/include
#flag @VEXEROOT/thirdparty/libgc/gc.o
}
} $else $if $pkgconfig('bdw-gc') {

View File

@ -33,11 +33,11 @@ fn init_bt_state() &C.backtrace_state {
}
// for bt_error_callback
// struct BacktraceData {
// state &C.backtrace_state
// }
struct BacktraceOptions {
stdin bool = true
}
fn bt_print_callback(data voidptr, pc voidptr, filename_ptr &char, line int, fn_name_ptr &char) int {
fn bt_print_callback(data &BacktraceOptions, pc voidptr, filename_ptr &char, line int, fn_name_ptr &char) int {
filename := if isnil(filename_ptr) { '???' } else { unsafe { filename_ptr.vstring() } }
fn_name := if isnil(fn_name_ptr) {
'???'
@ -46,7 +46,12 @@ fn bt_print_callback(data voidptr, pc voidptr, filename_ptr &char, line int, fn_
}
// keep it for later
// pc_64 := u64(pc)
println('$filename:$line: by $fn_name')
bt_str := '$filename:$line: by $fn_name'
if data.stdin {
println(bt_str)
} else {
eprintln(bt_str)
}
return 0
}
@ -81,6 +86,17 @@ fn print_libbacktrace(frames_to_skip int) {
$if no_backtrace ? {
return
}
// data := &BacktraceData{bt_state}
C.backtrace_full(bt_state, frames_to_skip, bt_print_callback, bt_error_callback, 0)
data := &BacktraceOptions{}
C.backtrace_full(bt_state, frames_to_skip, bt_print_callback, bt_error_callback, data)
}
[noinline]
fn eprint_libbacktrace(frames_to_skip int) {
$if no_backtrace ? {
return
}
data := &BacktraceOptions{
stdin: false
}
C.backtrace_full(bt_state, frames_to_skip, bt_print_callback, bt_error_callback, data)
}

View File

@ -2,3 +2,7 @@ module builtin
fn print_libbacktrace(frames_to_skip int) {
}
[noinline]
fn eprint_libbacktrace(frames_to_skip int) {
}

View File

@ -14,6 +14,14 @@ $if js_freestanding {
#globalPrint = globalThis.print
}
pub fn flush_stdout() {
// needed for parity with builtin.c.v
}
pub fn flush_stderr() {
// needed for parity with builtin.c.v
}
pub fn println(s string) {
$if js_freestanding {
#globalPrint(s.str)

View File

@ -205,7 +205,11 @@ pub fn (s string) hash() int {
// int returns the value of the string as an integer `'1'.int() == 1`.
pub fn (s string) int() int {
return int(JS.parseInt(s.str))
res := int(0)
#if (typeof(s) == "string") { res.val = parseInt(s) }
#else { res.val = parseInt(s.str) }
return res
}
// i64 returns the value of the string as i64 `'1'.i64() == i64(1)`.

View File

@ -113,14 +113,6 @@ struct Option {
// derived Option_xxx types
}
fn opt_ok(data voidptr, mut option Option, size int) {
unsafe {
*option = Option{}
// use err to get the end of OptionBase and then memcpy into it
vmemcpy(&u8(&option.err) + sizeof(IError), data, size)
}
}
// option is the base of V's internal optional return system.
struct _option {
state u8
@ -130,6 +122,14 @@ struct _option {
// derived _option_xxx types
}
fn _option_ok(data voidptr, mut option _option, size int) {
unsafe {
*option = _option{}
// use err to get the end of OptionBase and then memcpy into it
vmemcpy(&u8(&option.err) + sizeof(IError), data, size)
}
}
fn opt_ok2(data voidptr, mut option _option, size int) {
unsafe {
*option = _option{}

View File

@ -32,9 +32,10 @@ mut:
[unsafe]
fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock {
mut v := unsafe { &VMemoryBlock(C.calloc(1, sizeof(VMemoryBlock))) }
if prev != 0 {
if unsafe { prev != 0 } {
v.id = prev.id + 1
}
v.previous = prev
block_size := if at_least < prealloc_block_size { prealloc_block_size } else { at_least }
v.start = unsafe { C.malloc(block_size) }
@ -79,7 +80,7 @@ fn prealloc_vcleanup() {
// The second loop however should *not* allocate at all.
mut nr_mallocs := i64(0)
mut mb := g_memory_block
for mb != 0 {
for unsafe { mb != 0 } {
nr_mallocs += mb.mallocs
eprintln('> freeing mb.id: ${mb.id:3} | cap: ${mb.cap:7} | rem: ${mb.remaining:7} | start: ${voidptr(mb.start)} | current: ${voidptr(mb.current)} | diff: ${u64(mb.current) - u64(mb.start):7} bytes | mallocs: $mb.mallocs')
mb = mb.previous

View File

@ -37,26 +37,26 @@ pub enum StrIntpType {
}
pub fn (x StrIntpType) str() string {
match x {
.si_no_str { return 'no_str' }
.si_c { return 'c' }
.si_u8 { return 'u8' }
.si_i8 { return 'i8' }
.si_u16 { return 'u16' }
.si_i16 { return 'i16' }
.si_u32 { return 'u32' }
.si_i32 { return 'i32' }
.si_u64 { return 'u64' }
.si_i64 { return 'i64' }
.si_f32 { return 'f32' }
.si_f64 { return 'f64' }
.si_g32 { return 'f32' } // g32 format use f32 data
.si_g64 { return 'f64' } // g64 format use f64 data
.si_e32 { return 'f32' } // e32 format use f32 data
.si_e64 { return 'f64' } // e64 format use f64 data
.si_s { return 's' }
.si_p { return 'p' }
.si_vp { return 'vp' }
return match x {
.si_no_str { 'no_str' }
.si_c { 'c' }
.si_u8 { 'u8' }
.si_i8 { 'i8' }
.si_u16 { 'u16' }
.si_i16 { 'i16' }
.si_u32 { 'u32' }
.si_i32 { 'i32' }
.si_u64 { 'u64' }
.si_i64 { 'i64' }
.si_f32 { 'f32' }
.si_f64 { 'f64' }
.si_g32 { 'f32' } // g32 format use f32 data
.si_g64 { 'f64' } // g64 format use f64 data
.si_e32 { 'f32' } // e32 format use f32 data
.si_e64 { 'f64' } // e64 format use f64 data
.si_s { 's' }
.si_p { 'p' }
.si_vp { 'vp' }
}
}

View File

@ -53,7 +53,7 @@ pub fn (cmd Command) str() string {
res << ' cb execute: $cmd.execute'
res << ' cb pre_execute: $cmd.pre_execute'
res << ' cb post_execute: $cmd.post_execute'
if cmd.parent == 0 {
if unsafe { cmd.parent == 0 } {
res << ' parent: &Command(0)'
} else {
res << ' parent: &Command{$cmd.parent.name ...}'

View File

@ -49,7 +49,7 @@ pub fn print_help_for_command(help_cmd Command) ? {
}
print(cmd.help_message())
} else {
if help_cmd.parent != 0 {
if unsafe { help_cmd.parent != 0 } {
print(help_cmd.parent.help_message())
}
}

View File

@ -41,7 +41,7 @@ pub fn print_manpage_for_command(man_cmd Command) ? {
}
print(cmd.manpage())
} else {
if man_cmd.parent != 0 {
if unsafe { man_cmd.parent != 0 } {
print(man_cmd.parent.manpage())
}
}
@ -55,7 +55,7 @@ pub fn (cmd Command) manpage() string {
mdoc += '.Os\n.Sh NAME\n.Nm ${cmd.full_name().replace(' ', '-')}\n.Nd $cmd.description\n'
mdoc += '.Sh SYNOPSIS\n'
mdoc += '.Nm $cmd.root().name\n'
if cmd.parent != 0 {
if unsafe { cmd.parent != 0 } {
mut parents := []Command{}
if !cmd.parent.is_root() {
parents.prepend(cmd.parent)
@ -96,7 +96,7 @@ pub fn (cmd Command) manpage() string {
}
if cmd.commands.len > 0 {
mdoc += '.Nm $cmd.root().name\n'
if cmd.parent != 0 {
if unsafe { cmd.parent != 0 } {
mut parents := []Command{}
if !cmd.parent.is_root() {
parents.prepend(cmd.parent)
@ -158,7 +158,7 @@ pub fn (cmd Command) manpage() string {
if cmd.commands.len > 0 {
mdoc += '.Sh SEE ALSO\n'
mut cmds := []string{}
if cmd.parent != 0 {
if unsafe { cmd.parent != 0 } {
cmds << cmd.parent.full_name().replace(' ', '-')
}
for c in cmd.commands {

View File

@ -55,7 +55,7 @@ fn C.DestroyWindow(hwnd C.HWND)
//
// System "copy" and "paste" actions utilize the clipboard for temporary storage.
[heap]
struct Clipboard {
pub struct Clipboard {
max_retries int
retry_delay int
mut:

View File

@ -1,4 +1,8 @@
## Description:
`compress` is a namespace for (multiple) compression algorithms supported by V.
At the moment, only `compress.zlib` is implemented.
At the moment, the following compression algorithms are implemented:
- `compress.deflate`
- `compress.gzip`
- `compress.zlib`

View File

@ -0,0 +1,44 @@
module compress
#flag -I @VEXEROOT/thirdparty/zip
#include "miniz.h"
pub const max_size = u64(1 << 31)
fn C.tdefl_compress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
fn C.tinfl_decompress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
// compresses an array of bytes based on providing flags and returns the compressed bytes in a new array
// NB: this is a low level api, a high level implementation like zlib/gzip should be preferred
[manualfree]
pub fn compress(data []u8, flags int) ?[]u8 {
if u64(data.len) > compress.max_size {
return error('data too large ($data.len > $compress.max_size)')
}
mut out_len := usize(0)
address := C.tdefl_compress_mem_to_heap(data.data, data.len, &out_len, flags)
if address == 0 {
return error('compression failed')
}
if u64(out_len) > compress.max_size {
return error('compressed data is too large ($out_len > $compress.max_size)')
}
return unsafe { address.vbytes(int(out_len)) }
}
// decompresses an array of bytes based on providing flags and returns the decompressed bytes in a new array
// NB: this is a low level api, a high level implementation like zlib/gzip should be preferred
[manualfree]
pub fn decompress(data []u8, flags int) ?[]u8 {
mut out_len := usize(0)
address := C.tinfl_decompress_mem_to_heap(data.data, data.len, &out_len, flags)
if address == 0 {
return error('decompression failed')
}
if u64(out_len) > compress.max_size {
return error('decompressed data is too large ($out_len > $compress.max_size)')
}
return unsafe { address.vbytes(int(out_len)) }
}

View File

@ -0,0 +1,18 @@
## Description:
`compress.deflate` is a module that assists in the compression and
decompression of binary data using `deflate` compression
## Examples:
```v
import compress.deflate
fn main() {
uncompressed := 'Hello world!'
compressed := deflate.compress(uncompressed.bytes())?
decompressed := deflate.decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
```

View File

@ -0,0 +1,15 @@
module deflate
import compress
// compresses an array of bytes using deflate and returns the compressed bytes in a new array
// Example: compressed := deflate.compress(b)?
pub fn compress(data []u8) ?[]u8 {
return compress.compress(data, 0)
}
// decompresses an array of bytes using deflate and returns the decompressed bytes in a new array
// Example: decompressed := deflate.decompress(b)?
pub fn decompress(data []u8) ?[]u8 {
return compress.decompress(data, 0)
}

View File

@ -0,0 +1,12 @@
module deflate
const gzip_magic_numbers = [u8(0x1f), 0x8b]
fn test_gzip() ? {
uncompressed := 'Hello world!'
compressed := compress(uncompressed.bytes())?
first2 := compressed[0..2]
assert first2 != deflate.gzip_magic_numbers
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}

View File

@ -0,0 +1,18 @@
## Description:
`compress.gzip` is a module that assists in the compression and
decompression of binary data using `gzip` compression
## Examples:
```v
import compress.gzip
fn main() {
uncompressed := 'Hello world!'
compressed := gzip.compress(uncompressed.bytes())?
decompressed := gzip.decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
```

View File

@ -0,0 +1,121 @@
// [rfc1952](https://datatracker.ietf.org/doc/html/rfc1952) compliant
// gzip compression/decompression
module gzip
import compress
import hash.crc32
// compresses an array of bytes using gzip and returns the compressed bytes in a new array
// Example: compressed := gzip.compress(b)?
pub fn compress(data []u8) ?[]u8 {
compressed := compress.compress(data, 0)?
// header
mut result := [
u8(0x1f), // magic numbers (1F 8B)
0x8b,
0x08, // deflate
0x00, // header flags
0x00, // 4-byte timestamp, 0 = no timestamp (00 00 00 00)
0x00,
0x00,
0x00,
0x00, // extra flags
0xff, // operating system id (0xff = unknown)
] // 10 bytes
result << compressed
// trailer
checksum := crc32.sum(data)
length := data.len
result << [
u8(checksum >> 24),
u8(checksum >> 16),
u8(checksum >> 8),
u8(checksum),
u8(length >> 24),
u8(length >> 16),
u8(length >> 8),
u8(length),
] // 8 bytes
return result
}
[params]
pub struct DecompressParams {
verify_header_checksum bool = true
verify_length bool = true
verify_checksum bool = true
}
// decompresses an array of bytes using zlib and returns the decompressed bytes in a new array
// Example: decompressed := gzip.decompress(b)?
pub fn decompress(data []u8, params DecompressParams) ?[]u8 {
if data.len < 18 {
return error('data is too short, not gzip compressed?')
} else if data[0] != 0x1f || data[1] != 0x8b {
return error('wrong magic numbers, not gzip compressed?')
} else if data[2] != 0x08 {
return error('gzip data is not compressed with DEFLATE')
}
mut header_length := 10
// parse flags, we ignore most of them, but we still need to parse them
// correctly, so we dont accidently decompress something that belongs
// to the header
if data[4] & 0b1110_0000 > 0 { // reserved bits
// rfc 1952 2.3.1.2 Compliance
// A compliant decompressor must give an error indication if any
// reserved bit is non-zero, since such a bit could indicate the
// presence of a new field that would cause subsequent data to be
// interpreted incorrectly.
return error('reserved flags are set, unsupported field detected')
}
// if data[4] & 0b0000_0001 {} // FTEXT
if data[4] & 0b0000_0100 > 0 { // FEXTRA, extra data
xlen := data[header_length]
header_length += xlen + 1
}
if data[4] & 0b0000_1000 > 0 { // FNAME, file name
// filename is zero-terminated, so skip until we hit a zero byte
for header_length < data.len && data[header_length] != 0x00 {
header_length++
}
header_length++
}
if data[4] & 0b0001_0000 > 0 { // FCOMMENT
// comment is zero-terminated, so skip until we hit a zero byte
for header_length < data.len && data[header_length] != 0x00 {
header_length++
}
header_length++
}
if data[4] & 0b0000_0010 > 0 { // FHCRC, flag header crc
if header_length + 12 > data.len {
return error('data too short')
}
checksum_header := crc32.sum(data[..header_length])
checksum_header_expected := (u32(data[header_length]) << 24) | (u32(data[header_length + 1]) << 16) | (u32(data[
header_length + 2]) << 8) | data[header_length + 3]
if params.verify_header_checksum && checksum_header != checksum_header_expected {
return error('header checksum verification failed')
}
header_length += 4
}
if header_length + 8 > data.len {
return error('data too short')
}
decompressed := compress.decompress(data[header_length..data.len - 8], 0)?
length_expected := (u32(data[data.len - 4]) << 24) | (u32(data[data.len - 3]) << 16) | (u32(data[data.len - 2]) << 8) | data[data.len - 1]
if params.verify_length && decompressed.len != length_expected {
return error('length verification failed, got $decompressed.len, expected $length_expected')
}
checksum := crc32.sum(decompressed)
checksum_expected := (u32(data[data.len - 8]) << 24) | (u32(data[data.len - 7]) << 16) | (u32(data[data.len - 6]) << 8) | data[data.len - 5]
if params.verify_checksum && checksum != checksum_expected {
return error('checksum verification failed')
}
return decompressed
}

View File

@ -0,0 +1,134 @@
module gzip
import hash.crc32
fn test_gzip() ? {
uncompressed := 'Hello world!'
compressed := compress(uncompressed.bytes())?
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn assert_decompress_error(data []u8, reason string) ? {
decompress(data) or {
assert err.msg() == reason
return
}
return error('did not error')
}
fn test_gzip_invalid_too_short() ? {
assert_decompress_error([]u8{}, 'data is too short, not gzip compressed?')?
}
fn test_gzip_invalid_magic_numbers() ? {
assert_decompress_error([]u8{len: 100}, 'wrong magic numbers, not gzip compressed?')?
}
fn test_gzip_invalid_compression() ? {
mut data := []u8{len: 100}
data[0] = 0x1f
data[1] = 0x8b
assert_decompress_error(data, 'gzip data is not compressed with DEFLATE')?
}
fn test_gzip_with_ftext() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0000_0001 // FTEXT
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_fname() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0000_1000
compressed.insert(10, `h`)
compressed.insert(11, `i`)
compressed.insert(12, 0x00)
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_fcomment() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0001_0000
compressed.insert(10, `h`)
compressed.insert(11, `i`)
compressed.insert(12, 0x00)
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_fname_fcomment() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0001_1000
compressed.insert(10, `h`)
compressed.insert(11, `i`)
compressed.insert(12, 0x00)
compressed.insert(10, `h`)
compressed.insert(11, `i`)
compressed.insert(12, 0x00)
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_fextra() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0000_0100
compressed.insert(10, 2)
compressed.insert(11, `h`)
compressed.insert(12, `i`)
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_hcrc() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0000_0010
checksum := crc32.sum(compressed[..10])
compressed.insert(10, u8(checksum >> 24))
compressed.insert(11, u8(checksum >> 16))
compressed.insert(12, u8(checksum >> 8))
compressed.insert(13, u8(checksum))
decompressed := decompress(compressed)?
assert decompressed == uncompressed.bytes()
}
fn test_gzip_with_invalid_hcrc() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b0000_0010
checksum := crc32.sum(compressed[..10])
compressed.insert(10, u8(checksum >> 24))
compressed.insert(11, u8(checksum >> 16))
compressed.insert(12, u8(checksum >> 8))
compressed.insert(13, u8(checksum + 1))
assert_decompress_error(compressed, 'header checksum verification failed')?
}
fn test_gzip_with_invalid_checksum() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[compressed.len - 5] += 1
assert_decompress_error(compressed, 'checksum verification failed')?
}
fn test_gzip_with_invalid_length() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[compressed.len - 1] += 1
assert_decompress_error(compressed, 'length verification failed, got 12, expected 13')?
}
fn test_gzip_with_invalid_flags() ? {
uncompressed := 'Hello world!'
mut compressed := compress(uncompressed.bytes())?
compressed[4] |= 0b1000_0000
assert_decompress_error(compressed, 'reserved flags are set, unsupported field detected')?
}

View File

@ -1,60 +1,17 @@
module zlib
#flag -I @VEXEROOT/thirdparty/zip
#include "miniz.h"
pub const max_size = u64(1 << 31)
fn C.tdefl_compress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
fn C.tinfl_decompress_mem_to_heap(source_buf voidptr, source_buf_len usize, out_len &usize, flags int) voidptr
import compress
// compresses an array of bytes using zlib and returns the compressed bytes in a new array
// Example: compressed := zlib.compress(b)?
[manualfree]
pub fn compress(data []u8) ?[]u8 {
if u64(data.len) > zlib.max_size {
return error('data too large ($data.len > $zlib.max_size)')
}
mut out_len := usize(0)
// flags = TDEFL_WRITE_ZLIB_HEADER (0x01000)
address := C.tdefl_compress_mem_to_heap(data.data, data.len, &out_len, 0x01000)
if address == 0 {
return error('compression failed')
}
if u64(out_len) > zlib.max_size {
return error('compressed data is too large ($out_len > $zlib.max_size)')
}
compressed := unsafe {
address.vbytes(int(out_len))
}
copy := compressed.clone()
unsafe {
free(address)
}
return copy
return compress.compress(data, 0x01000)
}
// decompresses an array of bytes using zlib and returns the decompressed bytes in a new array
// Example: decompressed := zlib.decompress(b)?
[manualfree]
pub fn decompress(data []u8) ?[]u8 {
mut out_len := usize(0)
// flags = TINFL_FLAG_PARSE_ZLIB_HEADER (0x1)
address := C.tinfl_decompress_mem_to_heap(data.data, data.len, &out_len, 0x1)
if address == 0 {
return error('decompression failed')
}
if u64(out_len) > zlib.max_size {
return error('decompressed data is too large ($out_len > $zlib.max_size)')
}
decompressed := unsafe {
address.vbytes(int(out_len))
}
copy := decompressed.clone()
unsafe {
free(address)
}
return copy
return compress.decompress(data, 0x1)
}

View File

@ -9,8 +9,14 @@
module md5
import math.bits
import encoding.binary
[direct_array_access; inline]
fn get_le_u32(b []u8, start int) u32 {
return u32(b[start]) | (u32(b[1 + start]) << u32(8)) | (u32(b[2 + start]) << u32(16)) | (u32(b[
3 + start]) << u32(24))
}
[direct_array_access]
fn block_generic(mut dig Digest, p []u8) {
// load state
mut a := dig.s[0]
@ -19,8 +25,6 @@ fn block_generic(mut dig Digest, p []u8) {
mut d := dig.s[3]
for i := 0; i <= p.len - block_size; i += block_size {
mut q := p[i..]
q = q[..block_size]
// save current state
aa := a
bb := b
@ -28,22 +32,22 @@ fn block_generic(mut dig Digest, p []u8) {
dd := d
// load input block
x0 := binary.little_endian_u32(q[4 * 0x0..])
x1 := binary.little_endian_u32(q[4 * 0x1..])
x2 := binary.little_endian_u32(q[4 * 0x2..])
x3 := binary.little_endian_u32(q[4 * 0x3..])
x4 := binary.little_endian_u32(q[4 * 0x4..])
x5 := binary.little_endian_u32(q[4 * 0x5..])
x6 := binary.little_endian_u32(q[4 * 0x6..])
x7 := binary.little_endian_u32(q[4 * 0x7..])
x8 := binary.little_endian_u32(q[4 * 0x8..])
x9 := binary.little_endian_u32(q[4 * 0x9..])
xa := binary.little_endian_u32(q[4 * 0xa..])
xb := binary.little_endian_u32(q[4 * 0xb..])
xc := binary.little_endian_u32(q[4 * 0xc..])
xd := binary.little_endian_u32(q[4 * 0xd..])
xe := binary.little_endian_u32(q[4 * 0xe..])
xf := binary.little_endian_u32(q[4 * 0xf..])
x0 := get_le_u32(p, 4 * 0x0 + i)
x1 := get_le_u32(p, 4 * 0x1 + i)
x2 := get_le_u32(p, 4 * 0x2 + i)
x3 := get_le_u32(p, 4 * 0x3 + i)
x4 := get_le_u32(p, 4 * 0x4 + i)
x5 := get_le_u32(p, 4 * 0x5 + i)
x6 := get_le_u32(p, 4 * 0x6 + i)
x7 := get_le_u32(p, 4 * 0x7 + i)
x8 := get_le_u32(p, 4 * 0x8 + i)
x9 := get_le_u32(p, 4 * 0x9 + i)
xa := get_le_u32(p, 4 * 0xa + i)
xb := get_le_u32(p, 4 * 0xb + i)
xc := get_le_u32(p, 4 * 0xc + i)
xd := get_le_u32(p, 4 * 0xd + i)
xe := get_le_u32(p, 4 * 0xe + i)
xf := get_le_u32(p, 4 * 0xf + i)
// round 1
a = b + bits.rotate_left_32((((c ^ d) & b) ^ d) + a + x0 + u32(0xd76aa478), 7)

View File

@ -76,13 +76,13 @@ pub fn (mut bst BSTree<T>) insert(value T) bool {
// insert_helper walks the tree and inserts the given node.
fn (mut bst BSTree<T>) insert_helper(mut node BSTreeNode<T>, value T) bool {
if node.value < value {
if node.right != 0 && node.right.is_init {
if unsafe { node.right != 0 } && node.right.is_init {
return bst.insert_helper(mut node.right, value)
}
node.right = new_node(node, value)
return true
} else if node.value > value {
if node.left != 0 && node.left.is_init {
if unsafe { node.left != 0 } && node.left.is_init {
return bst.insert_helper(mut node.left, value)
}
node.left = new_node(node, value)
@ -99,7 +99,7 @@ pub fn (bst &BSTree<T>) contains(value T) bool {
// contains_helper is a helper function to walk the tree, and return
// the absence or presence of the `value`.
fn (bst &BSTree<T>) contains_helper(node &BSTreeNode<T>, value T) bool {
if node == 0 || !node.is_init {
if unsafe { node == 0 } || !node.is_init {
return false
}
if node.value < value {
@ -124,12 +124,12 @@ fn (mut bst BSTree<T>) remove_helper(mut node BSTreeNode<T>, value T, left bool)
return false
}
if node.value == value {
if node.left != 0 && node.left.is_init {
if unsafe { node.left != 0 } && node.left.is_init {
// In order to remove the element we need to bring up as parent the max of the
// left sub-tree.
mut max_node := bst.get_max_from_right(node.left)
node.bind(mut max_node, true)
} else if node.right != 0 && node.right.is_init {
} else if unsafe { node.right != 0 } && node.right.is_init {
// Bring up the element with the minimum value in the right sub-tree.
mut min_node := bst.get_min_from_left(node.right)
node.bind(mut min_node, false)
@ -153,11 +153,11 @@ fn (mut bst BSTree<T>) remove_helper(mut node BSTreeNode<T>, value T, left bool)
// get_max_from_right returns the max element of the BST following the right branch.
fn (bst &BSTree<T>) get_max_from_right(node &BSTreeNode<T>) &BSTreeNode<T> {
if node == 0 {
if unsafe { node == 0 } {
return new_none_node<T>(false)
}
right_node := node.right
if right_node == 0 || !right_node.is_init {
if unsafe { right_node == 0 } || !right_node.is_init {
return node
}
return bst.get_max_from_right(right_node)
@ -165,11 +165,11 @@ fn (bst &BSTree<T>) get_max_from_right(node &BSTreeNode<T>) &BSTreeNode<T> {
// get_min_from_left returns the min element of the BST by following the left branch.
fn (bst &BSTree<T>) get_min_from_left(node &BSTreeNode<T>) &BSTreeNode<T> {
if node == 0 {
if unsafe { node == 0 } {
return new_none_node<T>(false)
}
left_node := node.left
if left_node == 0 || !left_node.is_init {
if unsafe { left_node == 0 } || !left_node.is_init {
return node
}
return bst.get_min_from_left(left_node)
@ -177,7 +177,7 @@ fn (bst &BSTree<T>) get_min_from_left(node &BSTreeNode<T>) &BSTreeNode<T> {
// is_empty checks if the BST is empty
pub fn (bst &BSTree<T>) is_empty() bool {
return bst.root == 0
return unsafe { bst.root == 0 }
}
// in_order_traversal traverses the BST in order, and returns the result as an array.
@ -189,7 +189,7 @@ pub fn (bst &BSTree<T>) in_order_traversal() []T {
// in_order_traversal_helper helps traverse the BST, and accumulates the result in the `result` array.
fn (bst &BSTree<T>) in_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init {
if unsafe { node == 0 } || !node.is_init {
return
}
bst.in_order_traversal_helper(node.left, mut result)
@ -207,7 +207,7 @@ pub fn (bst &BSTree<T>) post_order_traversal() []T {
// post_order_traversal_helper is a helper function that traverses the BST in post order,
// accumulating the result in an array.
fn (bst &BSTree<T>) post_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init {
if unsafe { node == 0 } || !node.is_init {
return
}
@ -226,7 +226,7 @@ pub fn (bst &BSTree<T>) pre_order_traversal() []T {
// pre_order_traversal_helper is a helper function to traverse the BST
// in pre order and accumulates the results in an array.
fn (bst &BSTree<T>) pre_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init {
if unsafe { node == 0 } || !node.is_init {
return
}
result << node.value
@ -236,7 +236,7 @@ fn (bst &BSTree<T>) pre_order_traversal_helper(node &BSTreeNode<T>, mut result [
// get_node is a helper method to ge the internal rapresentation of the node with the `value`.
fn (bst &BSTree<T>) get_node(node &BSTreeNode<T>, value T) &BSTreeNode<T> {
if node == 0 || !node.is_init {
if unsafe { node == 0 } || !node.is_init {
return new_none_node<T>(false)
}
if node.value == value {

View File

@ -251,7 +251,7 @@ pub fn (mut list DoublyLinkedList<T>) delete(idx int) {
pub fn (list DoublyLinkedList<T>) str() string {
mut result_array := []T{}
mut node := list.head
for node != 0 {
for unsafe { node != 0 } {
result_array << node.data
node = node.next
}

Some files were not shown because too many files have changed in this diff Show More