Compare commits

...

46 Commits

Author SHA1 Message Date
kahsa 585cb9ec2b
net.html: use `-d debug_html` instead of `-g`, prevent undesired output, while debugging user programs (#14521)
ci/woodpecker/push/vc Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
2022-05-25 12:55:03 +02:00
Delyan Angelov f9515f7515
cgen: split keys and values in generated new_map_init calls into separate lines 2022-05-25 12:55:03 +02:00
yuyi fcabcfc048
checker: check fn call with argument mismatch (#14519) 2022-05-25 12:55:03 +02:00
yuyi 7c46e94b25
parser: fix fmt error for json decode (#14520) 2022-05-25 12:55:03 +02:00
yuyi 7b0dae5da7
fmt: fix file with just imports (fix #14267) (#14513) 2022-05-25 12:55:02 +02:00
Ben 95429e5cc8
os: minor clean ups on filepath.v (#14506) 2022-05-25 12:55:02 +02:00
yuyi b6fb1baadc
fmt: fix using rand.seed() when import rand and rand.seed (#14511) 2022-05-25 12:55:02 +02:00
yuyi 96b80bcf9f
parser: check fn decl multi return types without parentheses (#14508) 2022-05-25 12:55:02 +02:00
yuyi b717ef74f8
cgen: fix sizeof('str') and sizeof(r'str') (#14507) 2022-05-25 12:55:02 +02:00
Delyan Angelov 73e421cd95
pref: add support for `-dump-files -` and for `-dump-modules -` 2022-05-25 12:55:02 +02:00
Delyan Angelov 461e50fb48
v.builder: support -dump-c-flags with -cc msvc too 2022-05-25 12:55:01 +02:00
yuyi 1761b81130
checker: minor optimization in fn_call() and method_call() (#14503) 2022-05-25 12:55:01 +02:00
yuyi bf15ed95af
parser: fix sizeof(c'str') (fix #14499) (#14502) 2022-05-25 12:55:01 +02:00
Alexander Medvednikov 9133671436
checker: vfmt checker.v 2022-05-25 12:55:01 +02:00
Daniel Däschle 0329536da3
checker: allow but deprecate propagating result as option (#14500) 2022-05-25 12:55:01 +02:00
yuyi 880223c2dd
checker: check method call argument type mismatch (#14496) 2022-05-25 12:55:01 +02:00
spaceface 86ba683536
builtin: add static GC support on Windows with TCC (#14497) 2022-05-25 12:55:01 +02:00
Delyan Angelov a3516b116b
time: simplify some very commonly used t.format methods 2022-05-25 12:55:00 +02:00
spaceface 1a734a00d6
ci: run on all branches on forks (#14498) 2022-05-25 12:55:00 +02:00
yuyi 9326cb9d67
checker: fix map get anon fn value with mut argument (fix #14479) (#14493) 2022-05-25 12:55:00 +02:00
Alexander Medvednikov 1679e07619
checker: c2v infix fix 2022-05-25 12:55:00 +02:00
Daniel Däschle 38709f3e6f
markused: add _result_ok (#14495) 2022-05-25 12:55:00 +02:00
Subhomoy Haldar 534939d3df
rand: move dist functions to top module and PRNG interface; minor cleanup (#14481) 2022-05-25 12:55:00 +02:00
Delyan Angelov 1e0afc71e5
examples: speed up mandelbrot.v by using a constant size thread pool, processing smaller chunks 2022-05-25 12:54:59 +02:00
yuyi 0227573ea9
fmt: fix 'strings' name variable call generate auto import (fix #9713) (#14485) 2022-05-25 12:54:59 +02:00
Delyan Angelov 39c1ae3a43
v.builder: use /NOLOGO, when building cached object files with msvc 2022-05-25 12:54:59 +02:00
Delyan Angelov 89f0b16f23
cgen: support `-profile -` for _test.v files too 2022-05-25 12:54:59 +02:00
Delyan Angelov d2dde3b247
time: remove `import math` dependency, by implementing a simpler version of mceil 2022-05-25 12:54:59 +02:00
Ben 01f263261c
os: add norm_path and abs_path function (#14435) 2022-05-25 12:54:59 +02:00
Daniel Däschle 53bc4c80d4
checker,cgen: allow result if guard (#14474) 2022-05-25 12:54:58 +02:00
Alexander Medvednikov ec5ccb2995
cgen: do not generate `_vinit()` for translated .o code 2022-05-25 12:54:58 +02:00
Vincenzo Palazzo 3d723eb9bf
checker: ban unsafe pointer/fn comparison (#14462) 2022-05-25 12:54:58 +02:00
Daniel Däschle 9e216fbd91
ci: only run on master (#14476) 2022-05-25 12:54:58 +02:00
Delyan Angelov d71f8d336c
thirdparty: fix typo in atomic.h, cleanup comments. 2022-05-25 12:54:58 +02:00
yuyi d6fcd24e9d
parser: check error for script mode that define main function (fix #14467) (#14469) 2022-05-25 12:54:58 +02:00
yuyi b1c2a72118
builtin: minor cleanup in string_interpolation.v (#14471) 2022-05-25 12:54:57 +02:00
yuyi d79fdc075d
fmt: fix chain calls with comments (#14470) 2022-05-25 12:54:57 +02:00
StunxFS b15f50e9b1
json: fix struct field default value support (#14304) 2022-05-25 12:54:57 +02:00
Delyan Angelov 59d4e59ac6
tests: make potential failures in urllib_test.v more informative 2022-05-25 12:54:57 +02:00
David 'Epper' Marshall 801a88ad16
math: update documentation (#14457) 2022-05-25 12:54:57 +02:00
David 'Epper' Marshall b8a0315574
url: fix parse (#14456) 2022-05-25 12:54:57 +02:00
crthpl 2835a190e8
checker, cgen: fix shared non-decl assignment (#14466) 2022-05-25 12:54:56 +02:00
yuyi d5beaa0798
cgen: simplify auto_str_methods.v (#14463) 2022-05-25 12:54:56 +02:00
crthpl 63f03d0c61
cgen: fix autostr of shared fields (#14455) 2022-05-25 12:54:56 +02:00
Daniel Däschle 9ad7307e31
checker: forbid optional variable (#14460) 2022-05-25 12:54:56 +02:00
yuyi 1c0e0ec6a1
cgen: minor cleanup in auto_str_methods.v (#14461) 2022-05-25 12:54:56 +02:00
131 changed files with 1614 additions and 643 deletions

View File

@ -15,6 +15,7 @@ concurrency:
jobs: jobs:
ubuntu-tcc: ubuntu-tcc:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -94,6 +95,7 @@ jobs:
ubuntu-tcc-boehm-gc: ubuntu-tcc-boehm-gc:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -150,6 +152,7 @@ jobs:
macos: macos:
runs-on: macOS-latest runs-on: macOS-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang
@ -239,6 +242,7 @@ jobs:
ubuntu: ubuntu:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -338,6 +342,7 @@ jobs:
ubuntu-clang: ubuntu-clang:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang
@ -428,6 +433,7 @@ jobs:
windows-gcc: windows-gcc:
runs-on: windows-2019 runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc gcc VFLAGS: -cc gcc
@ -490,6 +496,7 @@ jobs:
windows-msvc: windows-msvc:
runs-on: windows-2019 runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc msvc VFLAGS: -cc msvc
@ -537,6 +544,7 @@ jobs:
windows-tcc: windows-tcc:
runs-on: windows-2019 runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -586,7 +594,9 @@ jobs:
- name: Build examples - name: Build examples
run: ./v build-examples run: ./v build-examples
- name: v2 self compilation - name: v2 self compilation
run: .\v.exe -o v2.exe cmd/v && .\v2.exe -o v3.exe cmd/v run: .\v.exe -o v2.exe cmd/v && .\v2.exe -o v3.exe cmd/v && .\v3.exe -o v4.exe cmd/v
- name: v2 self compilation with -gc boehm
run: .\v.exe -o v2.exe -gc boehm cmd/v && .\v2.exe -o v3.exe -gc boehm cmd/v && .\v3.exe -o v4.exe -gc boehm cmd/v
## ## tcc32 ## ## tcc32
## - name: Build with make.bat -tcc32 ## - name: Build with make.bat -tcc32
@ -631,6 +641,7 @@ jobs:
# ubuntu-autofree-selfcompile: # ubuntu-autofree-selfcompile:
# runs-on: ubuntu-20.04 # runs-on: ubuntu-20.04
# if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
# timeout-minutes: 121 # timeout-minutes: 121
# env: # env:
# VFLAGS: -cc gcc # VFLAGS: -cc gcc
@ -644,6 +655,7 @@ jobs:
# ubuntu-musl: # ubuntu-musl:
# runs-on: ubuntu-20.04 # runs-on: ubuntu-20.04
# if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
# timeout-minutes: 121 # timeout-minutes: 121
# env: # env:
# VFLAGS: -cc musl-gcc # VFLAGS: -cc musl-gcc

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
ubuntu: ubuntu:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -49,6 +50,7 @@ jobs:
macos: macos:
runs-on: macos-11 runs-on: macos-11
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang

View File

@ -12,6 +12,7 @@ jobs:
macos-cross: macos-cross:
runs-on: macOS-latest runs-on: macOS-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25 timeout-minutes: 25
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang
@ -48,6 +49,7 @@ jobs:
linux-cross: linux-cross:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25 timeout-minutes: 25
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -101,6 +103,7 @@ jobs:
windows-cross: windows-cross:
runs-on: windows-2019 runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 25 timeout-minutes: 25
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -74,6 +74,7 @@ concurrency:
jobs: jobs:
tests-sanitize-undefined-clang: tests-sanitize-undefined-clang:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang
@ -101,6 +102,7 @@ jobs:
tests-sanitize-undefined-gcc: tests-sanitize-undefined-gcc:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc gcc VFLAGS: -cc gcc
@ -127,6 +129,7 @@ jobs:
tests-sanitize-address-clang: tests-sanitize-address-clang:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang
@ -158,6 +161,7 @@ jobs:
tests-sanitize-address-msvc: tests-sanitize-address-msvc:
runs-on: windows-2019 runs-on: windows-2019
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc msvc VFLAGS: -cc msvc
@ -189,6 +193,7 @@ jobs:
tests-sanitize-address-gcc: tests-sanitize-address-gcc:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc gcc VFLAGS: -cc gcc
@ -220,6 +225,7 @@ jobs:
tests-sanitize-memory-clang: tests-sanitize-memory-clang:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 180 timeout-minutes: 180
env: env:
VFLAGS: -cc clang VFLAGS: -cc clang

View File

@ -12,6 +12,7 @@ jobs:
run: run:
name: Run name: Run
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v2

View File

@ -16,6 +16,7 @@ jobs:
alpine-docker-musl-gcc: alpine-docker-musl-gcc:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
container: container:
# Alpine docker pre-built container # Alpine docker pre-built container
@ -50,6 +51,7 @@ jobs:
ubuntu-docker-musl: ubuntu-docker-musl:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
container: container:
image: thevlang/vlang:ubuntu-build image: thevlang/vlang:ubuntu-build

View File

@ -10,6 +10,7 @@ on:
jobs: jobs:
build-vc: build-vc:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
env: env:
VREPO: github.com/vlang/vc.git VREPO: github.com/vlang/vc.git
steps: steps:

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
gg-regressions: gg-regressions:
runs-on: ubuntu-18.04 runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 10 timeout-minutes: 10
env: env:
VFLAGS: -cc tcc VFLAGS: -cc tcc

View File

@ -15,6 +15,7 @@ concurrency:
jobs: jobs:
no-gpl-by-accident: no-gpl-by-accident:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15 timeout-minutes: 15
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -24,6 +25,7 @@ jobs:
code-formatting: code-formatting:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15 timeout-minutes: 15
env: env:
VFLAGS: -cc gcc VFLAGS: -cc gcc
@ -40,6 +42,7 @@ jobs:
performance-regressions: performance-regressions:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 15 timeout-minutes: 15
env: env:
VFLAGS: -cc gcc VFLAGS: -cc gcc
@ -64,6 +67,7 @@ jobs:
misc-tooling: misc-tooling:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation
@ -112,6 +116,7 @@ jobs:
parser-silent: parser-silent:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -16,6 +16,7 @@ jobs:
space-paths-linux: space-paths-linux:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
MY_V_PATH: '你好 my $path, @с интервали' MY_V_PATH: '你好 my $path, @с интервали'
@ -41,6 +42,7 @@ jobs:
space-paths-macos: space-paths-macos:
runs-on: macOS-latest runs-on: macOS-latest
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
MY_V_PATH: '你好 my $path, @с интервали' MY_V_PATH: '你好 my $path, @с интервали'
@ -69,6 +71,7 @@ jobs:
space-paths-windows: space-paths-windows:
runs-on: windows-2022 runs-on: windows-2022
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
MY_V_PATH: 'path with some $punctuation, and some spaces' MY_V_PATH: 'path with some $punctuation, and some spaces'

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
v-compiles-sdl-examples: v-compiles-sdl-examples:
runs-on: ubuntu-18.04 runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
VFLAGS: -cc tcc VFLAGS: -cc tcc

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
toml-module-pass-external-test-suites: toml-module-pass-external-test-suites:
runs-on: ubuntu-18.04 runs-on: ubuntu-18.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 30 timeout-minutes: 30
env: env:
TOML_BS_TESTS_PATH: vlib/toml/tests/testdata/burntsushi/toml-test TOML_BS_TESTS_PATH: vlib/toml/tests/testdata/burntsushi/toml-test

View File

@ -9,12 +9,13 @@ on:
- "**.md" - "**.md"
concurrency: concurrency:
group: build-other-${{ github.event.pull_request.number || github.sha }} group: build-v-apps-and-modules-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
v-apps-compile: v-apps-compile:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
vab-compiles-v-examples: vab-compiles-v-examples:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VAB_FLAGS: --api 30 --build-tools 29.0.0 -v 3 VAB_FLAGS: --api 30 --build-tools 29.0.0 -v 3

View File

@ -13,6 +13,7 @@ on:
jobs: jobs:
vinix-build: vinix-build:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -11,6 +11,7 @@ on:
jobs: jobs:
websocket_tests: websocket_tests:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
if: github.event_name != 'push' || github.event.ref == 'refs/heads/master' || github.event.repository.full_name != 'vlang/v'
timeout-minutes: 121 timeout-minutes: 121
env: env:
VFLAGS: -cc tcc -no-retry-compilation VFLAGS: -cc tcc -no-retry-compilation

View File

@ -207,7 +207,7 @@ fn change_detection_loop(ocontext &Context) {
} }
fn (mut context Context) kill_pgroup() { fn (mut context Context) kill_pgroup() {
if context.child_process == 0 { if unsafe { context.child_process == 0 } {
return return
} }
if context.child_process.is_alive() { if context.child_process.is_alive() {

View File

@ -239,7 +239,15 @@ see also `v help build`.
-dump-c-flags file.txt -dump-c-flags file.txt
Write all C flags into `file.txt`, one flag per line. Write all C flags into `file.txt`, one flag per line.
If `file.txt` is `-`, then write the flags to stdout, one flag per line. If `file.txt` is `-`, write to stdout instead.
-dump-modules file.txt
Write all module names used by the program in `file.txt`, one module per line.
If `file.txt` is `-`, write to stdout instead.
-dump-files file.txt
Write all used V file paths used by the program in `file.txt`, one module per line.
If `file.txt` is `-`, write to stdout instead.
-no-rsp -no-rsp
By default, V passes all C compiler options to the backend C compiler By default, V passes all C compiler options to the backend C compiler

View File

@ -7,6 +7,8 @@ const pwidth = 800
const pheight = 600 const pheight = 600
const chunk_height = 2 // the image is recalculated in chunks, each chunk processed in a separate thread
const zoom_factor = 1.1 const zoom_factor = 1.1
struct ViewRect { struct ViewRect {
@ -17,22 +19,47 @@ mut:
y_max f64 y_max f64
} }
fn (v &ViewRect) width() f64 {
return v.x_max - v.x_min
}
fn (v &ViewRect) height() f64 {
return v.y_max - v.y_min
}
struct AppState { struct AppState {
mut: mut:
gg &gg.Context = 0 gg &gg.Context = 0
iidx int iidx int
pixels []u32 = []u32{len: pwidth * pheight} pixels &u32 = unsafe { vcalloc(pwidth * pheight * sizeof(u32)) }
npixels []u32 = []u32{len: pwidth * pheight} // all drawing happens here, results are copied at the end npixels &u32 = unsafe { vcalloc(pwidth * pheight * sizeof(u32)) } // all drawing happens here, results are swapped at the end
view ViewRect = ViewRect{-2.7610033817025625, 1.1788897130338223, -1.824584023871934, 2.1153096311072788} view ViewRect = ViewRect{-2.7610033817025625, 1.1788897130338223, -1.824584023871934, 2.1153096311072788}
ntasks int = runtime.nr_jobs() ntasks int = runtime.nr_jobs()
} }
const colors = [gx.black, gx.blue, gx.red, gx.green, gx.yellow, gx.orange, gx.purple, gx.white, const colors = [gx.black, gx.blue, gx.red, gx.green, gx.yellow, gx.orange, gx.purple, gx.white,
gx.indigo, gx.violet, gx.black] gx.indigo, gx.violet, gx.black].map(u32(it.abgr8()))
struct MandelChunk {
cview ViewRect
ymin f64
ymax f64
}
fn (mut state AppState) update() { fn (mut state AppState) update() {
mut sw := time.new_stopwatch() mut chunk_channel := chan MandelChunk{cap: state.ntasks}
mut chunk_ready_channel := chan bool{cap: 1000}
mut threads := []thread{cap: state.ntasks}
defer {
chunk_channel.close()
threads.wait()
}
for t in 0 .. state.ntasks {
threads << go state.worker(t, chunk_channel, chunk_ready_channel)
}
//
mut oview := ViewRect{} mut oview := ViewRect{}
mut sw := time.new_stopwatch()
for { for {
sw.restart() sw.restart()
cview := state.view cview := state.view
@ -40,39 +67,56 @@ fn (mut state AppState) update() {
time.sleep(5 * time.millisecond) time.sleep(5 * time.millisecond)
continue continue
} }
sheight := pheight / state.ntasks // schedule chunks, describing the work:
mut threads := []thread{} mut nchunks := 0
for start := 0; start < pheight; start += sheight { for start := 0; start < pheight; start += chunk_height {
threads << go state.recalc_lines(cview, start, start + sheight) chunk_channel <- MandelChunk{
cview: cview
ymin: start
ymax: start + chunk_height
}
nchunks++
} }
threads.wait() // wait for all chunks to be processed:
state.pixels = state.npixels for _ in 0 .. nchunks {
_ := <-chunk_ready_channel
}
// everything is done, swap the buffer pointers
state.pixels, state.npixels = state.npixels, state.pixels
println('$state.ntasks threads; $sw.elapsed().milliseconds() ms / frame') println('$state.ntasks threads; $sw.elapsed().milliseconds() ms / frame')
oview = cview oview = cview
} }
} }
fn (mut state AppState) recalc_lines(cview ViewRect, ymin f64, ymax f64) { fn (mut state AppState) worker(id int, input chan MandelChunk, ready chan bool) {
for y_pixel := ymin; y_pixel < ymax && y_pixel < pheight; y_pixel++ { for {
y0 := (y_pixel / pheight) * (cview.y_max - cview.y_min) + cview.y_min chunk := <-input or { break }
for x_pixel := 0.0; x_pixel < pwidth; x_pixel++ { yscale := chunk.cview.height() / pheight
x0 := (x_pixel / pwidth) * (cview.x_max - cview.x_min) + cview.x_min xscale := chunk.cview.width() / pwidth
mut x, mut y := x0, y0 for y_pixel := chunk.ymin; y_pixel < chunk.ymax && y_pixel < pheight; y_pixel++ {
mut iter := 0 y0 := y_pixel * yscale + chunk.cview.y_min
for ; iter < 80; iter++ { for x_pixel := 0.0; x_pixel < pwidth; x_pixel++ {
x, y = x * x - y * y + x0, 2 * x * y + y0 x0 := x_pixel * xscale + chunk.cview.x_min
if x * x + y * y > 4 { mut x, mut y := x0, y0
break mut iter := 0
for ; iter < 80; iter++ {
x, y = x * x - y * y + x0, 2 * x * y + y0
if x * x + y * y > 4 {
break
}
}
unsafe {
state.npixels[int(y_pixel * pwidth) + int(x_pixel)] = colors[iter & 7]
} }
} }
state.npixels[int(y_pixel) * pwidth + int(x_pixel)] = u32(colors[iter % 8].abgr8())
} }
ready <- true
} }
} }
fn (mut state AppState) draw() { fn (mut state AppState) draw() {
mut istream_image := state.gg.get_cached_image_by_idx(state.iidx) mut istream_image := state.gg.get_cached_image_by_idx(state.iidx)
istream_image.update_pixel_data(&state.pixels[0]) istream_image.update_pixel_data(state.pixels)
size := gg.window_size() size := gg.window_size()
state.gg.draw_image(0, 0, size.width, size.height, istream_image) state.gg.draw_image(0, 0, size.width, size.height, istream_image)
} }
@ -110,8 +154,8 @@ fn graphics_frame(mut state AppState) {
fn graphics_click(x f32, y f32, btn gg.MouseButton, mut state AppState) { fn graphics_click(x f32, y f32, btn gg.MouseButton, mut state AppState) {
if btn == .right { if btn == .right {
size := gg.window_size() size := gg.window_size()
m_x := (x / size.width) * (state.view.x_max - state.view.x_min) + state.view.x_min m_x := (x / size.width) * state.view.width() + state.view.x_min
m_y := (y / size.height) * (state.view.y_max - state.view.y_min) + state.view.y_min m_y := (y / size.height) * state.view.height() + state.view.y_min
state.center(m_x, m_y) state.center(m_x, m_y)
} }
} }
@ -119,8 +163,8 @@ fn graphics_click(x f32, y f32, btn gg.MouseButton, mut state AppState) {
fn graphics_move(x f32, y f32, mut state AppState) { fn graphics_move(x f32, y f32, mut state AppState) {
if state.gg.mouse_buttons.has(.left) { if state.gg.mouse_buttons.has(.left) {
size := gg.window_size() size := gg.window_size()
d_x := (f64(state.gg.mouse_dx) / size.width) * (state.view.x_max - state.view.x_min) d_x := (f64(state.gg.mouse_dx) / size.width) * state.view.width()
d_y := (f64(state.gg.mouse_dy) / size.height) * (state.view.y_max - state.view.y_min) d_y := (f64(state.gg.mouse_dy) / size.height) * state.view.height()
state.view.x_min -= d_x state.view.x_min -= d_x
state.view.x_max -= d_x state.view.x_max -= d_x
state.view.y_min -= d_y state.view.y_min -= d_y
@ -133,8 +177,8 @@ fn graphics_scroll(e &gg.Event, mut state AppState) {
} }
fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) { fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) {
s_x := (state.view.x_max - state.view.x_min) / 5 s_x := state.view.width() / 5
s_y := (state.view.y_max - state.view.y_min) / 5 s_y := state.view.height() / 5
// movement // movement
mut d_x, mut d_y := 0.0, 0.0 mut d_x, mut d_y := 0.0, 0.0
if code == .enter { if code == .enter {

View File

@ -95,7 +95,7 @@ pub fn (mut s System) explode(x f32, y f32) {
pub fn (mut s System) free() { pub fn (mut s System) free() {
for p in s.pool { for p in s.pool {
if p == 0 { if unsafe { p == 0 } {
print(ptr_str(p) + ' ouch') print(ptr_str(p) + ' ouch')
continue continue
} }
@ -103,7 +103,7 @@ pub fn (mut s System) free() {
} }
s.pool.clear() s.pool.clear()
for p in s.bin { for p in s.bin {
if p == 0 { if unsafe { p == 0 } {
print(ptr_str(p) + ' ouch') print(ptr_str(p) + ' ouch')
continue continue
} }

View File

@ -1,7 +1,6 @@
/* /*
Compability header for stdatomic.h that works for all compilers supported Compatibility header for stdatomic.h that works for all compilers supported by V.
by V. For TCC libatomic from the operating system is used For TCC, we use libatomic from the OS.
*/ */
#ifndef __ATOMIC_H #ifndef __ATOMIC_H
#define __ATOMIC_H #define __ATOMIC_H

View File

@ -59,14 +59,13 @@ $if dynamic_boehm ? {
#flag $first_existing("/usr/local/lib/libgc.a", "/usr/lib/libgc.a") #flag $first_existing("/usr/local/lib/libgc.a", "/usr/lib/libgc.a")
#flag -lpthread #flag -lpthread
} $else $if windows { } $else $if windows {
#flag -DGC_NOT_DLL=1
#flag -DGC_WIN32_THREADS=1
$if tinyc { $if tinyc {
#flag -I@VEXEROOT/thirdparty/libgc/include #flag -I@VEXEROOT/thirdparty/libgc/include
#flag -L@VEXEROOT/thirdparty/tcc/lib #flag @VEXEROOT/thirdparty/tcc/lib/libgc.a
#flag -lgc #flag -luser32
} $else { } $else {
#flag -DGC_NOT_DLL=1
#flag -DGC_WIN32_THREADS=1
#flag -DGC_BUILTIN_ATOMIC=1
#flag -I@VEXEROOT/thirdparty/libgc/include #flag -I@VEXEROOT/thirdparty/libgc/include
#flag @VEXEROOT/thirdparty/libgc/gc.o #flag @VEXEROOT/thirdparty/libgc/gc.o
} }

View File

@ -113,14 +113,6 @@ struct Option {
// derived Option_xxx types // derived Option_xxx types
} }
fn opt_ok(data voidptr, mut option Option, size int) {
unsafe {
*option = Option{}
// use err to get the end of OptionBase and then memcpy into it
vmemcpy(&u8(&option.err) + sizeof(IError), data, size)
}
}
// option is the base of V's internal optional return system. // option is the base of V's internal optional return system.
struct _option { struct _option {
state u8 state u8
@ -130,6 +122,14 @@ struct _option {
// derived _option_xxx types // derived _option_xxx types
} }
fn _option_ok(data voidptr, mut option _option, size int) {
unsafe {
*option = _option{}
// use err to get the end of OptionBase and then memcpy into it
vmemcpy(&u8(&option.err) + sizeof(IError), data, size)
}
}
fn opt_ok2(data voidptr, mut option _option, size int) { fn opt_ok2(data voidptr, mut option _option, size int) {
unsafe { unsafe {
*option = _option{} *option = _option{}

View File

@ -32,9 +32,10 @@ mut:
[unsafe] [unsafe]
fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock { fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock {
mut v := unsafe { &VMemoryBlock(C.calloc(1, sizeof(VMemoryBlock))) } mut v := unsafe { &VMemoryBlock(C.calloc(1, sizeof(VMemoryBlock))) }
if prev != 0 { if unsafe { prev != 0 } {
v.id = prev.id + 1 v.id = prev.id + 1
} }
v.previous = prev v.previous = prev
block_size := if at_least < prealloc_block_size { prealloc_block_size } else { at_least } block_size := if at_least < prealloc_block_size { prealloc_block_size } else { at_least }
v.start = unsafe { C.malloc(block_size) } v.start = unsafe { C.malloc(block_size) }
@ -79,7 +80,7 @@ fn prealloc_vcleanup() {
// The second loop however should *not* allocate at all. // The second loop however should *not* allocate at all.
mut nr_mallocs := i64(0) mut nr_mallocs := i64(0)
mut mb := g_memory_block mut mb := g_memory_block
for mb != 0 { for unsafe { mb != 0 } {
nr_mallocs += mb.mallocs nr_mallocs += mb.mallocs
eprintln('> freeing mb.id: ${mb.id:3} | cap: ${mb.cap:7} | rem: ${mb.remaining:7} | start: ${voidptr(mb.start)} | current: ${voidptr(mb.current)} | diff: ${u64(mb.current) - u64(mb.start):7} bytes | mallocs: $mb.mallocs') eprintln('> freeing mb.id: ${mb.id:3} | cap: ${mb.cap:7} | rem: ${mb.remaining:7} | start: ${voidptr(mb.start)} | current: ${voidptr(mb.current)} | diff: ${u64(mb.current) - u64(mb.start):7} bytes | mallocs: $mb.mallocs')
mb = mb.previous mb = mb.previous

View File

@ -37,26 +37,26 @@ pub enum StrIntpType {
} }
pub fn (x StrIntpType) str() string { pub fn (x StrIntpType) str() string {
match x { return match x {
.si_no_str { return 'no_str' } .si_no_str { 'no_str' }
.si_c { return 'c' } .si_c { 'c' }
.si_u8 { return 'u8' } .si_u8 { 'u8' }
.si_i8 { return 'i8' } .si_i8 { 'i8' }
.si_u16 { return 'u16' } .si_u16 { 'u16' }
.si_i16 { return 'i16' } .si_i16 { 'i16' }
.si_u32 { return 'u32' } .si_u32 { 'u32' }
.si_i32 { return 'i32' } .si_i32 { 'i32' }
.si_u64 { return 'u64' } .si_u64 { 'u64' }
.si_i64 { return 'i64' } .si_i64 { 'i64' }
.si_f32 { return 'f32' } .si_f32 { 'f32' }
.si_f64 { return 'f64' } .si_f64 { 'f64' }
.si_g32 { return 'f32' } // g32 format use f32 data .si_g32 { 'f32' } // g32 format use f32 data
.si_g64 { return 'f64' } // g64 format use f64 data .si_g64 { 'f64' } // g64 format use f64 data
.si_e32 { return 'f32' } // e32 format use f32 data .si_e32 { 'f32' } // e32 format use f32 data
.si_e64 { return 'f64' } // e64 format use f64 data .si_e64 { 'f64' } // e64 format use f64 data
.si_s { return 's' } .si_s { 's' }
.si_p { return 'p' } .si_p { 'p' }
.si_vp { return 'vp' } .si_vp { 'vp' }
} }
} }

View File

@ -53,7 +53,7 @@ pub fn (cmd Command) str() string {
res << ' cb execute: $cmd.execute' res << ' cb execute: $cmd.execute'
res << ' cb pre_execute: $cmd.pre_execute' res << ' cb pre_execute: $cmd.pre_execute'
res << ' cb post_execute: $cmd.post_execute' res << ' cb post_execute: $cmd.post_execute'
if cmd.parent == 0 { if unsafe { cmd.parent == 0 } {
res << ' parent: &Command(0)' res << ' parent: &Command(0)'
} else { } else {
res << ' parent: &Command{$cmd.parent.name ...}' res << ' parent: &Command{$cmd.parent.name ...}'

View File

@ -49,7 +49,7 @@ pub fn print_help_for_command(help_cmd Command) ? {
} }
print(cmd.help_message()) print(cmd.help_message())
} else { } else {
if help_cmd.parent != 0 { if unsafe { help_cmd.parent != 0 } {
print(help_cmd.parent.help_message()) print(help_cmd.parent.help_message())
} }
} }

View File

@ -41,7 +41,7 @@ pub fn print_manpage_for_command(man_cmd Command) ? {
} }
print(cmd.manpage()) print(cmd.manpage())
} else { } else {
if man_cmd.parent != 0 { if unsafe { man_cmd.parent != 0 } {
print(man_cmd.parent.manpage()) print(man_cmd.parent.manpage())
} }
} }
@ -55,7 +55,7 @@ pub fn (cmd Command) manpage() string {
mdoc += '.Os\n.Sh NAME\n.Nm ${cmd.full_name().replace(' ', '-')}\n.Nd $cmd.description\n' mdoc += '.Os\n.Sh NAME\n.Nm ${cmd.full_name().replace(' ', '-')}\n.Nd $cmd.description\n'
mdoc += '.Sh SYNOPSIS\n' mdoc += '.Sh SYNOPSIS\n'
mdoc += '.Nm $cmd.root().name\n' mdoc += '.Nm $cmd.root().name\n'
if cmd.parent != 0 { if unsafe { cmd.parent != 0 } {
mut parents := []Command{} mut parents := []Command{}
if !cmd.parent.is_root() { if !cmd.parent.is_root() {
parents.prepend(cmd.parent) parents.prepend(cmd.parent)
@ -96,7 +96,7 @@ pub fn (cmd Command) manpage() string {
} }
if cmd.commands.len > 0 { if cmd.commands.len > 0 {
mdoc += '.Nm $cmd.root().name\n' mdoc += '.Nm $cmd.root().name\n'
if cmd.parent != 0 { if unsafe { cmd.parent != 0 } {
mut parents := []Command{} mut parents := []Command{}
if !cmd.parent.is_root() { if !cmd.parent.is_root() {
parents.prepend(cmd.parent) parents.prepend(cmd.parent)
@ -158,7 +158,7 @@ pub fn (cmd Command) manpage() string {
if cmd.commands.len > 0 { if cmd.commands.len > 0 {
mdoc += '.Sh SEE ALSO\n' mdoc += '.Sh SEE ALSO\n'
mut cmds := []string{} mut cmds := []string{}
if cmd.parent != 0 { if unsafe { cmd.parent != 0 } {
cmds << cmd.parent.full_name().replace(' ', '-') cmds << cmd.parent.full_name().replace(' ', '-')
} }
for c in cmd.commands { for c in cmd.commands {

View File

@ -76,13 +76,13 @@ pub fn (mut bst BSTree<T>) insert(value T) bool {
// insert_helper walks the tree and inserts the given node. // insert_helper walks the tree and inserts the given node.
fn (mut bst BSTree<T>) insert_helper(mut node BSTreeNode<T>, value T) bool { fn (mut bst BSTree<T>) insert_helper(mut node BSTreeNode<T>, value T) bool {
if node.value < value { if node.value < value {
if node.right != 0 && node.right.is_init { if unsafe { node.right != 0 } && node.right.is_init {
return bst.insert_helper(mut node.right, value) return bst.insert_helper(mut node.right, value)
} }
node.right = new_node(node, value) node.right = new_node(node, value)
return true return true
} else if node.value > value { } else if node.value > value {
if node.left != 0 && node.left.is_init { if unsafe { node.left != 0 } && node.left.is_init {
return bst.insert_helper(mut node.left, value) return bst.insert_helper(mut node.left, value)
} }
node.left = new_node(node, value) node.left = new_node(node, value)
@ -99,7 +99,7 @@ pub fn (bst &BSTree<T>) contains(value T) bool {
// contains_helper is a helper function to walk the tree, and return // contains_helper is a helper function to walk the tree, and return
// the absence or presence of the `value`. // the absence or presence of the `value`.
fn (bst &BSTree<T>) contains_helper(node &BSTreeNode<T>, value T) bool { fn (bst &BSTree<T>) contains_helper(node &BSTreeNode<T>, value T) bool {
if node == 0 || !node.is_init { if unsafe { node == 0 } || !node.is_init {
return false return false
} }
if node.value < value { if node.value < value {
@ -124,12 +124,12 @@ fn (mut bst BSTree<T>) remove_helper(mut node BSTreeNode<T>, value T, left bool)
return false return false
} }
if node.value == value { if node.value == value {
if node.left != 0 && node.left.is_init { if unsafe { node.left != 0 } && node.left.is_init {
// In order to remove the element we need to bring up as parent the max of the // In order to remove the element we need to bring up as parent the max of the
// left sub-tree. // left sub-tree.
mut max_node := bst.get_max_from_right(node.left) mut max_node := bst.get_max_from_right(node.left)
node.bind(mut max_node, true) node.bind(mut max_node, true)
} else if node.right != 0 && node.right.is_init { } else if unsafe { node.right != 0 } && node.right.is_init {
// Bring up the element with the minimum value in the right sub-tree. // Bring up the element with the minimum value in the right sub-tree.
mut min_node := bst.get_min_from_left(node.right) mut min_node := bst.get_min_from_left(node.right)
node.bind(mut min_node, false) node.bind(mut min_node, false)
@ -153,11 +153,11 @@ fn (mut bst BSTree<T>) remove_helper(mut node BSTreeNode<T>, value T, left bool)
// get_max_from_right returns the max element of the BST following the right branch. // get_max_from_right returns the max element of the BST following the right branch.
fn (bst &BSTree<T>) get_max_from_right(node &BSTreeNode<T>) &BSTreeNode<T> { fn (bst &BSTree<T>) get_max_from_right(node &BSTreeNode<T>) &BSTreeNode<T> {
if node == 0 { if unsafe { node == 0 } {
return new_none_node<T>(false) return new_none_node<T>(false)
} }
right_node := node.right right_node := node.right
if right_node == 0 || !right_node.is_init { if unsafe { right_node == 0 } || !right_node.is_init {
return node return node
} }
return bst.get_max_from_right(right_node) return bst.get_max_from_right(right_node)
@ -165,11 +165,11 @@ fn (bst &BSTree<T>) get_max_from_right(node &BSTreeNode<T>) &BSTreeNode<T> {
// get_min_from_left returns the min element of the BST by following the left branch. // get_min_from_left returns the min element of the BST by following the left branch.
fn (bst &BSTree<T>) get_min_from_left(node &BSTreeNode<T>) &BSTreeNode<T> { fn (bst &BSTree<T>) get_min_from_left(node &BSTreeNode<T>) &BSTreeNode<T> {
if node == 0 { if unsafe { node == 0 } {
return new_none_node<T>(false) return new_none_node<T>(false)
} }
left_node := node.left left_node := node.left
if left_node == 0 || !left_node.is_init { if unsafe { left_node == 0 } || !left_node.is_init {
return node return node
} }
return bst.get_min_from_left(left_node) return bst.get_min_from_left(left_node)
@ -177,7 +177,7 @@ fn (bst &BSTree<T>) get_min_from_left(node &BSTreeNode<T>) &BSTreeNode<T> {
// is_empty checks if the BST is empty // is_empty checks if the BST is empty
pub fn (bst &BSTree<T>) is_empty() bool { pub fn (bst &BSTree<T>) is_empty() bool {
return bst.root == 0 return unsafe { bst.root == 0 }
} }
// in_order_traversal traverses the BST in order, and returns the result as an array. // in_order_traversal traverses the BST in order, and returns the result as an array.
@ -189,7 +189,7 @@ pub fn (bst &BSTree<T>) in_order_traversal() []T {
// in_order_traversal_helper helps traverse the BST, and accumulates the result in the `result` array. // in_order_traversal_helper helps traverse the BST, and accumulates the result in the `result` array.
fn (bst &BSTree<T>) in_order_traversal_helper(node &BSTreeNode<T>, mut result []T) { fn (bst &BSTree<T>) in_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init { if unsafe { node == 0 } || !node.is_init {
return return
} }
bst.in_order_traversal_helper(node.left, mut result) bst.in_order_traversal_helper(node.left, mut result)
@ -207,7 +207,7 @@ pub fn (bst &BSTree<T>) post_order_traversal() []T {
// post_order_traversal_helper is a helper function that traverses the BST in post order, // post_order_traversal_helper is a helper function that traverses the BST in post order,
// accumulating the result in an array. // accumulating the result in an array.
fn (bst &BSTree<T>) post_order_traversal_helper(node &BSTreeNode<T>, mut result []T) { fn (bst &BSTree<T>) post_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init { if unsafe { node == 0 } || !node.is_init {
return return
} }
@ -226,7 +226,7 @@ pub fn (bst &BSTree<T>) pre_order_traversal() []T {
// pre_order_traversal_helper is a helper function to traverse the BST // pre_order_traversal_helper is a helper function to traverse the BST
// in pre order and accumulates the results in an array. // in pre order and accumulates the results in an array.
fn (bst &BSTree<T>) pre_order_traversal_helper(node &BSTreeNode<T>, mut result []T) { fn (bst &BSTree<T>) pre_order_traversal_helper(node &BSTreeNode<T>, mut result []T) {
if node == 0 || !node.is_init { if unsafe { node == 0 } || !node.is_init {
return return
} }
result << node.value result << node.value
@ -236,7 +236,7 @@ fn (bst &BSTree<T>) pre_order_traversal_helper(node &BSTreeNode<T>, mut result [
// get_node is a helper method to ge the internal rapresentation of the node with the `value`. // get_node is a helper method to ge the internal rapresentation of the node with the `value`.
fn (bst &BSTree<T>) get_node(node &BSTreeNode<T>, value T) &BSTreeNode<T> { fn (bst &BSTree<T>) get_node(node &BSTreeNode<T>, value T) &BSTreeNode<T> {
if node == 0 || !node.is_init { if unsafe { node == 0 } || !node.is_init {
return new_none_node<T>(false) return new_none_node<T>(false)
} }
if node.value == value { if node.value == value {

View File

@ -251,7 +251,7 @@ pub fn (mut list DoublyLinkedList<T>) delete(idx int) {
pub fn (list DoublyLinkedList<T>) str() string { pub fn (list DoublyLinkedList<T>) str() string {
mut result_array := []T{} mut result_array := []T{}
mut node := list.head mut node := list.head
for node != 0 { for unsafe { node != 0 } {
result_array << node.data result_array << node.data
node = node.next node = node.next
} }

View File

@ -29,11 +29,11 @@ pub fn (list LinkedList<T>) first() ?T {
// last returns the last element of the linked list // last returns the last element of the linked list
pub fn (list LinkedList<T>) last() ?T { pub fn (list LinkedList<T>) last() ?T {
if list.head == 0 { if unsafe { list.head == 0 } {
return error('Linked list is empty') return error('Linked list is empty')
} else { } else {
mut node := list.head mut node := list.head
for node.next != 0 { for unsafe { node.next != 0 } {
node = node.next node = node.next
} }
return node.data return node.data
@ -42,12 +42,12 @@ pub fn (list LinkedList<T>) last() ?T {
// index returns the element at the given index of the linked list // index returns the element at the given index of the linked list
pub fn (list LinkedList<T>) index(idx int) ?T { pub fn (list LinkedList<T>) index(idx int) ?T {
if list.head == 0 { if unsafe { list.head == 0 } {
return error('Linked list is empty') return error('Linked list is empty')
} else { } else {
mut node := list.head mut node := list.head
mut iterations := 0 mut iterations := 0
for node.next != 0 && iterations < idx { for unsafe { node.next != 0 } && iterations < idx {
node = node.next node = node.next
iterations++ iterations++
} }
@ -64,12 +64,12 @@ pub fn (mut list LinkedList<T>) push(item T) {
new_node := &ListNode<T>{ new_node := &ListNode<T>{
data: item data: item
} }
if list.head == 0 { if unsafe { list.head == 0 } {
// first node case // first node case
list.head = new_node list.head = new_node
} else { } else {
mut node := list.head mut node := list.head
for node.next != 0 { for unsafe { node.next != 0 } {
node = node.next node = node.next
} }
node.next = new_node node.next = new_node
@ -79,17 +79,17 @@ pub fn (mut list LinkedList<T>) push(item T) {
// pop removes the last element of the linked list // pop removes the last element of the linked list
pub fn (mut list LinkedList<T>) pop() ?T { pub fn (mut list LinkedList<T>) pop() ?T {
if list.head == 0 { if unsafe { list.head == 0 } {
return error('Linked list is empty') return error('Linked list is empty')
} }
mut node := list.head mut node := list.head
mut to_return := node.data mut to_return := node.data
if node.next == 0 { if unsafe { node.next == 0 } {
// first node case // first node case
// set to null // set to null
list.head = voidptr(0) list.head = voidptr(0)
} else { } else {
for node.next.next != 0 { for unsafe { node.next.next != 0 } {
node = node.next node = node.next
} }
to_return = node.next.data to_return = node.next.data
@ -102,7 +102,7 @@ pub fn (mut list LinkedList<T>) pop() ?T {
// shift removes the first element of the linked list // shift removes the first element of the linked list
pub fn (mut list LinkedList<T>) shift() ?T { pub fn (mut list LinkedList<T>) shift() ?T {
if list.head == 0 { if unsafe { list.head == 0 } {
return error('Linked list is empty') return error('Linked list is empty')
} else { } else {
list.len -= 1 list.len -= 1
@ -149,7 +149,7 @@ pub fn (mut list LinkedList<T>) prepend(item T) {
pub fn (list LinkedList<T>) str() string { pub fn (list LinkedList<T>) str() string {
mut result_array := []T{} mut result_array := []T{}
mut node := list.head mut node := list.head
for node != 0 { for unsafe { node != 0 } {
result_array << node.data result_array << node.data
node = node.next node = node.next
} }

View File

@ -71,6 +71,11 @@ fn decode_i64(root &C.cJSON) i64 {
return i64(root.valuedouble) // i64 is double in C return i64(root.valuedouble) // i64 is double in C
} }
// TODO: remove when `byte` is removed
fn decode_byte(root &C.cJSON) byte {
return byte(decode_u8(root))
}
fn decode_u8(root &C.cJSON) u8 { fn decode_u8(root &C.cJSON) u8 {
if isnil(root) { if isnil(root) {
return u8(0) return u8(0)
@ -132,8 +137,6 @@ fn decode_string(root &C.cJSON) string {
if isnil(root.valuestring) { if isnil(root.valuestring) {
return '' return ''
} }
// println('decode string valuestring="$root.valuestring"')
// return tos(root.valuestring, _strlen(root.valuestring))
return unsafe { tos_clone(&u8(root.valuestring)) } // , _strlen(root.valuestring)) return unsafe { tos_clone(&u8(root.valuestring)) } // , _strlen(root.valuestring))
} }
@ -145,6 +148,7 @@ fn decode_bool(root &C.cJSON) bool {
} }
// /////////////////// // ///////////////////
fn encode_int(val int) &C.cJSON { fn encode_int(val int) &C.cJSON {
return C.cJSON_CreateNumber(val) return C.cJSON_CreateNumber(val)
} }
@ -161,6 +165,11 @@ fn encode_i64(val i64) &C.cJSON {
return C.cJSON_CreateNumber(val) return C.cJSON_CreateNumber(val)
} }
// TODO: remove when `byte` is removed
fn encode_byte(root byte) &C.cJSON {
return encode_u8(u8(root))
}
fn encode_u8(val u8) &C.cJSON { fn encode_u8(val u8) &C.cJSON {
return C.cJSON_CreateNumber(val) return C.cJSON_CreateNumber(val)
} }

View File

@ -27,6 +27,25 @@ fn test_simple() ? {
assert y.title == .worker assert y.title == .worker
} }
const currency_id = 'cconst'
struct Price {
net f64
currency_id string [json: currencyId] = currency_id
}
fn test_field_with_default_expr() ? {
data := '[{"net":1},{"net":2,"currencyId":"cjson"}]'
prices := json.decode([]Price, data)?
assert prices == [Price{
net: 1
currency_id: 'cconst'
}, Price{
net: 2
currency_id: 'cjson'
}]
}
fn test_decode_top_level_array() { fn test_decode_top_level_array() {
s := '[{"name":"Peter", "age": 29}, {"name":"Bob", "age":31}]' s := '[{"name":"Peter", "age": 29}, {"name":"Bob", "age":31}]'
x := json.decode([]Employee, s) or { panic(err) } x := json.decode([]Employee, s) or { panic(err) }
@ -454,3 +473,11 @@ fn test_encode_sumtype_defined_ahead() {
println(ret) println(ret)
assert ret == '{"value":0,"_type":"GPScale"}' assert ret == '{"value":0,"_type":"GPScale"}'
} }
struct StByteArray {
ba []byte
}
fn test_byte_array() {
assert json.encode(StByteArray{ ba: [byte(1), 2, 3, 4, 5] }) == '{"ba":[1,2,3,4,5]}'
}

View File

@ -96,6 +96,7 @@ pub fn exp2(x f64) f64 {
return expmulti(hi, lo, k) return expmulti(hi, lo, k)
} }
// ldexp calculates frac*(2**exp)
pub fn ldexp(frac f64, exp int) f64 { pub fn ldexp(frac f64, exp int) f64 {
return scalbn(frac, exp) return scalbn(frac, exp)
} }
@ -146,6 +147,7 @@ pub fn frexp(x f64) (f64, int) {
return f64_from_bits(y), e_ return f64_from_bits(y), e_
} }
// expm1 calculates e**x - 1
// special cases are: // special cases are:
// expm1(+inf) = +inf // expm1(+inf) = +inf
// expm1(-inf) = -1 // expm1(-inf) = -1
@ -176,7 +178,6 @@ pub fn expm1(x f64) f64 {
} }
} }
// exp1 returns e**r × 2**k where r = hi - lo and |r| ≤ ln(2)/2.
fn expmulti(hi f64, lo f64, k int) f64 { fn expmulti(hi f64, lo f64, k int) f64 {
exp_p1 := 1.66666666666666657415e-01 // 0x3FC55555; 0x55555555 exp_p1 := 1.66666666666666657415e-01 // 0x3FC55555; 0x55555555
exp_p2 := -2.77777777770155933842e-03 // 0xBF66C16C; 0x16BEBD93 exp_p2 := -2.77777777770155933842e-03 // 0xBF66C16C; 0x16BEBD93

View File

@ -1,5 +1,6 @@
module math module math
// hypot returns the hypotenuse of the triangle give two sides
pub fn hypot(x f64, y f64) f64 { pub fn hypot(x f64, y f64) f64 {
if is_inf(x, 0) || is_inf(y, 0) { if is_inf(x, 0) || is_inf(y, 0) {
return inf(1) return inf(1)

View File

@ -1,6 +1,7 @@
module math module math
import math.internal import math.internal
// acosh returns the non negative area hyperbolic cosine of x
pub fn acosh(x f64) f64 { pub fn acosh(x f64) f64 {
if x == 0.0 { if x == 0.0 {
@ -19,6 +20,7 @@ pub fn acosh(x f64) f64 {
} }
} }
// asinh returns the area hyperbolic sine of x
pub fn asinh(x f64) f64 { pub fn asinh(x f64) f64 {
a := abs(x) a := abs(x)
s := if x < 0 { -1.0 } else { 1.0 } s := if x < 0 { -1.0 } else { 1.0 }
@ -34,6 +36,7 @@ pub fn asinh(x f64) f64 {
} }
} }
// atanh returns the area hyperbolic tangent of x
pub fn atanh(x f64) f64 { pub fn atanh(x f64) f64 {
a := abs(x) a := abs(x)
s := if x < 0 { -1.0 } else { 1.0 } s := if x < 0 { -1.0 } else { 1.0 }

View File

@ -1,5 +1,6 @@
module math module math
// log_n returns log base b of x
pub fn log_n(x f64, b f64) f64 { pub fn log_n(x f64, b f64) f64 {
y := log(x) y := log(x)
z := log(b) z := log(b)
@ -24,6 +25,7 @@ pub fn log2(x f64) f64 {
return log(frac) * (1.0 / ln2) + f64(exp) return log(frac) * (1.0 / ln2) + f64(exp)
} }
// log1p returns log(1+x)
pub fn log1p(x f64) f64 { pub fn log1p(x f64) f64 {
y := 1.0 + x y := 1.0 + x
z := y - 1.0 z := y - 1.0

View File

@ -155,6 +155,7 @@ pub fn signbit(x f64) bool {
return f64_bits(x) & sign_mask != 0 return f64_bits(x) & sign_mask != 0
} }
// tolerance checks if a and b difference are less than or equal to the tolerance value
pub fn tolerance(a f64, b f64, tol f64) bool { pub fn tolerance(a f64, b f64, tol f64) bool {
mut ee := tol mut ee := tol
// Multiplying by ee here can underflow denormal values to zero. // Multiplying by ee here can underflow denormal values to zero.
@ -178,14 +179,17 @@ pub fn tolerance(a f64, b f64, tol f64) bool {
return d < ee return d < ee
} }
// close checks if a and b are within 1e-14 of each other
pub fn close(a f64, b f64) bool { pub fn close(a f64, b f64) bool {
return tolerance(a, b, 1e-14) return tolerance(a, b, 1e-14)
} }
// veryclose checks if a and b are within 4e-16 of each other
pub fn veryclose(a f64, b f64) bool { pub fn veryclose(a f64, b f64) bool {
return tolerance(a, b, 4e-16) return tolerance(a, b, 4e-16)
} }
// alike checks if a and b are equal
pub fn alike(a f64, b f64) bool { pub fn alike(a f64, b f64) bool {
if is_nan(a) && is_nan(b) { if is_nan(a) && is_nan(b) {
return true return true

View File

@ -4,13 +4,13 @@ fn C.cosf(x f32) f32
fn C.sinf(x f32) f32 fn C.sinf(x f32) f32
// cosf calculates cosine. (float32) // cosf calculates cosine in radians (float32)
[inline] [inline]
pub fn cosf(a f32) f32 { pub fn cosf(a f32) f32 {
return C.cosf(a) return C.cosf(a)
} }
// sinf calculates sine. (float32) // sinf calculates sine in radians (float32)
[inline] [inline]
pub fn sinf(a f32) f32 { pub fn sinf(a f32) f32 {
return C.sinf(a) return C.sinf(a)

View File

@ -4,13 +4,13 @@ fn JS.Math.cos(x f64) f64
fn JS.Math.sin(x f64) f64 fn JS.Math.sin(x f64) f64
// cos calculates cosine. // cos calculates cosine in radians
[inline] [inline]
pub fn cos(a f64) f64 { pub fn cos(a f64) f64 {
return JS.Math.cos(a) return JS.Math.cos(a)
} }
// sin calculates sine. // sin calculates sine in radians
[inline] [inline]
pub fn sin(a f64) f64 { pub fn sin(a f64) f64 {
return JS.Math.sin(a) return JS.Math.sin(a)

View File

@ -44,6 +44,7 @@ const (
} }
) )
// sin calculates the sine of the angle in radians
pub fn sin(x f64) f64 { pub fn sin(x f64) f64 {
p1 := 7.85398125648498535156e-1 p1 := 7.85398125648498535156e-1
p2 := 3.77489470793079817668e-8 p2 := 3.77489470793079817668e-8
@ -82,6 +83,7 @@ pub fn sin(x f64) f64 {
} }
} }
// cos calculates the cosine of the angle in radians
pub fn cos(x f64) f64 { pub fn cos(x f64) f64 {
p1 := 7.85398125648498535156e-1 p1 := 7.85398125648498535156e-1
p2 := 3.77489470793079817668e-8 p2 := 3.77489470793079817668e-8
@ -122,18 +124,19 @@ pub fn cos(x f64) f64 {
} }
} }
// cosf calculates cosine. (float32). // cosf calculates cosine in radians (float32).
[inline] [inline]
pub fn cosf(a f32) f32 { pub fn cosf(a f32) f32 {
return f32(cos(a)) return f32(cos(a))
} }
// sinf calculates sine. (float32) // sinf calculates sine in radians (float32)
[inline] [inline]
pub fn sinf(a f32) f32 { pub fn sinf(a f32) f32 {
return f32(sin(a)) return f32(sin(a))
} }
// sincos calculates the sine and cosine of the angle in radians
pub fn sincos(x f64) (f64, f64) { pub fn sincos(x f64) (f64, f64) {
p1 := 7.85398125648498535156e-1 p1 := 7.85398125648498535156e-1
p2 := 3.77489470793079817668e-8 p2 := 3.77489470793079817668e-8

View File

@ -4,13 +4,13 @@ fn JS.Math.cosh(x f64) f64
fn JS.Math.sinh(x f64) f64 fn JS.Math.sinh(x f64) f64
// cosh calculates hyperbolic cosine. // cosh calculates hyperbolic cosine in radians
[inline] [inline]
pub fn cosh(a f64) f64 { pub fn cosh(a f64) f64 {
return JS.Math.cosh(a) return JS.Math.cosh(a)
} }
// sinh calculates hyperbolic sine. // sinh calculates hyperbolic sine in radians
[inline] [inline]
pub fn sinh(a f64) f64 { pub fn sinh(a f64) f64 {
return JS.Math.sinh(a) return JS.Math.sinh(a)

View File

@ -33,7 +33,7 @@ pub fn sinh(x_ f64) f64 {
return temp return temp
} }
// cosh returns the hyperbolic cosine of x. // cosh returns the hyperbolic cosine of x in radians
// //
// special cases are: // special cases are:
// cosh(±0) = 1 // cosh(±0) = 1

View File

@ -2,7 +2,7 @@ module stats
import math import math
// Measure of Occurance // freq calculates the Measure of Occurance
// Frequency of a given number // Frequency of a given number
// Based on // Based on
// https://www.mathsisfun.com/data/frequency-distribution.html // https://www.mathsisfun.com/data/frequency-distribution.html
@ -19,8 +19,8 @@ pub fn freq<T>(data []T, val T) int {
return count return count
} }
// Measure of Central Tendancy // mean calculates the average
// Mean of the given input array // of the given input array, sum(data)/data.len
// Based on // Based on
// https://www.mathsisfun.com/data/central-measures.html // https://www.mathsisfun.com/data/central-measures.html
pub fn mean<T>(data []T) T { pub fn mean<T>(data []T) T {
@ -34,8 +34,8 @@ pub fn mean<T>(data []T) T {
return sum / T(data.len) return sum / T(data.len)
} }
// Measure of Central Tendancy // geometric_mean calculates the central tendency
// Geometric Mean of the given input array // of the given input array, product(data)**1/data.len
// Based on // Based on
// https://www.mathsisfun.com/numbers/geometric-mean.html // https://www.mathsisfun.com/numbers/geometric-mean.html
pub fn geometric_mean<T>(data []T) T { pub fn geometric_mean<T>(data []T) T {
@ -49,8 +49,8 @@ pub fn geometric_mean<T>(data []T) T {
return math.pow(sum, 1.0 / T(data.len)) return math.pow(sum, 1.0 / T(data.len))
} }
// Measure of Central Tendancy // harmonic_mean calculates the reciprocal of the average of reciprocals
// Harmonic Mean of the given input array // of the given input array
// Based on // Based on
// https://www.mathsisfun.com/numbers/harmonic-mean.html // https://www.mathsisfun.com/numbers/harmonic-mean.html
pub fn harmonic_mean<T>(data []T) T { pub fn harmonic_mean<T>(data []T) T {
@ -64,8 +64,7 @@ pub fn harmonic_mean<T>(data []T) T {
return T(data.len) / sum return T(data.len) / sum
} }
// Measure of Central Tendancy // median returns the middlemost value of the given input array ( input array is assumed to be sorted )
// Median of the given input array ( input array is assumed to be sorted )
// Based on // Based on
// https://www.mathsisfun.com/data/central-measures.html // https://www.mathsisfun.com/data/central-measures.html
pub fn median<T>(sorted_data []T) T { pub fn median<T>(sorted_data []T) T {
@ -80,8 +79,7 @@ pub fn median<T>(sorted_data []T) T {
} }
} }
// Measure of Central Tendancy // mode calculates the highest occuring value of the given input array
// Mode of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/central-measures.html // https://www.mathsisfun.com/data/central-measures.html
pub fn mode<T>(data []T) T { pub fn mode<T>(data []T) T {
@ -101,7 +99,7 @@ pub fn mode<T>(data []T) T {
return data[max] return data[max]
} }
// Root Mean Square of the given input array // rms, Root Mean Square, calculates the sqrt of the mean of the squares of the given input array
// Based on // Based on
// https://en.wikipedia.org/wiki/Root_mean_square // https://en.wikipedia.org/wiki/Root_mean_square
pub fn rms<T>(data []T) T { pub fn rms<T>(data []T) T {
@ -115,8 +113,8 @@ pub fn rms<T>(data []T) T {
return math.sqrt(sum / T(data.len)) return math.sqrt(sum / T(data.len))
} }
// Measure of Dispersion / Spread // population_variance is the Measure of Dispersion / Spread
// Population Variance of the given input array // of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
[inline] [inline]
@ -128,8 +126,8 @@ pub fn population_variance<T>(data []T) T {
return population_variance_mean<T>(data, data_mean) return population_variance_mean<T>(data, data_mean)
} }
// Measure of Dispersion / Spread // population_variance_mean is the Measure of Dispersion / Spread
// Population Variance of the given input array // of the given input array, with the provided mean
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
pub fn population_variance_mean<T>(data []T, mean T) T { pub fn population_variance_mean<T>(data []T, mean T) T {
@ -143,8 +141,7 @@ pub fn population_variance_mean<T>(data []T, mean T) T {
return sum / T(data.len) return sum / T(data.len)
} }
// Measure of Dispersion / Spread // sample_variance calculates the spread of dataset around the mean
// Sample Variance of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
[inline] [inline]
@ -156,8 +153,7 @@ pub fn sample_variance<T>(data []T) T {
return sample_variance_mean<T>(data, data_mean) return sample_variance_mean<T>(data, data_mean)
} }
// Measure of Dispersion / Spread // sample_variance calculates the spread of dataset around the provided mean
// Sample Variance of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
pub fn sample_variance_mean<T>(data []T, mean T) T { pub fn sample_variance_mean<T>(data []T, mean T) T {
@ -171,8 +167,7 @@ pub fn sample_variance_mean<T>(data []T, mean T) T {
return sum / T(data.len - 1) return sum / T(data.len - 1)
} }
// Measure of Dispersion / Spread // population_stddev calculates how spread out the dataset is
// Population Standard Deviation of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
[inline] [inline]
@ -183,8 +178,7 @@ pub fn population_stddev<T>(data []T) T {
return math.sqrt(population_variance<T>(data)) return math.sqrt(population_variance<T>(data))
} }
// Measure of Dispersion / Spread // population_stddev_mean calculates how spread out the dataset is, with the provide mean
// Population Standard Deviation of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/standard-deviation.html // https://www.mathsisfun.com/data/standard-deviation.html
[inline] [inline]
@ -219,8 +213,7 @@ pub fn sample_stddev_mean<T>(data []T, mean T) T {
return T(math.sqrt(f64(sample_variance_mean<T>(data, mean)))) return T(math.sqrt(f64(sample_variance_mean<T>(data, mean))))
} }
// Measure of Dispersion / Spread // absdev calculates the average distance between each data point and the mean
// Mean Absolute Deviation of the given input array
// Based on // Based on
// https://en.wikipedia.org/wiki/Average_absolute_deviation // https://en.wikipedia.org/wiki/Average_absolute_deviation
[inline] [inline]
@ -232,8 +225,7 @@ pub fn absdev<T>(data []T) T {
return absdev_mean<T>(data, data_mean) return absdev_mean<T>(data, data_mean)
} }
// Measure of Dispersion / Spread // absdev_mean calculates the average distance between each data point and the provided mean
// Mean Absolute Deviation of the given input array
// Based on // Based on
// https://en.wikipedia.org/wiki/Average_absolute_deviation // https://en.wikipedia.org/wiki/Average_absolute_deviation
pub fn absdev_mean<T>(data []T, mean T) T { pub fn absdev_mean<T>(data []T, mean T) T {
@ -247,7 +239,7 @@ pub fn absdev_mean<T>(data []T, mean T) T {
return sum / T(data.len) return sum / T(data.len)
} }
// Sum of squares // tts, Sum of squares, calculates the sum over all squared differences between values and overall mean
[inline] [inline]
pub fn tss<T>(data []T) T { pub fn tss<T>(data []T) T {
if data.len == 0 { if data.len == 0 {
@ -257,7 +249,7 @@ pub fn tss<T>(data []T) T {
return tss_mean<T>(data, data_mean) return tss_mean<T>(data, data_mean)
} }
// Sum of squares about the mean // tts_mean, Sum of squares, calculates the sum over all squared differences between values and the provided mean
pub fn tss_mean<T>(data []T, mean T) T { pub fn tss_mean<T>(data []T, mean T) T {
if data.len == 0 { if data.len == 0 {
return T(0) return T(0)
@ -269,7 +261,7 @@ pub fn tss_mean<T>(data []T, mean T) T {
return tss return tss
} }
// Minimum of the given input array // min finds the minimum value from the dataset
pub fn min<T>(data []T) T { pub fn min<T>(data []T) T {
if data.len == 0 { if data.len == 0 {
return T(0) return T(0)
@ -283,7 +275,7 @@ pub fn min<T>(data []T) T {
return min return min
} }
// Maximum of the given input array // max finds the maximum value from the dataset
pub fn max<T>(data []T) T { pub fn max<T>(data []T) T {
if data.len == 0 { if data.len == 0 {
return T(0) return T(0)
@ -297,7 +289,7 @@ pub fn max<T>(data []T) T {
return max return max
} }
// Minimum and maximum of the given input array // minmax finds the minimum and maximum value from the dataset
pub fn minmax<T>(data []T) (T, T) { pub fn minmax<T>(data []T) (T, T) {
if data.len == 0 { if data.len == 0 {
return T(0), T(0) return T(0), T(0)
@ -315,7 +307,7 @@ pub fn minmax<T>(data []T) (T, T) {
return min, max return min, max
} }
// Minimum of the given input array // min_index finds the first index of the minimum value
pub fn min_index<T>(data []T) int { pub fn min_index<T>(data []T) int {
if data.len == 0 { if data.len == 0 {
return 0 return 0
@ -331,7 +323,7 @@ pub fn min_index<T>(data []T) int {
return min_index return min_index
} }
// Maximum of the given input array // max_index finds the first index of the maximum value
pub fn max_index<T>(data []T) int { pub fn max_index<T>(data []T) int {
if data.len == 0 { if data.len == 0 {
return 0 return 0
@ -347,7 +339,7 @@ pub fn max_index<T>(data []T) int {
return max_index return max_index
} }
// Minimum and maximum of the given input array // minmax_index finds the first index of the minimum and maximum value
pub fn minmax_index<T>(data []T) (int, int) { pub fn minmax_index<T>(data []T) (int, int) {
if data.len == 0 { if data.len == 0 {
return 0, 0 return 0, 0
@ -369,7 +361,7 @@ pub fn minmax_index<T>(data []T) (int, int) {
return min_index, max_index return min_index, max_index
} }
// Measure of Dispersion / Spread // range calculates the difference between the min and max
// Range ( Maximum - Minimum ) of the given input array // Range ( Maximum - Minimum ) of the given input array
// Based on // Based on
// https://www.mathsisfun.com/data/range.html // https://www.mathsisfun.com/data/range.html
@ -381,6 +373,8 @@ pub fn range<T>(data []T) T {
return max - min return max - min
} }
// covariance calculates directional association between datasets
// positive value denotes variables move in same direction and negative denotes variables move in opposite directions
[inline] [inline]
pub fn covariance<T>(data1 []T, data2 []T) T { pub fn covariance<T>(data1 []T, data2 []T) T {
mean1 := mean<T>(data1) mean1 := mean<T>(data1)
@ -388,7 +382,7 @@ pub fn covariance<T>(data1 []T, data2 []T) T {
return covariance_mean<T>(data1, data2, mean1, mean2) return covariance_mean<T>(data1, data2, mean1, mean2)
} }
// Compute the covariance of a dataset using // covariance_mean computes the covariance of a dataset with means provided
// the recurrence relation // the recurrence relation
pub fn covariance_mean<T>(data1 []T, data2 []T, mean1 T, mean2 T) T { pub fn covariance_mean<T>(data1 []T, data2 []T, mean1 T, mean2 T) T {
n := int(math.min(data1.len, data2.len)) n := int(math.min(data1.len, data2.len))
@ -404,13 +398,16 @@ pub fn covariance_mean<T>(data1 []T, data2 []T, mean1 T, mean2 T) T {
return covariance return covariance
} }
// lag1_autocorrelation_mean calculates the correlation between values that are one time period apart
// of a dataset, based on the mean
[inline] [inline]
pub fn lag1_autocorrelation<T>(data []T) T { pub fn lag1_autocorrelation<T>(data []T) T {
data_mean := mean<T>(data) data_mean := mean<T>(data)
return lag1_autocorrelation_mean<T>(data, data_mean) return lag1_autocorrelation_mean<T>(data, data_mean)
} }
// Compute the lag-1 autocorrelation of a dataset using // lag1_autocorrelation_mean calculates the correlation between values that are one time period apart
// of a dataset, using
// the recurrence relation // the recurrence relation
pub fn lag1_autocorrelation_mean<T>(data []T, mean T) T { pub fn lag1_autocorrelation_mean<T>(data []T, mean T) T {
if data.len == 0 { if data.len == 0 {
@ -427,6 +424,7 @@ pub fn lag1_autocorrelation_mean<T>(data []T, mean T) T {
return q / v return q / v
} }
// kurtosis calculates the measure of the 'tailedness' of the data by finding mean and standard of deviation
[inline] [inline]
pub fn kurtosis<T>(data []T) T { pub fn kurtosis<T>(data []T) T {
data_mean := mean<T>(data) data_mean := mean<T>(data)
@ -434,7 +432,7 @@ pub fn kurtosis<T>(data []T) T {
return kurtosis_mean_stddev<T>(data, data_mean, sd) return kurtosis_mean_stddev<T>(data, data_mean, sd)
} }
// Takes a dataset and finds the kurtosis // kurtosis_mean_stddev calculates the measure of the 'tailedness' of the data
// using the fourth moment the deviations, normalized by the sd // using the fourth moment the deviations, normalized by the sd
pub fn kurtosis_mean_stddev<T>(data []T, mean T, sd T) T { pub fn kurtosis_mean_stddev<T>(data []T, mean T, sd T) T {
mut avg := T(0) // find the fourth moment the deviations, normalized by the sd mut avg := T(0) // find the fourth moment the deviations, normalized by the sd
@ -449,6 +447,7 @@ pub fn kurtosis_mean_stddev<T>(data []T, mean T, sd T) T {
return avg - T(3.0) return avg - T(3.0)
} }
// skew calculates the mean and standard of deviation to find the skew from the data
[inline] [inline]
pub fn skew<T>(data []T) T { pub fn skew<T>(data []T) T {
data_mean := mean<T>(data) data_mean := mean<T>(data)
@ -456,6 +455,7 @@ pub fn skew<T>(data []T) T {
return skew_mean_stddev<T>(data, data_mean, sd) return skew_mean_stddev<T>(data, data_mean, sd)
} }
// skew_mean_stddev calculates the skewness of data
pub fn skew_mean_stddev<T>(data []T, mean T, sd T) T { pub fn skew_mean_stddev<T>(data []T, mean T, sd T) T {
mut skew := T(0) // find the sum of the cubed deviations, normalized by the sd. mut skew := T(0) // find the sum of the cubed deviations, normalized by the sd.
/* /*

View File

@ -14,26 +14,32 @@ pub mut:
hi Uint128 = uint128_zero // upper 128 bit half hi Uint128 = uint128_zero // upper 128 bit half
} }
// uint256_from_128 creates a new `unsigned.Uint256` from the given Uint128 value
pub fn uint256_from_128(v Uint128) Uint256 { pub fn uint256_from_128(v Uint128) Uint256 {
return Uint256{v, uint128_zero} return Uint256{v, uint128_zero}
} }
// uint256_from_64 creates a new `unsigned.Uint256` from the given u64 value
pub fn uint256_from_64(v u64) Uint256 { pub fn uint256_from_64(v u64) Uint256 {
return uint256_from_128(uint128_from_64(v)) return uint256_from_128(uint128_from_64(v))
} }
// is_zero checks if specified Uint256 is zero
pub fn (u Uint256) is_zero() bool { pub fn (u Uint256) is_zero() bool {
return u.lo.is_zero() && u.hi.is_zero() return u.lo.is_zero() && u.hi.is_zero()
} }
// equals checks if the two Uint256 values match one another
pub fn (u Uint256) equals(v Uint256) bool { pub fn (u Uint256) equals(v Uint256) bool {
return u.lo.equals(v.lo) && u.hi.equals(v.hi) return u.lo.equals(v.lo) && u.hi.equals(v.hi)
} }
pub fn (u Uint256) euqals_128(v Uint128) bool { // equals_128 checks if the Uint256 value matches the Uint128 value
pub fn (u Uint256) equals_128(v Uint128) bool {
return u.lo.equals(v) && u.hi.is_zero() return u.lo.equals(v) && u.hi.is_zero()
} }
// cmp returns 1 if u is greater than v, -1 if u is less than v, or 0 if equal
pub fn (u Uint256) cmp(v Uint256) int { pub fn (u Uint256) cmp(v Uint256) int {
h := u.hi.cmp(v.hi) h := u.hi.cmp(v.hi)
if h != 0 { if h != 0 {
@ -42,6 +48,7 @@ pub fn (u Uint256) cmp(v Uint256) int {
return u.lo.cmp(v.lo) return u.lo.cmp(v.lo)
} }
// cmp_128 returns 1 if u is greater than v (Uint128), -1 if u is less than v, or 0 if equal
pub fn (u Uint256) cmp_128(v Uint128) int { pub fn (u Uint256) cmp_128(v Uint128) int {
if !u.hi.is_zero() { if !u.hi.is_zero() {
return 1 return 1
@ -49,34 +56,42 @@ pub fn (u Uint256) cmp_128(v Uint128) int {
return u.lo.cmp(v) return u.lo.cmp(v)
} }
// not returns a binary negation of the Uint256 value
pub fn (u Uint256) not() Uint256 { pub fn (u Uint256) not() Uint256 {
return Uint256{u.lo.not(), u.hi.not()} return Uint256{u.lo.not(), u.hi.not()}
} }
// and returns a Uint256 value that is the bitwise and of u and v
pub fn (u Uint256) and(v Uint256) Uint256 { pub fn (u Uint256) and(v Uint256) Uint256 {
return Uint256{u.lo.and(v.lo), u.hi.and(v.hi)} return Uint256{u.lo.and(v.lo), u.hi.and(v.hi)}
} }
// and_128 returns a Uint256 value that is the bitwise and of u and v, which is a Uint128
pub fn (u Uint256) and_128(v Uint128) Uint256 { pub fn (u Uint256) and_128(v Uint128) Uint256 {
return Uint256{u.lo.and(v), uint128_zero} return Uint256{u.lo.and(v), uint128_zero}
} }
// or_ returns a Uint256 value that is the bitwise or of u and v
pub fn (u Uint256) or_(v Uint256) Uint256 { pub fn (u Uint256) or_(v Uint256) Uint256 {
return Uint256{u.lo.or_(v.lo), u.hi.or_(v.hi)} return Uint256{u.lo.or_(v.lo), u.hi.or_(v.hi)}
} }
// or_128 returns a Uint256 value that is the bitwise or of u and v, which is a Uint128
pub fn (u Uint256) or_128(v Uint128) Uint256 { pub fn (u Uint256) or_128(v Uint128) Uint256 {
return Uint256{u.lo.or_(v), u.hi} return Uint256{u.lo.or_(v), u.hi}
} }
// xor returns a Uint256 value that is the bitwise xor of u and v
pub fn (u Uint256) xor(v Uint256) Uint256 { pub fn (u Uint256) xor(v Uint256) Uint256 {
return Uint256{u.lo.xor(v.lo), u.hi.xor(v.hi)} return Uint256{u.lo.xor(v.lo), u.hi.xor(v.hi)}
} }
// xor_128 returns a Uint256 value that is the bitwise xor of u and v, which is a Uint128
pub fn (u Uint256) xor_128(v Uint128) Uint256 { pub fn (u Uint256) xor_128(v Uint128) Uint256 {
return Uint256{u.lo.xor(v), u.hi} return Uint256{u.lo.xor(v), u.hi}
} }
// add_256 - untested
pub fn add_256(x Uint256, y Uint256, carry u64) (Uint256, u64) { pub fn add_256(x Uint256, y Uint256, carry u64) (Uint256, u64) {
mut sum := Uint256{} mut sum := Uint256{}
mut carry_out := u64(0) mut carry_out := u64(0)
@ -85,6 +100,7 @@ pub fn add_256(x Uint256, y Uint256, carry u64) (Uint256, u64) {
return sum, carry_out return sum, carry_out
} }
// sub_256 - untested
pub fn sub_256(x Uint256, y Uint256, borrow u64) (Uint256, u64) { pub fn sub_256(x Uint256, y Uint256, borrow u64) (Uint256, u64) {
mut diff := Uint256{} mut diff := Uint256{}
mut borrow_out := u64(0) mut borrow_out := u64(0)
@ -93,6 +109,7 @@ pub fn sub_256(x Uint256, y Uint256, borrow u64) (Uint256, u64) {
return diff, borrow_out return diff, borrow_out
} }
// mul_256 - untested
pub fn mul_256(x Uint256, y Uint256) (Uint256, Uint256) { pub fn mul_256(x Uint256, y Uint256) (Uint256, Uint256) {
mut hi := Uint256{} mut hi := Uint256{}
mut lo := Uint256{} mut lo := Uint256{}
@ -114,31 +131,37 @@ pub fn mul_256(x Uint256, y Uint256) (Uint256, Uint256) {
return hi, lo return hi, lo
} }
// add returns a Uint256 that is equal to u+v
pub fn (u Uint256) add(v Uint256) Uint256 { pub fn (u Uint256) add(v Uint256) Uint256 {
sum, _ := add_256(u, v, 0) sum, _ := add_256(u, v, 0)
return sum return sum
} }
// overflowing_add - untested
pub fn (u Uint256) overflowing_add(v Uint256) (Uint256, u64) { pub fn (u Uint256) overflowing_add(v Uint256) (Uint256, u64) {
sum, overflow := add_256(u, v, 0) sum, overflow := add_256(u, v, 0)
return sum, overflow return sum, overflow
} }
// add_128 returns a Uint256 that is equal to u+v, v being a Uint128
pub fn (u Uint256) add_128(v Uint128) Uint256 { pub fn (u Uint256) add_128(v Uint128) Uint256 {
lo, c0 := add_128(u.lo, v, 0) lo, c0 := add_128(u.lo, v, 0)
return Uint256{lo, u.hi.add_64(c0)} return Uint256{lo, u.hi.add_64(c0)}
} }
// sub returns a Uint256 that is equal to u-v
pub fn (u Uint256) sub(v Uint256) Uint256 { pub fn (u Uint256) sub(v Uint256) Uint256 {
diff, _ := sub_256(u, v, 0) diff, _ := sub_256(u, v, 0)
return diff return diff
} }
// sub_128 returns a Uint256 that is equal to u-v, v being a Uint128
pub fn (u Uint256) sub_128(v Uint128) Uint256 { pub fn (u Uint256) sub_128(v Uint128) Uint256 {
lo, b0 := sub_128(u.lo, v, 0) lo, b0 := sub_128(u.lo, v, 0)
return Uint256{lo, u.hi.sub_64(b0)} return Uint256{lo, u.hi.sub_64(b0)}
} }
// mul returns a Uint256 that is eqal to u*v
pub fn (u Uint256) mul(v Uint256) Uint256 { pub fn (u Uint256) mul(v Uint256) Uint256 {
mut hi, mut lo := mul_128(u.lo, v.lo) mut hi, mut lo := mul_128(u.lo, v.lo)
hi = hi.add(u.hi.mul(v.lo)) hi = hi.add(u.hi.mul(v.lo))
@ -146,11 +169,13 @@ pub fn (u Uint256) mul(v Uint256) Uint256 {
return Uint256{lo, hi} return Uint256{lo, hi}
} }
// mul_128 returns a Uint256 that is eqal to u*v, v being a Uint128
pub fn (u Uint256) mul_128(v Uint128) Uint256 { pub fn (u Uint256) mul_128(v Uint128) Uint256 {
hi, lo := mul_128(u.lo, v) hi, lo := mul_128(u.lo, v)
return Uint256{lo, hi.add(u.hi.mul(v))} return Uint256{lo, hi.add(u.hi.mul(v))}
} }
// quo_rem - untested
pub fn (u Uint256) quo_rem(v Uint256) (Uint256, Uint256) { pub fn (u Uint256) quo_rem(v Uint256) (Uint256, Uint256) {
if v.hi.is_zero() { if v.hi.is_zero() {
q, r := u.quo_rem_128(v.lo) q, r := u.quo_rem_128(v.lo)
@ -173,6 +198,7 @@ pub fn (u Uint256) quo_rem(v Uint256) (Uint256, Uint256) {
return q, r return q, r
} }
// quo_rem_128 - untested
pub fn (u Uint256) quo_rem_128(v Uint128) (Uint256, Uint128) { pub fn (u Uint256) quo_rem_128(v Uint128) (Uint256, Uint128) {
if u.hi.cmp(v) < 0 { if u.hi.cmp(v) < 0 {
lo, r := div_128(u.hi, u.lo, v) lo, r := div_128(u.hi, u.lo, v)
@ -184,6 +210,7 @@ pub fn (u Uint256) quo_rem_128(v Uint128) (Uint256, Uint128) {
return Uint256{lo, hi}, r2 return Uint256{lo, hi}, r2
} }
// quo_rem_64 - untested
pub fn (u Uint256) quo_rem_64(v u64) (Uint256, u64) { pub fn (u Uint256) quo_rem_64(v u64) (Uint256, u64) {
mut q := Uint256{} mut q := Uint256{}
mut r := u64(0) mut r := u64(0)
@ -192,6 +219,7 @@ pub fn (u Uint256) quo_rem_64(v u64) (Uint256, u64) {
return q, r return q, r
} }
// rsh returns a new Uint256 that has been right bit shifted
pub fn (u Uint256) rsh(n_ u32) Uint256 { pub fn (u Uint256) rsh(n_ u32) Uint256 {
mut n := n_ mut n := n_
if n > 128 { if n > 128 {
@ -205,6 +233,7 @@ pub fn (u Uint256) rsh(n_ u32) Uint256 {
return Uint256{Uint128{u.lo.lo >> n | u.lo.hi << (64 - n), u.lo.hi >> n | u.hi.lo << (64 - n)}, Uint128{u.hi.lo >> n | u.hi.hi << (64 - n), u.hi.hi >> n}} return Uint256{Uint128{u.lo.lo >> n | u.lo.hi << (64 - n), u.lo.hi >> n | u.hi.lo << (64 - n)}, Uint128{u.hi.lo >> n | u.hi.hi << (64 - n), u.hi.hi >> n}}
} }
// lsh returns a new Uint256 that has been left bit shifted
pub fn (u Uint256) lsh(n_ u32) Uint256 { pub fn (u Uint256) lsh(n_ u32) Uint256 {
mut n := n_ mut n := n_
if n > 128 { if n > 128 {
@ -219,36 +248,43 @@ pub fn (u Uint256) lsh(n_ u32) Uint256 {
return Uint256{Uint128{u.lo.lo << n, u.lo.hi << n | u.lo.lo >> (64 - n)}, Uint128{u.hi.lo << n | u.lo.hi >> (64 - n), u.hi.hi << n | u.hi.lo >> (64 - n)}} return Uint256{Uint128{u.lo.lo << n, u.lo.hi << n | u.lo.lo >> (64 - n)}, Uint128{u.hi.lo << n | u.lo.hi >> (64 - n), u.hi.hi << n | u.hi.lo >> (64 - n)}}
} }
// div - untested
pub fn (u Uint256) div(v Uint256) Uint256 { pub fn (u Uint256) div(v Uint256) Uint256 {
q, _ := u.quo_rem(v) q, _ := u.quo_rem(v)
return q return q
} }
// div_128 - untested
pub fn (u Uint256) div_128(v Uint128) Uint256 { pub fn (u Uint256) div_128(v Uint128) Uint256 {
q, _ := u.quo_rem_128(v) q, _ := u.quo_rem_128(v)
return q return q
} }
// div_64 - untested
pub fn (u Uint256) div_64(v u64) Uint256 { pub fn (u Uint256) div_64(v u64) Uint256 {
q, _ := u.quo_rem_64(v) q, _ := u.quo_rem_64(v)
return q return q
} }
// mod - untested
pub fn (u Uint256) mod(v Uint256) Uint256 { pub fn (u Uint256) mod(v Uint256) Uint256 {
_, r := u.quo_rem(v) _, r := u.quo_rem(v)
return r return r
} }
// mod_128 - untested
pub fn (u Uint256) mod_128(v Uint128) Uint128 { pub fn (u Uint256) mod_128(v Uint128) Uint128 {
_, r := u.quo_rem_128(v) _, r := u.quo_rem_128(v)
return r return r
} }
// mod_64 - untested
pub fn (u Uint256) mod_64(v u64) u64 { pub fn (u Uint256) mod_64(v u64) u64 {
_, r := u.quo_rem_64(v) _, r := u.quo_rem_64(v)
return r return r
} }
// rotate_left returns a new Uint256 that has been left bit shifted
pub fn (u Uint256) rotate_left(k int) Uint256 { pub fn (u Uint256) rotate_left(k int) Uint256 {
mut n := u32(k) & 255 mut n := u32(k) & 255
if n < 64 { if n < 64 {
@ -283,10 +319,12 @@ pub fn (u Uint256) rotate_left(k int) Uint256 {
return Uint256{Uint128{u.lo.hi << n | u.lo.lo >> (64 - n), u.hi.lo << n | u.lo.hi >> (64 - n)}, Uint128{u.hi.hi << n | u.hi.lo >> (64 - n), u.lo.lo << n | u.hi.hi >> (64 - n)}} return Uint256{Uint128{u.lo.hi << n | u.lo.lo >> (64 - n), u.hi.lo << n | u.lo.hi >> (64 - n)}, Uint128{u.hi.hi << n | u.hi.lo >> (64 - n), u.lo.lo << n | u.hi.hi >> (64 - n)}}
} }
// rotate_right returns a new Uint256 that has been right bit shifted
pub fn (u Uint256) rotate_right(k int) Uint256 { pub fn (u Uint256) rotate_right(k int) Uint256 {
return u.rotate_left(-k) return u.rotate_left(-k)
} }
// len returns the length of the binary value without the leading zeros
pub fn (u Uint256) len() int { pub fn (u Uint256) len() int {
if !u.hi.is_zero() { if !u.hi.is_zero() {
return 128 + u.hi.len() return 128 + u.hi.len()
@ -294,6 +332,7 @@ pub fn (u Uint256) len() int {
return u.lo.len() return u.lo.len()
} }
// leading_zeros returns the number of 0s at the beginning of the binary value of the Uint256 value [0, 256]
pub fn (u Uint256) leading_zeros() int { pub fn (u Uint256) leading_zeros() int {
if !u.hi.is_zero() { if !u.hi.is_zero() {
return u.hi.leading_zeros() return u.hi.leading_zeros()
@ -301,6 +340,7 @@ pub fn (u Uint256) leading_zeros() int {
return 128 + u.lo.leading_zeros() return 128 + u.lo.leading_zeros()
} }
// trailing_zeros returns the number of 0s at the end of the binary value of the Uint256 value [0,256]
pub fn (u Uint256) trailing_zeros() int { pub fn (u Uint256) trailing_zeros() int {
if !u.lo.is_zero() { if !u.lo.is_zero() {
return u.lo.trailing_zeros() return u.lo.trailing_zeros()
@ -309,10 +349,12 @@ pub fn (u Uint256) trailing_zeros() int {
return 128 + u.hi.trailing_zeros() return 128 + u.hi.trailing_zeros()
} }
// ones_count returns the number of ones in the binary value of the Uint256 value
pub fn (u Uint256) ones_count() int { pub fn (u Uint256) ones_count() int {
return u.lo.ones_count() + u.hi.ones_count() return u.lo.ones_count() + u.hi.ones_count()
} }
// str returns the decimal representation of the unsigned integer
pub fn (u_ Uint256) str() string { pub fn (u_ Uint256) str() string {
mut u := u_ mut u := u_
if u.hi.is_zero() { if u.hi.is_zero() {
@ -339,6 +381,7 @@ pub fn (u_ Uint256) str() string {
return '' return ''
} }
// uint256_from_dec_str creates a new `unsigned.Uint256` from the given string if possible
pub fn uint256_from_dec_str(value string) ?Uint256 { pub fn uint256_from_dec_str(value string) ?Uint256 {
mut res := unsigned.uint256_zero mut res := unsigned.uint256_zero
for b_ in value.bytes() { for b_ in value.bytes() {

View File

@ -21,12 +21,10 @@ mut:
debug_file os.File debug_file os.File
} }
[if debug] [if debug_html ?]
fn (mut dom DocumentObjectModel) print_debug(data string) { fn (mut dom DocumentObjectModel) print_debug(data string) {
$if debug { if data.len > 0 {
if data.len > 0 { dom.debug_file.writeln(data) or { eprintln(err) }
dom.debug_file.writeln(data) or { eprintln(err) }
}
} }
} }

View File

@ -32,7 +32,7 @@ fn test_access_parent() {
mut dom := parse(generate_temp_html()) mut dom := parse(generate_temp_html())
div_tags := dom.get_tag('div') div_tags := dom.get_tag('div')
parent := div_tags[0].parent parent := div_tags[0].parent
assert parent != 0 assert unsafe { parent != 0 }
for div_tag in div_tags { for div_tag in div_tags {
assert div_tag.parent == parent assert div_tag.parent == parent
} }

View File

@ -49,12 +49,10 @@ fn (parser Parser) builder_str() string {
return parser.lexical_attributes.lexeme_builder.after(0) return parser.lexical_attributes.lexeme_builder.after(0)
} }
[if debug] [if debug_html ?]
fn (mut parser Parser) print_debug(data string) { fn (mut parser Parser) print_debug(data string) {
$if debug { if data.len > 0 {
if data.len > 0 { parser.debug_file.writeln(data) or { panic(err) }
parser.debug_file.writeln(data) or { panic(err) }
}
} }
} }
@ -155,7 +153,8 @@ pub fn (mut parser Parser) split_parse(data string) {
parser.lexical_attributes.current_tag.attributes[lattr] = nval parser.lexical_attributes.current_tag.attributes[lattr] = nval
parser.lexical_attributes.current_tag.last_attribute = '' parser.lexical_attributes.current_tag.last_attribute = ''
} else { } else {
parser.lexical_attributes.current_tag.attributes[temp_lexeme.to_lower()] = '' // parser.print_debug(temp_lexeme) parser.lexical_attributes.current_tag.attributes[temp_lexeme.to_lower()] = ''
// parser.print_debug(temp_lexeme)
} }
parser.lexical_attributes.lexeme_builder.go_back_to(0) parser.lexical_attributes.lexeme_builder.go_back_to(0)
} else { } else {

View File

@ -599,7 +599,7 @@ fn parse_host(host string) ?string {
return host1 + host2 + host3 return host1 + host2 + host3
} }
if idx := host.last_index(':') { if idx := host.last_index(':') {
colon_port = host[idx..] colon_port = host[idx..i]
if !valid_optional_port(colon_port) { if !valid_optional_port(colon_port) {
return error(error_msg('parse_host: invalid port $colon_port after host ', return error(error_msg('parse_host: invalid port $colon_port after host ',
'')) ''))

View File

@ -98,3 +98,25 @@ fn test_parse_empty_query_two() ? {
assert qvalues_map == {} assert qvalues_map == {}
assert query_str == query_encode assert query_str == query_encode
} }
fn test_parse() ? {
urls := [
'jdbc:mysql://test_user:ouupppssss@localhost:3306/sakila?profileSQL=true',
'ftp://ftp.is.co.za/rfc/rfc1808.txt',
'http://www.ietf.org/rfc/rfc2396.txt#header1',
'ldap://[2001:db8::7]/c=GB?objectClass=one&objectClass=two',
'mailto:John.Doe@example.com',
'news:comp.infosystems.www.servers.unix',
'tel:+1-816-555-1212',
'telnet://192.0.2.16:80/',
'urn:oasis:names:specification:docbook:dtd:xml:4.1.2',
'foo://example.com:8042/over/there?name=ferret#nose',
]
for url in urls {
_ := urllib.parse(url) or {
eprintln(err)
assert false
return
}
}
}

View File

@ -1,13 +1,20 @@
module os module os
import strings
import strings.textscanner
// Collection of useful functions for manipulation, validation and analysis of system paths. // Collection of useful functions for manipulation, validation and analysis of system paths.
// The following functions handle paths depending on the operating system, // The following functions handle paths depending on the operating system,
// therefore results may be different for certain operating systems. // therefore results may be different for certain operating systems.
const ( const (
fslash = `/` fslash = `/`
bslash = `\\` bslash = `\\`
dot = `.` dot = `.`
qmark = `?`
dot_dot = '..'
empty_str = ''
dot_str = '.'
) )
// is_abs_path returns `true` if the given `path` is absolute. // is_abs_path returns `true` if the given `path` is absolute.
@ -16,11 +23,152 @@ pub fn is_abs_path(path string) bool {
return false return false
} }
$if windows { $if windows {
return is_device_path(path) || is_drive_rooted(path) || is_normal_path(path) return is_unc_path(path) || is_drive_rooted(path) || is_normal_path(path)
} }
return path[0] == os.fslash return path[0] == os.fslash
} }
// abs_path joins the current working directory
// with the given `path` (if the `path` is relative)
// and returns the absolute path representation.
pub fn abs_path(path string) string {
wd := getwd()
if path.len == 0 {
return wd
}
npath := norm_path(path)
if npath == os.dot_str {
return wd
}
if !is_abs_path(npath) {
mut sb := strings.new_builder(npath.len)
sb.write_string(wd)
sb.write_string(path_separator)
sb.write_string(npath)
return norm_path(sb.str())
}
return npath
}
// norm_path returns the normalized version of the given `path`
// by resolving backlinks (..), turning forward slashes into
// back slashes on a Windows system and eliminating:
// - references to current directories (.)
// - redundant path separators
// - the last path separator
[direct_array_access]
pub fn norm_path(path string) string {
if path.len == 0 {
return os.dot_str
}
rooted := is_abs_path(path)
volume := get_volume(path)
volume_len := volume.len
cpath := clean_path(path[volume_len..])
if cpath.len == 0 && volume_len == 0 {
return os.dot_str
}
spath := cpath.split(path_separator)
if os.dot_dot !in spath {
return if volume_len != 0 { volume + cpath } else { cpath }
}
// resolve backlinks (..)
spath_len := spath.len
mut sb := strings.new_builder(cpath.len)
if rooted {
sb.write_string(path_separator)
}
mut new_path := []string{cap: spath_len}
mut backlink_count := 0
for i := spath_len - 1; i >= 0; i-- {
part := spath[i]
if part == os.empty_str {
continue
}
if part == os.dot_dot {
backlink_count++
continue
}
if backlink_count != 0 {
backlink_count--
continue
}
new_path.prepend(part)
}
// append backlink(s) to the path if backtracking
// is not possible and the given path is not rooted
if backlink_count != 0 && !rooted {
for i in 0 .. backlink_count {
sb.write_string(os.dot_dot)
if new_path.len == 0 && i == backlink_count - 1 {
break
}
sb.write_string(path_separator)
}
}
sb.write_string(new_path.join(path_separator))
res := sb.str()
if res.len == 0 {
if volume_len != 0 {
return volume
}
if !rooted {
return os.dot_str
}
return path_separator
}
if volume_len != 0 {
return volume + res
}
return res
}
// clean_path returns the "cleaned" version of the given `path`
// by turning forward slashes into back slashes
// on a Windows system and eliminating:
// - references to current directories (.)
// - redundant separators
// - the last path separator
fn clean_path(path string) string {
if path.len == 0 {
return os.empty_str
}
mut sb := strings.new_builder(path.len)
mut sc := textscanner.new(path)
for sc.next() != -1 {
curr := u8(sc.current())
back := sc.peek_back()
peek := sc.peek()
// skip current path separator if last byte was a path separator
if back != -1 && is_slash(u8(back)) && is_slash(curr) {
continue
}
// skip reference to current dir (.)
if (back == -1 || is_slash(u8(back))) && curr == os.dot
&& (peek == -1 || is_slash(u8(peek))) {
// skip if the next byte is a path separator
if peek != -1 && is_slash(u8(peek)) {
sc.skip_n(1)
}
continue
}
// turn foward slash into a back slash on a Windows system
$if windows {
if curr == os.fslash {
sb.write_u8(os.bslash)
continue
}
}
sb.write_u8(u8(sc.current()))
}
res := sb.str()
// eliminate the last path separator
if res.len > 1 && is_slash(res[res.len - 1]) {
return res[..res.len - 1]
}
return res
}
// win_volume_len returns the length of the // win_volume_len returns the length of the
// Windows volume/drive from the given `path`. // Windows volume/drive from the given `path`.
fn win_volume_len(path string) int { fn win_volume_len(path string) int {
@ -32,7 +180,7 @@ fn win_volume_len(path string) int {
return 2 return 2
} }
// its UNC path / DOS device path? // its UNC path / DOS device path?
if path.len >= 5 && starts_w_slash_slash(path) && !is_slash(path[2]) { if plen >= 5 && starts_w_slash_slash(path) && !is_slash(path[2]) {
for i := 3; i < plen; i++ { for i := 3; i < plen; i++ {
if is_slash(path[i]) { if is_slash(path[i]) {
if i + 1 >= plen || is_slash(path[i + 1]) { if i + 1 >= plen || is_slash(path[i + 1]) {
@ -51,6 +199,20 @@ fn win_volume_len(path string) int {
return 0 return 0
} }
fn get_volume(path string) string {
$if !windows {
return os.empty_str
}
volume := path[..win_volume_len(path)]
if volume.len == 0 {
return os.empty_str
}
if volume[0] == os.fslash {
return volume.replace('/', '\\')
}
return volume
}
fn is_slash(b u8) bool { fn is_slash(b u8) bool {
$if windows { $if windows {
return b == os.bslash || b == os.fslash return b == os.bslash || b == os.fslash
@ -58,7 +220,7 @@ fn is_slash(b u8) bool {
return b == os.fslash return b == os.fslash
} }
fn is_device_path(path string) bool { fn is_unc_path(path string) bool {
return win_volume_len(path) >= 5 && starts_w_slash_slash(path) return win_volume_len(path) >= 5 && starts_w_slash_slash(path)
} }

View File

@ -27,3 +27,103 @@ fn test_is_abs_path() {
assert !is_abs_path('./') assert !is_abs_path('./')
assert !is_abs_path('.') assert !is_abs_path('.')
} }
fn test_clean_path() {
$if windows {
assert clean_path(r'\\path\to\files/file.v') == r'\path\to\files\file.v'
assert clean_path(r'\/\//\/') == '\\'
assert clean_path(r'./path\\dir/\\./\/\\/file.v\.\\\.') == r'path\dir\file.v'
assert clean_path(r'\./path/dir\\file.exe') == r'\path\dir\file.exe'
assert clean_path(r'.') == ''
assert clean_path(r'./') == ''
assert clean_path(r'\./') == '\\'
assert clean_path(r'//\/\/////') == '\\'
return
}
assert clean_path('./../.././././//') == '../..'
assert clean_path('.') == ''
assert clean_path('./path/to/file.v//./') == 'path/to/file.v'
assert clean_path('./') == ''
assert clean_path('/.') == '/'
assert clean_path('//path/./to/.///files/file.v///') == '/path/to/files/file.v'
assert clean_path('path/./to/.///files/.././file.v///') == 'path/to/files/../file.v'
assert clean_path('\\') == '\\'
assert clean_path('//////////') == '/'
}
fn test_norm_path() {
$if windows {
assert norm_path(r'C:/path/to//file.v\\') == r'C:\path\to\file.v'
assert norm_path(r'C:path\.\..\\\.\to//file.v') == r'C:to\file.v'
assert norm_path(r'D:path\.\..\..\\\\.\to//dir/..\') == r'D:..\to'
assert norm_path(r'D:/path\.\..\/..\file.v') == r'D:\file.v'
assert norm_path(r'') == '.'
assert norm_path(r'/') == '\\'
assert norm_path(r'\/') == '\\'
assert norm_path(r'path\../dir\..') == '.'
assert norm_path(r'.\.\') == '.'
assert norm_path(r'G:.\.\dir\././\.\.\\\\///to/././\file.v/./\\') == r'G:dir\to\file.v'
assert norm_path(r'G:\..\..\.\.\file.v\\\.') == r'G:\file.v'
assert norm_path(r'\\Server\share\\\dir/..\file.v\./.') == r'\\Server\share\file.v'
assert norm_path(r'\\.\device\\\dir/to/./file.v\.') == r'\\.\device\dir\to\file.v'
assert norm_path(r'C:dir/../dir2/../../../file.v') == r'C:..\..\file.v'
assert norm_path(r'\\.\C:\\\Users/\Documents//..') == r'\\.\C:\Users'
assert norm_path(r'\\.\C:\Users') == r'\\.\C:\Users'
assert norm_path(r'\\') == '\\'
assert norm_path(r'//') == '\\'
assert norm_path(r'\\\') == '\\'
assert norm_path(r'.') == '.'
assert norm_path(r'\\Server') == '\\Server'
assert norm_path(r'\\Server\') == '\\Server'
return
}
assert norm_path('/path/././../to/file//file.v/.') == '/to/file/file.v'
assert norm_path('path/././to/files/../../file.v/.') == 'path/file.v'
assert norm_path('path/././/../../to/file.v/.') == '../to/file.v'
assert norm_path('/path/././/../..///.././file.v/././') == '/file.v'
assert norm_path('path/././//../../../to/dir//.././file.v/././') == '../../to/file.v'
assert norm_path('path/../dir/..') == '.'
assert norm_path('../dir/..') == '..'
assert norm_path('/../dir/..') == '/'
assert norm_path('//././dir/../files/././/file.v') == '/files/file.v'
assert norm_path('/\\../dir/////////.') == '/\\../dir'
assert norm_path('/home/') == '/home'
assert norm_path('/home/////./.') == '/home'
assert norm_path('...') == '...'
}
fn test_abs_path() {
wd := getwd()
wd_w_sep := wd + path_separator
$if windows {
assert abs_path('path/to/file.v') == '${wd_w_sep}path\\to\\file.v'
assert abs_path('path/to/file.v') == '${wd_w_sep}path\\to\\file.v'
assert abs_path('/') == r'\'
assert abs_path(r'C:\path\to\files\file.v') == r'C:\path\to\files\file.v'
assert abs_path(r'C:\/\path\.\to\../files\file.v\.\\\.\') == r'C:\path\files\file.v'
assert abs_path(r'\\Host\share\files\..\..\.') == r'\\Host\share\'
assert abs_path(r'\\.\HardDiskvolume2\files\..\..\.') == r'\\.\HardDiskvolume2\'
assert abs_path(r'\\?\share') == r'\\?\share'
assert abs_path(r'\\.\') == r'\'
assert abs_path(r'G:/\..\\..\.\.\file.v\\.\.\\\\') == r'G:\file.v'
assert abs_path('files') == '${wd_w_sep}files'
assert abs_path('') == wd
assert abs_path('.') == wd
assert abs_path('files/../file.v') == '${wd_w_sep}file.v'
assert abs_path('///') == r'\'
assert abs_path('/path/to/file.v') == r'\path\to\file.v'
assert abs_path('D:/') == r'D:\'
assert abs_path(r'\\.\HardiskVolume6') == r'\\.\HardiskVolume6'
return
}
assert abs_path('/') == '/'
assert abs_path('.') == wd
assert abs_path('files') == '${wd_w_sep}files'
assert abs_path('') == wd
assert abs_path('files/../file.v') == '${wd_w_sep}file.v'
assert abs_path('///') == '/'
assert abs_path('/path/to/file.v') == '/path/to/file.v'
assert abs_path('/path/to/file.v/../..') == '/path'
assert abs_path('path/../file.v/..') == wd
assert abs_path('///') == '/'
}

View File

@ -176,7 +176,7 @@ fn (mut p Process) win_write_string(idx int, s string) {
fn (mut p Process) win_read_string(idx int, maxbytes int) (string, int) { fn (mut p Process) win_read_string(idx int, maxbytes int) (string, int) {
mut wdata := &WProcess(p.wdata) mut wdata := &WProcess(p.wdata)
if wdata == 0 { if unsafe { wdata == 0 } {
return '', 0 return '', 0
} }
mut rhandle := &u32(0) mut rhandle := &u32(0)
@ -207,7 +207,7 @@ fn (mut p Process) win_read_string(idx int, maxbytes int) (string, int) {
fn (mut p Process) win_slurp(idx int) string { fn (mut p Process) win_slurp(idx int) string {
mut wdata := &WProcess(p.wdata) mut wdata := &WProcess(p.wdata)
if wdata == 0 { if unsafe { wdata == 0 } {
return '' return ''
} }
mut rhandle := &u32(0) mut rhandle := &u32(0)

View File

@ -0,0 +1,10 @@
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module buffer
pub struct PRNGBuffer {
mut:
bytes_left int
buffer u64
}

View File

@ -1,3 +1,6 @@
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module config module config
import rand.seed import rand.seed
@ -12,3 +15,35 @@ pub struct PRNGConfigStruct {
pub: pub:
seed_ []u32 = seed.time_seed_array(2) seed_ []u32 = seed.time_seed_array(2)
} }
// Configuration struct for generating normally distributed floats. The default value for
// `mu` is 0 and the default value for `sigma` is 1.
[params]
pub struct NormalConfigStruct {
pub:
mu f64 = 0.0
sigma f64 = 1.0
}
// Configuration struct for the shuffle functions.
// The start index is inclusive and the end index is exclusive.
// Set the end to 0 to shuffle until the end of the array.
[params]
pub struct ShuffleConfigStruct {
pub:
start int
end int
}
// validate_for is a helper function for validating the configuration struct for the given array.
pub fn (config ShuffleConfigStruct) validate_for<T>(a []T) ? {
if config.start < 0 || config.start >= a.len {
return error("argument 'config.start' must be in range [0, a.len)")
}
if config.end < 0 || config.end > a.len {
return error("argument 'config.end' must be in range [0, a.len]")
}
if config.end != 0 && config.end <= config.start {
return error("argument 'config.end' must be greater than 'config.start'")
}
}

View File

@ -1,10 +0,0 @@
# Non-Uniform Distribution Functions
This module contains functions for sampling from non-uniform distributions.
All implementations of the `rand.PRNG` interface generate numbers from uniform
distributions. This library exists to allow the generation of pseudorandom numbers
sampled from non-uniform distributions. Additionally, it allows the user to use any
PRNG of their choice. This is because the default RNG can be reassigned to a different
generator. It can either be one of the pre-existing one (which are well-tested and
recommended) or a custom user-defined one. See `rand.set_rng()`.

85
vlib/rand/dist/dist.v vendored
View File

@ -1,85 +0,0 @@
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module dist
import math
import rand
fn check_probability_range(p f64) {
if p < 0 || p > 1 {
panic('$p is not a valid probability value.')
}
}
// bernoulli returns true with a probability p. Note that 0 <= p <= 1.
pub fn bernoulli(p f64) bool {
check_probability_range(p)
return rand.f64() <= p
}
// binomial returns the number of successful trials out of n when the
// probability of success for each trial is p.
pub fn binomial(n int, p f64) int {
check_probability_range(p)
mut count := 0
for _ in 0 .. n {
if bernoulli(p) {
count++
}
}
return count
}
// Configuration struct for the `normal_pair` function. The default value for
// `mu` is 0 and the default value for `sigma` is 1.
pub struct NormalConfigStruct {
mu f64 = 0.0
sigma f64 = 1.0
}
// normal_pair returns a pair of normally distributed random numbers with the mean mu
// and standard deviation sigma. If not specified, mu is 0 and sigma is 1. Intended usage is
// `x, y := normal_pair(mu: mean, sigma: stdev)`, or `x, y := normal_pair()`.
pub fn normal_pair(config NormalConfigStruct) (f64, f64) {
if config.sigma <= 0 {
panic('The standard deviation has to be positive.')
}
// This is an implementation of the Marsaglia polar method
// See: https://doi.org/10.1137%2F1006063
// Also: https://en.wikipedia.org/wiki/Marsaglia_polar_method
for {
u := rand.f64_in_range(-1, 1) or { 0.0 }
v := rand.f64_in_range(-1, 1) or { 0.0 }
s := u * u + v * v
if s >= 1 || s == 0 {
continue
}
t := math.sqrt(-2 * math.log(s) / s)
x := config.mu + config.sigma * t * u
y := config.mu + config.sigma * t * v
return x, y
}
return config.mu, config.mu
}
// normal returns a normally distributed random number with the mean mu and standard deviation
// sigma. If not specified, mu is 0 and sigma is 1. Intended usage is
// `x := normal(mu: mean, sigma: etdev)` or `x := normal()`.
// **NOTE:** If you are generating a lot of normal variates, use `the normal_pair` function
// instead. This function discards one of the two variates generated by the `normal_pair` function.
pub fn normal(config NormalConfigStruct) f64 {
x, _ := normal_pair(config)
return x
}
// exponential returns an exponentially distributed random number with the rate paremeter
// lambda. It is expected that lambda is positive.
pub fn exponential(lambda f64) f64 {
if lambda <= 0 {
panic('The rate (lambda) must be positive.')
}
// Use the inverse transform sampling method
return -math.log(rand.f64()) / lambda
}

View File

@ -1,6 +1,5 @@
import math import math
import rand import rand
import rand.dist
const ( const (
// The sample size to be used // The sample size to be used
@ -20,7 +19,7 @@ fn test_bernoulli() {
for p in ps { for p in ps {
mut successes := 0 mut successes := 0
for _ in 0 .. count { for _ in 0 .. count {
if dist.bernoulli(p) { if rand.bernoulli(p) or { false } {
successes++ successes++
} }
} }
@ -43,7 +42,7 @@ fn test_binomial() {
mut sum := 0 mut sum := 0
mut var := 0.0 mut var := 0.0
for _ in 0 .. count { for _ in 0 .. count {
x := dist.binomial(n, p) x := rand.binomial(n, p) or { 0 }
sum += x sum += x
dist := (x - np) dist := (x - np)
var += dist * dist var += dist * dist
@ -68,7 +67,7 @@ fn test_normal_pair() {
mut sum := 0.0 mut sum := 0.0
mut var := 0.0 mut var := 0.0
for _ in 0 .. count { for _ in 0 .. count {
x, y := dist.normal_pair(mu: mu, sigma: sigma) x, y := rand.normal_pair(mu: mu, sigma: sigma) or { 0.0, 0.0 }
sum += x + y sum += x + y
dist_x := x - mu dist_x := x - mu
dist_y := y - mu dist_y := y - mu
@ -95,7 +94,7 @@ fn test_normal() {
mut sum := 0.0 mut sum := 0.0
mut var := 0.0 mut var := 0.0
for _ in 0 .. count { for _ in 0 .. count {
x := dist.normal(mu: mu, sigma: sigma) x := rand.normal(mu: mu, sigma: sigma) or { 0.0 }
sum += x sum += x
dist := x - mu dist := x - mu
var += dist * dist var += dist * dist
@ -120,7 +119,7 @@ fn test_exponential() {
mut sum := 0.0 mut sum := 0.0
mut var := 0.0 mut var := 0.0
for _ in 0 .. count { for _ in 0 .. count {
x := dist.exponential(lambda) x := rand.exponential(lambda)
sum += x sum += x
dist := x - mu dist := x - mu
var += dist * dist var += dist * dist

View File

@ -0,0 +1,130 @@
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module rand
// NOTE: mini_math.v exists, so that we can avoid `import math`,
// just for the math.log and math.sqrt functions needed for the
// non uniform random number redistribution functions.
// Importing math is relatively heavy, both in terms of compilation
// speed (more source to process), and in terms of increases in the
// generated executable sizes (if the rest of the program does not use
// math already; many programs do not need math, for example the
// compiler itself does not, while needing random number generation.
const sqrt2 = 1.41421356237309504880168872420969807856967187537694807317667974
[inline]
fn msqrt(a f64) f64 {
if a == 0 {
return a
}
mut x := a
z, ex := frexp(x)
w := x
// approximate square root of number between 0.5 and 1
// relative error of approximation = 7.47e-3
x = 4.173075996388649989089e-1 + 5.9016206709064458299663e-1 * z // adjust for odd powers of 2
if (ex & 1) != 0 {
x *= rand.sqrt2
}
x = scalbn(x, ex >> 1)
// newton iterations
x = 0.5 * (x + w / x)
x = 0.5 * (x + w / x)
x = 0.5 * (x + w / x)
return x
}
// a simplified approximation (without the edge cases), see math.log
fn mlog(a f64) f64 {
ln2_lo := 1.90821492927058770002e-10
ln2_hi := 0.693147180369123816490
l1 := 0.6666666666666735130
l2 := 0.3999999999940941908
l3 := 0.2857142874366239149
l4 := 0.2222219843214978396
l5 := 0.1818357216161805012
l6 := 0.1531383769920937332
l7 := 0.1479819860511658591
x := a
mut f1, mut ki := frexp(x)
if f1 < rand.sqrt2 / 2 {
f1 *= 2
ki--
}
f := f1 - 1
k := f64(ki)
s := f / (2 + f)
s2 := s * s
s4 := s2 * s2
t1 := s2 * (l1 + s4 * (l3 + s4 * (l5 + s4 * l7)))
t2 := s4 * (l2 + s4 * (l4 + s4 * l6))
r := t1 + t2
hfsq := 0.5 * f * f
return k * ln2_hi - ((hfsq - (s * (hfsq + r) + k * ln2_lo)) - f)
}
fn frexp(x f64) (f64, int) {
mut y := f64_bits(x)
ee := int((y >> 52) & 0x7ff)
if ee == 0 {
if x != 0.0 {
x1p64 := f64_from_bits(u64(0x43f0000000000000))
z, e_ := frexp(x * x1p64)
return z, e_ - 64
}
return x, 0
} else if ee == 0x7ff {
return x, 0
}
e_ := ee - 0x3fe
y &= u64(0x800fffffffffffff)
y |= u64(0x3fe0000000000000)
return f64_from_bits(y), e_
}
fn scalbn(x f64, n_ int) f64 {
mut n := n_
x1p1023 := f64_from_bits(u64(0x7fe0000000000000))
x1p53 := f64_from_bits(u64(0x4340000000000000))
x1p_1022 := f64_from_bits(u64(0x0010000000000000))
mut y := x
if n > 1023 {
y *= x1p1023
n -= 1023
if n > 1023 {
y *= x1p1023
n -= 1023
if n > 1023 {
n = 1023
}
}
} else if n < -1022 {
/*
make sure final n < -53 to avoid double
rounding in the subnormal range
*/
y *= x1p_1022 * x1p53
n += 1022 - 53
if n < -1022 {
y *= x1p_1022 * x1p53
n += 1022 - 53
if n < -1022 {
n = -1022
}
}
}
return y * f64_from_bits(u64((0x3ff + n)) << 52)
}
[inline]
fn f64_from_bits(b u64) f64 {
return *unsafe { &f64(&b) }
}
[inline]
fn f64_bits(f f64) u64 {
return *unsafe { &u64(&f) }
}

View File

@ -3,6 +3,7 @@
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
module mt19937 module mt19937
import rand.buffer
import rand.seed import rand.seed
/* /*
@ -60,11 +61,10 @@ const (
// MT19937RNG is generator that uses the Mersenne Twister algorithm with period 2^19937. // MT19937RNG is generator that uses the Mersenne Twister algorithm with period 2^19937.
// **NOTE**: The RNG is not seeded when instantiated so remember to seed it before use. // **NOTE**: The RNG is not seeded when instantiated so remember to seed it before use.
pub struct MT19937RNG { pub struct MT19937RNG {
buffer.PRNGBuffer
mut: mut:
state []u64 = get_first_state(seed.time_seed_array(2)) state []u64 = get_first_state(seed.time_seed_array(2))
mti int = mt19937.nn mti int = mt19937.nn
bytes_left int
buffer u64
} }
fn get_first_state(seed_data []u32) []u64 { fn get_first_state(seed_data []u32) []u64 {

View File

@ -4,15 +4,15 @@
module musl module musl
import rand.seed import rand.seed
import rand.buffer
pub const seed_len = 1 pub const seed_len = 1
// MuslRNG ported from https://git.musl-libc.org/cgit/musl/tree/src/prng/rand_r.c // MuslRNG ported from https://git.musl-libc.org/cgit/musl/tree/src/prng/rand_r.c
pub struct MuslRNG { pub struct MuslRNG {
buffer.PRNGBuffer
mut: mut:
state u32 = seed.time_seed_32() state u32 = seed.time_seed_32()
bytes_left int
buffer u32
} }
// seed sets the current random state based on `seed_data`. // seed sets the current random state based on `seed_data`.

View File

@ -4,6 +4,7 @@
module pcg32 module pcg32
import rand.seed import rand.seed
import rand.buffer
pub const seed_len = 4 pub const seed_len = 4
@ -11,11 +12,10 @@ pub const seed_len = 4
// https://github.com/imneme/pcg-c-basic/blob/master/pcg_basic.c, and // https://github.com/imneme/pcg-c-basic/blob/master/pcg_basic.c, and
// https://github.com/imneme/pcg-c-basic/blob/master/pcg_basic.h // https://github.com/imneme/pcg-c-basic/blob/master/pcg_basic.h
pub struct PCG32RNG { pub struct PCG32RNG {
buffer.PRNGBuffer
mut: mut:
state u64 = u64(0x853c49e6748fea9b) ^ seed.time_seed_64() state u64 = u64(0x853c49e6748fea9b) ^ seed.time_seed_64()
inc u64 = u64(0xda3e39cb94b95bdb) ^ seed.time_seed_64() inc u64 = u64(0xda3e39cb94b95bdb) ^ seed.time_seed_64()
bytes_left int
buffer u32
} }
// seed seeds the PCG32RNG with 4 `u32` values. // seed seeds the PCG32RNG with 4 `u32` values.

View File

@ -274,34 +274,79 @@ pub fn (mut rng PRNG) ascii(len int) string {
return internal_string_from_set(mut rng, rand.ascii_chars, len) return internal_string_from_set(mut rng, rand.ascii_chars, len)
} }
// Configuration struct for the shuffle functions. // bernoulli returns true with a probability p. Note that 0 <= p <= 1.
// The start index is inclusive and the end index is exclusive. pub fn (mut rng PRNG) bernoulli(p f64) ?bool {
// Set the end to 0 to shuffle until the end of the array. if p < 0 || p > 1 {
[params] return error('$p is not a valid probability value.')
pub struct ShuffleConfigStruct { }
pub: return rng.f64() <= p
start int
end int
} }
fn (config ShuffleConfigStruct) validate_for<T>(a []T) ? { // normal returns a normally distributed pseudorandom f64 in range `[0, 1)`.
if config.start < 0 || config.start >= a.len { // NOTE: Use normal_pair() instead if you're generating a lot of normal variates.
return error("argument 'config.start' must be in range [0, a.len)") pub fn (mut rng PRNG) normal(conf config.NormalConfigStruct) ?f64 {
x, _ := rng.normal_pair(conf)?
return x
}
// normal_pair returns a pair of normally distributed pseudorandom f64 in range `[0, 1)`.
pub fn (mut rng PRNG) normal_pair(conf config.NormalConfigStruct) ?(f64, f64) {
if conf.sigma <= 0 {
return error('Standard deviation must be positive')
} }
if config.end < 0 || config.end > a.len { // This is an implementation of the Marsaglia polar method
return error("argument 'config.end' must be in range [0, a.len]") // See: https://doi.org/10.1137%2F1006063
// Also: https://en.wikipedia.org/wiki/Marsaglia_polar_method
for {
u := rng.f64_in_range(-1, 1) or { 0.0 }
v := rng.f64_in_range(-1, 1) or { 0.0 }
s := u * u + v * v
if s >= 1 || s == 0 {
continue
}
t := msqrt(-2 * mlog(s) / s)
x := conf.mu + conf.sigma * t * u
y := conf.mu + conf.sigma * t * v
return x, y
} }
return error('Implementation error. Please file an issue.')
}
// binomial returns the number of successful trials out of n when the
// probability of success for each trial is p.
pub fn (mut rng PRNG) binomial(n int, p f64) ?int {
if p < 0 || p > 1 {
return error('$p is not a valid probability value.')
}
mut count := 0
for _ in 0 .. n {
if rng.bernoulli(p)! {
count++
}
}
return count
}
// exponential returns an exponentially distributed random number with the rate paremeter
// lambda. It is expected that lambda is positive.
pub fn (mut rng PRNG) exponential(lambda f64) f64 {
if lambda <= 0 {
panic('The rate (lambda) must be positive.')
}
// Use the inverse transform sampling method
return -mlog(rng.f64()) / lambda
} }
// shuffle randomly permutates the elements in `a`. The range for shuffling is // shuffle randomly permutates the elements in `a`. The range for shuffling is
// optional and the entire array is shuffled by default. Leave the end as 0 to // optional and the entire array is shuffled by default. Leave the end as 0 to
// shuffle all elements until the end. // shuffle all elements until the end.
[direct_array_access] [direct_array_access]
pub fn (mut rng PRNG) shuffle<T>(mut a []T, config ShuffleConfigStruct) ? { pub fn (mut rng PRNG) shuffle<T>(mut a []T, config config.ShuffleConfigStruct) ? {
config.validate_for(a)? config.validate_for(a)?
new_end := if config.end == 0 { a.len } else { config.end } new_end := if config.end == 0 { a.len } else { config.end }
for i in config.start .. new_end { for i in config.start .. new_end {
x := rng.int_in_range(i, new_end) or { config.start } x := rng.int_in_range(i, new_end) or { config.start + i }
// swap // swap
a_i := a[i] a_i := a[i]
a[i] = a[x] a[i] = a[x]
@ -311,7 +356,7 @@ pub fn (mut rng PRNG) shuffle<T>(mut a []T, config ShuffleConfigStruct) ? {
// shuffle_clone returns a random permutation of the elements in `a`. // shuffle_clone returns a random permutation of the elements in `a`.
// The permutation is done on a fresh clone of `a`, so `a` remains unchanged. // The permutation is done on a fresh clone of `a`, so `a` remains unchanged.
pub fn (mut rng PRNG) shuffle_clone<T>(a []T, config ShuffleConfigStruct) ?[]T { pub fn (mut rng PRNG) shuffle_clone<T>(a []T, config config.ShuffleConfigStruct) ?[]T {
mut res := a.clone() mut res := a.clone()
rng.shuffle(mut res, config)? rng.shuffle(mut res, config)?
return res return res
@ -541,13 +586,13 @@ pub fn ascii(len int) string {
// shuffle randomly permutates the elements in `a`. The range for shuffling is // shuffle randomly permutates the elements in `a`. The range for shuffling is
// optional and the entire array is shuffled by default. Leave the end as 0 to // optional and the entire array is shuffled by default. Leave the end as 0 to
// shuffle all elements until the end. // shuffle all elements until the end.
pub fn shuffle<T>(mut a []T, config ShuffleConfigStruct) ? { pub fn shuffle<T>(mut a []T, config config.ShuffleConfigStruct) ? {
default_rng.shuffle(mut a, config)? default_rng.shuffle(mut a, config)?
} }
// shuffle_clone returns a random permutation of the elements in `a`. // shuffle_clone returns a random permutation of the elements in `a`.
// The permutation is done on a fresh clone of `a`, so `a` remains unchanged. // The permutation is done on a fresh clone of `a`, so `a` remains unchanged.
pub fn shuffle_clone<T>(a []T, config ShuffleConfigStruct) ?[]T { pub fn shuffle_clone<T>(a []T, config config.ShuffleConfigStruct) ?[]T {
return default_rng.shuffle_clone(a, config) return default_rng.shuffle_clone(a, config)
} }
@ -563,3 +608,31 @@ pub fn choose<T>(array []T, k int) ?[]T {
pub fn sample<T>(array []T, k int) []T { pub fn sample<T>(array []T, k int) []T {
return default_rng.sample(array, k) return default_rng.sample(array, k)
} }
// bernoulli returns true with a probability p. Note that 0 <= p <= 1.
pub fn bernoulli(p f64) ?bool {
return default_rng.bernoulli(p)
}
// normal returns a normally distributed pseudorandom f64 in range `[0, 1)`.
// NOTE: Use normal_pair() instead if you're generating a lot of normal variates.
pub fn normal(conf config.NormalConfigStruct) ?f64 {
return default_rng.normal(conf)
}
// normal_pair returns a pair of normally distributed pseudorandom f64 in range `[0, 1)`.
pub fn normal_pair(conf config.NormalConfigStruct) ?(f64, f64) {
return default_rng.normal_pair(conf)
}
// binomial returns the number of successful trials out of n when the
// probability of success for each trial is p.
pub fn binomial(n int, p f64) ?int {
return default_rng.binomial(n, p)
}
// exponential returns an exponentially distributed random number with the rate paremeter
// lambda. It is expected that lambda is positive.
pub fn exponential(lambda f64) f64 {
return default_rng.exponential(lambda)
}

View File

@ -4,11 +4,13 @@
module splitmix64 module splitmix64
import rand.seed import rand.seed
import rand.buffer
pub const seed_len = 2 pub const seed_len = 2
// SplitMix64RNG ported from http://xoshiro.di.unimi.it/splitmix64.c // SplitMix64RNG ported from http://xoshiro.di.unimi.it/splitmix64.c
pub struct SplitMix64RNG { pub struct SplitMix64RNG {
buffer.PRNGBuffer
mut: mut:
state u64 = seed.time_seed_64() state u64 = seed.time_seed_64()
bytes_left int bytes_left int

View File

@ -4,6 +4,7 @@
module sys module sys
import math.bits import math.bits
import rand.buffer
import rand.seed import rand.seed
// Implementation note: // Implementation note:
@ -36,10 +37,9 @@ fn calculate_iterations_for(bits int) int {
// SysRNG is the PRNG provided by default in the libc implementiation that V uses. // SysRNG is the PRNG provided by default in the libc implementiation that V uses.
pub struct SysRNG { pub struct SysRNG {
buffer.PRNGBuffer
mut: mut:
seed u32 = seed.time_seed_32() seed u32 = seed.time_seed_32()
buffer int
bytes_left int
} }
// r.seed() sets the seed of the accepting SysRNG to the given data. // r.seed() sets the seed of the accepting SysRNG to the given data.
@ -71,7 +71,7 @@ pub fn (mut r SysRNG) u8() u8 {
r.buffer >>= 8 r.buffer >>= 8
return value return value
} }
r.buffer = r.default_rand() r.buffer = u64(r.default_rand())
r.bytes_left = sys.rand_bytesize - 1 r.bytes_left = sys.rand_bytesize - 1
value := u8(r.buffer) value := u8(r.buffer)
r.buffer >>= 8 r.buffer >>= 8

View File

@ -4,6 +4,7 @@
module wyrand module wyrand
import hash import hash
import rand.buffer
import rand.seed import rand.seed
// Redefinition of some constants that we will need for pseudorandom number generation. // Redefinition of some constants that we will need for pseudorandom number generation.
@ -16,6 +17,7 @@ pub const seed_len = 2
// WyRandRNG is a RNG based on the WyHash hashing algorithm. // WyRandRNG is a RNG based on the WyHash hashing algorithm.
pub struct WyRandRNG { pub struct WyRandRNG {
buffer.PRNGBuffer
mut: mut:
state u64 = seed.time_seed_64() state u64 = seed.time_seed_64()
bytes_left int bytes_left int

View File

@ -645,7 +645,7 @@ pub fn channel_select(mut channels []&Channel, dir []Direction, mut objrefs []vo
unsafe { unsafe {
*subscr[i].prev = subscr[i].nxt *subscr[i].prev = subscr[i].nxt
} }
if subscr[i].nxt != 0 { if unsafe { subscr[i].nxt != 0 } {
subscr[i].nxt.prev = subscr[i].prev subscr[i].nxt.prev = subscr[i].prev
// just in case we have missed a semaphore during restore // just in case we have missed a semaphore during restore
subscr[i].nxt.sem.post() subscr[i].nxt.sem.post()
@ -659,7 +659,7 @@ pub fn channel_select(mut channels []&Channel, dir []Direction, mut objrefs []vo
unsafe { unsafe {
*subscr[i].prev = subscr[i].nxt *subscr[i].prev = subscr[i].nxt
} }
if subscr[i].nxt != 0 { if unsafe { subscr[i].nxt != 0 } {
subscr[i].nxt.prev = subscr[i].prev subscr[i].nxt.prev = subscr[i].prev
subscr[i].nxt.sem.post() subscr[i].nxt.sem.post()
} }

View File

@ -21,7 +21,7 @@ mut:
} }
fn restore_terminal_state() { fn restore_terminal_state() {
if ui.ctx_ptr != 0 { if unsafe { ui.ctx_ptr != 0 } {
if ui.ctx_ptr.cfg.use_alternate_buffer { if ui.ctx_ptr.cfg.use_alternate_buffer {
// clear the terminal and set the cursor to the origin // clear the terminal and set the cursor to the origin
print('\x1b[2J\x1b[3J') print('\x1b[2J\x1b[3J')
@ -84,7 +84,7 @@ pub fn init(cfg Config) &Context {
for code in ctx.cfg.reset { for code in ctx.cfg.reset {
os.signal_opt(code, fn (_ os.Signal) { os.signal_opt(code, fn (_ os.Signal) {
mut c := ui.ctx_ptr mut c := ui.ctx_ptr
if c != 0 { if unsafe { c != 0 } {
c.cleanup() c.cleanup()
} }
exit(0) exit(0)

View File

@ -44,7 +44,7 @@ fn restore_terminal_state_signal(_ os.Signal) {
fn restore_terminal_state() { fn restore_terminal_state() {
termios_reset() termios_reset()
mut c := ctx_ptr mut c := ctx_ptr
if c != 0 { if unsafe { c != 0 } {
c.paused = true c.paused = true
load_title() load_title()
} }
@ -121,7 +121,7 @@ fn (mut ctx Context) termios_setup() ? {
os.signal_opt(.tstp, restore_terminal_state_signal) or {} os.signal_opt(.tstp, restore_terminal_state_signal) or {}
os.signal_opt(.cont, fn (_ os.Signal) { os.signal_opt(.cont, fn (_ os.Signal) {
mut c := ctx_ptr mut c := ctx_ptr
if c != 0 { if unsafe { c != 0 } {
c.termios_setup() or { panic(err) } c.termios_setup() or { panic(err) }
c.window_height, c.window_width = get_terminal_size() c.window_height, c.window_width = get_terminal_size()
mut event := &Event{ mut event := &Event{
@ -136,7 +136,7 @@ fn (mut ctx Context) termios_setup() ? {
for code in ctx.cfg.reset { for code in ctx.cfg.reset {
os.signal_opt(code, fn (_ os.Signal) { os.signal_opt(code, fn (_ os.Signal) {
mut c := ctx_ptr mut c := ctx_ptr
if c != 0 { if unsafe { c != 0 } {
c.cleanup() c.cleanup()
} }
exit(0) exit(0)
@ -145,7 +145,7 @@ fn (mut ctx Context) termios_setup() ? {
os.signal_opt(.winch, fn (_ os.Signal) { os.signal_opt(.winch, fn (_ os.Signal) {
mut c := ctx_ptr mut c := ctx_ptr
if c != 0 { if unsafe { c != 0 } {
c.window_height, c.window_width = get_terminal_size() c.window_height, c.window_width = get_terminal_size()
mut event := &Event{ mut event := &Event{
@ -200,7 +200,7 @@ fn termios_reset() {
C.tcsetattr(C.STDIN_FILENO, C.TCSAFLUSH, &ui.termios_at_startup) C.tcsetattr(C.STDIN_FILENO, C.TCSAFLUSH, &ui.termios_at_startup)
print('\x1b[?1003l\x1b[?1006l\x1b[?25h') print('\x1b[?1003l\x1b[?1006l\x1b[?25h')
c := ctx_ptr c := ctx_ptr
if c != 0 && c.cfg.use_alternate_buffer { if unsafe { c != 0 } && c.cfg.use_alternate_buffer {
print('\x1b[?1049l') print('\x1b[?1049l')
} }
os.flush() os.flush()
@ -267,7 +267,7 @@ fn (mut ctx Context) parse_events() {
ctx.shift(1) ctx.shift(1)
} }
} }
if event != 0 { if unsafe { event != 0 } {
ctx.event(event) ctx.event(event)
nr_iters = 0 nr_iters = 0
} }

View File

@ -4,36 +4,35 @@
module time module time
import strings import strings
import math
// format returns a date string in "YYYY-MM-DD HH:mm" format (24h). // format returns a date string in "YYYY-MM-DD HH:mm" format (24h).
pub fn (t Time) format() string { pub fn (t Time) format() string {
return t.get_fmt_str(.hyphen, .hhmm24, .yyyymmdd) return '${t.year:04d}-${t.month:02d}-${t.day:02d} ${t.hour:02d}:${t.minute:02d}'
} }
// format_ss returns a date string in "YYYY-MM-DD HH:mm:ss" format (24h). // format_ss returns a date string in "YYYY-MM-DD HH:mm:ss" format (24h).
pub fn (t Time) format_ss() string { pub fn (t Time) format_ss() string {
return t.get_fmt_str(.hyphen, .hhmmss24, .yyyymmdd) return '${t.year:04d}-${t.month:02d}-${t.day:02d} ${t.hour:02d}:${t.minute:02d}:${t.second:02d}'
} }
// format_ss_milli returns a date string in "YYYY-MM-DD HH:mm:ss.123" format (24h). // format_ss_milli returns a date string in "YYYY-MM-DD HH:mm:ss.123" format (24h).
pub fn (t Time) format_ss_milli() string { pub fn (t Time) format_ss_milli() string {
return t.get_fmt_str(.hyphen, .hhmmss24_milli, .yyyymmdd) return '${t.year:04d}-${t.month:02d}-${t.day:02d} ${t.hour:02d}:${t.minute:02d}:${t.second:02d}.${(t.microsecond / 1000):03d}'
} }
// format_ss_micro returns a date string in "YYYY-MM-DD HH:mm:ss.123456" format (24h). // format_ss_micro returns a date string in "YYYY-MM-DD HH:mm:ss.123456" format (24h).
pub fn (t Time) format_ss_micro() string { pub fn (t Time) format_ss_micro() string {
return t.get_fmt_str(.hyphen, .hhmmss24_micro, .yyyymmdd) return '${t.year:04d}-${t.month:02d}-${t.day:02d} ${t.hour:02d}:${t.minute:02d}:${t.second:02d}.${t.microsecond:06d}'
} }
// hhmm returns a date string in "HH:mm" format (24h). // hhmm returns a date string in "HH:mm" format (24h).
pub fn (t Time) hhmm() string { pub fn (t Time) hhmm() string {
return t.get_fmt_time_str(.hhmm24) return '${t.hour:02d}:${t.minute:02d}'
} }
// hhmmss returns a date string in "HH:mm:ss" format (24h). // hhmmss returns a date string in "HH:mm:ss" format (24h).
pub fn (t Time) hhmmss() string { pub fn (t Time) hhmmss() string {
return t.get_fmt_time_str(.hhmmss24) return '${t.hour:02d}:${t.minute:02d}:${t.second:02d}'
} }
// hhmm12 returns a date string in "hh:mm" format (12h). // hhmm12 returns a date string in "hh:mm" format (12h).
@ -238,15 +237,15 @@ pub fn (t Time) custom_format(s string) string {
sb.write_string('${(t.hour + 1):02}') sb.write_string('${(t.hour + 1):02}')
} }
'w' { 'w' {
sb.write_string('${math.ceil((t.day + days_before[t.month - 1] + sb.write_string('${mceil((t.day + days_before[t.month - 1] +
int(is_leap_year(t.year))) / 7):.0}') int(is_leap_year(t.year))) / 7):.0}')
} }
'ww' { 'ww' {
sb.write_string('${math.ceil((t.day + days_before[t.month - 1] + sb.write_string('${mceil((t.day + days_before[t.month - 1] +
int(is_leap_year(t.year))) / 7):02.0}') int(is_leap_year(t.year))) / 7):02.0}')
} }
'wo' { 'wo' {
sb.write_string(ordinal_suffix(int(math.ceil((t.day + days_before[t.month - 1] + sb.write_string(ordinal_suffix(int(mceil((t.day + days_before[t.month - 1] +
int(is_leap_year(t.year))) / 7)))) int(is_leap_year(t.year))) / 7))))
} }
'Q' { 'Q' {
@ -439,3 +438,14 @@ pub fn (t Time) utc_string() string {
utc_string := '$day_str, $t.day $month_str $t.year ${t.hour:02d}:${t.minute:02d}:${t.second:02d} UTC' utc_string := '$day_str, $t.day $month_str $t.year ${t.hour:02d}:${t.minute:02d}:${t.second:02d} UTC'
return utc_string return utc_string
} }
// mceil returns the least integer value greater than or equal to x.
fn mceil(x f64) f64 {
if x > 0 {
return 1 + int(x)
}
if x < 0 {
return -int(-x)
}
return 0
}

View File

@ -79,7 +79,6 @@ pub fn pref_arch_to_table_language(pref_arch pref.Arch) Language {
// * Table.type_kind(typ) not TypeSymbol.kind. // * Table.type_kind(typ) not TypeSymbol.kind.
// Each TypeSymbol is entered into `Table.types`. // Each TypeSymbol is entered into `Table.types`.
// See also: Table.sym. // See also: Table.sym.
[minify] [minify]
pub struct TypeSymbol { pub struct TypeSymbol {
pub: pub:
@ -548,6 +547,15 @@ pub fn (t &Table) type_kind(typ Type) Kind {
return t.sym(typ).kind return t.sym(typ).kind
} }
pub fn (t &Table) type_is_for_pointer_arithmetic(typ Type) bool {
typ_sym := t.sym(typ)
if typ_sym.kind == .struct_ {
return false
} else {
return typ.is_any_kind_of_pointer() || typ.is_int_valptr()
}
}
pub enum Kind { pub enum Kind {
placeholder placeholder
void void

View File

@ -215,6 +215,9 @@ pub fn (mut b Builder) parse_imports() {
} }
exit(0) exit(0)
} }
if b.pref.dump_files != '' {
b.dump_files(b.parsed_files.map(it.path))
}
b.rebuild_modules() b.rebuild_modules()
} }
@ -241,6 +244,7 @@ pub fn (mut b Builder) resolve_deps() {
for node in deps_resolved.nodes { for node in deps_resolved.nodes {
mods << node.name mods << node.name
} }
b.dump_modules(mods)
if b.pref.is_verbose { if b.pref.is_verbose {
eprintln('------ imported modules: ------') eprintln('------ imported modules: ------')
eprintln(mods.str()) eprintln(mods.str())
@ -270,7 +274,7 @@ pub fn (b &Builder) import_graph() &depgraph.DepGraph {
deps << 'builtin' deps << 'builtin'
if b.pref.backend == .c { if b.pref.backend == .c {
// TODO JavaScript backend doesn't handle os for now // TODO JavaScript backend doesn't handle os for now
if b.pref.is_vsh && p.mod.name !in ['os', 'dl'] { if b.pref.is_vsh && p.mod.name !in ['os', 'dl', 'strings.textscanner'] {
deps << 'os' deps << 'os'
} }
} }

View File

@ -483,17 +483,6 @@ fn (mut v Builder) setup_output_name() {
v.ccoptions.o_args << '-o "$v.pref.out_name"' v.ccoptions.o_args << '-o "$v.pref.out_name"'
} }
fn (mut v Builder) dump_c_options(all_args []string) {
if v.pref.dump_c_flags != '' {
non_empty_args := all_args.filter(it != '').join('\n') + '\n'
if v.pref.dump_c_flags == '-' {
print(non_empty_args)
} else {
os.write_file(v.pref.dump_c_flags, non_empty_args) or { panic(err) }
}
}
}
pub fn (mut v Builder) cc() { pub fn (mut v Builder) cc() {
if os.executable().contains('vfmt') { if os.executable().contains('vfmt') {
return return

View File

@ -0,0 +1,26 @@
module builder
import os
pub fn (b &Builder) dump_c_options(all_args []string) {
dump_list(b.pref.dump_c_flags, all_args)
}
pub fn (b &Builder) dump_modules(mods []string) {
dump_list(b.pref.dump_modules, mods)
}
pub fn (b &Builder) dump_files(files []string) {
dump_list(b.pref.dump_files, files)
}
fn dump_list(file_path string, list []string) {
if file_path != '' {
content := list.filter(it != '').join('\n') + '\n'
if file_path == '-' {
print(content)
} else {
os.write_file(file_path, content) or { panic(err) }
}
}
}

View File

@ -346,10 +346,7 @@ pub fn (mut v Builder) cc_msvc() {
defines := sflags.defines defines := sflags.defines
other_flags := sflags.other_flags other_flags := sflags.other_flags
// Include the base paths // Include the base paths
a << '-I "$r.ucrt_include_path"' a << r.include_paths()
a << '-I "$r.vs_include_path"'
a << '-I "$r.um_include_path"'
a << '-I "$r.shared_include_path"'
a << defines a << defines
a << inc_paths a << inc_paths
a << other_flags a << other_flags
@ -358,9 +355,7 @@ pub fn (mut v Builder) cc_msvc() {
a << '/link' a << '/link'
a << '/NOLOGO' a << '/NOLOGO'
a << '/OUT:"$v.pref.out_name"' a << '/OUT:"$v.pref.out_name"'
a << '/LIBPATH:"$r.ucrt_lib_path"' a << r.library_paths()
a << '/LIBPATH:"$r.um_lib_path"'
a << '/LIBPATH:"$r.vs_lib_path"'
if !all_cflags.contains('/DEBUG') { if !all_cflags.contains('/DEBUG') {
// only use /DEBUG, if the user *did not* provide its own: // only use /DEBUG, if the user *did not* provide its own:
a << '/DEBUG:FULL' // required for prod builds to generate a PDB file a << '/DEBUG:FULL' // required for prod builds to generate a PDB file
@ -375,6 +370,7 @@ pub fn (mut v Builder) cc_msvc() {
if env_ldflags != '' { if env_ldflags != '' {
a << env_ldflags a << env_ldflags
} }
v.dump_c_options(a)
args := a.join(' ') args := a.join(' ')
// write args to a file so that we dont smash createprocess // write args to a file so that we dont smash createprocess
os.write_file(out_name_cmd_line, args) or { os.write_file(out_name_cmd_line, args) or {
@ -420,17 +416,18 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(path string, moduleflags
} }
println('$obj_path not found, building it (with msvc)...') println('$obj_path not found, building it (with msvc)...')
cfile := '${path_without_o_postfix}.c' cfile := '${path_without_o_postfix}.c'
// println('cfile: $cfile')
flags := msvc_string_flags(moduleflags) flags := msvc_string_flags(moduleflags)
inc_dirs := flags.inc_paths.join(' ') inc_dirs := flags.inc_paths.join(' ')
defines := flags.defines.join(' ') defines := flags.defines.join(' ')
include_string := '-I "$msvc.ucrt_include_path" -I "$msvc.vs_include_path" -I "$msvc.um_include_path" -I "$msvc.shared_include_path" $inc_dirs' //
mut oargs := []string{} mut oargs := []string{}
env_cflags := os.getenv('CFLAGS') env_cflags := os.getenv('CFLAGS')
mut all_cflags := '$env_cflags $v.pref.cflags' mut all_cflags := '$env_cflags $v.pref.cflags'
if all_cflags != ' ' { if all_cflags != ' ' {
oargs << all_cflags oargs << all_cflags
} }
oargs << '/NOLOGO'
oargs << '/volatile:ms'
// //
if v.pref.is_prod { if v.pref.is_prod {
oargs << '/O2' oargs << '/O2'
@ -440,12 +437,18 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(path string, moduleflags
oargs << '/MDd' oargs << '/MDd'
oargs << '/D_DEBUG' oargs << '/D_DEBUG'
} }
oargs << defines
oargs << msvc.include_paths()
oargs << inc_dirs
oargs << '/c "$cfile"'
oargs << '/Fo"$obj_path"'
env_ldflags := os.getenv('LDFLAGS') env_ldflags := os.getenv('LDFLAGS')
if env_ldflags != '' { if env_ldflags != '' {
oargs << env_ldflags oargs << env_ldflags
} }
v.dump_c_options(oargs)
str_oargs := oargs.join(' ') str_oargs := oargs.join(' ')
cmd := '"$msvc.full_cl_exe_path" /volatile:ms $str_oargs $defines $include_string /c "$cfile" /Fo"$obj_path"' cmd := '"$msvc.full_cl_exe_path" $str_oargs'
// Note: the quotes above ARE balanced. // Note: the quotes above ARE balanced.
$if trace_thirdparty_obj_files ? { $if trace_thirdparty_obj_files ? {
println('>>> build_thirdparty_obj_file_with_msvc cmd: $cmd') println('>>> build_thirdparty_obj_file_with_msvc cmd: $cmd')
@ -510,7 +513,7 @@ pub fn msvc_string_flags(cflags []cflag.CFlag) MsvcStringFlags {
} }
mut lpaths := []string{} mut lpaths := []string{}
for l in lib_paths { for l in lib_paths {
lpaths << '/LIBPATH:"' + os.real_path(l) + '"' lpaths << '/LIBPATH:"${os.real_path(l)}"'
} }
return MsvcStringFlags{ return MsvcStringFlags{
real_libs: real_libs real_libs: real_libs
@ -520,3 +523,34 @@ pub fn msvc_string_flags(cflags []cflag.CFlag) MsvcStringFlags {
other_flags: other_flags other_flags: other_flags
} }
} }
fn (r MsvcResult) include_paths() []string {
mut res := []string{cap: 4}
if r.ucrt_include_path != '' {
res << '-I "$r.ucrt_include_path"'
}
if r.vs_include_path != '' {
res << '-I "$r.vs_include_path"'
}
if r.um_include_path != '' {
res << '-I "$r.um_include_path"'
}
if r.shared_include_path != '' {
res << '-I "$r.shared_include_path"'
}
return res
}
fn (r MsvcResult) library_paths() []string {
mut res := []string{cap: 3}
if r.ucrt_lib_path != '' {
res << '/LIBPATH:"$r.ucrt_lib_path"'
}
if r.um_lib_path != '' {
res << '/LIBPATH:"$r.um_lib_path"'
}
if r.vs_lib_path != '' {
res << '/LIBPATH:"$r.vs_lib_path"'
}
return res
}

View File

@ -224,7 +224,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
left_type = left_type.set_nr_muls(1) left_type = left_type.set_nr_muls(1)
} }
} else if left_type.has_flag(.shared_f) { } else if left_type.has_flag(.shared_f) {
left_type = left_type.clear_flag(.shared_f) left_type = left_type.clear_flag(.shared_f).deref()
} }
if ident_var_info.share == .atomic_t { if ident_var_info.share == .atomic_t {
left_type = left_type.set_flag(.atomic_f) left_type = left_type.set_flag(.atomic_f)

View File

@ -940,8 +940,13 @@ pub fn (mut c Checker) check_or_expr(node ast.OrExpr, ret_type ast.Type, expr_re
node.pos) node.pos)
} }
if !expr_return_type.has_flag(.optional) { if !expr_return_type.has_flag(.optional) {
c.error('to propagate an option, the call must also return an optional type', if expr_return_type.has_flag(.result) {
node.pos) c.warn('propagating a result like an option is deprecated, use `foo()!` instead of `foo()?`',
node.pos)
} else {
c.error('to propagate an option, the call must also return an optional type',
node.pos)
}
} }
return return
} }
@ -2150,25 +2155,25 @@ pub fn (mut c Checker) expr(node_ ast.Expr) ast.Type {
c.inside_if_guard = true c.inside_if_guard = true
node.expr_type = c.expr(node.expr) node.expr_type = c.expr(node.expr)
c.inside_if_guard = old_inside_if_guard c.inside_if_guard = old_inside_if_guard
if !node.expr_type.has_flag(.optional) { if !node.expr_type.has_flag(.optional) && !node.expr_type.has_flag(.result) {
mut no_opt := true mut no_opt_or_res := true
match mut node.expr { match mut node.expr {
ast.IndexExpr { ast.IndexExpr {
no_opt = false no_opt_or_res = false
node.expr_type = node.expr_type.set_flag(.optional) node.expr_type = node.expr_type.set_flag(.optional)
node.expr.is_option = true node.expr.is_option = true
} }
ast.PrefixExpr { ast.PrefixExpr {
if node.expr.op == .arrow { if node.expr.op == .arrow {
no_opt = false no_opt_or_res = false
node.expr_type = node.expr_type.set_flag(.optional) node.expr_type = node.expr_type.set_flag(.optional)
node.expr.is_option = true node.expr.is_option = true
} }
} }
else {} else {}
} }
if no_opt { if no_opt_or_res {
c.error('expression should return an option', node.expr.pos()) c.error('expression should either return an option or a result', node.expr.pos())
} }
} }
return ast.bool_type return ast.bool_type
@ -2312,6 +2317,10 @@ pub fn (mut c Checker) cast_expr(mut node ast.CastExpr) ast.Type {
mut to_sym := c.table.sym(to_type) // type to be used as cast mut to_sym := c.table.sym(to_type) // type to be used as cast
mut final_to_sym := c.table.final_sym(to_type) mut final_to_sym := c.table.final_sym(to_type)
if to_type.has_flag(.optional) {
c.error('casting to optional type is forbidden', node.pos)
}
if (to_sym.is_number() && from_sym.name == 'JS.Number') if (to_sym.is_number() && from_sym.name == 'JS.Number')
|| (to_sym.is_number() && from_sym.name == 'JS.BigInt') || (to_sym.is_number() && from_sym.name == 'JS.BigInt')
|| (to_sym.is_string() && from_sym.name == 'JS.String') || (to_sym.is_string() && from_sym.name == 'JS.String')
@ -2672,7 +2681,7 @@ pub fn (mut c Checker) ident(mut node ast.Ident) ast.Type {
} }
} }
} else { } else {
typ = obj.expr.expr_type.clear_flag(.optional) typ = obj.expr.expr_type.clear_flag(.optional).clear_flag(.result)
} }
} else { } else {
typ = c.expr(obj.expr) typ = c.expr(obj.expr)

View File

@ -80,8 +80,12 @@ pub fn (mut c Checker) array_init(mut node ast.ArrayInit) ast.Type {
// } // }
array_info := type_sym.array_info() array_info := type_sym.array_info()
node.elem_type = array_info.elem_type node.elem_type = array_info.elem_type
// clear optional flag incase of: `fn opt_arr ?[]int { return [] }` // clear optional flag incase of: `fn opt_arr() ?[]int { return [] }`
return c.expected_type.clear_flag(.optional) return if c.expected_type.has_flag(.shared_f) {
c.expected_type.clear_flag(.shared_f).deref()
} else {
c.expected_type
}.clear_flag(.optional)
} }
// [1,2,3] // [1,2,3]
if node.exprs.len > 0 && node.elem_type == ast.void_type { if node.exprs.len > 0 && node.elem_type == ast.void_type {

View File

@ -559,12 +559,12 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
} }
if !found && mut node.left is ast.IndexExpr { if !found && mut node.left is ast.IndexExpr {
c.expr(node.left) c.expr(node.left)
sym := c.table.sym(node.left.left_type) sym := c.table.final_sym(node.left.left_type)
if sym.info is ast.Array { if sym.info is ast.Array {
elem_sym := c.table.sym(sym.info.elem_type) elem_sym := c.table.sym(sym.info.elem_type)
if elem_sym.info is ast.FnType { if elem_sym.info is ast.FnType {
node.return_type = elem_sym.info.func.return_type func = elem_sym.info.func
return elem_sym.info.func.return_type found = true
} else { } else {
c.error('cannot call the element of the array, it is not a function', c.error('cannot call the element of the array, it is not a function',
node.pos) node.pos)
@ -572,23 +572,21 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
} else if sym.info is ast.Map { } else if sym.info is ast.Map {
value_sym := c.table.sym(sym.info.value_type) value_sym := c.table.sym(sym.info.value_type)
if value_sym.info is ast.FnType { if value_sym.info is ast.FnType {
node.return_type = value_sym.info.func.return_type func = value_sym.info.func
return value_sym.info.func.return_type found = true
} else { } else {
c.error('cannot call the value of the map, it is not a function', node.pos) c.error('cannot call the value of the map, it is not a function', node.pos)
} }
} else if sym.info is ast.ArrayFixed { } else if sym.info is ast.ArrayFixed {
elem_sym := c.table.sym(sym.info.elem_type) elem_sym := c.table.sym(sym.info.elem_type)
if elem_sym.info is ast.FnType { if elem_sym.info is ast.FnType {
node.return_type = elem_sym.info.func.return_type func = elem_sym.info.func
return elem_sym.info.func.return_type found = true
} else { } else {
c.error('cannot call the element of the array, it is not a function', c.error('cannot call the element of the array, it is not a function',
node.pos) node.pos)
} }
} }
found = true
return ast.string_type
} }
if !found && mut node.left is ast.CallExpr { if !found && mut node.left is ast.CallExpr {
c.expr(node.left) c.expr(node.left)
@ -788,12 +786,7 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
c.warn('`error($arg)` can be shortened to just `$arg`', node.pos) c.warn('`error($arg)` can be shortened to just `$arg`', node.pos)
} }
} }
// TODO: typ optimize.. this node can get processed more than once c.set_node_expected_arg_types(mut node, func)
if node.expected_arg_types.len == 0 {
for param in func.params {
node.expected_arg_types << param.typ
}
}
if !c.pref.backend.is_js() && node.args.len > 0 && func.params.len == 0 { if !c.pref.backend.is_js() && node.args.len > 0 && func.params.len == 0 {
c.error('too many arguments in call to `$func.name` (non-js backend: $c.pref.backend)', c.error('too many arguments in call to `$func.name` (non-js backend: $c.pref.backend)',
node.pos) node.pos)
@ -904,15 +897,6 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
} }
c.check_expected_call_arg(arg_typ, c.unwrap_generic(param.typ), node.language, c.check_expected_call_arg(arg_typ, c.unwrap_generic(param.typ), node.language,
call_arg) or { call_arg) or {
// str method, allow type with str method if fn arg is string
// Passing an int or a string array produces a c error here
// Deleting this condition results in propper V error messages
// if arg_typ_sym.kind == .string && typ_sym.has_method('str') {
// continue
// }
if arg_typ_sym.kind == .void && param_typ_sym.kind == .string {
continue
}
if param.typ.has_flag(.generic) { if param.typ.has_flag(.generic) {
continue continue
} }
@ -997,7 +981,6 @@ pub fn (mut c Checker) fn_call(mut node ast.CallExpr, mut continue_check &bool)
if func.language != .c && !c.inside_unsafe && arg_typ.nr_muls() != param.typ.nr_muls() if func.language != .c && !c.inside_unsafe && arg_typ.nr_muls() != param.typ.nr_muls()
&& !(call_arg.is_mut && param.is_mut) && !(!call_arg.is_mut && !param.is_mut) && !(call_arg.is_mut && param.is_mut) && !(!call_arg.is_mut && !param.is_mut)
&& param.typ !in [ast.byteptr_type, ast.charptr_type, ast.voidptr_type] { && param.typ !in [ast.byteptr_type, ast.charptr_type, ast.voidptr_type] {
// sym := c.table.sym(typ)
c.warn('automatic referencing/dereferencing is deprecated and will be removed soon (got: $arg_typ.nr_muls() references, expected: $param.typ.nr_muls() references)', c.warn('automatic referencing/dereferencing is deprecated and will be removed soon (got: $arg_typ.nr_muls() references, expected: $param.typ.nr_muls() references)',
call_arg.pos) call_arg.pos)
} }
@ -1463,10 +1446,8 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
continue continue
} }
} }
if got_arg_typ != ast.void_type { c.error('$err.msg() in argument ${i + 1} to `${left_sym.name}.$method_name`',
c.error('$err.msg() in argument ${i + 1} to `${left_sym.name}.$method_name`', arg.pos)
arg.pos)
}
} }
} }
if method.is_unsafe && !c.inside_unsafe { if method.is_unsafe && !c.inside_unsafe {
@ -1476,12 +1457,7 @@ pub fn (mut c Checker) method_call(mut node ast.CallExpr) ast.Type {
if !c.table.cur_fn.is_deprecated && method.is_deprecated { if !c.table.cur_fn.is_deprecated && method.is_deprecated {
c.deprecate_fnmethod('method', '${left_sym.name}.$method.name', method, node) c.deprecate_fnmethod('method', '${left_sym.name}.$method.name', method, node)
} }
// TODO: typ optimize.. this node can get processed more than once c.set_node_expected_arg_types(mut node, method)
if node.expected_arg_types.len == 0 {
for i in 1 .. method.params.len {
node.expected_arg_types << method.params[i].typ
}
}
if is_method_from_embed { if is_method_from_embed {
node.receiver_type = node.from_embed_types.last().derive(method.params[0].typ) node.receiver_type = node.from_embed_types.last().derive(method.params[0].typ)
} else if is_generic { } else if is_generic {
@ -1619,6 +1595,15 @@ fn (mut c Checker) go_expr(mut node ast.GoExpr) ast.Type {
} }
} }
fn (mut c Checker) set_node_expected_arg_types(mut node ast.CallExpr, func &ast.Fn) {
if node.expected_arg_types.len == 0 {
start_idx := if func.is_method { 1 } else { 0 }
for i in start_idx .. func.params.len {
node.expected_arg_types << func.params[i].typ
}
}
}
fn (mut c Checker) deprecate_fnmethod(kind string, name string, the_fn ast.Fn, node ast.CallExpr) { fn (mut c Checker) deprecate_fnmethod(kind string, name string, the_fn ast.Fn, node ast.CallExpr) {
mut deprecation_message := '' mut deprecation_message := ''
now := time.now() now := time.now()

View File

@ -592,6 +592,14 @@ pub fn (mut c Checker) infix_expr(mut node ast.InfixExpr) ast.Type {
} }
c.error('infix expr: cannot use `$right_sym.name` (right expression) as `$left_sym.name`', c.error('infix expr: cannot use `$right_sym.name` (right expression) as `$left_sym.name`',
left_right_pos) left_right_pos)
} else if left_type.is_ptr() {
for_ptr_op := c.table.type_is_for_pointer_arithmetic(left_type)
if left_sym.language == .v && !c.pref.translated && !c.inside_unsafe && !for_ptr_op
&& right_type.is_int() {
sugg := ' (you can use it inside an `unsafe` block)'
c.error('infix expr: cannot use `$right_sym.name` (right expression) as `$left_sym.name` $sugg',
left_right_pos)
}
} }
/* /*
if (node.left is ast.InfixExpr && if (node.left is ast.InfixExpr &&

View File

@ -0,0 +1,7 @@
vlib/v/checker/tests/checker_comparison_between_obj_and_int.vv:10:5: error: infix expr: cannot use `int literal` (right expression) as `Foo` (you can use it inside an `unsafe` block)
8 |
9 | fn insert_helper(mut node Foo) {
10 | if node == 0 {
| ~~~~~~~~~
11 | }
12 | }

View File

@ -0,0 +1,14 @@
struct Foo {}
// inside a unsafe block it is valid
fn insert_helper_unsafe(mut node Foo) {
if unsafe { node == 0 } {
}
}
fn insert_helper(mut node Foo) {
if node == 0 {
}
}
fn main() {}

View File

@ -1,7 +1,7 @@
vlib/v/checker/tests/expression_should_return_an_option.vv:28:10: error: expression should return an option vlib/v/checker/tests/expression_should_return_an_option.vv:28:10: error: expression should either return an option or a result
26 | } 26 | }
27 | // should be an checker error: 27 | // should be an checker error:
28 | if x := return_string() { 28 | if x := return_string() {
| ~~~~~~~~~~~~~~~ | ~~~~~~~~~~~~~~~
29 | println('x: $x') 29 | println('x: $x')
30 | } 30 | }

View File

@ -0,0 +1,7 @@
vlib/v/checker/tests/fn_call_arg_mismatch_err_c.vv:13:18: error: `os.chdir(files) ?` (no value) used as value in argument 1 to `os.ls`
11 | println(files)
12 | } else {
13 | println(os.ls(os.chdir(files)?)?)
| ~~~~~~~~~~~~~~~~
14 | }
15 | println(files)

View File

@ -0,0 +1,29 @@
module main
import os
fn list_files() ?[][]string {
mut unchecked_files := os.ls('utilities/modules')?
println(unchecked_files)
for files in unchecked_files {
println(files)
if os.is_file(files) == true {
println(files)
} else {
println(os.ls(os.chdir(files)?)?)
}
println(files)
}
mut modules := [['Module:', 'Path:', 'Description']]
return modules
}
fn main() {
mut data := [
['Module:', 'Path:', 'Description'],
]
mods := list_files() or { [['null', 'null', 'null']] }
for _, mod in mods {
data << mod
}
}

View File

@ -1,7 +1,20 @@
vlib/v/checker/tests/fn_type_mismatch.vv:11:15: error: invalid array element: expected `fn (int, int) f32`, not `fn (f32, f32) f32` vlib/v/checker/tests/fn_type_mismatch.vv:11:15: error: invalid array element: expected `fn (int, int) f32`, not `fn (f32, f32) f32`
9 | 9 |
10 | fn main() { 10 | fn main() {
11 | fns := [add, div] 11 | fns := [add, div]
| ~~~ | ~~~
12 | println(fns[0](10.0, 5.0)) 12 | println(fns[0](10.0, 5.0))
13 | println(fns[1](10.0, 5.0)) 13 | println(fns[1](10.0, 5.0))
vlib/v/checker/tests/fn_type_mismatch.vv:12:17: error: cannot use `float literal` as `int` in argument 1 to ``
10 | fn main() {
11 | fns := [add, div]
12 | println(fns[0](10.0, 5.0))
| ~~~~
13 | println(fns[1](10.0, 5.0))
14 | }
vlib/v/checker/tests/fn_type_mismatch.vv:13:17: error: cannot use `float literal` as `int` in argument 1 to ``
11 | fns := [add, div]
12 | println(fns[0](10.0, 5.0))
13 | println(fns[1](10.0, 5.0))
| ~~~~
14 | }

View File

@ -1,5 +0,0 @@
vlib/v/checker/tests/main_and_script_err.vv:1:1: error: function `main` is already defined
1 | fn main() {
| ^
2 | println('main')
3 | }

View File

@ -0,0 +1,6 @@
vlib/v/checker/tests/method_call_arg_mismatch.vv:9:10: error: `baz()` (no value) used as value in argument 1 to `Foo.bar`
7 | fn main() {
8 | foo := Foo{}
9 | foo.bar(baz())
| ~~~~~
10 | }

View File

@ -0,0 +1,10 @@
struct Foo {}
fn (f Foo) bar(baz fn ()) {}
fn baz() {}
fn main() {
foo := Foo{}
foo.bar(baz())
}

View File

@ -0,0 +1,5 @@
vlib/v/checker/tests/optional_variable_err.vv:2:7: error: casting to optional type is forbidden
1 | fn main() {
2 | _ := ?bool(false)
| ~~~~~~~~~~~~
3 | }

View File

@ -0,0 +1,3 @@
fn main() {
_ := ?bool(false)
}

View File

@ -1,4 +1,4 @@
vlib/v/checker/tests/propagate_option_with_result_err.vv:6:7: error: to propagate an option, the call must also return an optional type vlib/v/checker/tests/propagate_option_with_result_err.vv:6:7: warning: propagating a result like an option is deprecated, use `foo()!` instead of `foo()?`
4 | 4 |
5 | fn bar() ?string { 5 | fn bar() ?string {
6 | foo()? 6 | foo()?

View File

@ -7,7 +7,6 @@ import strings
import v.ast import v.ast
import v.util import v.util
import v.pref import v.pref
import v.mathutil
const ( const (
bs = '\\' bs = '\\'
@ -41,6 +40,7 @@ pub mut:
used_imports []string // to remove unused imports used_imports []string // to remove unused imports
import_syms_used map[string]bool // to remove unused import symbols. import_syms_used map[string]bool // to remove unused import symbols.
mod2alias map[string]string // for `import time as t`, will contain: 'time'=>'t' mod2alias map[string]string // for `import time as t`, will contain: 'time'=>'t'
mod2syms map[string]string // import time { now } 'time.now'=>'now'
use_short_fn_args bool use_short_fn_args bool
single_line_fields bool // should struct fields be on a single line single_line_fields bool // should struct fields be on a single line
it_name string // the name to replace `it` with it_name string // the name to replace `it` with
@ -71,8 +71,16 @@ pub fn fmt(file ast.File, table &ast.Table, pref &pref.Preferences, is_debug boo
if res.len == 1 { if res.len == 1 {
return f.out_imports.str().trim_space() + '\n' return f.out_imports.str().trim_space() + '\n'
} }
bounded_import_pos := mathutil.min(res.len, f.import_pos) if res.len <= f.import_pos {
return res[..bounded_import_pos] + f.out_imports.str() + res[bounded_import_pos..] imp_str := f.out_imports.str().trim_space()
if imp_str.len > 0 {
return res + '\n' + imp_str + '\n'
} else {
return res
}
} else {
return res[..f.import_pos] + f.out_imports.str() + res[f.import_pos..]
}
} }
pub fn (mut f Fmt) process_file_imports(file &ast.File) { pub fn (mut f Fmt) process_file_imports(file &ast.File) {
@ -82,6 +90,9 @@ pub fn (mut f Fmt) process_file_imports(file &ast.File) {
f.mod2alias['${imp.mod}.$sym.name'] = sym.name f.mod2alias['${imp.mod}.$sym.name'] = sym.name
f.mod2alias['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name f.mod2alias['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
f.mod2alias[sym.name] = sym.name f.mod2alias[sym.name] = sym.name
f.mod2syms['${imp.mod}.$sym.name'] = sym.name
f.mod2syms['${imp.mod.all_after_last('.')}.$sym.name'] = sym.name
f.mod2syms[sym.name] = sym.name
f.import_syms_used[sym.name] = false f.import_syms_used[sym.name] = false
} }
} }
@ -208,8 +219,8 @@ pub fn (mut f Fmt) short_module(name string) string {
if !name.contains('.') || name.starts_with('JS.') { if !name.contains('.') || name.starts_with('JS.') {
return name return name
} }
if name in f.mod2alias { if name in f.mod2syms {
return f.mod2alias[name] return f.mod2syms[name]
} }
if name.ends_with('>') { if name.ends_with('>') {
generic_levels := name.trim_string_right('>').split('<') generic_levels := name.trim_string_right('>').split('<')
@ -1639,6 +1650,7 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
for arg in node.args { for arg in node.args {
f.comments(arg.comments) f.comments(arg.comments)
} }
mut is_method_newline := false
if node.is_method { if node.is_method {
if node.name in ['map', 'filter', 'all', 'any'] { if node.name in ['map', 'filter', 'all', 'any'] {
f.in_lambda_depth++ f.in_lambda_depth++
@ -1650,7 +1662,8 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
// `time.now()` without `time imported` is processed as a method call with `time` being // `time.now()` without `time imported` is processed as a method call with `time` being
// a `node.left` expression. Import `time` automatically. // a `node.left` expression. Import `time` automatically.
// TODO fetch all available modules // TODO fetch all available modules
if node.left.name in ['time', 'os', 'strings', 'math', 'json', 'base64'] { if node.left.name in ['time', 'os', 'strings', 'math', 'json', 'base64']
&& !node.left.scope.known_var(node.left.name) {
f.file.imports << ast.Import{ f.file.imports << ast.Import{
mod: node.left.name mod: node.left.name
alias: node.left.name alias: node.left.name
@ -1658,6 +1671,11 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
} }
} }
f.expr(node.left) f.expr(node.left)
is_method_newline = node.left.pos().last_line != node.name_pos.line_nr
if is_method_newline {
f.indent++
f.writeln('')
}
f.write('.' + node.name) f.write('.' + node.name)
} else { } else {
f.write_language_prefix(node.language) f.write_language_prefix(node.language)
@ -1680,6 +1698,9 @@ pub fn (mut f Fmt) call_expr(node ast.CallExpr) {
f.write(')') f.write(')')
f.or_expr(node.or_block) f.or_expr(node.or_block)
f.comments(node.comments, has_nl: false) f.comments(node.comments, has_nl: false)
if is_method_newline {
f.indent--
}
} }
fn (mut f Fmt) write_generic_call_if_require(node ast.CallExpr) { fn (mut f Fmt) write_generic_call_if_require(node ast.CallExpr) {

View File

@ -0,0 +1,10 @@
fn main() {
options := cmdline
.only_options(args)
.filter(it != '-') // options, not including '-'
.map(if it.bytes().len > 1 {
1
})
println(options)
}

View File

@ -0,0 +1,10 @@
fn main() {
options := cmdline
.only_options(args)
.filter(it != '-') // options, not including '-'
.map(if it.bytes().len>1{
1
})
println(options)
}

View File

@ -1,3 +1,3 @@
module proto module proto
import emily33901.vproto
import emily33901.vproto

View File

@ -0,0 +1,6 @@
import rand
import rand.seed
fn main() {
rand.seed(seed.time_seed_array(2))
}

View File

@ -0,0 +1,19 @@
import json
struct Request {
a int
}
fn parse(s string) ?Request {
return json.decode(Request, s)
}
fn parse2(s string) ?Request {
req := json.decode(Request, s)?
return req
}
fn main() {
println(parse('{"a": 22} ')?)
println(parse2('{"a": 22} ')?)
}

View File

@ -0,0 +1,5 @@
fn main() {
strings := 'hello'
a := strings.repeat(2)
println(a)
}

View File

@ -389,6 +389,9 @@ fn (mut g Gen) gen_assign_stmt(node_ ast.AssignStmt) {
g.write('*') g.write('*')
} }
g.expr(left) g.expr(left)
if !is_decl && var_type.has_flag(.shared_f) {
g.write('->val') // don't reset the mutex, just change the value
}
} }
} }
} }
@ -432,7 +435,6 @@ fn (mut g Gen) gen_assign_stmt(node_ ast.AssignStmt) {
} }
*/ */
} }
g.is_shared = var_type.has_flag(.shared_f)
if !cloned { if !cloned {
if is_fixed_array_var { if is_fixed_array_var {
// TODO Instead of the translated check, check if it's a pointer already // TODO Instead of the translated check, check if it's a pointer already
@ -445,6 +447,7 @@ fn (mut g Gen) gen_assign_stmt(node_ ast.AssignStmt) {
g.expr(val) g.expr(val)
g.write(', sizeof($typ_str))') g.write(', sizeof($typ_str))')
} else if is_decl { } else if is_decl {
g.is_shared = var_type.has_flag(.shared_f)
if is_fixed_array_init && !has_val { if is_fixed_array_init && !has_val {
if val is ast.ArrayInit { if val is ast.ArrayInit {
g.array_init(val, ident.name) g.array_init(val, ident.name)
@ -476,11 +479,11 @@ fn (mut g Gen) gen_assign_stmt(node_ ast.AssignStmt) {
if op_overloaded { if op_overloaded {
g.op_arg(val, op_expected_right, val_type) g.op_arg(val, op_expected_right, val_type)
} else { } else {
exp_type := if left.is_auto_deref_var() { exp_type := if left.is_auto_deref_var() || var_type.has_flag(.shared_f) {
var_type.deref() var_type.deref()
} else { } else {
var_type var_type
} }.clear_flag(.shared_f) // don't reset the mutex, just change the value
g.expr_with_cast(val, val_type, exp_type) g.expr_with_cast(val, val_type, exp_type)
} }
} }

Some files were not shown because too many files have changed in this diff Show More