v test-fmt: reformat some skipped files, comment on the remaining ones

pull/6615/head
Delyan Angelov 2020-10-15 00:39:09 +03:00
parent e36f11750b
commit 3795aaab5c
19 changed files with 262 additions and 347 deletions

View File

@ -2,10 +2,8 @@ module main
// This tool regenerates V's bootstrap .c files // This tool regenerates V's bootstrap .c files
// every time the V master branch is updated. // every time the V master branch is updated.
// if run with the --serve flag it will run in webhook // if run with the --serve flag it will run in webhook
// server mode awaiting a request to http://host:port/genhook // server mode awaiting a request to http://host:port/genhook
// available command line flags: // available command line flags:
// --work-dir gen_vc's working directory // --work-dir gen_vc's working directory
// --purge force purge the local repositories // --purge force purge the local repositories
@ -15,7 +13,6 @@ module main
// --log-file path to log file used when --log-to is 'file' // --log-file path to log file used when --log-to is 'file'
// --dry-run dont push anything to remote repo // --dry-run dont push anything to remote repo
// --force force update even if already up to date // --force force update even if already up to date
import os import os
import log import log
import flag import flag
@ -46,13 +43,13 @@ const(
// version // version
app_version = '0.1.2' app_version = '0.1.2'
// description // description
app_description = 'This tool regenerates V\'s bootstrap .c files every time the V master branch is updated.' app_description = "This tool regenerates V\'s bootstrap .c files every time the V master branch is updated."
// assume something went wrong if file size less than this // assume something went wrong if file size less than this
too_short_file_limit = 5000 too_short_file_limit = 5000
// create a .c file for these os's // create a .c file for these os's
vc_build_oses = [ vc_build_oses = [
'nix', // all nix based os 'nix', // all nix based os
'windows' 'windows',
] ]
) )
@ -109,23 +106,21 @@ struct FlagOptions {
fn main() { fn main() {
mut fp := flag.new_flag_parser(os.args.clone()) mut fp := flag.new_flag_parser(os.args.clone())
fp.application(app_name) fp.application(app_name)
fp.version(app_version) fp.version(app_version)
fp.description(app_description) fp.description(app_description)
fp.skip_executable() fp.skip_executable()
show_help := fp.bool('help', 0, false, 'Show this help screen\n') show_help := fp.bool('help', 0, false, 'Show this help screen\n')
flag_options := parse_flags(mut fp) flag_options := parse_flags(mut fp)
if show_help {
if show_help { println( fp.usage() ) exit(0) } println(fp.usage())
exit(0)
}
fp.finalize() or { fp.finalize() or {
eprintln(err) eprintln(err)
println(fp.usage()) println(fp.usage())
return return
} }
// webhook server mode // webhook server mode
if flag_options.serve { if flag_options.serve {
vweb.run<WebhookServer>(flag_options.port) vweb.run<WebhookServer>(flag_options.port)
@ -185,7 +180,6 @@ pub fn (mut ws WebhookServer) genhook() {
pub fn (ws &WebhookServer) reset() { pub fn (ws &WebhookServer) reset() {
} }
// parse flags to FlagOptions struct // parse flags to FlagOptions struct
fn parse_flags(mut fp flag.FlagParser) FlagOptions { fn parse_flags(mut fp flag.FlagParser) FlagOptions {
return FlagOptions{ return FlagOptions{
@ -193,8 +187,8 @@ fn parse_flags(mut fp flag.FlagParser) FlagOptions {
work_dir: fp.string('work-dir', 0, work_dir, 'gen_vc working directory') work_dir: fp.string('work-dir', 0, work_dir, 'gen_vc working directory')
purge: fp.bool('purge', 0, false, 'force purge the local repositories') purge: fp.bool('purge', 0, false, 'force purge the local repositories')
port: fp.int('port', 0, server_port, 'port for web server to listen on') port: fp.int('port', 0, server_port, 'port for web server to listen on')
log_to : fp.string('log-to', 0, log_to, 'log to is \'file\' or \'terminal\'') log_to: fp.string('log-to', 0, log_to, "log to is \'file\' or \'terminal\'")
log_file : fp.string('log-file', 0, log_file, 'log file to use when log-to is \'file\'') log_file: fp.string('log-file', 0, log_file, "log file to use when log-to is \'file\'")
dry_run: fp.bool('dry-run', 0, dry_run, 'when specified dont push anything to remote repo') dry_run: fp.bool('dry-run', 0, dry_run, 'when specified dont push anything to remote repo')
force: fp.bool('force', 0, false, 'force update even if already up to date') force: fp.bool('force', 0, false, 'force update even if already up to date')
} }
@ -212,11 +206,12 @@ fn (mut gen_vc GenVC) init() {
fn (mut gen_vc GenVC) generate() { fn (mut gen_vc GenVC) generate() {
// set errors to false // set errors to false
gen_vc.gen_error = false gen_vc.gen_error = false
// check if gen_vc dir exists // check if gen_vc dir exists
if !os.is_dir(gen_vc.options.work_dir) { if !os.is_dir(gen_vc.options.work_dir) {
// try create // try create
os.mkdir(gen_vc.options.work_dir) or { panic(err) } os.mkdir(gen_vc.options.work_dir) or {
panic(err)
}
// still dosen't exist... we have a problem // still dosen't exist... we have a problem
if !os.is_dir(gen_vc.options.work_dir) { if !os.is_dir(gen_vc.options.work_dir) {
gen_vc.logger.error('error creating directory: $gen_vc.options.work_dir') gen_vc.logger.error('error creating directory: $gen_vc.options.work_dir')
@ -224,10 +219,8 @@ fn (mut gen_vc GenVC) generate() {
return return
} }
} }
// cd to gen_vc dir // cd to gen_vc dir
os.chdir(gen_vc.options.work_dir) os.chdir(gen_vc.options.work_dir)
// if we are not running with the --serve flag (webhook server) // if we are not running with the --serve flag (webhook server)
// rather than deleting and re-downloading the repo each time // rather than deleting and re-downloading the repo each time
// first check to see if the local v repo is behind master // first check to see if the local v repo is behind master
@ -242,22 +235,17 @@ fn (mut gen_vc GenVC) generate() {
return return
} }
} }
// delete repos // delete repos
gen_vc.purge_repos() gen_vc.purge_repos()
// clone repos // clone repos
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_v $git_repo_dir_v') gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_v $git_repo_dir_v')
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_vc $git_repo_dir_vc') gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_vc $git_repo_dir_vc')
// get output of git log -1 (last commit) // get output of git log -1 (last commit)
git_log_v := gen_vc.cmd_exec('git -C $git_repo_dir_v log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"') git_log_v := gen_vc.cmd_exec('git -C $git_repo_dir_v log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
git_log_vc := gen_vc.cmd_exec('git -C $git_repo_dir_vc log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"') git_log_vc := gen_vc.cmd_exec('git -C $git_repo_dir_vc log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
// date of last commit in each repo // date of last commit in each repo
ts_v := git_log_v.find_between('Date:', '\n').trim_space() ts_v := git_log_v.find_between('Date:', '\n').trim_space()
ts_vc := git_log_vc.find_between('Date:', '\n').trim_space() ts_vc := git_log_vc.find_between('Date:', '\n').trim_space()
// parse time as string to time.Time // parse time as string to time.Time
last_commit_time_v := time.parse(ts_v) or { last_commit_time_v := time.parse(ts_v) or {
panic(err) panic(err)
@ -265,37 +253,31 @@ fn (mut gen_vc GenVC) generate() {
last_commit_time_vc := time.parse(ts_vc) or { last_commit_time_vc := time.parse(ts_vc) or {
panic(err) panic(err)
} }
// git dates are in users local timezone and v time.parse does not parse // git dates are in users local timezone and v time.parse does not parse
// timezones at the moment, so for now get unix timestamp from output also // timezones at the moment, so for now get unix timestamp from output also
t_unix_v := git_log_v.find_between('Date Unix:', '\n').trim_space().int() t_unix_v := git_log_v.find_between('Date Unix:', '\n').trim_space().int()
t_unix_vc := git_log_vc.find_between('Date Unix:', '\n').trim_space().int() t_unix_vc := git_log_vc.find_between('Date Unix:', '\n').trim_space().int()
// last commit hash in v repo // last commit hash in v repo
last_commit_hash_v := git_log_v.find_between('commit', '\n').trim_space() last_commit_hash_v := git_log_v.find_between('commit', '\n').trim_space()
last_commit_hash_v_short := last_commit_hash_v[..7] last_commit_hash_v_short := last_commit_hash_v[..7]
// subject // subject
last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace('"', '\\"') last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace('"',
'\\"')
// log some info // log some info
gen_vc.logger.debug('last commit time ($git_repo_v): ' + last_commit_time_v.format_ss()) gen_vc.logger.debug('last commit time ($git_repo_v): ' + last_commit_time_v.format_ss())
gen_vc.logger.debug('last commit time ($git_repo_vc): ' + last_commit_time_vc.format_ss()) gen_vc.logger.debug('last commit time ($git_repo_vc): ' + last_commit_time_vc.format_ss())
gen_vc.logger.debug('last commit hash ($git_repo_v): $last_commit_hash_v') gen_vc.logger.debug('last commit hash ($git_repo_v): $last_commit_hash_v')
gen_vc.logger.debug('last commit subject ($git_repo_v): $last_commit_subject') gen_vc.logger.debug('last commit subject ($git_repo_v): $last_commit_subject')
// if vc repo already has a newer commit than the v repo, assume it's up to date // if vc repo already has a newer commit than the v repo, assume it's up to date
if t_unix_vc >= t_unix_v && !gen_vc.options.force { if t_unix_vc >= t_unix_v && !gen_vc.options.force {
gen_vc.logger.warn('vc repository is already up to date.') gen_vc.logger.warn('vc repository is already up to date.')
return return
} }
// try build v for current os (linux in this case) // try build v for current os (linux in this case)
gen_vc.cmd_exec('make -C $git_repo_dir_v') gen_vc.cmd_exec('make -C $git_repo_dir_v')
v_exec := '$git_repo_dir_v/v' v_exec := '$git_repo_dir_v/v'
// check if make was successful // check if make was successful
gen_vc.assert_file_exists_and_is_not_too_short(v_exec, err_msg_make) gen_vc.assert_file_exists_and_is_not_too_short(v_exec, err_msg_make)
// build v.c for each os // build v.c for each os
for os_name in vc_build_oses { for os_name in vc_build_oses {
vc_suffix := if os_name == 'nix' { '' } else { '_${os_name[..3]}' } vc_suffix := if os_name == 'nix' { '' } else { '_${os_name[..3]}' }
@ -312,7 +294,6 @@ fn (mut gen_vc GenVC) generate() {
// add new .c file to local vc repo // add new .c file to local vc repo
gen_vc.cmd_exec('git -C $git_repo_dir_vc add $c_file') gen_vc.cmd_exec('git -C $git_repo_dir_vc add $c_file')
} }
// check if the vc repo actually changed // check if the vc repo actually changed
git_status := gen_vc.cmd_exec('git -C $git_repo_dir_vc status') git_status := gen_vc.cmd_exec('git -C $git_repo_dir_vc status')
if git_status.contains('nothing to commit') { if git_status.contains('nothing to commit') {
@ -383,7 +364,7 @@ fn (mut gen_vc GenVC) purge_repos() {
} }
// check if file size is too short // check if file size is too short
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f string, emsg string){ fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f, emsg string) {
if !os.exists(f) { if !os.exists(f) {
gen_vc.logger.error('$err_msg_build: $emsg .') gen_vc.logger.error('$err_msg_build: $emsg .')
gen_vc.gen_error = true gen_vc.gen_error = true

View File

@ -39,30 +39,30 @@ pub fn line_to_timestamp_and_commit(line string) (int,string) {
return parts[0].int(), parts[1] return parts[0].int(), parts[1]
} }
pub fn normalized_workpath_for_commit(workdir string, commit string) string { pub fn normalized_workpath_for_commit(workdir, commit string) string {
nc := 'v_at_' + commit.replace('^', '_').replace('-', '_').replace('/', '_') nc := 'v_at_' + commit.replace('^', '_').replace('-', '_').replace('/', '_')
return os.real_path(workdir + os.path_separator + nc) return os.real_path(workdir + os.path_separator + nc)
} }
pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string,string) { pub fn prepare_vc_source(vcdir, cdir, commit string) (string, string) {
scripting.chdir(cdir) scripting.chdir(cdir)
// Building a historic v with the latest vc is not always possible ... // Building a historic v with the latest vc is not always possible ...
// It is more likely, that the vc *at the time of the v commit*, // It is more likely, that the vc *at the time of the v commit*,
// or slightly before that time will be able to build the historic v: // or slightly before that time will be able to build the historic v:
vline := scripting.run('git rev-list -n1 --timestamp "$commit" ') vline := scripting.run('git rev-list -n1 --timestamp "$commit" ')
v_timestamp,v_commithash := vgit.line_to_timestamp_and_commit(vline) v_timestamp, v_commithash := line_to_timestamp_and_commit(vline)
vgit.check_v_commit_timestamp_before_self_rebuilding(v_timestamp) check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
scripting.chdir(vcdir) scripting.chdir(vcdir)
scripting.run('git checkout master') scripting.run('git checkout master')
vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=$v_timestamp ') vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=$v_timestamp ')
_,vccommit_before := vgit.line_to_timestamp_and_commit(vcbefore) _, vccommit_before := line_to_timestamp_and_commit(vcbefore)
scripting.run('git checkout "$vccommit_before" ') scripting.run('git checkout "$vccommit_before" ')
scripting.run('wc *.c') scripting.run('wc *.c')
scripting.chdir(cdir) scripting.chdir(cdir)
return v_commithash, vccommit_before return v_commithash, vccommit_before
} }
pub fn clone_or_pull( remote_git_url string, local_worktree_path string ) { pub fn clone_or_pull(remote_git_url, local_worktree_path string) {
// NB: after clone_or_pull, the current repo branch is === HEAD === master // NB: after clone_or_pull, the current repo branch is === HEAD === master
if os.is_dir(local_worktree_path) && os.is_dir(os.join_path(local_worktree_path, '.git')) { if os.is_dir(local_worktree_path) && os.is_dir(os.join_path(local_worktree_path, '.git')) {
// Already existing ... Just pulling in this case is faster usually. // Already existing ... Just pulling in this case is faster usually.
@ -100,18 +100,17 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
if 'windows' == os.user_os() { if 'windows' == os.user_os() {
command_for_building_v_from_c_source = '$vgit_context.cc -std=c99 -municode -w -o cv.exe "$vgit_context.path_vc/v_win.c" ' command_for_building_v_from_c_source = '$vgit_context.cc -std=c99 -municode -w -o cv.exe "$vgit_context.path_vc/v_win.c" '
command_for_selfbuilding = './cv.exe -o $vgit_context.vexename {SOURCE}' command_for_selfbuilding = './cv.exe -o $vgit_context.vexename {SOURCE}'
} } else {
else {
command_for_building_v_from_c_source = '$vgit_context.cc -std=gnu11 -w -o cv "$vgit_context.path_vc/v.c" -lm -lpthread' command_for_building_v_from_c_source = '$vgit_context.cc -std=gnu11 -w -o cv "$vgit_context.path_vc/v.c" -lm -lpthread'
command_for_selfbuilding = './cv -o $vgit_context.vexename {SOURCE}' command_for_selfbuilding = './cv -o $vgit_context.vexename {SOURCE}'
} }
scripting.chdir(vgit_context.workdir) scripting.chdir(vgit_context.workdir)
clone_or_pull(vgit_context.v_repo_url, vgit_context.path_v) clone_or_pull(vgit_context.v_repo_url, vgit_context.path_v)
clone_or_pull(vgit_context.vc_repo_url, vgit_context.path_vc) clone_or_pull(vgit_context.vc_repo_url, vgit_context.path_vc)
scripting.chdir(vgit_context.path_v) scripting.chdir(vgit_context.path_v)
scripting.run('git checkout $vgit_context.commit_v') scripting.run('git checkout $vgit_context.commit_v')
v_commithash,vccommit_before := vgit.prepare_vc_source(vgit_context.path_vc, vgit_context.path_v, vgit_context.commit_v) v_commithash, vccommit_before := prepare_vc_source(vgit_context.path_vc, vgit_context.path_v,
vgit_context.commit_v)
vgit_context.commit_v__hash = v_commithash vgit_context.commit_v__hash = v_commithash
vgit_context.commit_vc_hash = vccommit_before vgit_context.commit_vc_hash = vccommit_before
if os.exists('cmd/v') { if os.exists('cmd/v') {
@ -128,7 +127,6 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
scripting.run(command_for_building_v_from_c_source) scripting.run(command_for_building_v_from_c_source)
build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation) build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation)
scripting.run(build_cmd) scripting.run(build_cmd)
// At this point, there exists a file vgit_context.vexepath // At this point, there exists a file vgit_context.vexepath
// which should be a valid working V executable. // which should be a valid working V executable.
} }
@ -145,8 +143,8 @@ pub mut:
pub fn add_common_tool_options(mut context VGitOptions, mut fp flag.FlagParser) []string { pub fn add_common_tool_options(mut context VGitOptions, mut fp flag.FlagParser) []string {
tdir := os.temp_dir() tdir := os.temp_dir()
context.workdir = os.real_path(fp.string('workdir', `w`, tdir, 'A writable base folder. Default: $tdir')) context.workdir = os.real_path(fp.string('workdir', `w`, tdir, 'A writable base folder. Default: $tdir'))
context.v_repo_url = fp.string('vrepo', 0, vgit.remote_v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.') context.v_repo_url = fp.string('vrepo', 0, remote_v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.')
context.vc_repo_url = fp.string('vcrepo', 0, vgit.remote_vc_repo_url, 'The url of the vc repository. You can clone it context.vc_repo_url = fp.string('vcrepo', 0, remote_vc_repo_url, 'The url of the vc repository. You can clone it
${flag.space}beforehand, and then just give the local folder ${flag.space}beforehand, and then just give the local folder
${flag.space}path here. That will eliminate the network ops ${flag.space}path here. That will eliminate the network ops
${flag.space}done by this tool, which is useful, if you want ${flag.space}done by this tool, which is useful, if you want
@ -154,31 +152,25 @@ ${flag.space}to script it/run it in a restrictive vps/docker.
') ')
context.show_help = fp.bool('help', `h`, false, 'Show this help screen.') context.show_help = fp.bool('help', `h`, false, 'Show this help screen.')
context.verbose = fp.bool('verbose', `v`, false, 'Be more verbose.') context.verbose = fp.bool('verbose', `v`, false, 'Be more verbose.')
if context.show_help { if context.show_help {
println(fp.usage()) println(fp.usage())
exit(0) exit(0)
} }
if context.verbose { if context.verbose {
scripting.set_verbose(true) scripting.set_verbose(true)
} }
if os.is_dir(context.v_repo_url) { if os.is_dir(context.v_repo_url) {
context.v_repo_url = os.real_path(context.v_repo_url) context.v_repo_url = os.real_path(context.v_repo_url)
} }
if os.is_dir(context.vc_repo_url) { if os.is_dir(context.vc_repo_url) {
context.vc_repo_url = os.real_path(context.vc_repo_url) context.vc_repo_url = os.real_path(context.vc_repo_url)
} }
commits := fp.finalize() or { commits := fp.finalize() or {
eprintln('Error: ' + err) eprintln('Error: ' + err)
exit(1) exit(1)
} }
for commit in commits { for commit in commits {
vgit.validate_commit_exists(commit) validate_commit_exists(commit)
} }
return commits return commits
} }

View File

@ -69,10 +69,8 @@ fn main() {
fp.arguments_description('VCOMMIT') fp.arguments_description('VCOMMIT')
fp.skip_executable() fp.skip_executable()
fp.limit_free_args(1, 1) fp.limit_free_args(1, 1)
context.cleanup = fp.bool('clean', 0, true, 'Clean before running (slower).') context.cleanup = fp.bool('clean', 0, true, 'Clean before running (slower).')
context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n') context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp) commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
if commits.len > 0 { if commits.len > 0 {
context.commit_v = commits[0] context.commit_v = commits[0]
@ -83,7 +81,7 @@ fn main() {
context.path_v = vgit.normalized_workpath_for_commit(context.vgo.workdir, context.commit_v) context.path_v = vgit.normalized_workpath_for_commit(context.vgo.workdir, context.commit_v)
context.path_vc = vgit.normalized_workpath_for_commit(context.vgo.workdir, 'vc') context.path_vc = vgit.normalized_workpath_for_commit(context.vgo.workdir, 'vc')
if !os.is_dir(context.vgo.workdir) { if !os.is_dir(context.vgo.workdir) {
eprintln('Work folder: ${context.vgo.workdir} , does not exist.') eprintln('Work folder: $context.vgo.workdir , does not exist.')
exit(2) exit(2)
} }
ecc := os.getenv('CC') ecc := os.getenv('CC')
@ -94,9 +92,7 @@ fn main() {
scripting.rmrf(context.path_v) scripting.rmrf(context.path_v)
scripting.rmrf(context.path_vc) scripting.rmrf(context.path_vc)
} }
context.compile_oldv_if_needed() context.compile_oldv_if_needed()
scripting.chdir(context.path_v) scripting.chdir(context.path_v)
scripting.cprintln('# v commit hash: $context.commit_v_hash') scripting.cprintln('# v commit hash: $context.commit_v_hash')
scripting.cprintln('# checkout folder: $context.path_v') scripting.cprintln('# checkout folder: $context.path_v')
@ -108,5 +104,4 @@ fn main() {
println(cmdres.output) println(cmdres.output)
exit(cmdres.exit_code) exit(cmdres.exit_code)
} }
} }

View File

@ -5,10 +5,10 @@ import vgit
const ( const (
tool_version = '0.0.5' tool_version = '0.0.5'
tool_description = ' Compares V executable size and performance, tool_description = " Compares V executable size and performance,
between 2 commits from V\'s local git history. between 2 commits from V\'s local git history.
When only one commit is given, it is compared to master. When only one commit is given, it is compared to master.
' "
) )
struct Context { struct Context {
@ -44,45 +44,27 @@ fn (c Context) compare_versions() {
c.prepare_v(c.b, c.commit_before) c.prepare_v(c.b, c.commit_before)
c.prepare_v(c.a, c.commit_after) c.prepare_v(c.a, c.commit_after)
scripting.chdir(c.vgo.workdir) scripting.chdir(c.vgo.workdir)
if c.vflags.len > 0 { if c.vflags.len > 0 {
os.setenv('VFLAGS', c.vflags, true) os.setenv('VFLAGS', c.vflags, true)
} }
// The first is the baseline, against which all the others will be compared. // The first is the baseline, against which all the others will be compared.
// It is the fastest, since hello_world.v has only a single println in it, // It is the fastest, since hello_world.v has only a single println in it,
mut perf_files := []string{} mut perf_files := []string{}
perf_files << c.compare_v_performance('source_hello', [ perf_files <<
'vprod @DEBUG@ -o source.c examples/hello_world.v', c.compare_v_performance('source_hello', ['vprod @DEBUG@ -o source.c examples/hello_world.v', 'vprod -o source.c examples/hello_world.v', 'v @DEBUG@ -o source.c examples/hello_world.v', 'v -o source.c examples/hello_world.v'])
'vprod -o source.c examples/hello_world.v', perf_files <<
'v @DEBUG@ -o source.c examples/hello_world.v', c.compare_v_performance('source_v', ['vprod @DEBUG@ -o source.c @COMPILER@', 'vprod -o source.c @COMPILER@', 'v @DEBUG@ -o source.c @COMPILER@', 'v -o source.c @COMPILER@'])
'v -o source.c examples/hello_world.v', perf_files <<
]) c.compare_v_performance('binary_hello', ['vprod -o hello examples/hello_world.v', 'v -o hello examples/hello_world.v'])
perf_files <<
perf_files << c.compare_v_performance('source_v', [ c.compare_v_performance('binary_v', ['vprod -o binary @COMPILER@', 'v -o binary @COMPILER@'])
'vprod @DEBUG@ -o source.c @COMPILER@',
'vprod -o source.c @COMPILER@',
'v @DEBUG@ -o source.c @COMPILER@',
'v -o source.c @COMPILER@',
])
perf_files << c.compare_v_performance('binary_hello', [
'vprod -o hello examples/hello_world.v',
'v -o hello examples/hello_world.v',
])
perf_files << c.compare_v_performance('binary_v', [
'vprod -o binary @COMPILER@',
'v -o binary @COMPILER@',
])
println('All performance files:') println('All performance files:')
for f in perf_files { for f in perf_files {
println(' $f') println(' $f')
} }
} }
fn (c &Context) prepare_v(cdir string, commit string) { fn (c &Context) prepare_v(cdir, commit string) {
mut cc := os.getenv('CC') mut cc := os.getenv('CC')
if cc == '' { if cc == '' {
cc = 'cc' cc = 'cc'
@ -99,9 +81,9 @@ fn (c &Context) prepare_v(cdir string, commit string) {
vgit_context.compile_oldv_if_needed() vgit_context.compile_oldv_if_needed()
scripting.chdir(cdir) scripting.chdir(cdir)
println('Making a v compiler in $cdir') println('Making a v compiler in $cdir')
scripting.run('./v -cc ${cc} -o v $vgit_context.vvlocation') scripting.run('./v -cc $cc -o v $vgit_context.vvlocation')
println('Making a vprod compiler in $cdir') println('Making a vprod compiler in $cdir')
scripting.run('./v -cc ${cc} -prod -o vprod $vgit_context.vvlocation') scripting.run('./v -cc $cc -prod -o vprod $vgit_context.vvlocation')
println('Stripping and compressing cv v and vprod binaries in $cdir') println('Stripping and compressing cv v and vprod binaries in $cdir')
scripting.run('cp cv cv_stripped') scripting.run('cp cv cv_stripped')
scripting.run('cp v v_stripped') scripting.run('cp v v_stripped')
@ -118,12 +100,14 @@ fn (c &Context) prepare_v(cdir string, commit string) {
scripting.show_sizes_of_files(['$cdir/vprod', '$cdir/vprod_stripped', '$cdir/vprod_stripped_upxed']) scripting.show_sizes_of_files(['$cdir/vprod', '$cdir/vprod_stripped', '$cdir/vprod_stripped_upxed'])
vversion := scripting.run('$cdir/v -version') vversion := scripting.run('$cdir/v -version')
vcommit := scripting.run('git rev-parse --short --verify HEAD') vcommit := scripting.run('git rev-parse --short --verify HEAD')
println('V version is: ${vversion} , local source commit: ${vcommit}') println('V version is: $vversion , local source commit: $vcommit')
if vgit_context.vvlocation == 'cmd/v' { if vgit_context.vvlocation == 'cmd/v' {
if os.exists('vlib/v/ast/ast.v') { if os.exists('vlib/v/ast/ast.v') {
println('Source lines of the compiler: ' + scripting.run('find cmd/v/ vlib/v/ -name "*.v" | grep -v /tests/ | xargs wc | tail -n -1')) println('Source lines of the compiler: ' +
scripting.run('find cmd/v/ vlib/v/ -name "*.v" | grep -v /tests/ | xargs wc | tail -n -1'))
} else { } else {
println('Source lines of the compiler: ' + scripting.run('wc cmd/v/*.v vlib/compiler/*.v | tail -n -1')) println('Source lines of the compiler: ' +
scripting.run('wc cmd/v/*.v vlib/compiler/*.v | tail -n -1'))
} }
} else if vgit_context.vvlocation == 'v.v' { } else if vgit_context.vvlocation == 'v.v' {
println('Source lines of the compiler: ' + scripting.run('wc v.v vlib/compiler/*.v | tail -n -1')) println('Source lines of the compiler: ' + scripting.run('wc v.v vlib/compiler/*.v | tail -n -1'))
@ -156,14 +140,17 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
println(cmd) println(cmd)
} }
for cmd in commands { for cmd in commands {
hyperfine_commands_arguments << " \'cd ${c.b:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_b, '@DEBUG@', debug_option_b]) hyperfine_commands_arguments <<
" \'cd ${c.b:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_b, '@DEBUG@', debug_option_b])
} }
for cmd in commands { for cmd in commands {
hyperfine_commands_arguments << " \'cd ${c.a:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_a, '@DEBUG@', debug_option_a]) hyperfine_commands_arguments <<
" \'cd ${c.a:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_a, '@DEBUG@', debug_option_a])
} }
// ///////////////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////////////////////
cmd_stats_file := os.real_path([c.vgo.workdir, 'v_performance_stats_${label}.json'].join(os.path_separator)) cmd_stats_file := os.real_path([c.vgo.workdir, 'v_performance_stats_${label}.json'].join(os.path_separator))
comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json ${cmd_stats_file} ' + '--time-unit millisecond ' + '--style full --warmup $c.warmups ' + hyperfine_commands_arguments.join(' ') comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json $cmd_stats_file ' + '--time-unit millisecond ' +
'--style full --warmup $c.warmups ' + hyperfine_commands_arguments.join(' ')
// ///////////////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////////////////////
if c.vgo.verbose { if c.vgo.verbose {
println(comparison_cmd) println(comparison_cmd)
@ -175,7 +162,8 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
} }
fn main() { fn main() {
scripting.used_tools_must_exist(['cp', 'rm', 'strip', 'make', 'git', 'upx', 'cc', 'wc', 'tail', 'find', 'xargs', 'hyperfine']) scripting.used_tools_must_exist(['cp', 'rm', 'strip', 'make', 'git', 'upx', 'cc', 'wc', 'tail',
'find', 'xargs', 'hyperfine'])
mut context := new_context() mut context := new_context()
mut fp := flag.new_flag_parser(os.args) mut fp := flag.new_flag_parser(os.args)
fp.application(os.file_name(os.executable())) fp.application(os.file_name(os.executable()))
@ -184,12 +172,10 @@ fn main() {
fp.arguments_description('COMMIT_BEFORE [COMMIT_AFTER]') fp.arguments_description('COMMIT_BEFORE [COMMIT_AFTER]')
fp.skip_executable() fp.skip_executable()
fp.limit_free_args(1, 2) fp.limit_free_args(1, 2)
context.vflags = fp.string('vflags', 0, '', 'Additional options to pass to the v commands, for example "-cc tcc"') context.vflags = fp.string('vflags', 0, '', 'Additional options to pass to the v commands, for example "-cc tcc"')
context.hyperfineopts = fp.string('hyperfine_options', 0, '', context.hyperfineopts = fp.string('hyperfine_options', 0, '', 'Additional options passed to hyperfine.
'Additional options passed to hyperfine.
${flag.space}For example on linux, you may want to pass: ${flag.space}For example on linux, you may want to pass:
${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'" $flag.space--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
') ')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp) commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
context.commit_before = commits[0] context.commit_before = commits[0]
@ -204,6 +190,5 @@ ${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/
eprintln(msg) eprintln(msg)
exit(2) exit(2)
} }
context.compare_versions() context.compare_versions()
} }

View File

@ -2,6 +2,7 @@ module main
import os import os
import term import term
// ////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////
// / This file will get compiled as part of the main program, // / This file will get compiled as part of the main program,
// / for a _test.v file. // / for a _test.v file.
@ -14,29 +15,26 @@ import term
fn cb_assertion_failed(i &VAssertMetaInfo) { fn cb_assertion_failed(i &VAssertMetaInfo) {
use_color := term.can_show_color_on_stderr() use_color := term.can_show_color_on_stderr()
use_relative_paths := match os.getenv('VERROR_PATHS') { use_relative_paths := match os.getenv('VERROR_PATHS') {
'absolute' { 'absolute' { false }
false else { true }
} else {
true
}
} }
final_filename := if use_relative_paths { i.fpath } else { os.real_path(i.fpath) } final_filename := if use_relative_paths { i.fpath } else { os.real_path(i.fpath) }
final_funcname := i.fn_name.replace('main.', '').replace('__', '.') final_funcname := i.fn_name.replace('main.', '').replace('__', '.')
final_src := if use_color { term.bold(i.src) } else { i.src } final_src := if use_color { term.bold(i.src) } else { i.src }
eprintln('') eprintln('')
eprintln('$final_filename:${i.line_nr+1}: failed assert in function ${final_funcname}') eprintln('$final_filename:${i.line_nr+1}: failed assert in function $final_funcname')
eprintln('Source : `${final_src}`') eprintln('Source : `$final_src`')
if i.op.len > 0 && i.op != 'call' { if i.op.len > 0 && i.op != 'call' {
mut slvalue := '${i.lvalue}' mut slvalue := '$i.lvalue'
mut srvalue := '${i.rvalue}' mut srvalue := '$i.rvalue'
lpostfix := if slvalue == i.llabel { '.' } else { '<= `${i.llabel}`' } lpostfix := if slvalue == i.llabel { '.' } else { '<= `$i.llabel`' }
rpostfix := if srvalue == i.rlabel { '.' } else { '<= `${i.rlabel}`' } rpostfix := if srvalue == i.rlabel { '.' } else { '<= `$i.rlabel`' }
if use_color { if use_color {
slvalue = term.bold(term.yellow(slvalue)) slvalue = term.bold(term.yellow(slvalue))
srvalue = term.bold(term.yellow(srvalue)) srvalue = term.bold(term.yellow(srvalue))
} }
eprintln(' left value: ${slvalue} ${lpostfix}') eprintln(' left value: $slvalue $lpostfix')
eprintln(' right value: ${srvalue} ${rpostfix}') eprintln(' right value: $srvalue $rpostfix')
} }
} }

View File

@ -1,4 +1,5 @@
module main module main
// ///////////////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////////////
// / This file will get compiled as a part of the same module, // / This file will get compiled as a part of the same module,
// / in which a given _test.v file is, when v is given -stats argument // / in which a given _test.v file is, when v is given -stats argument
@ -77,7 +78,8 @@ fn (b &BenchedTests) fn_name() string {
// Called at the end of the test program produced by `v -stats file_test.v` // Called at the end of the test program produced by `v -stats file_test.v`
fn (mut b BenchedTests) end_testing() { fn (mut b BenchedTests) end_testing() {
b.bench.stop() b.bench.stop()
println(inner_indent + b.bench.total_message('running V tests in "' + os.file_name(b.test_suit_file) + '"')) println(inner_indent + b.bench.total_message('running V tests in "' + os.file_name(b.test_suit_file) +
'"'))
} }
// /////////////////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////////////////

View File

@ -5,37 +5,19 @@ import testing
const ( const (
known_failing_exceptions = [ known_failing_exceptions = [
'examples/vweb/vweb_example.v', 'vlib/v/tests/generics_test.v', // struct Repo<T, U> { => struct Repo {
'cmd/tools/gen_vc.v', 'vlib/crypto/aes/aes.v', // pub fn (c &AesCipher) encrypt(mut dst, mut src []byte) {
'cmd/tools/modules/vgit/vgit.v', // generics 'vlib/crypto/aes/block_generic.v', // fn expand_key_generic(key []byte, mut enc, mut dec []u32) {
'cmd/tools/preludes/live_main.v', 'vlib/crypto/aes/const.v', // multiple narrow columns of []string turned to 1 long single column, otherwise works
'cmd/tools/preludes/live_shared.v', 'vlib/crypto/rc4/rc4.v', // pub fn (mut c Cipher) xor_key_stream(mut dst, mut src []byte) {
'cmd/tools/preludes/tests_assertions.v', 'vlib/vweb/vweb.v', // $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error
'cmd/tools/preludes/tests_with_stats.v', 'vlib/v/gen/js/tests/life.v', // error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500)
'cmd/tools/performance_compare.v', // generics 'vlib/builtin/js/builtin.v', // JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c)
'cmd/tools/oldv.v', // generics 'vlib/builtin/js/jsfns_node.js.v',
'tutorials/code/blog/article.v', 'vlib/builtin/js/jsfns.js.v',
'tutorials/code/blog/blog.v', 'vlib/builtin/js/jsfns_browser.js.v',
'vlib/arrays/arrays.v', 'vlib/builtin/bare/linuxsys_bare.v', // error: expr(): bad token `asm`, on `asm {}`
'vlib/arrays/arrays_test.v', 'vlib/os/os.v', // embeded comments, mib := [1/* CTL_KERN */, 14/* KERN_PROC */, 12/* KERN_PROC_PATHNAME */, -1] => comment the rest of the line
'vlib/builtin/js/hashmap.v',
'vlib/v/tests/fn_variadic_test.v',
'vlib/v/tests/generic_test.v',
'vlib/crypto/aes/aes.v',
'vlib/crypto/aes/aes_cbc.v',
'vlib/crypto/aes/block_generic.v',
'vlib/crypto/aes/const.v',
'vlib/crypto/aes/cypher_generic.v',
'vlib/crypto/rc4/rc4.v',
'vlib/eventbus/eventbus_test.v',
'vlib/os/bare/bare_example_linux.v',
'vlib/szip/szip.v',
'vlib/uiold/examples/users_gui/users.v',
'vlib/vweb/assets/assets.v',
'vlib/vweb/vweb.v',
'vlib/v/gen/js/tests/life.v',
'vlib/builtin/bare/linuxsys_bare.v',
'vlib/os/os.v',
] ]
) )

View File

@ -42,6 +42,9 @@ pub fn (mut app App) text() vweb.Result {
} }
pub fn (mut app App) cookie() vweb.Result { pub fn (mut app App) cookie() vweb.Result {
app.vweb.set_cookie(name:'cookie', value:'test') app.vweb.set_cookie({
name: 'cookie'
value: 'test'
})
return app.vweb.text('Headers: $app.vweb.headers') return app.vweb.text('Headers: $app.vweb.headers')
} }

View File

@ -26,7 +26,6 @@ pub fn (app &App) index_html() vweb.Result {
return $vweb.html() return $vweb.html()
} }
*/ */
pub fn (app &App) index() vweb.Result { pub fn (app &App) index() vweb.Result {
articles := app.find_all_articles() articles := app.find_all_articles()
return $vweb.html() return $vweb.html()

View File

@ -5,13 +5,14 @@ module arrays
// - idx_min / idx_max - return the index of the first minumum / maximum // - idx_min / idx_max - return the index of the first minumum / maximum
// - shuffle - randomize array items order in place (allowing exit after n items) // - shuffle - randomize array items order in place (allowing exit after n items)
// - merge - combine two sorted arrays and maintain sorted order // - merge - combine two sorted arrays and maintain sorted order
import rand import rand
// min returns the minimum // min returns the minimum
[direct_array_access] [direct_array_access]
pub fn min<T>(a []T) T { pub fn min<T>(a []T) T {
if a.len==0 { panic('.min called on an empty array') } if a.len == 0 {
panic('.min called on an empty array')
}
mut val := a[0] mut val := a[0]
for i in 0 .. a.len { for i in 0 .. a.len {
if a[i] < val { if a[i] < val {
@ -24,7 +25,9 @@ pub fn min<T>(a []T) T {
// max returns the maximum // max returns the maximum
[direct_array_access] [direct_array_access]
pub fn max<T>(a []T) T { pub fn max<T>(a []T) T {
if a.len==0 { panic('.max called on an empty array') } if a.len == 0 {
panic('.max called on an empty array')
}
mut val := a[0] mut val := a[0]
for i in 0 .. a.len { for i in 0 .. a.len {
if a[i] > val { if a[i] > val {
@ -37,7 +40,9 @@ pub fn max<T>(a []T) T {
// idx_min returns the index of the first minimum // idx_min returns the index of the first minimum
[direct_array_access] [direct_array_access]
pub fn idx_min<T>(a []T) int { pub fn idx_min<T>(a []T) int {
if a.len==0 { panic('.idxmin called on an empty array') } if a.len == 0 {
panic('.idxmin called on an empty array')
}
mut idx := 0 mut idx := 0
mut val := a[0] mut val := a[0]
for i in 0 .. a.len { for i in 0 .. a.len {
@ -52,7 +57,9 @@ pub fn idx_min<T>(a []T) int {
// idx_max returns the index of the first maximum // idx_max returns the index of the first maximum
[direct_array_access] [direct_array_access]
pub fn idx_max<T>(a []T) int { pub fn idx_max<T>(a []T) int {
if a.len==0 { panic('.idxmax called on an empty array') } if a.len == 0 {
panic('.idxmax called on an empty array')
}
mut idx := 0 mut idx := 0
mut val := a[0] mut val := a[0]
for i in 0 .. a.len { for i in 0 .. a.len {
@ -67,7 +74,9 @@ pub fn idx_max<T>(a []T) int {
// shuffle randomizes the first n items of an array in place (all if n=0) // shuffle randomizes the first n items of an array in place (all if n=0)
[direct_array_access] [direct_array_access]
pub fn shuffle<T>(mut a []T, n int) { pub fn shuffle<T>(mut a []T, n int) {
if n < 0 || n > a.len { panic("shuffle's argument 'n' must be in range [0,a.len]") } if n < 0 || n > a.len {
panic("shuffle's argument 'n' must be in range [0,a.len]")
}
cnt := if n == 0 { a.len - 1 } else { n } cnt := if n == 0 { a.len - 1 } else { n }
for i in 0 .. cnt { for i in 0 .. cnt {
x := rand.int_in_range(i, a.len) x := rand.int_in_range(i, a.len)
@ -78,15 +87,13 @@ pub fn shuffle<T>(mut a []T, n int) {
} }
} }
// merge two sorted arrays (ascending) and maintain sorted order // merge two sorted arrays (ascending) and maintain sorted order
[direct_array_access] [direct_array_access]
pub fn merge<T>(a []T, b []T) []T { pub fn merge<T>(a, b []T) []T {
mut m := []T{len: a.len + b.len} mut m := []T{len: a.len + b.len}
mut ia := 0 mut ia := 0
mut ib := 0 mut ib := 0
mut j := 0 mut j := 0
// TODO efficient approach to merge_desc where: a[ia] >= b[ib] // TODO efficient approach to merge_desc where: a[ia] >= b[ib]
for ia < a.len && ib < b.len { for ia < a.len && ib < b.len {
if a[ia] <= b[ib] { if a[ia] <= b[ib] {
@ -98,21 +105,17 @@ pub fn merge<T>(a []T, b []T) []T {
} }
j++ j++
} }
// a leftovers // a leftovers
for ia < a.len { for ia < a.len {
m[j] = a[ia] m[j] = a[ia]
ia++ ia++
j++ j++
} }
// b leftovers // b leftovers
for ib < b.len { for ib < b.len {
m[j] = b[ib] m[j] = b[ib]
ib++ ib++
j++ j++
} }
return m return m
} }

View File

@ -6,59 +6,46 @@ fn test_min() {
a := [8, 2, 6, 4] a := [8, 2, 6, 4]
assert min<int>(a) == 2 assert min<int>(a) == 2
assert min<int>(a[2..]) == 4 assert min<int>(a[2..]) == 4
b := [f32(5.1), 3.1, 1.1, 9.1] b := [f32(5.1), 3.1, 1.1, 9.1]
assert min<f32>(b) == f32(1.1) assert min<f32>(b) == f32(1.1)
assert min<f32>(b[..2]) == f32(3.1) assert min<f32>(b[..2]) == f32(3.1)
c := [byte(4), 9, 3, 1] c := [byte(4), 9, 3, 1]
assert min<byte>(c) == byte(1) assert min<byte>(c) == byte(1)
assert min<byte>(c[..3]) == byte(3) assert min<byte>(c[..3]) == byte(3)
} }
fn test_max() { fn test_max() {
a := [8, 2, 6, 4] a := [8, 2, 6, 4]
assert max<int>(a) == 8 assert max<int>(a) == 8
assert max<int>(a[1..]) == 6 assert max<int>(a[1..]) == 6
b := [f32(5.1), 3.1, 1.1, 9.1] b := [f32(5.1), 3.1, 1.1, 9.1]
assert max<f32>(b) == f32(9.1) assert max<f32>(b) == f32(9.1)
assert max<f32>(b[..3]) == f32(5.1) assert max<f32>(b[..3]) == f32(5.1)
c := [byte(4), 9, 3, 1] c := [byte(4), 9, 3, 1]
assert max<byte>(c) == byte(9) assert max<byte>(c) == byte(9)
assert max<byte>(c[2..]) == byte(3) assert max<byte>(c[2..]) == byte(3)
} }
fn test_idx_min() { fn test_idx_min() {
a := [8, 2, 6, 4] a := [8, 2, 6, 4]
assert idx_min<int>(a) == 1 assert idx_min<int>(a) == 1
b := [f32(5.1), 3.1, 1.1, 9.1] b := [f32(5.1), 3.1, 1.1, 9.1]
assert idx_min<f32>(b) == 2 assert idx_min<f32>(b) == 2
c := [byte(4), 9, 3, 1] c := [byte(4), 9, 3, 1]
assert idx_min<byte>(c) == 3 assert idx_min<byte>(c) == 3
} }
fn test_idx_max() { fn test_idx_max() {
a := [8, 2, 6, 4] a := [8, 2, 6, 4]
assert idx_max<int>(a) == 0 assert idx_max<int>(a) == 0
b := [f32(5.1), 3.1, 1.1, 9.1] b := [f32(5.1), 3.1, 1.1, 9.1]
assert idx_max<f32>(b) == 3 assert idx_max<f32>(b) == 3
c := [byte(4), 9, 3, 1] c := [byte(4), 9, 3, 1]
assert idx_max<byte>(c) == 1 assert idx_max<byte>(c) == 1
} }
fn test_shuffle() { fn test_shuffle() {
rand.seed([u32(1), 2]) // set seed to produce same results in order rand.seed([u32(1), 2]) // set seed to produce same results in order
a := [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] a := [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
mut b := a.clone() mut b := a.clone()
mut c := a.clone() mut c := a.clone()
@ -66,19 +53,16 @@ fn test_shuffle() {
shuffle<int>(mut c, 0) shuffle<int>(mut c, 0)
assert b == [6, 4, 5, 1, 9, 2, 10, 3, 8, 7] assert b == [6, 4, 5, 1, 9, 2, 10, 3, 8, 7]
assert c == [1, 6, 5, 8, 7, 2, 10, 9, 3, 4] assert c == [1, 6, 5, 8, 7, 2, 10, 9, 3, 4]
// test shuffling a slice // test shuffling a slice
mut d := a.clone() mut d := a.clone()
shuffle<int>(mut d[..5], 0) shuffle<int>(mut d[..5], 0)
assert d == [5, 2, 1, 3, 4, 6, 7, 8, 9, 10] assert d == [5, 2, 1, 3, 4, 6, 7, 8, 9, 10]
assert d[5..] == a[5..] assert d[5..] == a[5..]
// test shuffling n items // test shuffling n items
mut e := a.clone() mut e := a.clone()
shuffle<int>(mut e, 5) shuffle<int>(mut e, 5)
assert e[..5] == [10, 3, 1, 8, 4] assert e[..5] == [10, 3, 1, 8, 4]
assert e[5..] == [6, 7, 5, 9, 2] assert e[5..] == [6, 7, 5, 9, 2]
// test shuffling empty array // test shuffling empty array
mut f := a[..0] mut f := a[..0]
shuffle<int>(mut f, 0) shuffle<int>(mut f, 0)

View File

@ -1,16 +1,11 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved. // Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license // Use of this source code is governed by an MIT license
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
// Cipher block chaining (CBC) mode. // Cipher block chaining (CBC) mode.
// CBC provides confidentiality by xoring (chaining) each plaintext block // CBC provides confidentiality by xoring (chaining) each plaintext block
// with the previous ciphertext block before applying the block cipher. // with the previous ciphertext block before applying the block cipher.
// See NIST SP 800-38A, pp 10-11 // See NIST SP 800-38A, pp 10-11
// NOTE this will be moved to crypto.cipher interface (joe-c) // NOTE this will be moved to crypto.cipher interface (joe-c)
module aes module aes
import crypto.cipher import crypto.cipher
@ -27,10 +22,10 @@ mut:
// internal // internal
fn new_aes_cbc(b AesCipher, iv []byte) AesCbc { fn new_aes_cbc(b AesCipher, iv []byte) AesCbc {
return AesCbc{ return AesCbc{
b: b, b: b
block_size: b.block_size(), block_size: b.block_size()
iv: iv.clone(), iv: iv.clone()
tmp: []byte{len:(b.block_size()),} tmp: []byte{len: (b.block_size())}
} }
} }
@ -44,7 +39,9 @@ pub fn new_cbc(b AesCipher, iv []byte) AesCbc {
return new_aes_cbc(b, iv) return new_aes_cbc(b, iv)
} }
pub fn (x &AesCbc) block_size() int { return x.block_size } pub fn (x &AesCbc) block_size() int {
return x.block_size
}
pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) { pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
mut dst := *dst_ mut dst := *dst_
@ -58,14 +55,11 @@ pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
if subtle.inexact_overlap(dst[..src.len], src_) { if subtle.inexact_overlap(dst[..src.len], src_) {
panic('crypto.cipher: invalid buffer overlap') panic('crypto.cipher: invalid buffer overlap')
} }
mut iv := x.iv mut iv := x.iv
for src.len > 0 { for src.len > 0 {
// Write the xor to dst, then encrypt in place. // Write the xor to dst, then encrypt in place.
cipher.xor_bytes(mut dst[..x.block_size], src[..x.block_size], iv) cipher.xor_bytes(mut dst[..x.block_size], src[..x.block_size], iv)
x.b.encrypt(mut dst[..x.block_size], mut dst[..x.block_size]) x.b.encrypt(mut dst[..x.block_size], mut dst[..x.block_size])
// Move to the next block with this block as the next iv. // Move to the next block with this block as the next iv.
iv = dst[..x.block_size] iv = dst[..x.block_size]
if x.block_size >= src.len { if x.block_size >= src.len {
@ -75,7 +69,6 @@ pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
} }
dst = dst[x.block_size..] dst = dst[x.block_size..]
} }
// Save the iv for the next crypt_blocks call. // Save the iv for the next crypt_blocks call.
copy(x.iv, iv) copy(x.iv, iv)
} }
@ -93,33 +86,27 @@ pub fn (mut x AesCbc) decrypt_blocks(mut dst []byte, src []byte) {
if src.len == 0 { if src.len == 0 {
return return
} }
// For each block, we need to xor the decrypted data with the previous block's ciphertext (the iv). // For each block, we need to xor the decrypted data with the previous block's ciphertext (the iv).
// To avoid making a copy each time, we loop over the blocks BACKWARDS. // To avoid making a copy each time, we loop over the blocks BACKWARDS.
mut end := src.len mut end := src.len
mut start := end - x.block_size mut start := end - x.block_size
mut prev := start - x.block_size mut prev := start - x.block_size
// Copy the last block of ciphertext in preparation as the new iv. // Copy the last block of ciphertext in preparation as the new iv.
copy(x.tmp, src.slice(start, end)) copy(x.tmp, src.slice(start, end))
// Loop over all but the first block. // Loop over all but the first block.
for start > 0 { for start > 0 {
mut src_chunk := src.slice(start, end) mut src_chunk := src.slice(start, end)
x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk) x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk)
cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), src.slice(prev, start)) cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), src.slice(prev,
start))
end = start end = start
start = prev start = prev
prev -= x.block_size prev -= x.block_size
} }
// The first block is special because it uses the saved iv. // The first block is special because it uses the saved iv.
mut src_chunk := src.slice(start, end) mut src_chunk := src.slice(start, end)
x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk) x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk)
cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), x.iv) cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), x.iv)
// Set the new iv to the first block we copied earlier. // Set the new iv to the first block we copied earlier.
x.iv = x.tmp x.iv = x.tmp
x.tmp = x.iv x.tmp = x.iv

View File

@ -1,7 +1,6 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved. // Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license // Use of this source code is governed by an MIT license
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
import crypto.aes import crypto.aes
fn test_crypto_aes() { fn test_crypto_aes() {
@ -23,6 +22,6 @@ fn test_crypto_aes() {
mode := aes.new_cbc(block, iv) mode := aes.new_cbc(block, iv)
cipher_clone := ciphertext.clone() cipher_clone := ciphertext.clone()
mode.encrypt_blocks(mut ciphertext, cipher_clone) mode.encrypt_blocks(mut ciphertext, cipher_clone)
assert ciphertext.hex() ==
assert ciphertext.hex() == 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1' 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1'
} }

View File

@ -1,7 +1,6 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved. // Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license // Use of this source code is governed by an MIT license
// that can be found in the LICENSE file. // that can be found in the LICENSE file.
module aes module aes
// new_cipher_generic creates and returns a new cipher.Block // new_cipher_generic creates and returns a new cipher.Block

View File

@ -7,29 +7,22 @@ struct EventData {
fn test_eventbus() { fn test_eventbus() {
ev_data := &EventData{'hello'} ev_data := &EventData{'hello'}
mut eb := eventbus.new() mut eb := eventbus.new()
eb.subscriber.subscribe_once("on_test", on_test) eb.subscriber.subscribe_once('on_test', on_test)
assert eb.has_subscriber('on_test')
assert eb.has_subscriber("on_test") assert eb.subscriber.is_subscribed('on_test')
assert eb.subscriber.is_subscribed("on_test") eb.publish('on_test', eb, ev_data)
assert !eb.has_subscriber('on_test')
eb.publish("on_test", eb, ev_data) assert !eb.subscriber.is_subscribed('on_test')
eb.subscriber.subscribe('on_test', on_test)
assert !eb.has_subscriber("on_test") assert eb.has_subscriber('on_test')
assert !eb.subscriber.is_subscribed("on_test") assert eb.subscriber.is_subscribed('on_test')
eb.subscriber.subscribe("on_test", on_test)
assert eb.has_subscriber("on_test")
assert eb.subscriber.is_subscribed("on_test")
eb.clear_all() eb.clear_all()
assert !eb.has_subscriber('on_test')
assert !eb.has_subscriber("on_test") assert !eb.subscriber.is_subscribed('on_test')
assert !eb.subscriber.is_subscribed("on_test")
} }
fn on_test(receiver voidptr, ev &EventData, sender voidptr) { fn on_test(receiver voidptr, ev &EventData, sender voidptr) {
assert receiver == 0 assert receiver == 0
assert sender != 0 assert sender != 0
assert ev.data == "hello" assert ev.data == 'hello'
} }

View File

@ -3,5 +3,5 @@ fn main() {
s := 'test string\n' s := 'test string\n'
sys_write(1, s.str, u64(s.len)) sys_write(1, s.str, u64(s.len))
a := s[0] a := s[0]
println("Hello freestanding!") println('Hello freestanding!')
} }

View File

@ -3,24 +3,37 @@ module szip
#flag -I @VROOT/thirdparty/zip #flag -I @VROOT/thirdparty/zip
#include "zip.c" #include "zip.c"
#include "zip.h" #include "zip.h"
struct C.zip_t {
struct C.zip_t {} }
type Zip = C.zip_t type Zip = C.zip_t
fn C.zip_open(byteptr, int, byte) &Zip fn C.zip_open(byteptr, int, byte) &Zip
fn C.zip_close(&Zip) fn C.zip_close(&Zip)
fn C.zip_entry_open(&Zip, byteptr) int fn C.zip_entry_open(&Zip, byteptr) int
fn C.zip_entry_close(&Zip) int fn C.zip_entry_close(&Zip) int
fn C.zip_entry_name(&Zip) byteptr fn C.zip_entry_name(&Zip) byteptr
fn C.zip_entry_index(&Zip) int fn C.zip_entry_index(&Zip) int
fn C.zip_entry_isdir(&Zip) int fn C.zip_entry_isdir(&Zip) int
fn C.zip_entry_size(&Zip) u64 fn C.zip_entry_size(&Zip) u64
fn C.zip_entry_crc32(&Zip) u32 fn C.zip_entry_crc32(&Zip) u32
fn C.zip_entry_write(&Zip, voidptr, int) int fn C.zip_entry_write(&Zip, voidptr, int) int
fn C.zip_entry_fwrite(&Zip, byteptr) int fn C.zip_entry_fwrite(&Zip, byteptr) int
fn C.zip_entry_read(&Zip, byteptr, int) int fn C.zip_entry_read(&Zip, byteptr, int) int
fn C.zip_entry_fread(&Zip, byteptr) int fn C.zip_entry_fread(&Zip, byteptr) int
fn C.zip_total_entries(&Zip) int fn C.zip_total_entries(&Zip) int
// Ref - miniz.h // Ref - miniz.h

View File

@ -64,7 +64,8 @@ fn test_variadic_only_with_no_vargs() {
fn_variadic_only_with_no_vargs() fn_variadic_only_with_no_vargs()
} }
struct VaTestStruct {} struct VaTestStruct {
}
fn (a VaTestStruct) variadic_method(name string, groups ...VaTestGroup) { fn (a VaTestStruct) variadic_method(name string, groups ...VaTestGroup) {
assert groups.len == 2 assert groups.len == 2

View File

@ -2,7 +2,6 @@ module assets
// this module provides an AssetManager for combining // this module provides an AssetManager for combining
// and caching javascript & css. // and caching javascript & css.
import os import os
import time import time
import crypto.md5 import crypto.md5
@ -101,7 +100,9 @@ fn (am AssetManager) combine(asset_type string, to_file bool) string {
return out return out
} }
if !os.is_dir(am.cache_dir) { if !os.is_dir(am.cache_dir) {
os.mkdir(am.cache_dir) or { panic(err) } os.mkdir(am.cache_dir) or {
panic(err)
}
} }
mut file := os.create(out_file) or { mut file := os.create(out_file) or {
panic(err) panic(err)
@ -157,7 +158,9 @@ fn (mut am AssetManager) add(asset_type, file string) bool {
} }
asset := Asset{ asset := Asset{
file_path: file file_path: file
last_modified: time.Time{unix: u64(os.file_last_mod_unix(file))} last_modified: time.Time{
unix: u64(os.file_last_mod_unix(file))
}
} }
if asset_type == 'css' { if asset_type == 'css' {
am.css << asset am.css << asset
@ -183,11 +186,7 @@ fn (am AssetManager) get_assets(asset_type string) []Asset {
if asset_type != 'css' && asset_type != 'js' { if asset_type != 'css' && asset_type != 'js' {
panic('$unknown_asset_type_error ($asset_type).') panic('$unknown_asset_type_error ($asset_type).')
} }
assets := if asset_type == 'css' { assets := if asset_type == 'css' { am.css } else { am.js }
am.css
} else {
am.js
}
return assets return assets
} }