v test-fmt: reformat some skipped files, comment on the remaining ones

pull/6615/head
Delyan Angelov 2020-10-15 00:39:09 +03:00
parent e36f11750b
commit 3795aaab5c
19 changed files with 262 additions and 347 deletions

View File

@ -2,10 +2,8 @@ module main
// This tool regenerates V's bootstrap .c files
// every time the V master branch is updated.
// if run with the --serve flag it will run in webhook
// server mode awaiting a request to http://host:port/genhook
// available command line flags:
// --work-dir gen_vc's working directory
// --purge force purge the local repositories
@ -15,7 +13,6 @@ module main
// --log-file path to log file used when --log-to is 'file'
// --dry-run dont push anything to remote repo
// --force force update even if already up to date
import os
import log
import flag
@ -24,54 +21,54 @@ import vweb
import net.urllib
// git credentials
const(
const (
git_username = os.getenv('GITUSER')
git_password = os.getenv('GITPASS')
)
// repository
const(
const (
// git repo
git_repo_v = 'github.com/vlang/v'
git_repo_vc = 'github.com/vlang/vc'
git_repo_v = 'github.com/vlang/v'
git_repo_vc = 'github.com/vlang/vc'
// local repo directories
git_repo_dir_v = 'v'
git_repo_dir_vc = 'vc'
)
// gen_vc
const(
const (
// name
app_name = 'gen_vc'
app_name = 'gen_vc'
// version
app_version = '0.1.2'
app_version = '0.1.2'
// description
app_description = 'This tool regenerates V\'s bootstrap .c files every time the V master branch is updated.'
app_description = "This tool regenerates V\'s bootstrap .c files every time the V master branch is updated."
// assume something went wrong if file size less than this
too_short_file_limit = 5000
// create a .c file for these os's
vc_build_oses = [
vc_build_oses = [
'nix', // all nix based os
'windows'
'windows',
]
)
// default options (overridden by flags)
const(
const (
// gen_vc working directory
work_dir = '/tmp/gen_vc'
work_dir = '/tmp/gen_vc'
// dont push anything to remote repo
dry_run = false
dry_run = false
// server port
server_port = 7171
// log file
log_file = '$work_dir/log.txt'
log_file = '$work_dir/log.txt'
// log_to is either 'file' or 'terminal'
log_to = 'terminal'
log_to = 'terminal'
)
// errors
const(
const (
err_msg_build = 'error building'
err_msg_make = 'make failed'
err_msg_gen_c = 'failed to generate .c file'
@ -81,9 +78,9 @@ const(
struct GenVC {
// logger
// flag options
options FlagOptions
options FlagOptions
mut:
logger &log.Log
logger &log.Log
// true if error was experienced running generate
gen_error bool
}
@ -109,23 +106,21 @@ struct FlagOptions {
fn main() {
mut fp := flag.new_flag_parser(os.args.clone())
fp.application(app_name)
fp.version(app_version)
fp.description(app_description)
fp.skip_executable()
show_help:=fp.bool('help', 0, false, 'Show this help screen\n')
show_help := fp.bool('help', 0, false, 'Show this help screen\n')
flag_options := parse_flags(mut fp)
if show_help { println( fp.usage() ) exit(0) }
if show_help {
println(fp.usage())
exit(0)
}
fp.finalize() or {
eprintln(err)
println(fp.usage())
return
}
eprintln(err)
println(fp.usage())
return
}
// webhook server mode
if flag_options.serve {
vweb.run<WebhookServer>(flag_options.port)
@ -142,7 +137,7 @@ fn new_gen_vc(flag_options FlagOptions) &GenVC {
mut logger := &log.Log{}
logger.set_level(.debug)
if flag_options.log_to == 'file' {
logger.set_full_logpath( flag_options.log_file )
logger.set_full_logpath(flag_options.log_file)
}
return &GenVC{
options: flag_options
@ -156,11 +151,11 @@ pub fn (mut ws WebhookServer) init_once() {
flag_options := parse_flags(mut fp)
ws.gen_vc = new_gen_vc(flag_options)
ws.gen_vc.init()
//ws.gen_vc = new_gen_vc(flag_options)
// ws.gen_vc = new_gen_vc(flag_options)
}
pub fn (mut ws WebhookServer) init() {
//ws.init_once()
// ws.init_once()
}
pub fn (mut ws WebhookServer) index() {
@ -185,18 +180,17 @@ pub fn (mut ws WebhookServer) genhook() {
pub fn (ws &WebhookServer) reset() {
}
// parse flags to FlagOptions struct
fn parse_flags(mut fp flag.FlagParser) FlagOptions {
return FlagOptions{
serve : fp.bool('serve', 0, false, 'run in webhook server mode')
work_dir : fp.string('work-dir', 0, work_dir, 'gen_vc working directory')
purge : fp.bool('purge', 0, false, 'force purge the local repositories')
port : fp.int('port', 0, server_port, 'port for web server to listen on')
log_to : fp.string('log-to', 0, log_to, 'log to is \'file\' or \'terminal\'')
log_file : fp.string('log-file', 0, log_file, 'log file to use when log-to is \'file\'')
dry_run : fp.bool('dry-run', 0, dry_run, 'when specified dont push anything to remote repo')
force : fp.bool('force', 0, false, 'force update even if already up to date')
serve: fp.bool('serve', 0, false, 'run in webhook server mode')
work_dir: fp.string('work-dir', 0, work_dir, 'gen_vc working directory')
purge: fp.bool('purge', 0, false, 'force purge the local repositories')
port: fp.int('port', 0, server_port, 'port for web server to listen on')
log_to: fp.string('log-to', 0, log_to, "log to is \'file\' or \'terminal\'")
log_file: fp.string('log-file', 0, log_file, "log file to use when log-to is \'file\'")
dry_run: fp.bool('dry-run', 0, dry_run, 'when specified dont push anything to remote repo')
force: fp.bool('force', 0, false, 'force update even if already up to date')
}
}
@ -212,11 +206,12 @@ fn (mut gen_vc GenVC) init() {
fn (mut gen_vc GenVC) generate() {
// set errors to false
gen_vc.gen_error = false
// check if gen_vc dir exists
if !os.is_dir(gen_vc.options.work_dir) {
// try create
os.mkdir(gen_vc.options.work_dir) or { panic(err) }
os.mkdir(gen_vc.options.work_dir) or {
panic(err)
}
// still dosen't exist... we have a problem
if !os.is_dir(gen_vc.options.work_dir) {
gen_vc.logger.error('error creating directory: $gen_vc.options.work_dir')
@ -224,10 +219,8 @@ fn (mut gen_vc GenVC) generate() {
return
}
}
// cd to gen_vc dir
os.chdir(gen_vc.options.work_dir)
// if we are not running with the --serve flag (webhook server)
// rather than deleting and re-downloading the repo each time
// first check to see if the local v repo is behind master
@ -242,60 +235,49 @@ fn (mut gen_vc GenVC) generate() {
return
}
}
// delete repos
gen_vc.purge_repos()
// clone repos
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_v $git_repo_dir_v')
gen_vc.cmd_exec('git clone --depth 1 https://$git_repo_vc $git_repo_dir_vc')
// get output of git log -1 (last commit)
git_log_v := gen_vc.cmd_exec('git -C $git_repo_dir_v log -1 --format="commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
git_log_vc := gen_vc.cmd_exec('git -C $git_repo_dir_vc log -1 --format="Commit %H%nDate: %ci%nDate Unix: %ct%nSubject: %s"')
// date of last commit in each repo
ts_v := git_log_v.find_between('Date:', '\n').trim_space()
ts_vc := git_log_vc.find_between('Date:', '\n').trim_space()
// parse time as string to time.Time
last_commit_time_v := time.parse(ts_v) or {
last_commit_time_v := time.parse(ts_v) or {
panic(err)
}
last_commit_time_vc := time.parse(ts_vc) or {
panic(err)
}
// git dates are in users local timezone and v time.parse does not parse
// timezones at the moment, so for now get unix timestamp from output also
t_unix_v := git_log_v.find_between('Date Unix:', '\n').trim_space().int()
t_unix_vc := git_log_vc.find_between('Date Unix:', '\n').trim_space().int()
// last commit hash in v repo
last_commit_hash_v := git_log_v.find_between('commit', '\n').trim_space()
last_commit_hash_v_short := last_commit_hash_v[..7]
// subject
last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace('"', '\\"')
last_commit_subject := git_log_v.find_between('Subject:', '\n').trim_space().replace('"',
'\\"')
// log some info
gen_vc.logger.debug('last commit time ($git_repo_v): ' + last_commit_time_v.format_ss())
gen_vc.logger.debug('last commit time ($git_repo_vc): ' + last_commit_time_vc.format_ss())
gen_vc.logger.debug('last commit hash ($git_repo_v): $last_commit_hash_v')
gen_vc.logger.debug('last commit subject ($git_repo_v): $last_commit_subject')
// if vc repo already has a newer commit than the v repo, assume it's up to date
if t_unix_vc >= t_unix_v && !gen_vc.options.force {
gen_vc.logger.warn('vc repository is already up to date.')
return
}
// try build v for current os (linux in this case)
gen_vc.cmd_exec('make -C $git_repo_dir_v')
v_exec := '$git_repo_dir_v/v'
// check if make was successful
gen_vc.assert_file_exists_and_is_not_too_short(v_exec, err_msg_make)
// build v.c for each os
for os_name in vc_build_oses {
vc_suffix := if os_name == 'nix' { '' } else { '_${os_name[..3]}' }
@ -312,7 +294,6 @@ fn (mut gen_vc GenVC) generate() {
// add new .c file to local vc repo
gen_vc.cmd_exec('git -C $git_repo_dir_vc add $c_file')
}
// check if the vc repo actually changed
git_status := gen_vc.cmd_exec('git -C $git_repo_dir_vc status')
if git_status.contains('nothing to commit') {
@ -344,7 +325,7 @@ fn (mut gen_vc GenVC) command_execute(cmd string, dry bool) string {
gen_vc.logger.info('cmd: $cmd')
r := os.exec(cmd) or {
gen_vc.logger.error('$err_msg_cmd_x: "$cmd" could not start.')
gen_vc.logger.error( err )
gen_vc.logger.error(err)
// something went wrong, better start fresh next time
gen_vc.purge_repos()
gen_vc.gen_error = true
@ -383,7 +364,7 @@ fn (mut gen_vc GenVC) purge_repos() {
}
// check if file size is too short
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f string, emsg string){
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f, emsg string) {
if !os.exists(f) {
gen_vc.logger.error('$err_msg_build: $emsg .')
gen_vc.gen_error = true

View File

@ -5,7 +5,7 @@ import flag
import scripting
const (
remote_v_repo_url = 'https://github.com/vlang/v'
remote_v_repo_url = 'https://github.com/vlang/v'
remote_vc_repo_url = 'https://github.com/vlang/vc'
)
@ -34,35 +34,35 @@ pub fn validate_commit_exists(commit string) {
}
}
pub fn line_to_timestamp_and_commit(line string) (int,string) {
pub fn line_to_timestamp_and_commit(line string) (int, string) {
parts := line.split(' ')
return parts[0].int(),parts[1]
return parts[0].int(), parts[1]
}
pub fn normalized_workpath_for_commit(workdir string, commit string) string {
pub fn normalized_workpath_for_commit(workdir, commit string) string {
nc := 'v_at_' + commit.replace('^', '_').replace('-', '_').replace('/', '_')
return os.real_path(workdir + os.path_separator + nc)
}
pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string,string) {
pub fn prepare_vc_source(vcdir, cdir, commit string) (string, string) {
scripting.chdir(cdir)
// Building a historic v with the latest vc is not always possible ...
// It is more likely, that the vc *at the time of the v commit*,
// or slightly before that time will be able to build the historic v:
vline := scripting.run('git rev-list -n1 --timestamp "$commit" ')
v_timestamp,v_commithash := vgit.line_to_timestamp_and_commit(vline)
vgit.check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
v_timestamp, v_commithash := line_to_timestamp_and_commit(vline)
check_v_commit_timestamp_before_self_rebuilding(v_timestamp)
scripting.chdir(vcdir)
scripting.run('git checkout master')
vcbefore := scripting.run('git rev-list HEAD -n1 --timestamp --before=$v_timestamp ')
_,vccommit_before := vgit.line_to_timestamp_and_commit(vcbefore)
_, vccommit_before := line_to_timestamp_and_commit(vcbefore)
scripting.run('git checkout "$vccommit_before" ')
scripting.run('wc *.c')
scripting.chdir(cdir)
return v_commithash,vccommit_before
return v_commithash, vccommit_before
}
pub fn clone_or_pull( remote_git_url string, local_worktree_path string ) {
pub fn clone_or_pull(remote_git_url, local_worktree_path string) {
// NB: after clone_or_pull, the current repo branch is === HEAD === master
if os.is_dir(local_worktree_path) && os.is_dir(os.join_path(local_worktree_path, '.git')) {
// Already existing ... Just pulling in this case is faster usually.
@ -76,20 +76,20 @@ pub fn clone_or_pull( remote_git_url string, local_worktree_path string ) {
pub struct VGitContext {
pub:
cc string = 'cc' // what compiler to use
workdir string = '/tmp' // the base working folder
commit_v string = 'master' // the commit-ish that needs to be prepared
path_v string // where is the local working copy v repo
path_vc string // where is the local working copy vc repo
v_repo_url string // the remote v repo URL
vc_repo_url string // the remote vc repo URL
cc string = 'cc' // what compiler to use
workdir string = '/tmp' // the base working folder
commit_v string = 'master' // the commit-ish that needs to be prepared
path_v string // where is the local working copy v repo
path_vc string // where is the local working copy vc repo
v_repo_url string // the remote v repo URL
vc_repo_url string // the remote vc repo URL
pub mut:
// these will be filled by vgitcontext.compile_oldv_if_needed()
commit_v__hash string // the git commit of the v repo that should be prepared
commit_vc_hash string // the git commit of the vc repo, corresponding to commit_v__hash
vexename string // v or v.exe
vexepath string // the full absolute path to the prepared v/v.exe
vvlocation string // v.v or compiler/ or cmd/v, depending on v version
vexename string // v or v.exe
vexepath string // the full absolute path to the prepared v/v.exe
vvlocation string // v.v or compiler/ or cmd/v, depending on v version
}
pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
@ -100,18 +100,17 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
if 'windows' == os.user_os() {
command_for_building_v_from_c_source = '$vgit_context.cc -std=c99 -municode -w -o cv.exe "$vgit_context.path_vc/v_win.c" '
command_for_selfbuilding = './cv.exe -o $vgit_context.vexename {SOURCE}'
}
else {
} else {
command_for_building_v_from_c_source = '$vgit_context.cc -std=gnu11 -w -o cv "$vgit_context.path_vc/v.c" -lm -lpthread'
command_for_selfbuilding = './cv -o $vgit_context.vexename {SOURCE}'
}
scripting.chdir(vgit_context.workdir)
clone_or_pull( vgit_context.v_repo_url, vgit_context.path_v )
clone_or_pull( vgit_context.vc_repo_url, vgit_context.path_vc )
clone_or_pull(vgit_context.v_repo_url, vgit_context.path_v)
clone_or_pull(vgit_context.vc_repo_url, vgit_context.path_vc)
scripting.chdir(vgit_context.path_v)
scripting.run('git checkout $vgit_context.commit_v')
v_commithash,vccommit_before := vgit.prepare_vc_source(vgit_context.path_vc, vgit_context.path_v, vgit_context.commit_v)
v_commithash, vccommit_before := prepare_vc_source(vgit_context.path_vc, vgit_context.path_v,
vgit_context.commit_v)
vgit_context.commit_v__hash = v_commithash
vgit_context.commit_vc_hash = vccommit_before
if os.exists('cmd/v') {
@ -128,25 +127,24 @@ pub fn (mut vgit_context VGitContext) compile_oldv_if_needed() {
scripting.run(command_for_building_v_from_c_source)
build_cmd := command_for_selfbuilding.replace('{SOURCE}', vgit_context.vvlocation)
scripting.run(build_cmd)
// At this point, there exists a file vgit_context.vexepath
// which should be a valid working V executable.
}
pub struct VGitOptions {
pub mut:
workdir string // the working folder (typically /tmp), where the tool will write
v_repo_url string // the url of the V repository. It can be a local folder path, if you want to eliminate network operations...
vc_repo_url string // the url of the vc repository. It can be a local folder path, if you want to eliminate network operations...
show_help bool // whether to show the usage screen
verbose bool // should the tool be much more verbose
workdir string // the working folder (typically /tmp), where the tool will write
v_repo_url string // the url of the V repository. It can be a local folder path, if you want to eliminate network operations...
vc_repo_url string // the url of the vc repository. It can be a local folder path, if you want to eliminate network operations...
show_help bool // whether to show the usage screen
verbose bool // should the tool be much more verbose
}
pub fn add_common_tool_options(mut context VGitOptions, mut fp flag.FlagParser) []string {
tdir := os.temp_dir()
context.workdir = os.real_path(fp.string('workdir', `w`, tdir, 'A writable base folder. Default: $tdir'))
context.v_repo_url = fp.string('vrepo', 0, vgit.remote_v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.')
context.vc_repo_url = fp.string('vcrepo', 0, vgit.remote_vc_repo_url, 'The url of the vc repository. You can clone it
context.v_repo_url = fp.string('vrepo', 0, remote_v_repo_url, 'The url of the V repository. You can clone it locally too. See also --vcrepo below.')
context.vc_repo_url = fp.string('vcrepo', 0, remote_vc_repo_url, 'The url of the vc repository. You can clone it
${flag.space}beforehand, and then just give the local folder
${flag.space}path here. That will eliminate the network ops
${flag.space}done by this tool, which is useful, if you want
@ -154,31 +152,25 @@ ${flag.space}to script it/run it in a restrictive vps/docker.
')
context.show_help = fp.bool('help', `h`, false, 'Show this help screen.')
context.verbose = fp.bool('verbose', `v`, false, 'Be more verbose.')
if context.show_help {
println(fp.usage())
exit(0)
}
if context.verbose {
scripting.set_verbose(true)
}
if os.is_dir(context.v_repo_url) {
context.v_repo_url = os.real_path( context.v_repo_url )
context.v_repo_url = os.real_path(context.v_repo_url)
}
if os.is_dir(context.vc_repo_url) {
context.vc_repo_url = os.real_path( context.vc_repo_url )
context.vc_repo_url = os.real_path(context.vc_repo_url)
}
commits := fp.finalize() or {
eprintln('Error: ' + err)
exit(1)
}
for commit in commits {
vgit.validate_commit_exists(commit)
validate_commit_exists(commit)
}
return commits
}

View File

@ -4,7 +4,7 @@ import scripting
import vgit
const (
tool_version = '0.0.3'
tool_version = '0.0.3'
tool_description = ' Checkout an old V and compile it as it was on specific commit.
This tool is useful, when you want to discover when something broke.
It is also useful, when you just want to experiment with an older historic V.
@ -30,25 +30,25 @@ const (
struct Context {
mut:
vgo vgit.VGitOptions
commit_v string='master' // the commit from which you want to produce a working v compiler (this may be a commit-ish too)
commit_vc string='master' // this will be derived from commit_v
commit_v string = 'master' // the commit from which you want to produce a working v compiler (this may be a commit-ish too)
commit_vc string = 'master' // this will be derived from commit_v
commit_v_hash string // this will be filled from the commit-ish commit_v using rev-list. It IS a commit hash.
path_v string // the full path to the v folder inside workdir.
path_vc string // the full path to the vc folder inside workdir.
cmd_to_run string // the command that you want to run *in* the oldv repo
cc string='cc' // the C compiler to use for bootstrapping.
cc string = 'cc' // the C compiler to use for bootstrapping.
cleanup bool // should the tool run a cleanup first
}
fn (mut c Context) compile_oldv_if_needed() {
mut vgit_context := vgit.VGitContext{
workdir: c.vgo.workdir
v_repo_url: c.vgo.v_repo_url
workdir: c.vgo.workdir
v_repo_url: c.vgo.v_repo_url
vc_repo_url: c.vgo.vc_repo_url
cc: c.cc
commit_v: c.commit_v
path_v: c.path_v
path_vc: c.path_vc
cc: c.cc
commit_v: c.commit_v
path_v: c.path_v
path_vc: c.path_vc
}
vgit_context.compile_oldv_if_needed()
c.commit_v_hash = vgit_context.commit_v__hash
@ -69,10 +69,8 @@ fn main() {
fp.arguments_description('VCOMMIT')
fp.skip_executable()
fp.limit_free_args(1, 1)
context.cleanup = fp.bool('clean', 0, true, 'Clean before running (slower).')
context.cmd_to_run = fp.string('command', `c`, '', 'Command to run in the old V repo.\n')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
if commits.len > 0 {
context.commit_v = commits[0]
@ -83,7 +81,7 @@ fn main() {
context.path_v = vgit.normalized_workpath_for_commit(context.vgo.workdir, context.commit_v)
context.path_vc = vgit.normalized_workpath_for_commit(context.vgo.workdir, 'vc')
if !os.is_dir(context.vgo.workdir) {
eprintln('Work folder: ${context.vgo.workdir} , does not exist.')
eprintln('Work folder: $context.vgo.workdir , does not exist.')
exit(2)
}
ecc := os.getenv('CC')
@ -94,9 +92,7 @@ fn main() {
scripting.rmrf(context.path_v)
scripting.rmrf(context.path_vc)
}
context.compile_oldv_if_needed()
scripting.chdir(context.path_v)
scripting.cprintln('# v commit hash: $context.commit_v_hash')
scripting.cprintln('# checkout folder: $context.path_v')
@ -108,5 +104,4 @@ fn main() {
println(cmdres.output)
exit(cmdres.exit_code)
}
}

View File

@ -4,11 +4,11 @@ import scripting
import vgit
const (
tool_version = '0.0.5'
tool_description = ' Compares V executable size and performance,
tool_version = '0.0.5'
tool_description = " Compares V executable size and performance,
between 2 commits from V\'s local git history.
When only one commit is given, it is compared to master.
'
"
)
struct Context {
@ -44,64 +44,46 @@ fn (c Context) compare_versions() {
c.prepare_v(c.b, c.commit_before)
c.prepare_v(c.a, c.commit_after)
scripting.chdir(c.vgo.workdir)
if c.vflags.len > 0 {
os.setenv('VFLAGS', c.vflags, true)
}
// The first is the baseline, against which all the others will be compared.
// It is the fastest, since hello_world.v has only a single println in it,
mut perf_files := []string{}
perf_files << c.compare_v_performance('source_hello', [
'vprod @DEBUG@ -o source.c examples/hello_world.v',
'vprod -o source.c examples/hello_world.v',
'v @DEBUG@ -o source.c examples/hello_world.v',
'v -o source.c examples/hello_world.v',
])
perf_files << c.compare_v_performance('source_v', [
'vprod @DEBUG@ -o source.c @COMPILER@',
'vprod -o source.c @COMPILER@',
'v @DEBUG@ -o source.c @COMPILER@',
'v -o source.c @COMPILER@',
])
perf_files << c.compare_v_performance('binary_hello', [
'vprod -o hello examples/hello_world.v',
'v -o hello examples/hello_world.v',
])
perf_files << c.compare_v_performance('binary_v', [
'vprod -o binary @COMPILER@',
'v -o binary @COMPILER@',
])
perf_files <<
c.compare_v_performance('source_hello', ['vprod @DEBUG@ -o source.c examples/hello_world.v', 'vprod -o source.c examples/hello_world.v', 'v @DEBUG@ -o source.c examples/hello_world.v', 'v -o source.c examples/hello_world.v'])
perf_files <<
c.compare_v_performance('source_v', ['vprod @DEBUG@ -o source.c @COMPILER@', 'vprod -o source.c @COMPILER@', 'v @DEBUG@ -o source.c @COMPILER@', 'v -o source.c @COMPILER@'])
perf_files <<
c.compare_v_performance('binary_hello', ['vprod -o hello examples/hello_world.v', 'v -o hello examples/hello_world.v'])
perf_files <<
c.compare_v_performance('binary_v', ['vprod -o binary @COMPILER@', 'v -o binary @COMPILER@'])
println('All performance files:')
for f in perf_files {
println(' $f')
}
}
fn (c &Context) prepare_v(cdir string, commit string) {
fn (c &Context) prepare_v(cdir, commit string) {
mut cc := os.getenv('CC')
if cc == '' {
cc = 'cc'
}
mut vgit_context := vgit.VGitContext{
cc: cc
commit_v: commit
path_v: cdir
path_vc: c.vc
workdir: c.vgo.workdir
v_repo_url: c.vgo.v_repo_url
cc: cc
commit_v: commit
path_v: cdir
path_vc: c.vc
workdir: c.vgo.workdir
v_repo_url: c.vgo.v_repo_url
vc_repo_url: c.vgo.vc_repo_url
}
vgit_context.compile_oldv_if_needed()
scripting.chdir(cdir)
println('Making a v compiler in $cdir')
scripting.run('./v -cc ${cc} -o v $vgit_context.vvlocation')
scripting.run('./v -cc $cc -o v $vgit_context.vvlocation')
println('Making a vprod compiler in $cdir')
scripting.run('./v -cc ${cc} -prod -o vprod $vgit_context.vvlocation')
scripting.run('./v -cc $cc -prod -o vprod $vgit_context.vvlocation')
println('Stripping and compressing cv v and vprod binaries in $cdir')
scripting.run('cp cv cv_stripped')
scripting.run('cp v v_stripped')
@ -117,17 +99,19 @@ fn (c &Context) prepare_v(cdir string, commit string) {
scripting.show_sizes_of_files(['$cdir/v', '$cdir/v_stripped', '$cdir/v_stripped_upxed'])
scripting.show_sizes_of_files(['$cdir/vprod', '$cdir/vprod_stripped', '$cdir/vprod_stripped_upxed'])
vversion := scripting.run('$cdir/v -version')
vcommit := scripting.run('git rev-parse --short --verify HEAD')
println('V version is: ${vversion} , local source commit: ${vcommit}')
vcommit := scripting.run('git rev-parse --short --verify HEAD')
println('V version is: $vversion , local source commit: $vcommit')
if vgit_context.vvlocation == 'cmd/v' {
if os.exists('vlib/v/ast/ast.v') {
println('Source lines of the compiler: ' + scripting.run('find cmd/v/ vlib/v/ -name "*.v" | grep -v /tests/ | xargs wc | tail -n -1'))
println('Source lines of the compiler: ' +
scripting.run('find cmd/v/ vlib/v/ -name "*.v" | grep -v /tests/ | xargs wc | tail -n -1'))
} else {
println('Source lines of the compiler: ' + scripting.run('wc cmd/v/*.v vlib/compiler/*.v | tail -n -1'))
println('Source lines of the compiler: ' +
scripting.run('wc cmd/v/*.v vlib/compiler/*.v | tail -n -1'))
}
} else if vgit_context.vvlocation == 'v.v' {
println('Source lines of the compiler: ' + scripting.run('wc v.v vlib/compiler/*.v | tail -n -1'))
}else{
} else {
println('Source lines of the compiler: ' + scripting.run('wc compiler/*.v | tail -n -1'))
}
}
@ -147,8 +131,8 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
} else {
source_location_b = if os.exists('$c.b/v.v') { 'v.v ' } else { 'compiler/ ' }
}
timestamp_a,_ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.a/ ; git rev-list -n1 --timestamp HEAD'))
timestamp_b,_ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.b/ ; git rev-list -n1 --timestamp HEAD'))
timestamp_a, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.a/ ; git rev-list -n1 --timestamp HEAD'))
timestamp_b, _ := vgit.line_to_timestamp_and_commit(scripting.run('cd $c.b/ ; git rev-list -n1 --timestamp HEAD'))
debug_option_a := if timestamp_a > 1570877641 { '-cg ' } else { '-debug ' }
debug_option_b := if timestamp_b > 1570877641 { '-cg ' } else { '-debug ' }
mut hyperfine_commands_arguments := []string{}
@ -156,14 +140,17 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
println(cmd)
}
for cmd in commands {
hyperfine_commands_arguments << " \'cd ${c.b:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_b, '@DEBUG@', debug_option_b])
hyperfine_commands_arguments <<
" \'cd ${c.b:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_b, '@DEBUG@', debug_option_b])
}
for cmd in commands {
hyperfine_commands_arguments << " \'cd ${c.a:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_a, '@DEBUG@', debug_option_a])
hyperfine_commands_arguments <<
" \'cd ${c.a:-34s} ; ./$cmd \' ".replace_each(['@COMPILER@', source_location_a, '@DEBUG@', debug_option_a])
}
// /////////////////////////////////////////////////////////////////////////////
cmd_stats_file := os.real_path([c.vgo.workdir, 'v_performance_stats_${label}.json'].join(os.path_separator))
comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json ${cmd_stats_file} ' + '--time-unit millisecond ' + '--style full --warmup $c.warmups ' + hyperfine_commands_arguments.join(' ')
comparison_cmd := 'hyperfine $c.hyperfineopts ' + '--export-json $cmd_stats_file ' + '--time-unit millisecond ' +
'--style full --warmup $c.warmups ' + hyperfine_commands_arguments.join(' ')
// /////////////////////////////////////////////////////////////////////////////
if c.vgo.verbose {
println(comparison_cmd)
@ -175,7 +162,8 @@ fn (c Context) compare_v_performance(label string, commands []string) string {
}
fn main() {
scripting.used_tools_must_exist(['cp', 'rm', 'strip', 'make', 'git', 'upx', 'cc', 'wc', 'tail', 'find', 'xargs', 'hyperfine'])
scripting.used_tools_must_exist(['cp', 'rm', 'strip', 'make', 'git', 'upx', 'cc', 'wc', 'tail',
'find', 'xargs', 'hyperfine'])
mut context := new_context()
mut fp := flag.new_flag_parser(os.args)
fp.application(os.file_name(os.executable()))
@ -184,12 +172,10 @@ fn main() {
fp.arguments_description('COMMIT_BEFORE [COMMIT_AFTER]')
fp.skip_executable()
fp.limit_free_args(1, 2)
context.vflags = fp.string('vflags', 0, '', 'Additional options to pass to the v commands, for example "-cc tcc"')
context.hyperfineopts = fp.string('hyperfine_options', 0, '',
'Additional options passed to hyperfine.
context.hyperfineopts = fp.string('hyperfine_options', 0, '', 'Additional options passed to hyperfine.
${flag.space}For example on linux, you may want to pass:
${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
$flag.space--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/vm/drop_caches\'"
')
commits := vgit.add_common_tool_options(mut context.vgo, mut fp)
context.commit_before = commits[0]
@ -204,6 +190,5 @@ ${flag.space}--hyperfine_options "--prepare \'sync; echo 3 | sudo tee /proc/sys/
eprintln(msg)
exit(2)
}
context.compare_versions()
}

View File

@ -2,6 +2,7 @@ module main
import os
import term
// //////////////////////////////////////////////////////////////////
// / This file will get compiled as part of the main program,
// / for a _test.v file.
@ -14,29 +15,26 @@ import term
fn cb_assertion_failed(i &VAssertMetaInfo) {
use_color := term.can_show_color_on_stderr()
use_relative_paths := match os.getenv('VERROR_PATHS') {
'absolute' {
false
} else {
true
}
'absolute' { false }
else { true }
}
final_filename := if use_relative_paths { i.fpath } else { os.real_path(i.fpath) }
final_funcname := i.fn_name.replace('main.', '').replace('__', '.')
final_src := if use_color { term.bold(i.src) } else { i.src }
eprintln('')
eprintln('$final_filename:${i.line_nr+1}: failed assert in function ${final_funcname}')
eprintln('Source : `${final_src}`')
eprintln('$final_filename:${i.line_nr+1}: failed assert in function $final_funcname')
eprintln('Source : `$final_src`')
if i.op.len > 0 && i.op != 'call' {
mut slvalue := '${i.lvalue}'
mut srvalue := '${i.rvalue}'
lpostfix := if slvalue == i.llabel { '.' } else { '<= `${i.llabel}`' }
rpostfix := if srvalue == i.rlabel { '.' } else { '<= `${i.rlabel}`' }
mut slvalue := '$i.lvalue'
mut srvalue := '$i.rvalue'
lpostfix := if slvalue == i.llabel { '.' } else { '<= `$i.llabel`' }
rpostfix := if srvalue == i.rlabel { '.' } else { '<= `$i.rlabel`' }
if use_color {
slvalue = term.bold(term.yellow(slvalue))
srvalue = term.bold(term.yellow(srvalue))
}
eprintln(' left value: ${slvalue} ${lpostfix}')
eprintln(' right value: ${srvalue} ${rpostfix}')
eprintln(' left value: $slvalue $lpostfix')
eprintln(' right value: $srvalue $rpostfix')
}
}

View File

@ -1,4 +1,5 @@
module main
// /////////////////////////////////////////////////////////////////////
// / This file will get compiled as a part of the same module,
// / in which a given _test.v file is, when v is given -stats argument
@ -77,7 +78,8 @@ fn (b &BenchedTests) fn_name() string {
// Called at the end of the test program produced by `v -stats file_test.v`
fn (mut b BenchedTests) end_testing() {
b.bench.stop()
println(inner_indent + b.bench.total_message('running V tests in "' + os.file_name(b.test_suit_file) + '"'))
println(inner_indent + b.bench.total_message('running V tests in "' + os.file_name(b.test_suit_file) +
'"'))
}
// ///////////////////////////////////////////////////////////////////

View File

@ -5,37 +5,19 @@ import testing
const (
known_failing_exceptions = [
'examples/vweb/vweb_example.v',
'cmd/tools/gen_vc.v',
'cmd/tools/modules/vgit/vgit.v', // generics
'cmd/tools/preludes/live_main.v',
'cmd/tools/preludes/live_shared.v',
'cmd/tools/preludes/tests_assertions.v',
'cmd/tools/preludes/tests_with_stats.v',
'cmd/tools/performance_compare.v', // generics
'cmd/tools/oldv.v', // generics
'tutorials/code/blog/article.v',
'tutorials/code/blog/blog.v',
'vlib/arrays/arrays.v',
'vlib/arrays/arrays_test.v',
'vlib/builtin/js/hashmap.v',
'vlib/v/tests/fn_variadic_test.v',
'vlib/v/tests/generic_test.v',
'vlib/crypto/aes/aes.v',
'vlib/crypto/aes/aes_cbc.v',
'vlib/crypto/aes/block_generic.v',
'vlib/crypto/aes/const.v',
'vlib/crypto/aes/cypher_generic.v',
'vlib/crypto/rc4/rc4.v',
'vlib/eventbus/eventbus_test.v',
'vlib/os/bare/bare_example_linux.v',
'vlib/szip/szip.v',
'vlib/uiold/examples/users_gui/users.v',
'vlib/vweb/assets/assets.v',
'vlib/vweb/vweb.v',
'vlib/v/gen/js/tests/life.v',
'vlib/builtin/bare/linuxsys_bare.v',
'vlib/os/os.v',
'vlib/v/tests/generics_test.v', // struct Repo<T, U> { => struct Repo {
'vlib/crypto/aes/aes.v', // pub fn (c &AesCipher) encrypt(mut dst, mut src []byte) {
'vlib/crypto/aes/block_generic.v', // fn expand_key_generic(key []byte, mut enc, mut dec []u32) {
'vlib/crypto/aes/const.v', // multiple narrow columns of []string turned to 1 long single column, otherwise works
'vlib/crypto/rc4/rc4.v', // pub fn (mut c Cipher) xor_key_stream(mut dst, mut src []byte) {
'vlib/vweb/vweb.v', // $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error
'vlib/v/gen/js/tests/life.v', // error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500)
'vlib/builtin/js/builtin.v', // JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c)
'vlib/builtin/js/jsfns_node.js.v',
'vlib/builtin/js/jsfns.js.v',
'vlib/builtin/js/jsfns_browser.js.v',
'vlib/builtin/bare/linuxsys_bare.v', // error: expr(): bad token `asm`, on `asm {}`
'vlib/os/os.v', // embeded comments, mib := [1/* CTL_KERN */, 14/* KERN_PROC */, 12/* KERN_PROC_PATHNAME */, -1] => comment the rest of the line
]
)

View File

@ -9,7 +9,7 @@ const (
struct App {
pub mut:
vweb vweb.Context // TODO embed
cnt int
cnt int
}
fn main() {
@ -31,9 +31,9 @@ pub fn (mut app App) json_endpoint() vweb.Result {
pub fn (mut app App) index() vweb.Result {
app.cnt++
show := true
//app.vweb.text('Hello world from vweb')
// app.vweb.text('Hello world from vweb')
hello := 'Hello world from vweb'
numbers := [1,2,3]
numbers := [1, 2, 3]
return $vweb.html()
}
@ -42,6 +42,9 @@ pub fn (mut app App) text() vweb.Result {
}
pub fn (mut app App) cookie() vweb.Result {
app.vweb.set_cookie(name:'cookie', value:'test')
app.vweb.set_cookie({
name: 'cookie'
value: 'test'
})
return app.vweb.text('Headers: $app.vweb.headers')
}

View File

@ -26,7 +26,6 @@ pub fn (app &App) index_html() vweb.Result {
return $vweb.html()
}
*/
pub fn (app &App) index() vweb.Result {
articles := app.find_all_articles()
return $vweb.html()
@ -53,7 +52,7 @@ pub fn (mut app App) new_article() vweb.Result {
app.vweb.text('Empty text/title')
return vweb.Result{}
}
article := Article {
article := Article{
title: title
text: text
}

View File

@ -5,15 +5,16 @@ module arrays
// - idx_min / idx_max - return the index of the first minumum / maximum
// - shuffle - randomize array items order in place (allowing exit after n items)
// - merge - combine two sorted arrays and maintain sorted order
import rand
// min returns the minimum
[direct_array_access]
pub fn min<T>(a []T) T {
if a.len==0 { panic('.min called on an empty array') }
if a.len == 0 {
panic('.min called on an empty array')
}
mut val := a[0]
for i in 0..a.len {
for i in 0 .. a.len {
if a[i] < val {
val = a[i]
}
@ -24,9 +25,11 @@ pub fn min<T>(a []T) T {
// max returns the maximum
[direct_array_access]
pub fn max<T>(a []T) T {
if a.len==0 { panic('.max called on an empty array') }
if a.len == 0 {
panic('.max called on an empty array')
}
mut val := a[0]
for i in 0..a.len {
for i in 0 .. a.len {
if a[i] > val {
val = a[i]
}
@ -37,10 +40,12 @@ pub fn max<T>(a []T) T {
// idx_min returns the index of the first minimum
[direct_array_access]
pub fn idx_min<T>(a []T) int {
if a.len==0 { panic('.idxmin called on an empty array') }
if a.len == 0 {
panic('.idxmin called on an empty array')
}
mut idx := 0
mut val := a[0]
for i in 0..a.len {
for i in 0 .. a.len {
if a[i] < val {
val = a[i]
idx = i
@ -52,10 +57,12 @@ pub fn idx_min<T>(a []T) int {
// idx_max returns the index of the first maximum
[direct_array_access]
pub fn idx_max<T>(a []T) int {
if a.len==0 { panic('.idxmax called on an empty array') }
if a.len == 0 {
panic('.idxmax called on an empty array')
}
mut idx := 0
mut val := a[0]
for i in 0..a.len {
for i in 0 .. a.len {
if a[i] > val {
val = a[i]
idx = i
@ -67,10 +74,12 @@ pub fn idx_max<T>(a []T) int {
// shuffle randomizes the first n items of an array in place (all if n=0)
[direct_array_access]
pub fn shuffle<T>(mut a []T, n int) {
if n < 0 || n > a.len { panic("shuffle's argument 'n' must be in range [0,a.len]") }
cnt := if n==0 { a.len-1 } else { n }
for i in 0..cnt {
x := rand.int_in_range(i,a.len)
if n < 0 || n > a.len {
panic("shuffle's argument 'n' must be in range [0,a.len]")
}
cnt := if n == 0 { a.len - 1 } else { n }
for i in 0 .. cnt {
x := rand.int_in_range(i, a.len)
// swap
a_i := a[i]
a[i] = a[x]
@ -78,17 +87,15 @@ pub fn shuffle<T>(mut a []T, n int) {
}
}
// merge two sorted arrays (ascending) and maintain sorted order
[direct_array_access]
pub fn merge<T>(a []T, b []T) []T {
mut m := []T{len:a.len + b.len}
pub fn merge<T>(a, b []T) []T {
mut m := []T{len: a.len + b.len}
mut ia := 0
mut ib := 0
mut j := 0
// TODO efficient approach to merge_desc where: a[ia] >= b[ib]
for ia<a.len && ib<b.len {
for ia < a.len && ib < b.len {
if a[ia] <= b[ib] {
m[j] = a[ia]
ia++
@ -98,21 +105,17 @@ pub fn merge<T>(a []T, b []T) []T {
}
j++
}
// a leftovers
for ia < a.len {
m[j] = a[ia]
ia++
j++
}
// b leftovers
for ib < b.len {
m[j] = b[ib]
ib++
j++
}
return m
}

View File

@ -4,94 +4,78 @@ import rand
fn test_min() {
a := [8, 2, 6, 4]
assert min<int>(a)==2
assert min<int>(a[2..])==4
assert min<int>(a) == 2
assert min<int>(a[2..]) == 4
b := [f32(5.1), 3.1, 1.1, 9.1]
assert min<f32>(b) == f32(1.1)
assert min<f32>(b[..2]) == f32(3.1)
c := [byte(4), 9, 3, 1]
assert min<byte>(c) == byte(1)
assert min<byte>(c[..3]) == byte(3)
}
fn test_max() {
a := [8, 2, 6, 4]
assert max<int>(a)==8
assert max<int>(a[1..])==6
assert max<int>(a) == 8
assert max<int>(a[1..]) == 6
b := [f32(5.1), 3.1, 1.1, 9.1]
assert max<f32>(b) == f32(9.1)
assert max<f32>(b[..3]) == f32(5.1)
c := [byte(4), 9, 3, 1]
assert max<byte>(c) == byte(9)
assert max<byte>(c[2..]) == byte(3)
}
fn test_idx_min() {
a := [8, 2, 6, 4]
assert idx_min<int>(a)==1
assert idx_min<int>(a) == 1
b := [f32(5.1), 3.1, 1.1, 9.1]
assert idx_min<f32>(b) == 2
c := [byte(4), 9, 3, 1]
assert idx_min<byte>(c) == 3
}
fn test_idx_max() {
a := [8, 2, 6, 4]
assert idx_max<int>(a)==0
assert idx_max<int>(a) == 0
b := [f32(5.1), 3.1, 1.1, 9.1]
assert idx_max<f32>(b) == 3
c := [byte(4), 9, 3, 1]
assert idx_max<byte>(c) == 1
}
fn test_shuffle() {
rand.seed([u32(1),2]) // set seed to produce same results in order
a := [1,2,3,4,5,6,7,8,9,10]
rand.seed([u32(1), 2]) // set seed to produce same results in order
a := [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
mut b := a.clone()
mut c := a.clone()
shuffle<int>(mut b, 0)
shuffle<int>(mut c, 0)
assert b == [6, 4, 5, 1, 9, 2, 10, 3, 8, 7]
assert c == [1, 6, 5, 8, 7, 2, 10, 9, 3, 4]
// test shuffling a slice
mut d := a.clone()
shuffle<int>(mut d[..5], 0)
assert d == [5, 2, 1, 3, 4, 6, 7, 8, 9, 10]
assert d[5..] == a[5..]
// test shuffling n items
mut e := a.clone()
shuffle<int>(mut e, 5)
assert e[..5] == [10, 3, 1, 8, 4]
assert e[5..] == [6, 7, 5, 9, 2]
// test shuffling empty array
mut f := a[..0]
shuffle<int>(mut f,0)
shuffle<int>(mut f, 0)
assert f == []int{}
}
fn test_merge() {
a := [1,3,5,5,7]
b := [2,4,4,5,6,8]
a := [1, 3, 5, 5, 7]
b := [2, 4, 4, 5, 6, 8]
c := []int{}
d := []int{}
assert merge<int>(a,b) == [1,2,3,4,4,5,5,5,6,7,8]
assert merge<int>(c,d) == []
assert merge<int>(a,c) == a
assert merge<int>(d,b) == b
assert merge<int>(a, b) == [1, 2, 3, 4, 4, 5, 5, 5, 6, 7, 8]
assert merge<int>(c, d) == []
assert merge<int>(a, c) == a
assert merge<int>(d, b) == b
}

View File

@ -1,16 +1,11 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
// Cipher block chaining (CBC) mode.
// CBC provides confidentiality by xoring (chaining) each plaintext block
// with the previous ciphertext block before applying the block cipher.
// See NIST SP 800-38A, pp 10-11
// NOTE this will be moved to crypto.cipher interface (joe-c)
module aes
import crypto.cipher
@ -27,10 +22,10 @@ mut:
// internal
fn new_aes_cbc(b AesCipher, iv []byte) AesCbc {
return AesCbc{
b: b,
block_size: b.block_size(),
iv: iv.clone(),
tmp: []byte{len:(b.block_size()),}
b: b
block_size: b.block_size()
iv: iv.clone()
tmp: []byte{len: (b.block_size())}
}
}
@ -44,12 +39,14 @@ pub fn new_cbc(b AesCipher, iv []byte) AesCbc {
return new_aes_cbc(b, iv)
}
pub fn (x &AesCbc) block_size() int { return x.block_size }
pub fn (x &AesCbc) block_size() int {
return x.block_size
}
pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
mut dst := *dst_
mut src := src_
if src.len%x.block_size != 0 {
if src.len % x.block_size != 0 {
panic('crypto.cipher: input not full blocks')
}
if dst.len < src.len {
@ -58,14 +55,11 @@ pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
if subtle.inexact_overlap(dst[..src.len], src_) {
panic('crypto.cipher: invalid buffer overlap')
}
mut iv := x.iv
for src.len > 0 {
// Write the xor to dst, then encrypt in place.
cipher.xor_bytes(mut dst[..x.block_size], src[..x.block_size], iv)
x.b.encrypt(mut dst[..x.block_size], mut dst[..x.block_size])
// Move to the next block with this block as the next iv.
iv = dst[..x.block_size]
if x.block_size >= src.len {
@ -75,13 +69,12 @@ pub fn (x &AesCbc) encrypt_blocks(mut dst_ []byte, src_ []byte) {
}
dst = dst[x.block_size..]
}
// Save the iv for the next crypt_blocks call.
copy(x.iv, iv)
}
pub fn (mut x AesCbc) decrypt_blocks(mut dst []byte, src []byte) {
if src.len%x.block_size != 0 {
if src.len % x.block_size != 0 {
panic('crypto.cipher: input not full blocks')
}
if dst.len < src.len {
@ -93,33 +86,27 @@ pub fn (mut x AesCbc) decrypt_blocks(mut dst []byte, src []byte) {
if src.len == 0 {
return
}
// For each block, we need to xor the decrypted data with the previous block's ciphertext (the iv).
// To avoid making a copy each time, we loop over the blocks BACKWARDS.
mut end := src.len
mut start := end - x.block_size
mut prev := start - x.block_size
// Copy the last block of ciphertext in preparation as the new iv.
copy(x.tmp, src.slice(start, end))
// Loop over all but the first block.
for start > 0 {
mut src_chunk := src.slice(start, end)
x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk)
cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), src.slice(prev, start))
cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), src.slice(prev,
start))
end = start
start = prev
prev -= x.block_size
}
// The first block is special because it uses the saved iv.
mut src_chunk := src.slice(start, end)
x.b.decrypt(mut (*dst).slice(start, end), mut src_chunk)
cipher.xor_bytes(mut (*dst).slice(start, end), (*dst).slice(start, end), x.iv)
// Set the new iv to the first block we copied earlier.
x.iv = x.tmp
x.tmp = x.iv

View File

@ -1,7 +1,6 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
import crypto.aes
fn test_crypto_aes() {
@ -17,12 +16,12 @@ fn test_crypto_aes() {
iv := ciphertext[..aes.block_size]
ciphertext = ciphertext[aes.block_size..]
// CBC mode always works in whole blocks.
if ciphertext.len%aes.block_size != 0 {
if ciphertext.len % aes.block_size != 0 {
panic('ciphertext is not a multiple of the block size')
}
mode := aes.new_cbc(block, iv)
cipher_clone := ciphertext.clone()
mode.encrypt_blocks(mut ciphertext, cipher_clone)
assert ciphertext.hex() == 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1'
assert ciphertext.hex() ==
'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1'
}

View File

@ -1,7 +1,6 @@
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
module aes
// new_cipher_generic creates and returns a new cipher.Block
@ -9,8 +8,8 @@ module aes
fn new_cipher_generic(key []byte) AesCipher {
n := key.len + 28
mut c := AesCipher{
enc: []u32{len:(n)}
dec: []u32{len:(n)}
enc: []u32{len: (n)}
dec: []u32{len: (n)}
}
expand_key_generic(key, mut c.enc, mut c.dec)
return c

View File

@ -1,35 +1,28 @@
import eventbus
struct EventData {
data string
data string
}
fn test_eventbus(){
fn test_eventbus() {
ev_data := &EventData{'hello'}
mut eb := eventbus.new()
eb.subscriber.subscribe_once("on_test", on_test)
assert eb.has_subscriber("on_test")
assert eb.subscriber.is_subscribed("on_test")
eb.publish("on_test", eb, ev_data)
assert !eb.has_subscriber("on_test")
assert !eb.subscriber.is_subscribed("on_test")
eb.subscriber.subscribe("on_test", on_test)
assert eb.has_subscriber("on_test")
assert eb.subscriber.is_subscribed("on_test")
eb.subscriber.subscribe_once('on_test', on_test)
assert eb.has_subscriber('on_test')
assert eb.subscriber.is_subscribed('on_test')
eb.publish('on_test', eb, ev_data)
assert !eb.has_subscriber('on_test')
assert !eb.subscriber.is_subscribed('on_test')
eb.subscriber.subscribe('on_test', on_test)
assert eb.has_subscriber('on_test')
assert eb.subscriber.is_subscribed('on_test')
eb.clear_all()
assert !eb.has_subscriber("on_test")
assert !eb.subscriber.is_subscribed("on_test")
assert !eb.has_subscriber('on_test')
assert !eb.subscriber.is_subscribed('on_test')
}
fn on_test(receiver voidptr, ev &EventData, sender voidptr) {
assert receiver == 0
assert sender != 0
assert ev.data == "hello"
assert ev.data == 'hello'
}

View File

@ -3,5 +3,5 @@ fn main() {
s := 'test string\n'
sys_write(1, s.str, u64(s.len))
a := s[0]
println("Hello freestanding!")
println('Hello freestanding!')
}

View File

@ -3,24 +3,37 @@ module szip
#flag -I @VROOT/thirdparty/zip
#include "zip.c"
#include "zip.h"
struct C.zip_t {}
struct C.zip_t {
}
type Zip = C.zip_t
fn C.zip_open(byteptr, int, byte) &Zip
fn C.zip_close(&Zip)
fn C.zip_entry_open(&Zip, byteptr) int
fn C.zip_entry_close(&Zip) int
fn C.zip_entry_name(&Zip) byteptr
fn C.zip_entry_index(&Zip) int
fn C.zip_entry_isdir(&Zip) int
fn C.zip_entry_size(&Zip) u64
fn C.zip_entry_crc32(&Zip) u32
fn C.zip_entry_write(&Zip, voidptr, int) int
fn C.zip_entry_fwrite(&Zip, byteptr) int
fn C.zip_entry_read(&Zip, byteptr, int) int
fn C.zip_entry_fread(&Zip, byteptr) int
fn C.zip_total_entries(&Zip) int
// Ref - miniz.h

View File

@ -64,7 +64,8 @@ fn test_variadic_only_with_no_vargs() {
fn_variadic_only_with_no_vargs()
}
struct VaTestStruct {}
struct VaTestStruct {
}
fn (a VaTestStruct) variadic_method(name string, groups ...VaTestGroup) {
assert groups.len == 2

View File

@ -2,7 +2,6 @@ module assets
// this module provides an AssetManager for combining
// and caching javascript & css.
import os
import time
import crypto.md5
@ -101,7 +100,9 @@ fn (am AssetManager) combine(asset_type string, to_file bool) string {
return out
}
if !os.is_dir(am.cache_dir) {
os.mkdir(am.cache_dir) or { panic(err) }
os.mkdir(am.cache_dir) or {
panic(err)
}
}
mut file := os.create(out_file) or {
panic(err)
@ -157,7 +158,9 @@ fn (mut am AssetManager) add(asset_type, file string) bool {
}
asset := Asset{
file_path: file
last_modified: time.Time{unix: u64(os.file_last_mod_unix(file))}
last_modified: time.Time{
unix: u64(os.file_last_mod_unix(file))
}
}
if asset_type == 'css' {
am.css << asset
@ -183,11 +186,7 @@ fn (am AssetManager) get_assets(asset_type string) []Asset {
if asset_type != 'css' && asset_type != 'js' {
panic('$unknown_asset_type_error ($asset_type).')
}
assets := if asset_type == 'css' {
am.css
} else {
am.js
}
assets := if asset_type == 'css' { am.css } else { am.js }
return assets
}