ci: fix warnings/errors due to the vfmt change
parent
50a2b033b7
commit
31ef921ef2
|
@ -27,5 +27,4 @@ fn main() {
|
|||
exit(1)
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
|
|
|
@ -11,13 +11,19 @@ const (
|
|||
base_os = 'linux'
|
||||
os_names = ['linux', 'macos', 'windows']
|
||||
skip_modules = [
|
||||
'builtin.bare', 'builtin.js',
|
||||
'strconv', 'strconv.ftoa', 'hash', 'strings',
|
||||
'builtin.bare',
|
||||
'builtin.js',
|
||||
'strconv',
|
||||
'strconv.ftoa',
|
||||
'hash',
|
||||
'strings',
|
||||
'crypto.rand',
|
||||
'os.bare', 'os2',
|
||||
'picohttpparser', 'picoev',
|
||||
'os.bare',
|
||||
'os2',
|
||||
'picohttpparser',
|
||||
'picoev',
|
||||
'szip',
|
||||
'v.eval'
|
||||
'v.eval',
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -44,7 +50,7 @@ fn main() {
|
|||
}
|
||||
for mname in app.modules {
|
||||
if !app.is_verbose {
|
||||
eprintln('Checking module: ${mname} ...')
|
||||
eprintln('Checking module: $mname ...')
|
||||
}
|
||||
api_base := app.gen_api_for_module_in_os(mname, base_os)
|
||||
for oname in os_names {
|
||||
|
@ -85,7 +91,7 @@ fn all_vlib_modules() []string {
|
|||
return modules
|
||||
}
|
||||
|
||||
fn (app App) gen_api_for_module_in_os(mod_name, os_name string) string {
|
||||
fn (app App) gen_api_for_module_in_os(mod_name string, os_name string) string {
|
||||
if app.is_verbose {
|
||||
eprintln('Checking module: ${mod_name:-30} for OS: ${os_name:-10} ...')
|
||||
}
|
||||
|
@ -102,7 +108,7 @@ fn (app App) gen_api_for_module_in_os(mod_name, os_name string) string {
|
|||
fn_signature := s.stringify(b.table, mod_name)
|
||||
fn_mod := s.modname()
|
||||
if fn_mod == mod_name {
|
||||
fline := '${fn_mod}: $fn_signature'
|
||||
fline := '$fn_mod: $fn_signature'
|
||||
res << fline
|
||||
}
|
||||
}
|
||||
|
@ -113,10 +119,10 @@ fn (app App) gen_api_for_module_in_os(mod_name, os_name string) string {
|
|||
return res.join('\n')
|
||||
}
|
||||
|
||||
fn (mut app App) compare_api(api_base, api_os, mod_name, os_base, os_target string) {
|
||||
fn (mut app App) compare_api(api_base string, api_os string, mod_name string, os_base string, os_target string) {
|
||||
res := util.color_compare_strings(app.diff_cmd, api_base, api_os)
|
||||
if res.len > 0 {
|
||||
summary := 'Different APIs found for module: `${mod_name}`, between OS base: `${os_base}` and OS: `${os_target}`'
|
||||
summary := 'Different APIs found for module: `$mod_name`, between OS base: `$os_base` and OS: `$os_target`'
|
||||
eprintln(term.header(summary, '-'))
|
||||
eprintln(res)
|
||||
eprintln(term.h_divider('-'))
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
fn main() {
|
||||
println('fn print(a int) {}')
|
||||
//println('fn println(a string) {}')
|
||||
for i in 0..100000 {
|
||||
// println('fn println(a string) {}')
|
||||
for i in 0 .. 100000 {
|
||||
println('
|
||||
fn foo${i}() {
|
||||
x := $i
|
||||
|
@ -13,7 +13,6 @@ fn foo${i}() {
|
|||
}
|
||||
')
|
||||
}
|
||||
//println('fn main() {foo1()} ')
|
||||
// println('fn main() {foo1()} ')
|
||||
println('fn main() { println("1m DONE") } ')
|
||||
|
||||
}
|
||||
|
|
|
@ -364,7 +364,7 @@ fn (mut gen_vc GenVC) purge_repos() {
|
|||
}
|
||||
|
||||
// check if file size is too short
|
||||
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f, emsg string) {
|
||||
fn (mut gen_vc GenVC) assert_file_exists_and_is_not_too_short(f string, emsg string) {
|
||||
if !os.exists(f) {
|
||||
gen_vc.logger.error('$err_msg_build: $emsg .')
|
||||
gen_vc.gen_error = true
|
||||
|
|
|
@ -39,12 +39,12 @@ pub fn line_to_timestamp_and_commit(line string) (int, string) {
|
|||
return parts[0].int(), parts[1]
|
||||
}
|
||||
|
||||
pub fn normalized_workpath_for_commit(workdir, commit string) string {
|
||||
pub fn normalized_workpath_for_commit(workdir string, commit string) string {
|
||||
nc := 'v_at_' + commit.replace('^', '_').replace('-', '_').replace('/', '_')
|
||||
return os.real_path(workdir + os.path_separator + nc)
|
||||
}
|
||||
|
||||
pub fn prepare_vc_source(vcdir, cdir, commit string) (string, string) {
|
||||
pub fn prepare_vc_source(vcdir string, cdir string, commit string) (string, string) {
|
||||
scripting.chdir(cdir)
|
||||
// Building a historic v with the latest vc is not always possible ...
|
||||
// It is more likely, that the vc *at the time of the v commit*,
|
||||
|
@ -62,7 +62,7 @@ pub fn prepare_vc_source(vcdir, cdir, commit string) (string, string) {
|
|||
return v_commithash, vccommit_before
|
||||
}
|
||||
|
||||
pub fn clone_or_pull(remote_git_url, local_worktree_path string) {
|
||||
pub fn clone_or_pull(remote_git_url string, local_worktree_path string) {
|
||||
// NB: after clone_or_pull, the current repo branch is === HEAD === master
|
||||
if os.is_dir(local_worktree_path) && os.is_dir(os.join_path(local_worktree_path, '.git')) {
|
||||
// Already existing ... Just pulling in this case is faster usually.
|
||||
|
|
|
@ -64,7 +64,7 @@ fn (c Context) compare_versions() {
|
|||
}
|
||||
}
|
||||
|
||||
fn (c &Context) prepare_v(cdir, commit string) {
|
||||
fn (c &Context) prepare_v(cdir string, commit string) {
|
||||
mut cc := os.getenv('CC')
|
||||
if cc == '' {
|
||||
cc = 'cc'
|
||||
|
|
|
@ -10,45 +10,49 @@ import v.util
|
|||
|
||||
struct CmdResult {
|
||||
mut:
|
||||
runs int
|
||||
cmd string
|
||||
icmd int
|
||||
runs int
|
||||
cmd string
|
||||
icmd int
|
||||
outputs []string
|
||||
oms map[string][]int
|
||||
oms map[string][]int
|
||||
summary map[string]Aints
|
||||
timings []int
|
||||
atiming Aints
|
||||
}
|
||||
|
||||
struct Context {
|
||||
mut:
|
||||
count int
|
||||
series int
|
||||
warmup int
|
||||
show_help bool
|
||||
show_output bool
|
||||
count int
|
||||
series int
|
||||
warmup int
|
||||
show_help bool
|
||||
show_output bool
|
||||
fail_on_regress_percent int
|
||||
fail_on_maxtime int // in ms
|
||||
verbose bool
|
||||
commands []string
|
||||
results []CmdResult
|
||||
cline string // a terminal clearing line
|
||||
fail_on_maxtime int // in ms
|
||||
verbose bool
|
||||
commands []string
|
||||
results []CmdResult
|
||||
cline string // a terminal clearing line
|
||||
}
|
||||
|
||||
struct Aints {
|
||||
values []int
|
||||
values []int
|
||||
mut:
|
||||
imin int
|
||||
imax int
|
||||
imin int
|
||||
imax int
|
||||
average f64
|
||||
stddev f64
|
||||
stddev f64
|
||||
}
|
||||
|
||||
fn new_aints(vals []int) Aints {
|
||||
mut res := Aints{ values: vals }
|
||||
mut res := Aints{
|
||||
values: vals
|
||||
}
|
||||
mut sum := i64(0)
|
||||
mut imin := math.max_i32
|
||||
mut imin := math.max_i32
|
||||
mut imax := -math.max_i32
|
||||
for i in vals {
|
||||
sum+=i
|
||||
sum += i
|
||||
if i < imin {
|
||||
imin = i
|
||||
}
|
||||
|
@ -67,17 +71,22 @@ fn new_aints(vals []int) Aints {
|
|||
x := f64(i) - res.average
|
||||
devsum += (x * x)
|
||||
}
|
||||
res.stddev = math.sqrt( devsum / f64(vals.len) )
|
||||
res.stddev = math.sqrt(devsum / f64(vals.len))
|
||||
return res
|
||||
}
|
||||
fn (a Aints) str() string { return util.bold('${a.average:9.3f}') + 'ms ± σ: ${a.stddev:-5.1f}ms, min … max: ${a.imin}ms … ${a.imax}ms' }
|
||||
|
||||
fn (a Aints) str() string {
|
||||
return util.bold('${a.average:9.3f}') +
|
||||
'ms ± σ: ${a.stddev:-5.1f}ms, min … max: ${a.imin}ms … ${a.imax}ms'
|
||||
}
|
||||
|
||||
const (
|
||||
max_fail_percent = 100000
|
||||
max_time = 60*1000 // ms
|
||||
max_fail_percent = 100000
|
||||
max_time = 60 * 1000 // ms
|
||||
performance_regression_label = 'Performance regression detected, failing since '
|
||||
)
|
||||
fn main(){
|
||||
|
||||
fn main() {
|
||||
mut context := Context{}
|
||||
context.parse_options()
|
||||
context.run()
|
||||
|
@ -111,7 +120,7 @@ fn (mut context Context) parse_options() {
|
|||
eprintln('Error: ' + err)
|
||||
exit(1)
|
||||
}
|
||||
context.results = []CmdResult{ len: context.commands.len, init: CmdResult{} }
|
||||
context.results = []CmdResult{len: context.commands.len, init: CmdResult{}}
|
||||
context.cline = '\r' + term.h_divider('')
|
||||
}
|
||||
|
||||
|
@ -121,7 +130,7 @@ fn (mut context Context) clear_line() {
|
|||
|
||||
fn (mut context Context) run() {
|
||||
mut run_warmups := 0
|
||||
for si in 1..context.series+1 {
|
||||
for si in 1 .. context.series + 1 {
|
||||
for icmd, cmd in context.commands {
|
||||
mut runs := 0
|
||||
mut duration := 0
|
||||
|
@ -129,20 +138,22 @@ fn (mut context Context) run() {
|
|||
mut oldres := ''
|
||||
println('Series: ${si:4}/${context.series:-4}, command: $cmd')
|
||||
if context.warmup > 0 && run_warmups < context.commands.len {
|
||||
for i in 1..context.warmup+1 {
|
||||
for i in 1 .. context.warmup + 1 {
|
||||
print('\r warming up run: ${i:4}/${context.warmup:-4} for ${cmd:-50s} took ${duration:6} ms ...')
|
||||
mut sw := time.new_stopwatch({})
|
||||
os.exec(cmd) or { continue }
|
||||
os.exec(cmd) or {
|
||||
continue
|
||||
}
|
||||
duration = int(sw.elapsed().milliseconds())
|
||||
}
|
||||
run_warmups++
|
||||
}
|
||||
context.clear_line()
|
||||
for i in 1..(context.count+1) {
|
||||
avg := f64(sum)/f64(i)
|
||||
for i in 1 .. (context.count + 1) {
|
||||
avg := f64(sum) / f64(i)
|
||||
print('\rAverage: ${avg:9.3f}ms | run: ${i:4}/${context.count:-4} | took ${duration:6} ms')
|
||||
if context.show_output {
|
||||
print(' | result: ${oldres:-s}')
|
||||
print(' | result: ${oldres:s}')
|
||||
}
|
||||
mut sw := time.new_stopwatch({})
|
||||
res := scripting.exec(cmd) or {
|
||||
|
@ -153,7 +164,8 @@ fn (mut context Context) run() {
|
|||
eprintln('${i:10} non 0 exit code for cmd: $cmd')
|
||||
continue
|
||||
}
|
||||
context.results[icmd].outputs << res.output.trim_right('\r\n').replace('\r\n', '\n').split("\n")
|
||||
context.results[icmd].outputs <<
|
||||
res.output.trim_right('\r\n').replace('\r\n', '\n').split('\n')
|
||||
context.results[icmd].timings << duration
|
||||
sum += duration
|
||||
runs++
|
||||
|
@ -165,7 +177,7 @@ fn (mut context Context) run() {
|
|||
context.results[icmd].atiming = new_aints(context.results[icmd].timings)
|
||||
context.clear_line()
|
||||
print('\r')
|
||||
mut m := map[string][]int
|
||||
mut m := map[string][]int{}
|
||||
for o in context.results[icmd].outputs {
|
||||
x := o.split(':')
|
||||
if x.len > 1 {
|
||||
|
@ -175,7 +187,7 @@ fn (mut context Context) run() {
|
|||
}
|
||||
}
|
||||
mut summary := map[string]Aints{}
|
||||
for k,v in m {
|
||||
for k, v in m {
|
||||
// show a temporary summary for the current series/cmd cycle
|
||||
s := new_aints(v)
|
||||
println(' $k: $s')
|
||||
|
@ -183,8 +195,8 @@ fn (mut context Context) run() {
|
|||
}
|
||||
// merge current raw results to the previous ones
|
||||
old_oms := context.results[icmd].oms
|
||||
mut new_oms := map[string][]int
|
||||
for k,v in m {
|
||||
mut new_oms := map[string][]int{}
|
||||
for k, v in m {
|
||||
if old_oms[k].len == 0 {
|
||||
new_oms[k] = v
|
||||
} else {
|
||||
|
@ -193,20 +205,21 @@ fn (mut context Context) run() {
|
|||
}
|
||||
}
|
||||
context.results[icmd].oms = new_oms
|
||||
//println('')
|
||||
// println('')
|
||||
}
|
||||
}
|
||||
// create full summaries, taking account of all runs
|
||||
for icmd in 0..context.results.len {
|
||||
for icmd in 0 .. context.results.len {
|
||||
mut new_full_summary := map[string]Aints{}
|
||||
for k,v in context.results[icmd].oms {
|
||||
for k, v in context.results[icmd].oms {
|
||||
new_full_summary[k] = new_aints(v)
|
||||
}
|
||||
context.results[icmd].summary = new_full_summary
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut context Context) show_diff_summary() {
|
||||
context.results.sort_with_compare(fn (a, b &CmdResult) int {
|
||||
context.results.sort_with_compare(fn (a &CmdResult, b &CmdResult) int {
|
||||
if a.atiming.average < b.atiming.average {
|
||||
return -1
|
||||
}
|
||||
|
@ -224,7 +237,7 @@ fn (mut context Context) show_diff_summary() {
|
|||
if r.icmd == 0 {
|
||||
first_cmd_percentage = cpercent
|
||||
}
|
||||
println(' ${first_marker}${(i+1):3} | ${cpercent:6.1f}% slower | ${r.cmd:-55s} | ${r.atiming}')
|
||||
println(' $first_marker${(i+1):3} | ${cpercent:6.1f}% slower | ${r.cmd:-55s} | $r.atiming')
|
||||
}
|
||||
$if debugcontext ? {
|
||||
println('context: $context')
|
||||
|
@ -232,7 +245,7 @@ fn (mut context Context) show_diff_summary() {
|
|||
eprintln('base: $base | context.fail_on_maxtime: $context.fail_on_maxtime')
|
||||
if int(base) > context.fail_on_maxtime {
|
||||
print(performance_regression_label)
|
||||
println('average time: ${base:6.1f} ms > ${context.fail_on_maxtime} ms threshold.')
|
||||
println('average time: ${base:6.1f} ms > $context.fail_on_maxtime ms threshold.')
|
||||
exit(2)
|
||||
}
|
||||
if context.fail_on_regress_percent == max_fail_percent || context.results.len < 2 {
|
||||
|
|
|
@ -5,7 +5,7 @@ import flag
|
|||
import strings
|
||||
|
||||
const (
|
||||
tool_version = '0.0.4'
|
||||
tool_version = '0.0.4'
|
||||
tool_description = 'Converts a list of arbitrary files into a single v module file.'
|
||||
)
|
||||
|
||||
|
@ -20,29 +20,26 @@ mut:
|
|||
|
||||
fn (context Context) header() string {
|
||||
mut header_s := ''
|
||||
header_s += 'module ${context.module_name}\n'
|
||||
header_s += 'module $context.module_name\n'
|
||||
header_s += '\n'
|
||||
allfiles := context.files.join(' ')
|
||||
|
||||
mut options := []string{}
|
||||
if context.prefix.len > 0 {
|
||||
options << '-p ${context.prefix}'
|
||||
options << '-p $context.prefix'
|
||||
}
|
||||
if context.module_name.len > 0 {
|
||||
options << '-m ${context.module_name}'
|
||||
options << '-m $context.module_name'
|
||||
}
|
||||
if context.write_file.len > 0 {
|
||||
options << '-w ${context.write_file}'
|
||||
options << '-w $context.write_file'
|
||||
}
|
||||
soptions := options.join(' ')
|
||||
|
||||
header_s += '// File generated by:\n'
|
||||
header_s += '// v bin2v ${allfiles} ${soptions}\n'
|
||||
header_s += '// v bin2v $allfiles $soptions\n'
|
||||
header_s += '// Please, do not edit this file.\n'
|
||||
header_s += '// Your changes may be overwritten.\n'
|
||||
header_s += '\n'
|
||||
header_s += 'const (\n'
|
||||
|
||||
return header_s
|
||||
}
|
||||
|
||||
|
@ -54,14 +51,14 @@ fn (context Context) file2v(file string) string {
|
|||
mut sb := strings.new_builder(1000)
|
||||
fname := os.file_name(file)
|
||||
fname_no_dots := fname.replace('.', '_')
|
||||
byte_name := '${context.prefix}${fname_no_dots}'.to_lower()
|
||||
byte_name := '$context.prefix$fname_no_dots'.to_lower()
|
||||
fbytes := os.read_bytes(file) or {
|
||||
eprintln('Error: $err')
|
||||
return ''
|
||||
}
|
||||
fbyte := fbytes[0]
|
||||
sb.write(' ${byte_name}_len = ${fbytes.len}\n')
|
||||
sb.write(' ${byte_name} = [ byte(${fbyte}), \n ')
|
||||
sb.write(' ${byte_name}_len = $fbytes.len\n')
|
||||
sb.write(' $byte_name = [ byte($fbyte), \n ')
|
||||
for i := 1; i < fbytes.len; i++ {
|
||||
b := int(fbytes[i]).str()
|
||||
sb.write('${b:4s}, ')
|
||||
|
@ -71,7 +68,6 @@ fn (context Context) file2v(file string) string {
|
|||
}
|
||||
sb.write('\n]!!\n')
|
||||
sb.write('\n')
|
||||
|
||||
return sb.str()
|
||||
}
|
||||
|
||||
|
@ -100,19 +96,19 @@ fn main() {
|
|||
exit(0)
|
||||
}
|
||||
context.files = real_files
|
||||
|
||||
if !context.write_file.ends_with('.v') {
|
||||
context.write_file += '.v'
|
||||
}
|
||||
if context.write_file.len > 0 {
|
||||
mut out_file := os.create(context.write_file) or { panic(err) }
|
||||
mut out_file := os.create(context.write_file) or {
|
||||
panic(err)
|
||||
}
|
||||
out_file.write(context.header())
|
||||
for file in real_files {
|
||||
out_file.write(context.file2v(file))
|
||||
}
|
||||
out_file.write(context.footer())
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
println(context.header())
|
||||
for file in real_files {
|
||||
println(context.file2v(file))
|
||||
|
|
|
@ -9,7 +9,8 @@ fn main() {
|
|||
args_string := args[1..].join(' ')
|
||||
skips := []string{}
|
||||
util.ensure_modules_for_all_tools_are_installed('-v' in args)
|
||||
if testing.v_build_failing_skipped(args_string.all_before('build-tools'), 'cmd/tools', skips) {
|
||||
if testing.v_build_failing_skipped(args_string.all_before('build-tools'), 'cmd/tools',
|
||||
skips) {
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ fn cerror(e string) {
|
|||
eprintln('\nerror: $e')
|
||||
}
|
||||
|
||||
fn vmod_content(name, desc string) string {
|
||||
fn vmod_content(name string, desc string) string {
|
||||
return [
|
||||
'Module {',
|
||||
" name: '$name'",
|
||||
|
|
|
@ -207,7 +207,7 @@ fn handle_http_connection(mut con net.Socket, ctx &VdocHttpServerContext) {
|
|||
send_http_response(mut con, 200, ctx.content_type, ctx.docs[filename])
|
||||
}
|
||||
|
||||
fn send_http_response(mut con net.Socket, http_code int, content_type, html string) {
|
||||
fn send_http_response(mut con net.Socket, http_code int, content_type string, html string) {
|
||||
content_length := html.len.str()
|
||||
shttp_code := http_code.str()
|
||||
mut http_response := strings.new_builder(20000)
|
||||
|
@ -230,7 +230,7 @@ fn send_http_response(mut con net.Socket, http_code int, content_type, html stri
|
|||
}
|
||||
}
|
||||
|
||||
fn get_src_link(repo_url, file_name string, line_nr int) string {
|
||||
fn get_src_link(repo_url string, file_name string, line_nr int) string {
|
||||
mut url := urllib.parse(repo_url) or {
|
||||
return ''
|
||||
}
|
||||
|
@ -516,10 +516,13 @@ fn (cfg DocConfig) gen_html(idx int) string {
|
|||
}
|
||||
return html_content.replace('{{ title }}', dcs.head.name).replace('{{ head_name }}',
|
||||
header_name).replace('{{ version }}', version).replace('{{ light_icon }}', cfg.assets['light_icon']).replace('{{ dark_icon }}',
|
||||
cfg.assets['dark_icon']).replace('{{ menu_icon }}', cfg.assets['menu_icon']).replace('{{ head_assets }}', if cfg.inline_assets {
|
||||
'\n <style>'+ cfg.assets['doc_css'] + '</style>\n <style>'+ cfg.assets['normalize_css'] +'</style>'
|
||||
cfg.assets['dark_icon']).replace('{{ menu_icon }}', cfg.assets['menu_icon']).replace('{{ head_assets }}',
|
||||
if cfg.inline_assets {
|
||||
'\n <style>' + cfg.assets['doc_css'] + '</style>\n <style>' + cfg.assets['normalize_css'] +
|
||||
'</style>'
|
||||
} else {
|
||||
'\n <link rel="stylesheet" href="'+cfg.assets['doc_css']+'" />\n <link rel="stylesheet" href="'+cfg.assets['normalize_css']+'" />'
|
||||
'\n <link rel="stylesheet" href="' + cfg.assets['doc_css'] + '" />\n <link rel="stylesheet" href="' +
|
||||
cfg.assets['normalize_css'] + '" />'
|
||||
}).replace('{{ toc_links }}', if cfg.is_multi || cfg.docs.len > 1 {
|
||||
toc2.str()
|
||||
} else {
|
||||
|
@ -531,9 +534,9 @@ fn (cfg DocConfig) gen_html(idx int) string {
|
|||
''
|
||||
}).replace('{{ footer_content }}', 'Powered by vdoc. Generated on: $time_gen').replace('{{ footer_assets }}',
|
||||
if cfg.inline_assets {
|
||||
'<script>'+cfg.assets['doc_js']+'</script>'
|
||||
'<script>' + cfg.assets['doc_js'] + '</script>'
|
||||
} else {
|
||||
'<script src="'+cfg.assets['doc_js']+'"></script>'
|
||||
'<script src="' + cfg.assets['doc_js'] + '"></script>'
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -604,7 +607,7 @@ fn (cfg DocConfig) render_doc(doc doc.Doc, i int) (string, string) {
|
|||
return name, output
|
||||
}
|
||||
|
||||
fn (cfg DocConfig)work_processor(mut work sync.Channel, mut wg sync.WaitGroup) {
|
||||
fn (cfg DocConfig) work_processor(mut work sync.Channel, mut wg sync.WaitGroup) {
|
||||
for {
|
||||
mut pdoc := ParallelDoc{}
|
||||
if !work.pop(&pdoc) {
|
||||
|
@ -622,7 +625,6 @@ fn (cfg DocConfig) render_parallel() {
|
|||
vjobs := runtime.nr_jobs()
|
||||
mut work := sync.new_channel<ParallelDoc>(cfg.docs.len)
|
||||
mut wg := sync.new_waitgroup()
|
||||
|
||||
for i in 0 .. cfg.docs.len {
|
||||
p_doc := ParallelDoc{cfg.docs[i], i}
|
||||
work.push(&p_doc)
|
||||
|
@ -637,7 +639,6 @@ fn (cfg DocConfig) render_parallel() {
|
|||
|
||||
fn (cfg DocConfig) render() map[string]string {
|
||||
mut docs := map[string]string{}
|
||||
|
||||
for i, doc in cfg.docs {
|
||||
name, output := cfg.render_doc(doc, i)
|
||||
docs[name] = output.trim_space()
|
||||
|
@ -649,12 +650,12 @@ fn (cfg DocConfig) render() map[string]string {
|
|||
fn (mut cfg DocConfig) render_static() {
|
||||
if cfg.output_type == .html {
|
||||
cfg.assets = {
|
||||
'doc_css': cfg.get_resource(css_js_assets[0], true),
|
||||
'normalize_css': cfg.get_resource(css_js_assets[1], true),
|
||||
'doc_js': cfg.get_resource(css_js_assets[2], !cfg.serve_http),
|
||||
'light_icon': cfg.get_resource('light.svg', true),
|
||||
'dark_icon': cfg.get_resource('dark.svg', true),
|
||||
'menu_icon': cfg.get_resource('menu.svg', true),
|
||||
'doc_css': cfg.get_resource(css_js_assets[0], true)
|
||||
'normalize_css': cfg.get_resource(css_js_assets[1], true)
|
||||
'doc_js': cfg.get_resource(css_js_assets[2], !cfg.serve_http)
|
||||
'light_icon': cfg.get_resource('light.svg', true)
|
||||
'dark_icon': cfg.get_resource('dark.svg', true)
|
||||
'menu_icon': cfg.get_resource('menu.svg', true)
|
||||
'arrow_icon': cfg.get_resource('arrow.svg', true)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import term
|
||||
import time
|
||||
import v.util
|
||||
import runtime
|
||||
|
@ -130,7 +129,7 @@ fn (mut a App) line(label string, value string) {
|
|||
a.println('$label: ${util.bold(value)}')
|
||||
}
|
||||
|
||||
fn (app &App) parse(config, sep string) map[string]string {
|
||||
fn (app &App) parse(config string, sep string) map[string]string {
|
||||
mut m := map[string]string
|
||||
for line in config.split_into_lines() {
|
||||
sline := line.trim_space()
|
||||
|
|
|
@ -187,7 +187,7 @@ fn print_compiler_options(compiler_params &pref.Preferences) {
|
|||
eprintln(' is_script: $compiler_params.is_script ')
|
||||
}
|
||||
|
||||
fn (foptions &FormatOptions) post_process_file(file, formatted_file_path string) {
|
||||
fn (foptions &FormatOptions) post_process_file(file string, formatted_file_path string) {
|
||||
if formatted_file_path.len == 0 {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -1,87 +0,0 @@
|
|||
module main
|
||||
|
||||
/*
|
||||
QTODO
|
||||
import os
|
||||
import flag
|
||||
import strings
|
||||
import compiler
|
||||
import v.pref
|
||||
|
||||
const (
|
||||
tool_version = '0.0.1'
|
||||
tool_description = ' Extracts the function names declared in a v file.'
|
||||
)
|
||||
|
||||
fn f_to_string(fmod string, f compiler.Fn) ?string {
|
||||
svisibility := if f.is_public {
|
||||
'public'
|
||||
}else{
|
||||
'private'
|
||||
}
|
||||
if fmod != f.v_fn_module() { return none }
|
||||
if fmod == 'builtin' {
|
||||
return '$svisibility\t' + f.v_fn_name()
|
||||
}
|
||||
return '$svisibility\t' + f.v_fn_module() + '.' + f.v_fn_name()
|
||||
}
|
||||
|
||||
fn analyze_v_file(file string) {
|
||||
println('')
|
||||
hash := strings.repeat(`#`, (76 - file.len) / 2)
|
||||
println('$hash $file $hash')
|
||||
|
||||
// main work:
|
||||
mut pref := &pref.Preferences{
|
||||
path: file
|
||||
}
|
||||
pref.fill_with_defaults()
|
||||
mut v := compiler.new_v(pref)
|
||||
v.add_v_files_to_compile()
|
||||
for f in v.files { v.parse(f, .decl) }
|
||||
fi := v.get_file_parser_index( file ) or { panic(err) }
|
||||
fmod := v.parsers[fi].mod
|
||||
|
||||
// output:
|
||||
mut fns :=[]string
|
||||
for _, f in v.table.fns {
|
||||
fname := f_to_string(fmod, f) or { continue }
|
||||
fns << fname
|
||||
}
|
||||
fns.sort()
|
||||
for f in fns { println(f) }
|
||||
|
||||
}
|
||||
|
||||
fn main(){
|
||||
toolexe := os.executable()
|
||||
compiler.set_vroot_folder(os.dir(os.dir(os.dir(toolexe))))
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application(os.file_name(toolexe))
|
||||
fp.version( tool_version )
|
||||
fp.description( tool_description )
|
||||
fp.arguments_description('FILE.v/FOLDER [FILE.v/FOLDER]...')
|
||||
fp.limit_free_args_to_at_least(1)
|
||||
fp.skip_executable()
|
||||
show_help:=fp.bool('help', `h`, false, 'Show this help screen\n')
|
||||
if( show_help ){
|
||||
println( fp.usage() )
|
||||
exit(0)
|
||||
}
|
||||
|
||||
mut files := []string{}
|
||||
locations := fp.finalize() or { eprintln('Error: ' + err) exit(1) }
|
||||
for xloc in locations {
|
||||
loc := os.real_path(xloc)
|
||||
xfiles := if os.is_dir(loc){ os.walk_ext(loc,'.v') } else { [loc] }
|
||||
filtered_files := xfiles.filter(!it.ends_with('_js.v'))
|
||||
files << filtered_files
|
||||
}
|
||||
|
||||
for file in files {
|
||||
analyze_v_file(file)
|
||||
}
|
||||
}
|
||||
*/
|
||||
fn main() {}
|
|
@ -12,7 +12,8 @@ import v.vmod
|
|||
|
||||
const (
|
||||
default_vpm_server_urls = ['https://vpm.vlang.io']
|
||||
valid_vpm_commands = ['help', 'search', 'install', 'update', 'upgrade', 'outdated', 'list', 'remove']
|
||||
valid_vpm_commands = ['help', 'search', 'install', 'update', 'upgrade', 'outdated',
|
||||
'list', 'remove']
|
||||
excluded_dirs = ['cache', 'vlib']
|
||||
supported_vcs_systems = ['git', 'hg']
|
||||
supported_vcs_folders = ['.git', '.hg']
|
||||
|
@ -281,7 +282,9 @@ fn get_outdated() ?[]string {
|
|||
}
|
||||
|
||||
fn vpm_upgrade() {
|
||||
outdated := get_outdated() or { exit(1) }
|
||||
outdated := get_outdated() or {
|
||||
exit(1)
|
||||
}
|
||||
if outdated.len > 0 {
|
||||
vpm_update(outdated)
|
||||
} else {
|
||||
|
@ -290,7 +293,9 @@ fn vpm_upgrade() {
|
|||
}
|
||||
|
||||
fn vpm_outdated() {
|
||||
outdated := get_outdated() or { exit(1) }
|
||||
outdated := get_outdated() or {
|
||||
exit(1)
|
||||
}
|
||||
if outdated.len > 0 {
|
||||
println('Outdated modules:')
|
||||
for m in outdated {
|
||||
|
@ -452,7 +457,7 @@ fn get_all_modules() []string {
|
|||
return modules
|
||||
}
|
||||
|
||||
fn resolve_dependencies(name, module_path string, module_names []string) {
|
||||
fn resolve_dependencies(name string, module_path string, module_names []string) {
|
||||
vmod_path := os.join_path(module_path, 'v.mod')
|
||||
if !os.exists(vmod_path) {
|
||||
return
|
||||
|
|
|
@ -95,7 +95,7 @@ fn repl_help() {
|
|||
')
|
||||
}
|
||||
|
||||
fn run_repl(workdir, vrepl_prefix string) {
|
||||
fn run_repl(workdir string, vrepl_prefix string) {
|
||||
if !is_stdin_a_pipe {
|
||||
println(util.full_v_version(false))
|
||||
println('Use Ctrl-C or `exit` to exit, or `help` to see other available commands')
|
||||
|
|
|
@ -8,15 +8,15 @@ fn main() {
|
|||
vroot := os.dir(vexe)
|
||||
os.chdir(vroot)
|
||||
os.setenv('VCOLORS', 'always', true)
|
||||
|
||||
self_idx := os.args.index('self')
|
||||
args := os.args[1..self_idx]
|
||||
args_str := args.join(' ')
|
||||
options := if args.len > 0 { '($args_str)' } else { '' }
|
||||
println('V self compiling ${options}...')
|
||||
|
||||
cmd := '$vexe -o v2 $args_str cmd/v'
|
||||
result := os.exec(cmd) or { panic(err) }
|
||||
result := os.exec(cmd) or {
|
||||
panic(err)
|
||||
}
|
||||
if result.exit_code != 0 {
|
||||
mut err := 'Permission denied'
|
||||
if !result.output.contains('Permission denied') {
|
||||
|
@ -28,11 +28,9 @@ fn main() {
|
|||
if result.output.len > 0 {
|
||||
println(result.output.trim_space())
|
||||
}
|
||||
|
||||
v_file := if os.user_os() == 'windows' { 'v.exe' } else { 'v' }
|
||||
v2_file := if os.user_os() == 'windows' { 'v2.exe' } else { 'v2' }
|
||||
bak_file := if os.user_os() == 'windows' { 'v_old.exe' } else { 'v_old' }
|
||||
|
||||
if os.exists(bak_file) {
|
||||
os.rm(bak_file)
|
||||
}
|
||||
|
|
|
@ -8,19 +8,16 @@ fn main() {
|
|||
println('Setup freetype...')
|
||||
vroot := os.dir(pref.vexe_path())
|
||||
os.chdir(vroot)
|
||||
|
||||
if os.is_dir('./thirdparty/freetype') {
|
||||
println('Thirdparty "freetype" is already installed.')
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
s := os.exec('git clone --depth=1 https://github.com/ubawurinna/freetype-windows-binaries ./thirdparty/freetype/') or {
|
||||
panic(err)
|
||||
}
|
||||
println(s.output)
|
||||
println('Thirdparty "freetype" installed successfully.')
|
||||
}
|
||||
}
|
||||
$else {
|
||||
} $else {
|
||||
println('It is only for Windows to setup thirdparty "freetype".')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,7 @@ $if windows {
|
|||
#flag -lUser32
|
||||
}
|
||||
}
|
||||
|
||||
fn main(){
|
||||
fn main() {
|
||||
vexe := pref.vexe_path()
|
||||
$if windows {
|
||||
setup_symlink_windows(vexe)
|
||||
|
@ -17,7 +16,7 @@ fn main(){
|
|||
}
|
||||
}
|
||||
|
||||
fn setup_symlink(vexe string){
|
||||
fn setup_symlink(vexe string) {
|
||||
mut link_path := '/usr/local/bin/v'
|
||||
mut ret := os.exec('ln -sf $vexe $link_path') or {
|
||||
panic(err)
|
||||
|
@ -40,23 +39,19 @@ fn setup_symlink(vexe string){
|
|||
}
|
||||
}
|
||||
|
||||
fn setup_symlink_windows(vexe string){
|
||||
fn setup_symlink_windows(vexe string) {
|
||||
$if windows {
|
||||
// Create a symlink in a new local folder (.\.bin\.v.exe)
|
||||
// Puts `v` in %PATH% without polluting it with anything else (like make.bat).
|
||||
// This will make `v` available on cmd.exe, PowerShell, and MinGW(MSYS)/WSL/Cygwin
|
||||
|
||||
vdir := os.real_path(os.dir(vexe))
|
||||
vsymlinkdir := os.join_path(vdir, '.bin')
|
||||
|
||||
vdir := os.real_path(os.dir(vexe))
|
||||
vsymlinkdir := os.join_path(vdir, '.bin')
|
||||
mut vsymlink := os.join_path(vsymlinkdir, 'v.exe')
|
||||
|
||||
if !os.exists(vsymlinkdir) {
|
||||
os.mkdir_all(vsymlinkdir) // will panic if fails
|
||||
} else {
|
||||
os.rm(vsymlink)
|
||||
}
|
||||
|
||||
// try to create a native symlink at .\.bin\v.exe
|
||||
os.symlink(vsymlink, vexe) or {
|
||||
// typically only fails if you're on a network drive (VirtualBox)
|
||||
|
@ -67,41 +62,35 @@ fn setup_symlink_windows(vexe string){
|
|||
if os.exists(vsymlink) {
|
||||
os.rm(vsymlink)
|
||||
}
|
||||
os.write_file(vsymlink, '@echo off\n${vexe} %*')
|
||||
os.write_file(vsymlink, '@echo off\n$vexe %*')
|
||||
}
|
||||
|
||||
if !os.exists(vsymlink) {
|
||||
warn_and_exit('Could not create $vsymlink')
|
||||
}
|
||||
|
||||
print('Symlink $vsymlink to $vexe created.\n\nChecking system %PATH%...')
|
||||
|
||||
reg_sys_env_handle := get_reg_sys_env_handle() or {
|
||||
reg_sys_env_handle := get_reg_sys_env_handle() or {
|
||||
warn_and_exit(err)
|
||||
return
|
||||
}
|
||||
// TODO: Fix defers inside ifs
|
||||
// defer {
|
||||
// C.RegCloseKey(reg_sys_env_handle)
|
||||
// C.RegCloseKey(reg_sys_env_handle)
|
||||
// }
|
||||
|
||||
// if the above succeeded, and we cannot get the value, it may simply be empty
|
||||
sys_env_path := get_reg_value(reg_sys_env_handle, 'Path') or { '' }
|
||||
|
||||
sys_env_path := get_reg_value(reg_sys_env_handle, 'Path') or {
|
||||
''
|
||||
}
|
||||
current_sys_paths := sys_env_path.split(os.path_delimiter).map(it.trim('/$os.path_separator'))
|
||||
mut new_paths := [ vsymlinkdir ]
|
||||
mut new_paths := [vsymlinkdir]
|
||||
for p in current_sys_paths {
|
||||
if p !in new_paths {
|
||||
new_paths << p
|
||||
}
|
||||
}
|
||||
|
||||
new_sys_env_path := new_paths.join(';')
|
||||
|
||||
if new_sys_env_path == sys_env_path {
|
||||
println('configured.')
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
print('not configured.\nAdding symlink directory to system %PATH%...')
|
||||
set_reg_value(reg_sys_env_handle, 'Path', new_sys_env_path) or {
|
||||
warn_and_exit(err)
|
||||
|
@ -110,7 +99,6 @@ fn setup_symlink_windows(vexe string){
|
|||
}
|
||||
println('done.')
|
||||
}
|
||||
|
||||
print('Letting running process know to update their Environment...')
|
||||
send_setting_change_msg('Environment') or {
|
||||
eprintln('\n' + err)
|
||||
|
@ -118,7 +106,6 @@ fn setup_symlink_windows(vexe string){
|
|||
C.RegCloseKey(reg_sys_env_handle)
|
||||
return
|
||||
}
|
||||
|
||||
C.RegCloseKey(reg_sys_env_handle)
|
||||
println('finished.\n\nNote: restart your shell/IDE to load the new %PATH%.')
|
||||
println('After restarting your shell/IDE, give `v version` a try in another dir!')
|
||||
|
@ -134,8 +121,8 @@ fn warn_and_exit(err string) {
|
|||
fn get_reg_sys_env_handle() ?voidptr {
|
||||
$if windows { // wrap for cross-compile compat
|
||||
// open the registry key
|
||||
reg_key_path := 'Environment'
|
||||
reg_env_key := voidptr(0) // or HKEY (HANDLE)
|
||||
reg_key_path := 'Environment'
|
||||
reg_env_key := voidptr(0) // or HKEY (HANDLE)
|
||||
if C.RegOpenKeyEx(os.hkey_current_user, reg_key_path.to_wide(), 0, 1 | 2, ®_env_key) != 0 {
|
||||
return error('Could not open "$reg_key_path" in the registry')
|
||||
}
|
||||
|
@ -149,11 +136,10 @@ fn get_reg_value(reg_env_key voidptr, key string) ?string {
|
|||
$if windows {
|
||||
// query the value (shortcut the sizing step)
|
||||
reg_value_size := 4095 // this is the max length (not for the registry, but for the system %PATH%)
|
||||
mut reg_value := &u16(malloc(reg_value_size))
|
||||
mut reg_value := &u16(malloc(reg_value_size))
|
||||
if C.RegQueryValueEx(reg_env_key, key.to_wide(), 0, 0, reg_value, ®_value_size) != 0 {
|
||||
return error('Unable to get registry value for "$key", try rerunning as an Administrator')
|
||||
}
|
||||
|
||||
return string_from_wide(reg_value)
|
||||
}
|
||||
return error('not on windows')
|
||||
|
@ -165,7 +151,6 @@ fn set_reg_value(reg_key voidptr, key string, value string) ?bool {
|
|||
if C.RegSetValueEx(reg_key, key.to_wide(), 0, 1, value.to_wide(), 4095) != 0 {
|
||||
return error('Unable to set registry value for "$key", are you running as an Administrator?')
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
return error('not on windows')
|
||||
|
@ -175,7 +160,8 @@ fn set_reg_value(reg_key voidptr, key string, value string) ?bool {
|
|||
// letting them know that the system environment has changed and should be reloaded
|
||||
fn send_setting_change_msg(message_data string) ?bool {
|
||||
$if windows {
|
||||
if C.SendMessageTimeout(os.hwnd_broadcast, os.wm_settingchange, 0, message_data.to_wide(), os.smto_abortifhung, 5000, 0) == 0 {
|
||||
if C.SendMessageTimeout(os.hwnd_broadcast, os.wm_settingchange, 0, message_data.to_wide(), os.smto_abortifhung, 5000, 0) ==
|
||||
0 {
|
||||
return error('Could not broadcast WM_SETTINGCHANGE')
|
||||
}
|
||||
return true
|
||||
|
|
|
@ -6,16 +6,16 @@ import testing
|
|||
const (
|
||||
known_failing_exceptions = [
|
||||
'vlib/v/tests/generics_test.v', // struct Repo<T, U> { => struct Repo {
|
||||
'vlib/crypto/aes/aes.v', // pub fn (c &AesCipher) encrypt(mut dst, mut src []byte) {
|
||||
'vlib/crypto/aes/aes.v', // pub fn (c &AesCipher) encrypt(mut dst, mut src []byte) {
|
||||
'vlib/crypto/aes/block_generic.v', // fn expand_key_generic(key []byte, mut enc, mut dec []u32) {
|
||||
'vlib/crypto/aes/const.v', // multiple narrow columns of []string turned to 1 long single column, otherwise works
|
||||
'vlib/crypto/rc4/rc4.v', // pub fn (mut c Cipher) xor_key_stream(mut dst, mut src []byte) {
|
||||
'vlib/vweb/vweb.v', // $for method in T.methods { => $for method in T(methods) { , `return // xx` => parse expr error
|
||||
'vlib/v/gen/js/tests/life.v', // error: unexpected `,`, expecting ), on JS.setInterval(fn () { show(game) game = step(game) }, 500)
|
||||
'vlib/builtin/js/builtin.v', // JS.console.error(s) => JS.error(s), JS.process.exit(c) => JS.exit(c)
|
||||
'vlib/builtin/js/jsfns_node.js.v',
|
||||
'vlib/builtin/js/jsfns.js.v',
|
||||
'vlib/builtin/js/jsfns_browser.js.v',
|
||||
'vlib/builtin/js/jsfns_node.js.v',
|
||||
'vlib/builtin/js/jsfns.js.v',
|
||||
'vlib/builtin/js/jsfns_browser.js.v',
|
||||
'vlib/builtin/bare/linuxsys_bare.v', // error: expr(): bad token `asm`, on `asm {}`
|
||||
'vlib/os/os.v', // embeded comments, mib := [1/* CTL_KERN */, 14/* KERN_PROC */, 12/* KERN_PROC_PATHNAME */, -1] => comment the rest of the line
|
||||
]
|
||||
|
|
|
@ -18,17 +18,14 @@ fn main() {
|
|||
println('')
|
||||
return
|
||||
}
|
||||
|
||||
args_to_executable := args[1..]
|
||||
args_before := cmdline.options_before(args_to_executable, ['test'])
|
||||
args_after := cmdline.options_after(args_to_executable, ['test'])
|
||||
|
||||
if args_after.join(' ') == 'v' {
|
||||
eprintln('`v test v` has been deprecated.')
|
||||
eprintln('Use `v test-compiler` instead.')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
mut ts := testing.new_test_session(args_before.join(' '))
|
||||
for targ in args_after {
|
||||
if os.exists(targ) && targ.ends_with('_test.v') {
|
||||
|
@ -37,16 +34,14 @@ fn main() {
|
|||
}
|
||||
if os.is_dir(targ) {
|
||||
// Fetch all tests from the directory
|
||||
ts.files << os.walk_ext( targ.trim_right(os.path_separator), '_test.v')
|
||||
ts.files << os.walk_ext(targ.trim_right(os.path_separator), '_test.v')
|
||||
continue
|
||||
}
|
||||
println('Unrecognized test file $targ .')
|
||||
}
|
||||
|
||||
testing.header('Testing...')
|
||||
ts.test()
|
||||
|
||||
println( ts.benchmark.total_message('running V _test.v files') )
|
||||
println(ts.benchmark.total_message('running V _test.v files'))
|
||||
if ts.failed {
|
||||
exit(1)
|
||||
}
|
||||
|
|
181
vlib/flag/flag.v
181
vlib/flag/flag.v
|
@ -2,22 +2,19 @@ module flag
|
|||
|
||||
// data object storing information about a defined flag
|
||||
pub struct Flag {
|
||||
pub:
|
||||
pub:
|
||||
name string // name as it appears on command line
|
||||
abbr byte // shortcut
|
||||
abbr byte // shortcut
|
||||
usage string // help message
|
||||
val_desc string // something like '<arg>' that appears in usage,
|
||||
// and also the default value, when the flag is not given
|
||||
}
|
||||
|
||||
pub fn (f Flag) str() string {
|
||||
return ''
|
||||
+' flag:\n'
|
||||
+' name: $f.name\n'
|
||||
+' abbr: $f.abbr\n'
|
||||
+' usag: $f.usage\n'
|
||||
+' desc: $f.val_desc'
|
||||
return '' + ' flag:\n' + ' name: $f.name\n' + ' abbr: $f.abbr\n' +
|
||||
' usag: $f.usage\n' + ' desc: $f.val_desc'
|
||||
}
|
||||
|
||||
pub fn (af []Flag) str() string {
|
||||
mut res := []string{}
|
||||
res << '\n []Flag = ['
|
||||
|
@ -27,32 +24,34 @@ pub fn (af []Flag) str() string {
|
|||
res << ' ]'
|
||||
return res.join('\n')
|
||||
}
|
||||
|
||||
//
|
||||
pub struct FlagParser {
|
||||
pub mut:
|
||||
args []string // the arguments to be parsed
|
||||
max_free_args int
|
||||
flags []Flag // registered flags
|
||||
|
||||
pub mut:
|
||||
args []string // the arguments to be parsed
|
||||
max_free_args int
|
||||
flags []Flag // registered flags
|
||||
application_name string
|
||||
application_version string
|
||||
application_description string
|
||||
|
||||
min_free_args int
|
||||
min_free_args int
|
||||
args_description string
|
||||
}
|
||||
|
||||
pub const (
|
||||
// used for formating usage message
|
||||
space = ' '
|
||||
underline = '-----------------------------------------------'
|
||||
space = ' '
|
||||
underline = '-----------------------------------------------'
|
||||
max_args_number = 4048
|
||||
)
|
||||
|
||||
// create a new flag set for parsing command line arguments
|
||||
// TODO use INT_MAX some how
|
||||
pub fn new_flag_parser(args []string) &FlagParser {
|
||||
return &FlagParser{args: args.clone(), max_free_args: max_args_number}
|
||||
return &FlagParser{
|
||||
args: args.clone()
|
||||
max_free_args: max_args_number
|
||||
}
|
||||
}
|
||||
|
||||
// change the application name to be used in 'usage' output
|
||||
|
@ -78,22 +77,22 @@ pub fn (mut fs FlagParser) skip_executable() {
|
|||
// private helper to register a flag
|
||||
fn (mut fs FlagParser) add_flag(name string, abbr byte, usage string, desc string) {
|
||||
fs.flags << Flag{
|
||||
name: name,
|
||||
abbr: abbr,
|
||||
usage: usage,
|
||||
name: name
|
||||
abbr: abbr
|
||||
usage: usage
|
||||
val_desc: desc
|
||||
}
|
||||
}
|
||||
|
||||
// private: general parsing a single argument
|
||||
// - search args for existence
|
||||
// if true
|
||||
// extract the defined value as string
|
||||
// else
|
||||
// return an (dummy) error -> argument is not defined
|
||||
// - search args for existence
|
||||
// if true
|
||||
// extract the defined value as string
|
||||
// else
|
||||
// return an (dummy) error -> argument is not defined
|
||||
//
|
||||
// - the name, usage are registered
|
||||
// - found arguments and corresponding values are removed from args list
|
||||
// - the name, usage are registered
|
||||
// - found arguments and corresponding values are removed from args list
|
||||
fn (mut fs FlagParser) parse_value(longhand string, shorthand byte) []string {
|
||||
full := '--$longhand'
|
||||
mut found_entries := []string{}
|
||||
|
@ -105,36 +104,36 @@ fn (mut fs FlagParser) parse_value(longhand string, shorthand byte) []string {
|
|||
continue
|
||||
}
|
||||
if arg == '--' {
|
||||
//End of input. We're done here.
|
||||
// End of input. We're done here.
|
||||
break
|
||||
}
|
||||
if arg[0] != `-` {
|
||||
continue
|
||||
}
|
||||
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand ) || arg == full {
|
||||
if i+1 >= fs.args.len {
|
||||
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand) || arg == full {
|
||||
if i + 1 >= fs.args.len {
|
||||
return []
|
||||
}
|
||||
nextarg := fs.args[i+1]
|
||||
nextarg := fs.args[i + 1]
|
||||
if nextarg.len > 2 && nextarg[..2] == '--' {
|
||||
//It could be end of input (--) or another argument (--abc).
|
||||
//Both are invalid so die.
|
||||
// It could be end of input (--) or another argument (--abc).
|
||||
// Both are invalid so die.
|
||||
return []
|
||||
}
|
||||
found_entries << fs.args[i+1]
|
||||
found_entries << fs.args[i + 1]
|
||||
to_delete << i
|
||||
to_delete << i+1
|
||||
to_delete << i + 1
|
||||
should_skip_one = true
|
||||
continue
|
||||
}
|
||||
if arg.len > full.len+1 && arg[..full.len+1] == '$full=' {
|
||||
found_entries << arg[full.len+1..]
|
||||
if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
|
||||
found_entries << arg[full.len + 1..]
|
||||
to_delete << i
|
||||
continue
|
||||
}
|
||||
}
|
||||
for i, del in to_delete {
|
||||
//i entrys are deleted so it's shifted left i times.
|
||||
// i entrys are deleted so it's shifted left i times.
|
||||
fs.args.delete(del - i)
|
||||
}
|
||||
return found_entries
|
||||
|
@ -150,7 +149,7 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
|
|||
full := '--$longhand'
|
||||
for i, arg in fs.args {
|
||||
if arg == '--' {
|
||||
//End of input. We're done.
|
||||
// End of input. We're done.
|
||||
break
|
||||
}
|
||||
if arg.len == 0 {
|
||||
|
@ -159,10 +158,10 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
|
|||
if arg[0] != `-` {
|
||||
continue
|
||||
}
|
||||
if ( arg.len == 2 && arg[0] == `-` && arg[1] == shorthand ) || arg == full {
|
||||
if fs.args.len > i+1 && (fs.args[i+1] in ['true', 'false']) {
|
||||
val := fs.args[i+1]
|
||||
fs.args.delete(i+1)
|
||||
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand) || arg == full {
|
||||
if fs.args.len > i + 1 && (fs.args[i + 1] in ['true', 'false']) {
|
||||
val := fs.args[i + 1]
|
||||
fs.args.delete(i + 1)
|
||||
fs.args.delete(i)
|
||||
return val
|
||||
} else {
|
||||
|
@ -170,9 +169,9 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
|
|||
return 'true'
|
||||
}
|
||||
}
|
||||
if arg.len > full.len+1 && arg[..full.len+1] == '$full=' {
|
||||
if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
|
||||
// Flag abc=true
|
||||
val := arg[full.len+1..]
|
||||
val := arg[full.len + 1..]
|
||||
fs.args.delete(i)
|
||||
return val
|
||||
}
|
||||
|
@ -195,12 +194,12 @@ pub fn (mut fs FlagParser) bool_opt(name string, abbr byte, usage string) ?bool
|
|||
}
|
||||
|
||||
// defining and parsing a bool flag
|
||||
// if defined
|
||||
// the value is returned (true/false)
|
||||
// else
|
||||
// the default value is returned
|
||||
// if defined
|
||||
// the value is returned (true/false)
|
||||
// else
|
||||
// the default value is returned
|
||||
// version with abbr
|
||||
//TODO error handling for invalid string to bool conversion
|
||||
// TODO error handling for invalid string to bool conversion
|
||||
pub fn (mut fs FlagParser) bool(name string, abbr byte, bdefault bool, usage string) bool {
|
||||
value := fs.bool_opt(name, abbr, usage) or {
|
||||
return bdefault
|
||||
|
@ -232,12 +231,12 @@ pub fn (mut fs FlagParser) int_opt(name string, abbr byte, usage string) ?int {
|
|||
}
|
||||
|
||||
// defining and parsing an int flag
|
||||
// if defined
|
||||
// the value is returned (int)
|
||||
// else
|
||||
// the default value is returned
|
||||
// if defined
|
||||
// the value is returned (int)
|
||||
// else
|
||||
// the default value is returned
|
||||
// version with abbr
|
||||
//TODO error handling for invalid string to int conversion
|
||||
// TODO error handling for invalid string to int conversion
|
||||
pub fn (mut fs FlagParser) int(name string, abbr byte, idefault int, usage string) int {
|
||||
value := fs.int_opt(name, abbr, usage) or {
|
||||
return idefault
|
||||
|
@ -269,12 +268,12 @@ pub fn (mut fs FlagParser) float_opt(name string, abbr byte, usage string) ?f64
|
|||
}
|
||||
|
||||
// defining and parsing a float flag
|
||||
// if defined
|
||||
// the value is returned (float)
|
||||
// else
|
||||
// the default value is returned
|
||||
// if defined
|
||||
// the value is returned (float)
|
||||
// else
|
||||
// the default value is returned
|
||||
// version with abbr
|
||||
//TODO error handling for invalid string to float conversion
|
||||
// TODO error handling for invalid string to float conversion
|
||||
pub fn (mut fs FlagParser) float(name string, abbr byte, fdefault f64, usage string) f64 {
|
||||
value := fs.float_opt(name, abbr, usage) or {
|
||||
return fdefault
|
||||
|
@ -301,10 +300,10 @@ pub fn (mut fs FlagParser) string_opt(name string, abbr byte, usage string) ?str
|
|||
}
|
||||
|
||||
// defining and parsing a string flag
|
||||
// if defined
|
||||
// the value is returned (string)
|
||||
// else
|
||||
// the default value is returned
|
||||
// if defined
|
||||
// the value is returned (string)
|
||||
// else
|
||||
// the default value is returned
|
||||
// version with abbr
|
||||
pub fn (mut fs FlagParser) string(name string, abbr byte, sdefault string, usage string) string {
|
||||
value := fs.string_opt(name, abbr, usage) or {
|
||||
|
@ -336,7 +335,7 @@ pub fn (mut fs FlagParser) limit_free_args_to_exactly(n int) {
|
|||
|
||||
// this will cause an error in finalize() if free args are out of range
|
||||
// (min, ..., max)
|
||||
pub fn (mut fs FlagParser) limit_free_args(min, max int) {
|
||||
pub fn (mut fs FlagParser) limit_free_args(min int, max int) {
|
||||
if min > max {
|
||||
panic('flag.limit_free_args expect min < max, got $min >= $max')
|
||||
}
|
||||
|
@ -344,42 +343,44 @@ pub fn (mut fs FlagParser) limit_free_args(min, max int) {
|
|||
fs.max_free_args = max
|
||||
}
|
||||
|
||||
pub fn (mut fs FlagParser) arguments_description(description string){
|
||||
pub fn (mut fs FlagParser) arguments_description(description string) {
|
||||
fs.args_description = description
|
||||
}
|
||||
|
||||
// collect all given information and
|
||||
pub fn (fs FlagParser) usage() string {
|
||||
|
||||
positive_min_arg := ( fs.min_free_args > 0 )
|
||||
positive_max_arg := ( fs.max_free_args > 0 && fs.max_free_args != max_args_number )
|
||||
no_arguments := ( fs.min_free_args == 0 && fs.max_free_args == 0 )
|
||||
|
||||
positive_min_arg := (fs.min_free_args > 0)
|
||||
positive_max_arg := (fs.max_free_args > 0 && fs.max_free_args != max_args_number)
|
||||
no_arguments := (fs.min_free_args == 0 && fs.max_free_args == 0)
|
||||
mut adesc := if fs.args_description.len > 0 { fs.args_description } else { '[ARGS]' }
|
||||
if no_arguments { adesc = '' }
|
||||
|
||||
if no_arguments {
|
||||
adesc = ''
|
||||
}
|
||||
mut use := ''
|
||||
if fs.application_version != '' {
|
||||
use += '$fs.application_name $fs.application_version\n'
|
||||
use += '$underline\n'
|
||||
}
|
||||
use += 'Usage: ${fs.application_name} [options] $adesc\n'
|
||||
use += 'Usage: $fs.application_name [options] $adesc\n'
|
||||
use += '\n'
|
||||
if fs.application_description != '' {
|
||||
use += 'Description:\n'
|
||||
use += '$fs.application_description'
|
||||
use += '\n\n'
|
||||
}
|
||||
|
||||
// show a message about the [ARGS]:
|
||||
if positive_min_arg || positive_max_arg || no_arguments {
|
||||
if no_arguments {
|
||||
use += 'This application does not expect any arguments\n\n'
|
||||
goto end_of_arguments_handling
|
||||
}
|
||||
mut s:= []string{}
|
||||
if positive_min_arg { s << 'at least $fs.min_free_args' }
|
||||
if positive_max_arg { s << 'at most $fs.max_free_args' }
|
||||
mut s := []string{}
|
||||
if positive_min_arg {
|
||||
s << 'at least $fs.min_free_args'
|
||||
}
|
||||
if positive_max_arg {
|
||||
s << 'at most $fs.max_free_args'
|
||||
}
|
||||
if positive_min_arg && positive_max_arg && fs.min_free_args == fs.max_free_args {
|
||||
s = ['exactly $fs.min_free_args']
|
||||
}
|
||||
|
@ -387,32 +388,30 @@ pub fn (fs FlagParser) usage() string {
|
|||
use += 'The arguments should be $sargs in number.\n\n'
|
||||
}
|
||||
end_of_arguments_handling:
|
||||
|
||||
if fs.flags.len > 0 {
|
||||
use += 'Options:\n'
|
||||
for f in fs.flags {
|
||||
mut onames := []string{}
|
||||
if f.abbr != 0 {
|
||||
onames << '-${f.abbr.str()}'
|
||||
onames << '-$f.abbr.str()'
|
||||
}
|
||||
if f.name != '' {
|
||||
if !f.val_desc.contains('<bool>') {
|
||||
onames << '--${f.name} $f.val_desc'
|
||||
}else{
|
||||
onames << '--${f.name}'
|
||||
onames << '--$f.name $f.val_desc'
|
||||
} else {
|
||||
onames << '--$f.name'
|
||||
}
|
||||
}
|
||||
option_names := ' ' + onames.join(', ')
|
||||
mut xspace := ''
|
||||
if option_names.len > space.len-2 {
|
||||
xspace = '\n${space}'
|
||||
if option_names.len > space.len - 2 {
|
||||
xspace = '\n$space'
|
||||
} else {
|
||||
xspace = space[option_names.len..]
|
||||
}
|
||||
use += '${option_names}${xspace}${f.usage}\n'
|
||||
use += '$option_names$xspace$f.usage\n'
|
||||
}
|
||||
}
|
||||
|
||||
return use
|
||||
}
|
||||
|
||||
|
@ -426,14 +425,14 @@ pub fn (fs FlagParser) usage() string {
|
|||
pub fn (fs FlagParser) finalize() ?[]string {
|
||||
for a in fs.args {
|
||||
if a.len >= 2 && a[..2] == '--' {
|
||||
return error('Unknown argument \'${a[2..]}\'')
|
||||
return error("Unknown argument \'${a[2..]}\'")
|
||||
}
|
||||
}
|
||||
if fs.args.len < fs.min_free_args && fs.min_free_args > 0 {
|
||||
return error('Expected at least ${fs.min_free_args} arguments, but given $fs.args.len')
|
||||
return error('Expected at least $fs.min_free_args arguments, but given $fs.args.len')
|
||||
}
|
||||
if fs.args.len > fs.max_free_args && fs.max_free_args > 0 {
|
||||
return error('Expected at most ${fs.max_free_args} arguments, but given $fs.args.len')
|
||||
return error('Expected at most $fs.max_free_args arguments, but given $fs.args.len')
|
||||
}
|
||||
if fs.args.len > 0 && fs.max_free_args == 0 && fs.min_free_args == 0 {
|
||||
return error('Expected no arguments, but given $fs.args.len')
|
||||
|
|
|
@ -355,7 +355,7 @@ fn test_single_dash() {
|
|||
|
||||
fn test_optional_flags() {
|
||||
mut fp := flag.new_flag_parser(['-a', '10', '-b'])
|
||||
a := fp.int_opt('some-flag', `a`, '') or {
|
||||
fp.int_opt('some-flag', `a`, '') or {
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ const (
|
|||
is_used = openssl.is_used
|
||||
)
|
||||
|
||||
fn (req &Request) ssl_do(port int, method Method, host_name, path string) ?Response {
|
||||
fn (req &Request) ssl_do(port int, method Method, host_name string, path string) ?Response {
|
||||
// ssl_method := C.SSLv23_method()
|
||||
ssl_method := C.TLSv1_2_method()
|
||||
ctx := C.SSL_CTX_new(ssl_method)
|
||||
|
|
|
@ -5,7 +5,7 @@ module http
|
|||
|
||||
import os
|
||||
|
||||
pub fn download_file(url, out string) bool {
|
||||
pub fn download_file(url string, out string) bool {
|
||||
$if debug_http? {
|
||||
println('download file url=$url out=$out')
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ mut:
|
|||
cb DownloadFn
|
||||
}
|
||||
*/
|
||||
fn download_cb(ptr voidptr, size, nmemb size_t, userp voidptr) {
|
||||
fn download_cb(ptr voidptr, size size_t, nmemb size_t, userp voidptr) {
|
||||
/*
|
||||
mut data := &DownloadStruct(userp)
|
||||
written := C.fwrite(ptr, size, nmemb, data.stream)
|
||||
|
@ -24,7 +24,7 @@ fn download_cb(ptr voidptr, size, nmemb size_t, userp voidptr) {
|
|||
*/
|
||||
}
|
||||
|
||||
pub fn download_file_with_progress(url, out string, cb DownloadFn, cb_finished fn()) {
|
||||
pub fn download_file_with_progress(url string, out string, cb DownloadFn, cb_finished fn()) {
|
||||
/*
|
||||
curl := C.curl_easy_init()
|
||||
if isnil(curl) {
|
||||
|
|
|
@ -11,7 +11,7 @@ import net
|
|||
const (
|
||||
max_redirects = 4
|
||||
content_type_default = 'text/plain'
|
||||
bufsize = 1536
|
||||
bufsize = 1536
|
||||
)
|
||||
|
||||
pub struct Request {
|
||||
|
@ -47,18 +47,17 @@ pub:
|
|||
status_code int
|
||||
}
|
||||
|
||||
pub fn new_request(method Method, url_, data string) ?Request {
|
||||
pub fn new_request(method Method, url_ string, data string) ?Request {
|
||||
url := if method == .get { url_ + '?' + data } else { url_ }
|
||||
//println('new req() method=$method url="$url" dta="$data"')
|
||||
// println('new req() method=$method url="$url" dta="$data"')
|
||||
return Request{
|
||||
method: method
|
||||
url: url
|
||||
data: data
|
||||
/*
|
||||
headers: {
|
||||
data: data /*
|
||||
headers: {
|
||||
'Accept-Encoding': 'compress'
|
||||
}
|
||||
*/
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,7 +74,7 @@ pub fn get(url string) ?Response {
|
|||
return fetch_with_method(.get, url, FetchConfig{})
|
||||
}
|
||||
|
||||
pub fn post(url, data string) ?Response {
|
||||
pub fn post(url string, data string) ?Response {
|
||||
return fetch_with_method(.post, url, {
|
||||
data: data
|
||||
headers: {
|
||||
|
@ -84,7 +83,7 @@ pub fn post(url, data string) ?Response {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn post_json(url, data string) ?Response {
|
||||
pub fn post_json(url string, data string) ?Response {
|
||||
return fetch_with_method(.post, url, {
|
||||
data: data
|
||||
headers: {
|
||||
|
@ -102,7 +101,7 @@ pub fn post_form(url string, data map[string]string) ?Response {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn put(url, data string) ?Response {
|
||||
pub fn put(url string, data string) ?Response {
|
||||
return fetch_with_method(.put, url, {
|
||||
data: data
|
||||
headers: {
|
||||
|
@ -111,7 +110,7 @@ pub fn put(url, data string) ?Response {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn patch(url, data string) ?Response {
|
||||
pub fn patch(url string, data string) ?Response {
|
||||
return fetch_with_method(.patch, url, {
|
||||
data: data
|
||||
headers: {
|
||||
|
@ -133,7 +132,7 @@ pub fn fetch(_url string, config FetchConfig) ?Response {
|
|||
return error('http.fetch: empty url')
|
||||
}
|
||||
url := build_url_from_fetch(_url, config) or {
|
||||
return error('http.fetch: invalid url ${_url}')
|
||||
return error('http.fetch: invalid url $_url')
|
||||
}
|
||||
data := config.data
|
||||
req := Request{
|
||||
|
@ -147,7 +146,7 @@ pub fn fetch(_url string, config FetchConfig) ?Response {
|
|||
user_ptr: 0
|
||||
verbose: config.verbose
|
||||
}
|
||||
res := req.do()?
|
||||
res := req.do() ?
|
||||
return res
|
||||
}
|
||||
|
||||
|
@ -177,14 +176,14 @@ fn fetch_with_method(method Method, url string, _config FetchConfig) ?Response {
|
|||
}
|
||||
|
||||
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
|
||||
mut url := urllib.parse(_url)?
|
||||
mut url := urllib.parse(_url) ?
|
||||
params := config.params
|
||||
if params.keys().len == 0 {
|
||||
return url.str()
|
||||
}
|
||||
mut pieces := []string{}
|
||||
for key in params.keys() {
|
||||
pieces << '${key}=${params[key]}'
|
||||
pieces << '$key=${params[key]}'
|
||||
}
|
||||
mut query := pieces.join('&')
|
||||
if url.raw_query.len > 1 {
|
||||
|
@ -195,19 +194,15 @@ fn build_url_from_fetch(_url string, config FetchConfig) ?string {
|
|||
}
|
||||
|
||||
fn (mut req Request) free() {
|
||||
unsafe {
|
||||
req.headers.free()
|
||||
}
|
||||
unsafe {req.headers.free()}
|
||||
}
|
||||
|
||||
fn (mut resp Response) free() {
|
||||
unsafe {
|
||||
resp.headers.free()
|
||||
}
|
||||
unsafe {resp.headers.free()}
|
||||
}
|
||||
|
||||
// add_header adds the key and value of an HTTP request header
|
||||
pub fn (mut req Request) add_header(key, val string) {
|
||||
pub fn (mut req Request) add_header(key string, val string) {
|
||||
req.headers[key] = val
|
||||
}
|
||||
|
||||
|
@ -229,7 +224,7 @@ pub fn parse_headers(lines []string) map[string]string {
|
|||
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
|
||||
pub fn (req &Request) do() ?Response {
|
||||
mut url := urllib.parse(req.url) or {
|
||||
return error('http.Request.do: invalid url ${req.url}')
|
||||
return error('http.Request.do: invalid url $req.url')
|
||||
}
|
||||
mut rurl := url
|
||||
mut resp := Response{}
|
||||
|
@ -238,7 +233,7 @@ pub fn (req &Request) do() ?Response {
|
|||
if no_redirects == max_redirects {
|
||||
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
|
||||
}
|
||||
qresp := req.method_and_url_to_response(req.method, rurl)?
|
||||
qresp := req.method_and_url_to_response(req.method, rurl) ?
|
||||
resp = qresp
|
||||
if resp.status_code !in [301, 302, 303, 307, 308] {
|
||||
break
|
||||
|
@ -264,7 +259,7 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Res
|
|||
host_name := url.hostname()
|
||||
scheme := url.scheme
|
||||
p := url.path.trim_left('/')
|
||||
path := if url.query().len > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
|
||||
path := if url.query().len > 0 { '/$p?$url.query().encode()' } else { '/$p' }
|
||||
mut nport := url.port().int()
|
||||
if nport == 0 {
|
||||
if scheme == 'http' {
|
||||
|
@ -277,11 +272,11 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Res
|
|||
// println('fetch $method, $scheme, $host_name, $nport, $path ')
|
||||
if scheme == 'https' {
|
||||
// println('ssl_do( $nport, $method, $host_name, $path )')
|
||||
res := req.ssl_do(nport, method, host_name, path)?
|
||||
res := req.ssl_do(nport, method, host_name, path) ?
|
||||
return res
|
||||
} else if scheme == 'http' {
|
||||
// println('http_do( $nport, $method, $host_name, $path )')
|
||||
res := req.http_do(nport, method, host_name, path)?
|
||||
res := req.http_do(nport, method, host_name, path) ?
|
||||
return res
|
||||
}
|
||||
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
|
||||
|
@ -322,7 +317,6 @@ fn parse_response(resp string) Response {
|
|||
// if h.contains('Content-Type') {
|
||||
// continue
|
||||
// }
|
||||
|
||||
mut key := h[..pos]
|
||||
lkey := key.to_lower()
|
||||
val := h[pos + 2..]
|
||||
|
@ -346,7 +340,7 @@ fn parse_response(resp string) Response {
|
|||
}
|
||||
}
|
||||
|
||||
fn (req &Request) build_request_headers(method Method, host_name, path string) string {
|
||||
fn (req &Request) build_request_headers(method Method, host_name string, path string) string {
|
||||
ua := req.user_agent
|
||||
mut uheaders := []string{}
|
||||
if 'Host' !in req.headers {
|
||||
|
@ -356,17 +350,16 @@ fn (req &Request) build_request_headers(method Method, host_name, path string) s
|
|||
uheaders << 'User-Agent: $ua\r\n'
|
||||
}
|
||||
if req.data.len > 0 && 'Content-Length' !in req.headers {
|
||||
uheaders << 'Content-Length: ${req.data.len}\r\n'
|
||||
uheaders << 'Content-Length: $req.data.len\r\n'
|
||||
}
|
||||
for key, val in req.headers {
|
||||
if key == 'Cookie' {
|
||||
continue
|
||||
}
|
||||
uheaders << '${key}: ${val}\r\n'
|
||||
uheaders << '$key: $val\r\n'
|
||||
}
|
||||
uheaders << req.build_request_cookies_header()
|
||||
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
|
||||
req.data
|
||||
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
|
||||
}
|
||||
|
||||
fn (req &Request) build_request_cookies_header() string {
|
||||
|
@ -399,13 +392,12 @@ pub fn escape(s string) string {
|
|||
panic('http.escape() was replaced with http.escape_url()')
|
||||
}
|
||||
|
||||
fn (req &Request) http_do(port int, method Method, host_name, path string) ?Response {
|
||||
fn (req &Request) http_do(port int, method Method, host_name string, path string) ?Response {
|
||||
rbuffer := [bufsize]byte{}
|
||||
mut sb := strings.new_builder(100)
|
||||
s := req.build_request_headers(method, host_name, path)
|
||||
client := net.dial(host_name, port)?
|
||||
client.send(s.str, s.len) or {
|
||||
}
|
||||
client := net.dial(host_name, port) ?
|
||||
client.send(s.str, s.len) or { }
|
||||
for {
|
||||
readbytes := client.crecv(rbuffer, bufsize)
|
||||
if readbytes < 0 {
|
||||
|
@ -416,8 +408,7 @@ fn (req &Request) http_do(port int, method Method, host_name, path string) ?Resp
|
|||
}
|
||||
sb.write(tos(rbuffer, readbytes))
|
||||
}
|
||||
client.close() or {
|
||||
}
|
||||
client.close() or { }
|
||||
return parse_response(sb.str())
|
||||
}
|
||||
|
||||
|
|
|
@ -1,48 +1,62 @@
|
|||
import net.http
|
||||
|
||||
fn test_http_get() {
|
||||
$if !network ? { return }
|
||||
$if !network ? {
|
||||
return
|
||||
}
|
||||
assert http.get_text('https://vlang.io/version') == '0.1.5'
|
||||
println('http ok')
|
||||
}
|
||||
|
||||
fn test_http_get_from_vlang_utc_now() {
|
||||
$if !network ? { return }
|
||||
$if !network ? {
|
||||
return
|
||||
}
|
||||
urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
|
||||
for url in urls {
|
||||
println('Test getting current time from $url by http.get')
|
||||
res := http.get(url) or { panic(err) }
|
||||
res := http.get(url) or {
|
||||
panic(err)
|
||||
}
|
||||
assert 200 == res.status_code
|
||||
assert res.text.len > 0
|
||||
assert res.text.int() > 1566403696
|
||||
println('Current time is: ${res.text.int()}')
|
||||
println('Current time is: $res.text.int()')
|
||||
}
|
||||
}
|
||||
|
||||
fn test_public_servers() {
|
||||
$if !network ? { return }
|
||||
$if !network ? {
|
||||
return
|
||||
}
|
||||
urls := [
|
||||
'http://github.com/robots.txt',
|
||||
'http://google.com/robots.txt',
|
||||
'https://github.com/robots.txt',
|
||||
'https://google.com/robots.txt',
|
||||
// 'http://yahoo.com/robots.txt',
|
||||
// 'https://yahoo.com/robots.txt',
|
||||
// 'http://yahoo.com/robots.txt',
|
||||
// 'https://yahoo.com/robots.txt',
|
||||
]
|
||||
for url in urls {
|
||||
println('Testing http.get on public url: $url ')
|
||||
res := http.get( url ) or { panic(err) }
|
||||
res := http.get(url) or {
|
||||
panic(err)
|
||||
}
|
||||
assert 200 == res.status_code
|
||||
assert res.text.len > 0
|
||||
}
|
||||
}
|
||||
|
||||
fn test_relative_redirects() {
|
||||
$if !network ? { return }
|
||||
$else { return } // tempfix periodic: httpbin relative redirects are broken
|
||||
res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or { panic(err) }
|
||||
$if !network ? {
|
||||
return
|
||||
} $else {
|
||||
return
|
||||
} // tempfix periodic: httpbin relative redirects are broken
|
||||
res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or {
|
||||
panic(err)
|
||||
}
|
||||
assert 200 == res.status_code
|
||||
assert res.text.len > 0
|
||||
assert res.text.contains('"abc": "xyz"')
|
||||
}
|
||||
|
||||
|
|
|
@ -4,10 +4,10 @@ import os
|
|||
|
||||
pub struct Socket {
|
||||
pub:
|
||||
sockfd int
|
||||
family int
|
||||
typ int
|
||||
proto int
|
||||
sockfd int
|
||||
family int
|
||||
typ int
|
||||
proto int
|
||||
pub mut:
|
||||
max_single_send_size int = 64000
|
||||
}
|
||||
|
@ -75,12 +75,12 @@ fn C.inet_ntop(af int, src voidptr, dst charptr, dst_size int) charptr
|
|||
fn C.getpeername(sockfd int, addr &C.sockaddr_in, addrsize &int) int
|
||||
|
||||
// create socket
|
||||
pub fn new_socket(family, typ, proto int) ?Socket {
|
||||
pub fn new_socket(family int, typ int, proto int) ?Socket {
|
||||
sockfd := C.socket(family, typ, proto)
|
||||
one := 1
|
||||
// This is needed so that there are no problems with reusing the
|
||||
// same port after the application exits.
|
||||
C.setsockopt(sockfd, C.SOL_SOCKET, C.SO_REUSEADDR, &one, sizeof(int))
|
||||
C.setsockopt(sockfd, C.SOL_SOCKET, C.SO_REUSEADDR, &one, sizeof(voidptr))
|
||||
if sockfd == -1 {
|
||||
return error('net.socket: failed')
|
||||
}
|
||||
|
@ -98,8 +98,8 @@ pub fn socket_udp() ?Socket {
|
|||
}
|
||||
|
||||
// set socket options
|
||||
pub fn (s Socket) setsockopt(level, optname int, optvalue &int) ?int {
|
||||
res := C.setsockopt(s.sockfd, level, optname, optvalue, sizeof(&int))
|
||||
pub fn (s Socket) setsockopt(level int, optname int, optvalue &int) ?int {
|
||||
res := C.setsockopt(s.sockfd, level, optname, optvalue, sizeof(int))
|
||||
if res < 0 {
|
||||
return error('net.setsocketopt: failed with $res')
|
||||
}
|
||||
|
@ -153,9 +153,9 @@ pub fn listen(port int) ?Socket {
|
|||
$if debug {
|
||||
println('net.listen($port)')
|
||||
}
|
||||
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0)?
|
||||
s.bind(port)?
|
||||
s.listen()?
|
||||
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) ?
|
||||
s.bind(port) ?
|
||||
s.listen() ?
|
||||
return s
|
||||
}
|
||||
|
||||
|
@ -222,8 +222,8 @@ pub fn (s Socket) connect(address string, port int) ?int {
|
|||
|
||||
// helper method to create socket and connect
|
||||
pub fn dial(address string, port int) ?Socket {
|
||||
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0)?
|
||||
s.connect(address, port)?
|
||||
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) ?
|
||||
s.connect(address, port) ?
|
||||
return s
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ enum EncodingMode {
|
|||
|
||||
const (
|
||||
err_msg_escape = 'unescape: invalid URL escape'
|
||||
err_msg_parse = 'parse: failed parsing url'
|
||||
err_msg_parse = 'parse: failed parsing url'
|
||||
)
|
||||
|
||||
fn error_msg(message, val string) string {
|
||||
fn error_msg(message string, val string) string {
|
||||
mut msg := 'net.urllib.$message'
|
||||
if val != '' {
|
||||
msg = '$msg ($val)'
|
||||
|
@ -53,7 +53,8 @@ fn should_escape(c byte, mode EncodingMode) bool {
|
|||
// we could possibly allow, and parse will reject them if we
|
||||
// escape them (because hosts can`t use %-encoding for
|
||||
// ASCII bytes).
|
||||
if c in [`!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `:`, `[`, `]`, `<`, `>`, `"`] {
|
||||
if c in
|
||||
[`!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `:`, `[`, `]`, `<`, `>`, `"`] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
@ -100,11 +101,11 @@ fn should_escape(c byte, mode EncodingMode) bool {
|
|||
// everything, so escape nothing.
|
||||
return false
|
||||
}
|
||||
else {
|
||||
}}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
else {
|
||||
}}
|
||||
else {}
|
||||
}
|
||||
if mode == .encode_fragment {
|
||||
// RFC 3986 §2.2 allows not escaping sub-delims. A subset of sub-delims are
|
||||
// included in reserved from RFC 2396 §2.2. The remaining sub-delims do not
|
||||
|
@ -113,11 +114,9 @@ fn should_escape(c byte, mode EncodingMode) bool {
|
|||
// escape single quote to avoid breaking callers that had previously assumed that
|
||||
// single quotes would be escaped. See issue #19917.
|
||||
match c {
|
||||
`!`, `(`, `)`, `*` {
|
||||
return false
|
||||
}
|
||||
else {
|
||||
}}
|
||||
`!`, `(`, `)`, `*` { return false }
|
||||
else {}
|
||||
}
|
||||
}
|
||||
// Everything else must be escaped.
|
||||
return true
|
||||
|
@ -150,55 +149,58 @@ fn unescape(s_ string, mode EncodingMode) ?string {
|
|||
// Count %, check that they're well-formed.
|
||||
mut n := 0
|
||||
mut has_plus := false
|
||||
for i := 0; i < s.len; {
|
||||
for i := 0; i < s.len; {
|
||||
x := s[i]
|
||||
match x {
|
||||
`%` {
|
||||
if s == '' {
|
||||
break
|
||||
}
|
||||
n++
|
||||
if i + 2 >= s.len || !ishex(s[i + 1]) || !ishex(s[i + 2]) {
|
||||
s = s[i..]
|
||||
if s.len > 3 {
|
||||
s = s[..3]
|
||||
`%` {
|
||||
if s == '' {
|
||||
break
|
||||
}
|
||||
return error(error_msg(err_msg_escape, s))
|
||||
}
|
||||
// Per https://tools.ietf.org/html/rfc3986#page-21
|
||||
// in the host component %-encoding can only be used
|
||||
// for non-ASCII bytes.
|
||||
// But https://tools.ietf.org/html/rfc6874#section-2
|
||||
// introduces %25 being allowed to escape a percent sign
|
||||
// in IPv6 scoped-address literals. Yay.
|
||||
if mode == .encode_host && unhex(s[i + 1]) < 8 && s[i..i + 3] != '%25' {
|
||||
return error(error_msg(err_msg_escape, s[i..i + 3]))
|
||||
}
|
||||
if mode == .encode_zone {
|
||||
// RFC 6874 says basically 'anything goes' for zone identifiers
|
||||
// and that even non-ASCII can be redundantly escaped,
|
||||
// but it seems prudent to restrict %-escaped bytes here to those
|
||||
// that are valid host name bytes in their unescaped form.
|
||||
// That is, you can use escaping in the zone identifier but not
|
||||
// to introduce bytes you couldn't just write directly.
|
||||
// But Windows puts spaces here! Yay.
|
||||
v := ( (unhex(s[i + 1])<<byte(4)) | unhex(s[i + 2]))
|
||||
if s[i..i + 3] != '%25' && v != ` ` && should_escape(v, .encode_host) {
|
||||
error(error_msg(err_msg_escape, s[i..i + 3]))
|
||||
n++
|
||||
if i + 2 >= s.len || !ishex(s[i + 1]) || !ishex(s[i + 2]) {
|
||||
s = s[i..]
|
||||
if s.len > 3 {
|
||||
s = s[..3]
|
||||
}
|
||||
return error(error_msg(err_msg_escape, s))
|
||||
}
|
||||
// Per https://tools.ietf.org/html/rfc3986#page-21
|
||||
// in the host component %-encoding can only be used
|
||||
// for non-ASCII bytes.
|
||||
// But https://tools.ietf.org/html/rfc6874#section-2
|
||||
// introduces %25 being allowed to escape a percent sign
|
||||
// in IPv6 scoped-address literals. Yay.
|
||||
if mode == .encode_host && unhex(s[i + 1]) < 8 && s[i..i + 3] != '%25' {
|
||||
return error(error_msg(err_msg_escape, s[i..i + 3]))
|
||||
}
|
||||
if mode == .encode_zone {
|
||||
// RFC 6874 says basically 'anything goes' for zone identifiers
|
||||
// and that even non-ASCII can be redundantly escaped,
|
||||
// but it seems prudent to restrict %-escaped bytes here to those
|
||||
// that are valid host name bytes in their unescaped form.
|
||||
// That is, you can use escaping in the zone identifier but not
|
||||
// to introduce bytes you couldn't just write directly.
|
||||
// But Windows puts spaces here! Yay.
|
||||
v := ((unhex(s[i + 1]) << byte(4)) | unhex(s[i + 2]))
|
||||
if s[i..i + 3] != '%25' && v != ` ` && should_escape(v, .encode_host) {
|
||||
error(error_msg(err_msg_escape, s[i..i + 3]))
|
||||
}
|
||||
}
|
||||
i += 3
|
||||
}
|
||||
i += 3
|
||||
}
|
||||
`+` {
|
||||
has_plus = mode == .encode_query_component
|
||||
i++
|
||||
}
|
||||
else {
|
||||
if (mode == .encode_host || mode == .encode_zone) && s[i] < 0x80 && should_escape(s[i], mode) {
|
||||
error(error_msg('unescape: invalid character in host name', s[i..i + 1]))
|
||||
`+` {
|
||||
has_plus = mode == .encode_query_component
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}}
|
||||
else {
|
||||
if (mode == .encode_host ||
|
||||
mode == .encode_zone) &&
|
||||
s[i] < 0x80 && should_escape(s[i], mode) {
|
||||
error(error_msg('unescape: invalid character in host name', s[i..i + 1]))
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
if n == 0 && !has_plus {
|
||||
return s
|
||||
|
@ -208,20 +210,20 @@ fn unescape(s_ string, mode EncodingMode) ?string {
|
|||
x := s[i]
|
||||
match x {
|
||||
`%` {
|
||||
t.write( ((unhex(s[i + 1])<<byte(4)) | unhex(s[i + 2])).str() )
|
||||
t.write(((unhex(s[i + 1]) << byte(4)) | unhex(s[i + 2])).str())
|
||||
i += 2
|
||||
}
|
||||
`+` {
|
||||
if mode == .encode_query_component {
|
||||
t.write(' ')
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
t.write('+')
|
||||
}
|
||||
}
|
||||
else {
|
||||
t.write(s[i].str())
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
return t.str()
|
||||
}
|
||||
|
@ -242,13 +244,12 @@ fn escape(s string, mode EncodingMode) string {
|
|||
mut space_count := 0
|
||||
mut hex_count := 0
|
||||
mut c := byte(0)
|
||||
for i in 0..s.len {
|
||||
for i in 0 .. s.len {
|
||||
c = s[i]
|
||||
if should_escape(c, mode) {
|
||||
if c == ` ` && mode == .encode_query_component {
|
||||
space_count++
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
hex_count++
|
||||
}
|
||||
}
|
||||
|
@ -256,18 +257,17 @@ fn escape(s string, mode EncodingMode) string {
|
|||
if space_count == 0 && hex_count == 0 {
|
||||
return s
|
||||
}
|
||||
buf := []byte{len:(64)}
|
||||
buf := []byte{len: (64)}
|
||||
mut t := []byte{}
|
||||
required := s.len + 2 * hex_count
|
||||
if required <= buf.len {
|
||||
t = buf[..required]
|
||||
}
|
||||
else {
|
||||
t = []byte{len:(required)}
|
||||
} else {
|
||||
t = []byte{len: (required)}
|
||||
}
|
||||
if hex_count == 0 {
|
||||
copy(t, s.bytes())
|
||||
for i in 0..s.len {
|
||||
for i in 0 .. s.len {
|
||||
if s[i] == ` ` {
|
||||
t[i] = `+`
|
||||
}
|
||||
|
@ -276,19 +276,17 @@ fn escape(s string, mode EncodingMode) string {
|
|||
}
|
||||
upperhex := '0123456789ABCDEF'
|
||||
mut j := 0
|
||||
for i in 0..s.len {
|
||||
for i in 0 .. s.len {
|
||||
c1 := s[i]
|
||||
if c1 == ` ` && mode == .encode_query_component {
|
||||
t[j] = `+`
|
||||
j++
|
||||
}
|
||||
else if should_escape(c1, mode) {
|
||||
} else if should_escape(c1, mode) {
|
||||
t[j] = `%`
|
||||
t[j + 1] = upperhex[c1>>4]
|
||||
t[j + 1] = upperhex[c1 >> 4]
|
||||
t[j + 2] = upperhex[c1 & 15]
|
||||
j += 3
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
t[j] = s[i]
|
||||
j++
|
||||
}
|
||||
|
@ -345,9 +343,8 @@ pub fn user(username string) &Userinfo {
|
|||
// ``is NOT RECOMMENDED, because the passing of authentication
|
||||
// information in clear text (such as URI) has proven to be a
|
||||
// security risk in almost every case where it has been used.''
|
||||
fn user_password(username, password string) &Userinfo {
|
||||
return &Userinfo{
|
||||
username,password,true}
|
||||
fn user_password(username string, password string) &Userinfo {
|
||||
return &Userinfo{username, password, true}
|
||||
}
|
||||
|
||||
// The Userinfo type is an immutable encapsulation of username and
|
||||
|
@ -382,23 +379,20 @@ fn (u &Userinfo) str() string {
|
|||
// (scheme must be [a-zA-Z][a-zA-Z0-9+-.]*)
|
||||
// If so, return [scheme, path]; else return ['', rawurl]
|
||||
fn split_by_scheme(rawurl string) ?[]string {
|
||||
for i in 0..rawurl.len {
|
||||
for i in 0 .. rawurl.len {
|
||||
c := rawurl[i]
|
||||
if (`a` <= c && c <= `z`) || (`A` <= c && c <= `Z`) {
|
||||
// do nothing
|
||||
}
|
||||
else if (`0` <= c && c <= `9`) || (c == `+` || c == `-` || c == `.`) {
|
||||
} else if (`0` <= c && c <= `9`) || (c == `+` || c == `-` || c == `.`) {
|
||||
if i == 0 {
|
||||
return ['', rawurl]
|
||||
}
|
||||
}
|
||||
else if c == `:` {
|
||||
} else if c == `:` {
|
||||
if i == 0 {
|
||||
return error(error_msg('split_by_scheme: missing protocol scheme', ''))
|
||||
}
|
||||
return [rawurl[..i], rawurl[i + 1..]]
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// we have encountered an invalid character,
|
||||
// so there is no valid scheme
|
||||
return ['', rawurl]
|
||||
|
@ -417,15 +411,15 @@ fn get_scheme(rawurl string) ?string {
|
|||
// split slices s into two substrings separated by the first occurence of
|
||||
// sep. If cutc is true then sep is included with the second substring.
|
||||
// If sep does not occur in s then s and the empty string is returned.
|
||||
fn split(s string, sep byte, cutc bool) (string,string) {
|
||||
fn split(s string, sep byte, cutc bool) (string, string) {
|
||||
i := s.index_byte(sep)
|
||||
if i < 0 {
|
||||
return s,''
|
||||
return s, ''
|
||||
}
|
||||
if cutc {
|
||||
return s[..i],s[i + 1..]
|
||||
return s[..i], s[i + 1..]
|
||||
}
|
||||
return s[..i],s[i..]
|
||||
return s[..i], s[i..]
|
||||
}
|
||||
|
||||
// parse parses rawurl into a URL structure.
|
||||
|
@ -436,7 +430,7 @@ fn split(s string, sep byte, cutc bool) (string,string) {
|
|||
// error, due to parsing ambiguities.
|
||||
pub fn parse(rawurl string) ?URL {
|
||||
// Cut off #frag
|
||||
u,frag := split(rawurl, `#`, true)
|
||||
u, frag := split(rawurl, `#`, true)
|
||||
mut url := parse_url(u, false) or {
|
||||
return error(error_msg(err_msg_parse, u))
|
||||
}
|
||||
|
@ -479,7 +473,7 @@ fn parse_url(rawurl string, via_request bool) ?URL {
|
|||
}
|
||||
// Split off possible leading 'http:', 'mailto:', etc.
|
||||
// Cannot contain escaped characters.
|
||||
p := split_by_scheme(rawurl)?
|
||||
p := split_by_scheme(rawurl) ?
|
||||
url.scheme = p[0]
|
||||
mut rest := p[1]
|
||||
url.scheme = url.scheme.to_lower()
|
||||
|
@ -487,9 +481,8 @@ fn parse_url(rawurl string, via_request bool) ?URL {
|
|||
if rest.ends_with('?') && !rest[..1].contains('?') {
|
||||
url.force_query = true
|
||||
rest = rest[..rest.len - 1]
|
||||
}
|
||||
else {
|
||||
r,raw_query := split(rest, `?`, true)
|
||||
} else {
|
||||
r, raw_query := split(rest, `?`, true)
|
||||
rest = r
|
||||
url.raw_query = raw_query
|
||||
}
|
||||
|
@ -516,13 +509,14 @@ fn parse_url(rawurl string, via_request bool) ?URL {
|
|||
}
|
||||
if colon >= 0 && (slash < 0 || colon < slash) {
|
||||
// First path segment has colon. Not allowed in relative URL.
|
||||
return error(error_msg('parse_url: first path segment in URL cannot contain colon', ''))
|
||||
return error(error_msg('parse_url: first path segment in URL cannot contain colon',
|
||||
''))
|
||||
}
|
||||
}
|
||||
if ((url.scheme != '' || !via_request) && !rest.starts_with('///')) && rest.starts_with('//') {
|
||||
authority,r := split(rest[2..], `/`, false)
|
||||
authority, r := split(rest[2..], `/`, false)
|
||||
rest = r
|
||||
a := parse_authority(authority)?
|
||||
a := parse_authority(authority) ?
|
||||
url.user = a.user
|
||||
url.host = a.host
|
||||
}
|
||||
|
@ -530,7 +524,7 @@ fn parse_url(rawurl string, via_request bool) ?URL {
|
|||
// raw_path is a hint of the encoding of path. We don't want to set it if
|
||||
// the default escaping of path is equivalent, to help make sure that people
|
||||
// don't rely on it in general.
|
||||
url.set_path(rest)?
|
||||
url.set_path(rest) ?
|
||||
return url
|
||||
}
|
||||
|
||||
|
@ -546,11 +540,10 @@ fn parse_authority(authority string) ?ParseAuthorityRes {
|
|||
mut host := ''
|
||||
mut zuser := user('')
|
||||
if i < 0 {
|
||||
h := parse_host(authority)?
|
||||
h := parse_host(authority) ?
|
||||
host = h
|
||||
}
|
||||
else {
|
||||
h := parse_host(authority[i + 1..])?
|
||||
} else {
|
||||
h := parse_host(authority[i + 1..]) ?
|
||||
host = h
|
||||
}
|
||||
if i < 0 {
|
||||
|
@ -564,15 +557,14 @@ fn parse_authority(authority string) ?ParseAuthorityRes {
|
|||
return error(error_msg('parse_authority: invalid userinfo', ''))
|
||||
}
|
||||
if !userinfo.contains(':') {
|
||||
u := unescape(userinfo, .encode_user_password)?
|
||||
u := unescape(userinfo, .encode_user_password) ?
|
||||
userinfo = u
|
||||
zuser = user(userinfo)
|
||||
}
|
||||
else {
|
||||
mut username,mut password := split(userinfo, `:`, true)
|
||||
u := unescape(username, .encode_user_password)?
|
||||
} else {
|
||||
mut username, mut password := split(userinfo, `:`, true)
|
||||
u := unescape(username, .encode_user_password) ?
|
||||
username = u
|
||||
p := unescape(password, .encode_user_password)?
|
||||
p := unescape(password, .encode_user_password) ?
|
||||
password = p
|
||||
zuser = user_password(username, password)
|
||||
}
|
||||
|
@ -593,7 +585,8 @@ fn parse_host(host string) ?string {
|
|||
}
|
||||
mut colon_port := host[i + 1..]
|
||||
if !valid_optional_port(colon_port) {
|
||||
return error(error_msg('parse_host: invalid port $colon_port after host ', ''))
|
||||
return error(error_msg('parse_host: invalid port $colon_port after host ',
|
||||
''))
|
||||
}
|
||||
// RFC 6874 defines that %25 (%-encoded percent) introduces
|
||||
// the zone identifier, and the zone identifier can use basically
|
||||
|
@ -601,7 +594,7 @@ fn parse_host(host string) ?string {
|
|||
// can only %-encode non-ASCII bytes.
|
||||
// We do impose some restrictions on the zone, to avoid stupidity
|
||||
// like newlines.
|
||||
if zone:=host[..i].index('%25'){
|
||||
if zone := host[..i].index('%25') {
|
||||
host1 := unescape(host[..zone], .encode_host) or {
|
||||
return err
|
||||
}
|
||||
|
@ -613,10 +606,11 @@ fn parse_host(host string) ?string {
|
|||
}
|
||||
return host1 + host2 + host3
|
||||
}
|
||||
if idx:=host.last_index(':'){
|
||||
if idx := host.last_index(':') {
|
||||
colon_port = host[idx..]
|
||||
if !valid_optional_port(colon_port) {
|
||||
return error(error_msg('parse_host: invalid port $colon_port after host ', ''))
|
||||
return error(error_msg('parse_host: invalid port $colon_port after host ',
|
||||
''))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -627,6 +621,7 @@ fn parse_host(host string) ?string {
|
|||
// host = h
|
||||
// return host
|
||||
}
|
||||
|
||||
// set_path sets the path and raw_path fields of the URL based on the provided
|
||||
// escaped path p. It maintains the invariant that raw_path is only specified
|
||||
// when it differs from the default encoding of the path.
|
||||
|
@ -636,14 +631,13 @@ fn parse_host(host string) ?string {
|
|||
// set_path will return an error only if the provided path contains an invalid
|
||||
// escaping.
|
||||
pub fn (mut u URL) set_path(p string) ?bool {
|
||||
path := unescape(p, .encode_path)?
|
||||
path := unescape(p, .encode_path) ?
|
||||
u.path = path
|
||||
escp := escape(path, .encode_path)
|
||||
if p == escp {
|
||||
// Default encoding is fine.
|
||||
u.raw_path = ''
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
u.raw_path = p
|
||||
}
|
||||
return true
|
||||
|
@ -674,7 +668,7 @@ fn (u &URL) escaped_path() string {
|
|||
// valid_encoded_path reports whether s is a valid encoded path.
|
||||
// It must not contain any bytes that require escaping during path encoding.
|
||||
fn valid_encoded_path(s string) bool {
|
||||
for i in 0..s.len {
|
||||
for i in 0 .. s.len {
|
||||
// RFC 3986, Appendix A.
|
||||
// pchar = unreserved / pct-encoded / sub-delims / ':' / '@'.
|
||||
// should_escape is not quite compliant with the RFC,
|
||||
|
@ -695,7 +689,8 @@ fn valid_encoded_path(s string) bool {
|
|||
if should_escape(s[i], .encode_path) {
|
||||
return false
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
@ -746,8 +741,7 @@ pub fn (u URL) str() string {
|
|||
}
|
||||
if u.opaque != '' {
|
||||
buf.write(u.opaque)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if u.scheme != '' || u.host != '' || (u.user != 0 && !u.user.empty()) {
|
||||
if u.host != '' || u.path != '' || !u.user.empty() {
|
||||
buf.write('//')
|
||||
|
@ -804,7 +798,7 @@ pub fn (u URL) str() string {
|
|||
// interpreted as a key set to an empty value.
|
||||
pub fn parse_query(query string) ?Values {
|
||||
mut m := new_values()
|
||||
parse_query_values(mut m, query)?
|
||||
parse_query_values(mut m, query) ?
|
||||
return m
|
||||
}
|
||||
|
||||
|
@ -825,15 +819,14 @@ fn parse_query_values(mut m Values, query string) ?bool {
|
|||
if i >= 0 {
|
||||
q = key[i + 1..]
|
||||
key = key[..i]
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
q = ''
|
||||
}
|
||||
if key == '' {
|
||||
continue
|
||||
}
|
||||
mut value := ''
|
||||
if idx:=key.index('='){
|
||||
if idx := key.index('=') {
|
||||
i = idx
|
||||
value = key[i + 1..]
|
||||
key = key[..i]
|
||||
|
@ -885,18 +878,16 @@ pub fn (v Values) encode() string {
|
|||
|
||||
// resolve_path applies special path segments from refs and applies
|
||||
// them to base, per RFC 3986.
|
||||
fn resolve_path(base, ref string) string {
|
||||
fn resolve_path(base string, ref string) string {
|
||||
mut full := ''
|
||||
if ref == '' {
|
||||
full = base
|
||||
}
|
||||
else if ref[0] != `/` {
|
||||
} else if ref[0] != `/` {
|
||||
i := base.last_index('/') or {
|
||||
-1
|
||||
}
|
||||
full = base[..i + 1] + ref
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
full = ref
|
||||
}
|
||||
if full == '' {
|
||||
|
@ -916,7 +907,8 @@ fn resolve_path(base, ref string) string {
|
|||
}
|
||||
else {
|
||||
dst << elem
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
last := src[src.len - 1]
|
||||
if last == '.' || last == '..' {
|
||||
|
@ -936,7 +928,7 @@ pub fn (u &URL) is_abs() bool {
|
|||
// may be relative or absolute. parse returns nil, err on parse
|
||||
// failure, otherwise its return value is the same as resolve_reference.
|
||||
pub fn (u &URL) parse(ref string) ?URL {
|
||||
refurl := parse(ref)?
|
||||
refurl := parse(ref) ?
|
||||
return u.resolve_reference(refurl)
|
||||
}
|
||||
|
||||
|
@ -955,7 +947,7 @@ pub fn (u &URL) resolve_reference(ref &URL) ?URL {
|
|||
// The 'absoluteURI' or 'net_path' cases.
|
||||
// We can ignore the error from set_path since we know we provided a
|
||||
// validly-escaped path.
|
||||
url.set_path(resolve_path(ref.escaped_path(), ''))?
|
||||
url.set_path(resolve_path(ref.escaped_path(), '')) ?
|
||||
return url
|
||||
}
|
||||
if ref.opaque != '' {
|
||||
|
@ -973,7 +965,7 @@ pub fn (u &URL) resolve_reference(ref &URL) ?URL {
|
|||
// The 'abs_path' or 'rel_path' cases.
|
||||
url.host = u.host
|
||||
url.user = u.user
|
||||
url.set_path(resolve_path(u.escaped_path(), ref.escaped_path()))?
|
||||
url.set_path(resolve_path(u.escaped_path(), ref.escaped_path())) ?
|
||||
return url
|
||||
}
|
||||
|
||||
|
@ -994,8 +986,7 @@ pub fn (u &URL) request_uri() string {
|
|||
if result == '' {
|
||||
result = '/'
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if result.starts_with('//') {
|
||||
result = u.scheme + ':' + result
|
||||
}
|
||||
|
@ -1011,21 +1002,21 @@ pub fn (u &URL) request_uri() string {
|
|||
// If the result is enclosed in square brackets, as literal IPv6 addresses are,
|
||||
// the square brackets are removed from the result.
|
||||
pub fn (u &URL) hostname() string {
|
||||
host,_ := split_host_port(u.host)
|
||||
host, _ := split_host_port(u.host)
|
||||
return host
|
||||
}
|
||||
|
||||
// port returns the port part of u.host, without the leading colon.
|
||||
// If u.host doesn't contain a port, port returns an empty string.
|
||||
pub fn (u &URL) port() string {
|
||||
_,port := split_host_port(u.host)
|
||||
_, port := split_host_port(u.host)
|
||||
return port
|
||||
}
|
||||
|
||||
// split_host_port separates host and port. If the port is not valid, it returns
|
||||
// the entire input as host, and it doesn't check the validity of the host.
|
||||
// Per RFC 3986, it requires ports to be numeric.
|
||||
fn split_host_port(hostport string) (string,string) {
|
||||
fn split_host_port(hostport string) (string, string) {
|
||||
mut host := hostport
|
||||
mut port := ''
|
||||
colon := host.last_index_byte(`:`)
|
||||
|
@ -1036,7 +1027,7 @@ fn split_host_port(hostport string) (string,string) {
|
|||
if host.starts_with('[') && host.ends_with(']') {
|
||||
host = host[1..host.len - 1]
|
||||
}
|
||||
return host,port
|
||||
return host, port
|
||||
}
|
||||
|
||||
// valid_userinfo reports whether s is a valid userinfo string per RFC 3986
|
||||
|
@ -1059,19 +1050,16 @@ pub fn valid_userinfo(s string) bool {
|
|||
continue
|
||||
}
|
||||
match r {
|
||||
`-`, `.`, `_`, `:`, `~`, `!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `%`, `@` {
|
||||
continue
|
||||
}
|
||||
else {
|
||||
return false
|
||||
}}
|
||||
`-`, `.`, `_`, `:`, `~`, `!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `%`, `@` { continue }
|
||||
else { return false }
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// string_contains_ctl_byte reports whether s contains any ASCII control character.
|
||||
fn string_contains_ctl_byte(s string) bool {
|
||||
for i in 0..s.len {
|
||||
for i in 0 .. s.len {
|
||||
b := s[i]
|
||||
if b < ` ` || b == 0x7f {
|
||||
return true
|
||||
|
@ -1083,11 +1071,9 @@ fn string_contains_ctl_byte(s string) bool {
|
|||
pub fn ishex(c byte) bool {
|
||||
if `0` <= c && c <= `9` {
|
||||
return true
|
||||
}
|
||||
else if `a` <= c && c <= `f` {
|
||||
} else if `a` <= c && c <= `f` {
|
||||
return true
|
||||
}
|
||||
else if `A` <= c && c <= `F` {
|
||||
} else if `A` <= c && c <= `F` {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
@ -1096,11 +1082,9 @@ pub fn ishex(c byte) bool {
|
|||
fn unhex(c byte) byte {
|
||||
if `0` <= c && c <= `9` {
|
||||
return c - `0`
|
||||
}
|
||||
else if `a` <= c && c <= `f` {
|
||||
} else if `a` <= c && c <= `f` {
|
||||
return c - `a` + 10
|
||||
}
|
||||
else if `A` <= c && c <= `F` {
|
||||
} else if `A` <= c && c <= `F` {
|
||||
return c - `A` + 10
|
||||
}
|
||||
return 0
|
||||
|
|
|
@ -11,7 +11,7 @@ pub mut:
|
|||
struct Values {
|
||||
pub mut:
|
||||
data map[string]Value
|
||||
len int
|
||||
len int
|
||||
}
|
||||
|
||||
// new_values returns a new Values struct for creating
|
||||
|
@ -20,7 +20,7 @@ pub mut:
|
|||
// values.encode() will return the encoded data
|
||||
pub fn new_values() Values {
|
||||
return Values{
|
||||
data: map[string]Value
|
||||
data: map[string]Value{}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ pub fn (v &Values) get_all(key string) []string {
|
|||
|
||||
// set sets the key to value. It replaces any existing
|
||||
// values.
|
||||
pub fn (mut v Values) set(key, value string) {
|
||||
pub fn (mut v Values) set(key string, value string) {
|
||||
mut a := v.data[key]
|
||||
a.data = [value]
|
||||
v.data[key] = a
|
||||
|
@ -70,7 +70,7 @@ pub fn (mut v Values) set(key, value string) {
|
|||
|
||||
// add adds the value to key. It appends to any existing
|
||||
// values associated with key.
|
||||
pub fn (mut v Values) add(key, value string) {
|
||||
pub fn (mut v Values) add(key string, value string) {
|
||||
mut a := v.data[key]
|
||||
if a.data.len == 0 {
|
||||
a.data = []
|
||||
|
|
|
@ -191,13 +191,13 @@ pub fn (mut nodes []DocNode) sort_by_category() {
|
|||
nodes.sort_with_compare(compare_nodes_by_category)
|
||||
}
|
||||
|
||||
fn compare_nodes_by_name(a, b &DocNode) int {
|
||||
fn compare_nodes_by_name(a &DocNode, b &DocNode) int {
|
||||
al := a.name.to_lower()
|
||||
bl := b.name.to_lower()
|
||||
return compare_strings(al, bl)
|
||||
}
|
||||
|
||||
fn compare_nodes_by_category(a, b &DocNode) int {
|
||||
fn compare_nodes_by_category(a &DocNode, b &DocNode) int {
|
||||
al := a.attrs['category']
|
||||
bl := b.attrs['category']
|
||||
return compare_strings(al, bl)
|
||||
|
@ -217,7 +217,7 @@ pub fn (nodes []DocNode) find_children_of(parent string) []DocNode {
|
|||
return nodes.find_nodes_with_attr('parent', parent)
|
||||
}
|
||||
|
||||
pub fn (nodes []DocNode) find_nodes_with_attr(attr_name, value string) []DocNode {
|
||||
pub fn (nodes []DocNode) find_nodes_with_attr(attr_name string, value string) []DocNode {
|
||||
mut subgroup := []DocNode{}
|
||||
if attr_name.len == 0 {
|
||||
return subgroup
|
||||
|
@ -509,7 +509,7 @@ fn (mut d Doc) generate() ?Doc {
|
|||
return *d
|
||||
}
|
||||
|
||||
pub fn generate_from_pos(input_path, filename string, pos int) ?Doc {
|
||||
pub fn generate_from_pos(input_path string, filename string, pos int) ?Doc {
|
||||
mut doc := new(input_path)
|
||||
doc.pub_only = false
|
||||
doc.with_comments = true
|
||||
|
@ -519,7 +519,7 @@ pub fn generate_from_pos(input_path, filename string, pos int) ?Doc {
|
|||
return doc.generate()
|
||||
}
|
||||
|
||||
pub fn generate(input_path string, pub_only, with_comments bool) ?Doc {
|
||||
pub fn generate(input_path string, pub_only bool, with_comments bool) ?Doc {
|
||||
mut doc := new(input_path)
|
||||
doc.pub_only = pub_only
|
||||
doc.with_comments = with_comments
|
||||
|
|
Loading…
Reference in New Issue