refactor: apply new vfmt defaults
parent
53f5b68d08
commit
5f21e256ee
|
@ -46,14 +46,14 @@ pub fn create_build_image(base_image string) ?string {
|
||||||
image_tag := if image_parts.len > 1 { image_parts[1] } else { 'latest' }
|
image_tag := if image_parts.len > 1 { image_parts[1] } else { 'latest' }
|
||||||
|
|
||||||
// We pull the provided image
|
// We pull the provided image
|
||||||
docker.pull_image(image_name, image_tag) ?
|
docker.pull_image(image_name, image_tag)?
|
||||||
|
|
||||||
id := docker.create_container(c) ?
|
id := docker.create_container(c)?
|
||||||
docker.start_container(id) ?
|
docker.start_container(id)?
|
||||||
|
|
||||||
// This loop waits until the container has stopped, so we can remove it after
|
// This loop waits until the container has stopped, so we can remove it after
|
||||||
for {
|
for {
|
||||||
data := docker.inspect_container(id) ?
|
data := docker.inspect_container(id)?
|
||||||
|
|
||||||
if !data.state.running {
|
if !data.state.running {
|
||||||
break
|
break
|
||||||
|
@ -67,8 +67,8 @@ pub fn create_build_image(base_image string) ?string {
|
||||||
// TODO also add the base image's name into the image name to prevent
|
// TODO also add the base image's name into the image name to prevent
|
||||||
// conflicts.
|
// conflicts.
|
||||||
tag := time.sys_mono_now().str()
|
tag := time.sys_mono_now().str()
|
||||||
image := docker.create_image_from_container(id, 'vieter-build', tag) ?
|
image := docker.create_image_from_container(id, 'vieter-build', tag)?
|
||||||
docker.remove_container(id) ?
|
docker.remove_container(id)?
|
||||||
|
|
||||||
return image.id
|
return image.id
|
||||||
}
|
}
|
||||||
|
@ -112,21 +112,21 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &db
|
||||||
user: 'builder:builder'
|
user: 'builder:builder'
|
||||||
}
|
}
|
||||||
|
|
||||||
id := docker.create_container(c) ?
|
id := docker.create_container(c)?
|
||||||
docker.start_container(id) ?
|
docker.start_container(id)?
|
||||||
|
|
||||||
mut data := docker.inspect_container(id) ?
|
mut data := docker.inspect_container(id)?
|
||||||
|
|
||||||
// This loop waits until the container has stopped, so we can remove it after
|
// This loop waits until the container has stopped, so we can remove it after
|
||||||
for data.state.running {
|
for data.state.running {
|
||||||
time.sleep(1 * time.second)
|
time.sleep(1 * time.second)
|
||||||
|
|
||||||
data = docker.inspect_container(id) ?
|
data = docker.inspect_container(id)?
|
||||||
}
|
}
|
||||||
|
|
||||||
logs := docker.get_container_logs(id) ?
|
logs := docker.get_container_logs(id)?
|
||||||
|
|
||||||
docker.remove_container(id) ?
|
docker.remove_container(id)?
|
||||||
|
|
||||||
return BuildResult{
|
return BuildResult{
|
||||||
start_time: data.state.start_time
|
start_time: data.state.start_time
|
||||||
|
@ -139,20 +139,20 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &db
|
||||||
// build builds every Git repo in the server's list.
|
// build builds every Git repo in the server's list.
|
||||||
fn build(conf Config, repo_id int) ? {
|
fn build(conf Config, repo_id int) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
repo := c.get_git_repo(repo_id) ?
|
repo := c.get_git_repo(repo_id)?
|
||||||
|
|
||||||
build_arch := os.uname().machine
|
build_arch := os.uname().machine
|
||||||
|
|
||||||
println('Creating base image...')
|
println('Creating base image...')
|
||||||
image_id := create_build_image(conf.base_image) ?
|
image_id := create_build_image(conf.base_image)?
|
||||||
|
|
||||||
println('Running build...')
|
println('Running build...')
|
||||||
res := build_repo(conf.address, conf.api_key, image_id, repo) ?
|
res := build_repo(conf.address, conf.api_key, image_id, repo)?
|
||||||
|
|
||||||
println('Removing build image...')
|
println('Removing build image...')
|
||||||
docker.remove_image(image_id) ?
|
docker.remove_image(image_id)?
|
||||||
|
|
||||||
println('Uploading logs to Vieter...')
|
println('Uploading logs to Vieter...')
|
||||||
c.add_build_log(repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
c.add_build_log(repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||||
res.logs) ?
|
res.logs)?
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,12 +18,12 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'id'
|
usage: 'id'
|
||||||
description: 'Build the repository with the given ID.'
|
description: 'Build the repository with the given ID.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
id := cmd.args[0].int()
|
id := cmd.args[0].int()
|
||||||
|
|
||||||
build(conf, id) ?
|
build(conf, id)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,10 +37,10 @@ fn (c &Client) send_request_raw(method Method, url string, params map[string]str
|
||||||
full_url = '$full_url?$params_str'
|
full_url = '$full_url?$params_str'
|
||||||
}
|
}
|
||||||
|
|
||||||
mut req := http.new_request(method, full_url, body) ?
|
mut req := http.new_request(method, full_url, body)?
|
||||||
req.add_custom_header('X-Api-Key', c.api_key) ?
|
req.add_custom_header('X-Api-Key', c.api_key)?
|
||||||
|
|
||||||
res := req.do() ?
|
res := req.do()?
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
@ -53,15 +53,15 @@ fn (c &Client) send_request<T>(method Method, url string, params map[string]stri
|
||||||
// send_request_with_body<T> calls send_request_raw_response & parses its
|
// send_request_with_body<T> calls send_request_raw_response & parses its
|
||||||
// output as a Response<T> object.
|
// output as a Response<T> object.
|
||||||
fn (c &Client) send_request_with_body<T>(method Method, url string, params map[string]string, body string) ?Response<T> {
|
fn (c &Client) send_request_with_body<T>(method Method, url string, params map[string]string, body string) ?Response<T> {
|
||||||
res_text := c.send_request_raw_response(method, url, params, body) ?
|
res_text := c.send_request_raw_response(method, url, params, body)?
|
||||||
data := json.decode(Response<T>, res_text) ?
|
data := json.decode(Response<T>, res_text)?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// send_request_raw_response returns the raw text response for an HTTP request.
|
// send_request_raw_response returns the raw text response for an HTTP request.
|
||||||
fn (c &Client) send_request_raw_response(method Method, url string, params map[string]string, body string) ?string {
|
fn (c &Client) send_request_raw_response(method Method, url string, params map[string]string, body string) ?string {
|
||||||
res := c.send_request_raw(method, url, params, body) ?
|
res := c.send_request_raw(method, url, params, body)?
|
||||||
|
|
||||||
return res.text
|
return res.text
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,14 +6,14 @@ import response { Response }
|
||||||
|
|
||||||
// get_git_repos returns the current list of repos.
|
// get_git_repos returns the current list of repos.
|
||||||
pub fn (c &Client) get_git_repos() ?[]GitRepo {
|
pub fn (c &Client) get_git_repos() ?[]GitRepo {
|
||||||
data := c.send_request<[]GitRepo>(Method.get, '/api/repos', {}) ?
|
data := c.send_request<[]GitRepo>(Method.get, '/api/repos', {})?
|
||||||
|
|
||||||
return data.data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_git_repo returns the repo for a specific ID.
|
// get_git_repo returns the repo for a specific ID.
|
||||||
pub fn (c &Client) get_git_repo(id int) ?GitRepo {
|
pub fn (c &Client) get_git_repo(id int) ?GitRepo {
|
||||||
data := c.send_request<GitRepo>(Method.get, '/api/repos/$id', {}) ?
|
data := c.send_request<GitRepo>(Method.get, '/api/repos/$id', {})?
|
||||||
|
|
||||||
return data.data
|
return data.data
|
||||||
}
|
}
|
||||||
|
@ -30,14 +30,14 @@ pub fn (c &Client) add_git_repo(url string, branch string, repo string, arch []s
|
||||||
params['arch'] = arch.join(',')
|
params['arch'] = arch.join(',')
|
||||||
}
|
}
|
||||||
|
|
||||||
data := c.send_request<string>(Method.post, '/api/repos', params) ?
|
data := c.send_request<string>(Method.post, '/api/repos', params)?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove_git_repo removes the repo with the given ID from the server.
|
// remove_git_repo removes the repo with the given ID from the server.
|
||||||
pub fn (c &Client) remove_git_repo(id int) ?Response<string> {
|
pub fn (c &Client) remove_git_repo(id int) ?Response<string> {
|
||||||
data := c.send_request<string>(Method.delete, '/api/repos/$id', {}) ?
|
data := c.send_request<string>(Method.delete, '/api/repos/$id', {})?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ pub fn (c &Client) remove_git_repo(id int) ?Response<string> {
|
||||||
// patch_git_repo sends a PATCH request to the given repo with the params as
|
// patch_git_repo sends a PATCH request to the given repo with the params as
|
||||||
// payload.
|
// payload.
|
||||||
pub fn (c &Client) patch_git_repo(id int, params map[string]string) ?Response<string> {
|
pub fn (c &Client) patch_git_repo(id int, params map[string]string) ?Response<string> {
|
||||||
data := c.send_request<string>(Method.patch, '/api/repos/$id', params) ?
|
data := c.send_request<string>(Method.patch, '/api/repos/$id', params)?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ import time
|
||||||
|
|
||||||
// get_build_logs returns all build logs.
|
// get_build_logs returns all build logs.
|
||||||
pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> {
|
pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> {
|
||||||
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {}) ?
|
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {})?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
@ -18,21 +18,21 @@ pub fn (c &Client) get_build_logs_for_repo(repo_id int) ?Response<[]BuildLog> {
|
||||||
'repo': repo_id.str()
|
'repo': repo_id.str()
|
||||||
}
|
}
|
||||||
|
|
||||||
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params) ?
|
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params)?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_build_log returns a specific build log.
|
// get_build_log returns a specific build log.
|
||||||
pub fn (c &Client) get_build_log(id int) ?Response<BuildLog> {
|
pub fn (c &Client) get_build_log(id int) ?Response<BuildLog> {
|
||||||
data := c.send_request<BuildLog>(Method.get, '/api/logs/$id', {}) ?
|
data := c.send_request<BuildLog>(Method.get, '/api/logs/$id', {})?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_build_log_content returns the contents of the build log file.
|
// get_build_log_content returns the contents of the build log file.
|
||||||
pub fn (c &Client) get_build_log_content(id int) ?string {
|
pub fn (c &Client) get_build_log_content(id int) ?string {
|
||||||
data := c.send_request_raw_response(Method.get, '/api/logs/$id/content', {}, '') ?
|
data := c.send_request_raw_response(Method.get, '/api/logs/$id/content', {}, '')?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ pub fn (c &Client) add_build_log(repo_id int, start_time time.Time, end_time tim
|
||||||
'exitCode': exit_code.str()
|
'exitCode': exit_code.str()
|
||||||
}
|
}
|
||||||
|
|
||||||
data := c.send_request_with_body<string>(Method.post, '/api/logs', params, content) ?
|
data := c.send_request_with_body<string>(Method.post, '/api/logs', params, content)?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,10 +20,10 @@ pub fn cmd() cli.Command {
|
||||||
name: 'list'
|
name: 'list'
|
||||||
description: 'List the current repos.'
|
description: 'List the current repos.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
list(conf) ?
|
list(conf)?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -32,10 +32,10 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'url branch repo'
|
usage: 'url branch repo'
|
||||||
description: 'Add a new repository.'
|
description: 'Add a new repository.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
add(conf, cmd.args[0], cmd.args[1], cmd.args[2]) ?
|
add(conf, cmd.args[0], cmd.args[1], cmd.args[2])?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -44,10 +44,10 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'id'
|
usage: 'id'
|
||||||
description: 'Remove a repository that matches the given ID prefix.'
|
description: 'Remove a repository that matches the given ID prefix.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
remove(conf, cmd.args[0]) ?
|
remove(conf, cmd.args[0])?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -56,10 +56,10 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'id'
|
usage: 'id'
|
||||||
description: 'Show detailed information for the repo matching the ID prefix.'
|
description: 'Show detailed information for the repo matching the ID prefix.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
info(conf, cmd.args[0]) ?
|
info(conf, cmd.args[0])?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -95,8 +95,8 @@ pub fn cmd() cli.Command {
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
found := cmd.flags.get_all_found()
|
found := cmd.flags.get_all_found()
|
||||||
|
|
||||||
|
@ -104,11 +104,11 @@ pub fn cmd() cli.Command {
|
||||||
|
|
||||||
for f in found {
|
for f in found {
|
||||||
if f.name != 'config-file' {
|
if f.name != 'config-file' {
|
||||||
params[f.name] = f.get_string() ?
|
params[f.name] = f.get_string()?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
patch(conf, cmd.args[0], params) ?
|
patch(conf, cmd.args[0], params)?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -121,7 +121,7 @@ pub fn cmd() cli.Command {
|
||||||
// list prints out a list of all repositories.
|
// list prints out a list of all repositories.
|
||||||
fn list(conf Config) ? {
|
fn list(conf Config) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
repos := c.get_git_repos() ?
|
repos := c.get_git_repos()?
|
||||||
|
|
||||||
for repo in repos {
|
for repo in repos {
|
||||||
println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo')
|
println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo')
|
||||||
|
@ -131,7 +131,7 @@ fn list(conf Config) ? {
|
||||||
// add adds a new repository to the server's list.
|
// add adds a new repository to the server's list.
|
||||||
fn add(conf Config, url string, branch string, repo string) ? {
|
fn add(conf Config, url string, branch string, repo string) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
res := c.add_git_repo(url, branch, repo, []) ?
|
res := c.add_git_repo(url, branch, repo, [])?
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ fn remove(conf Config, id string) ? {
|
||||||
|
|
||||||
if id_int != 0 {
|
if id_int != 0 {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
res := c.remove_git_repo(id_int) ?
|
res := c.remove_git_repo(id_int)?
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,7 +161,7 @@ fn patch(conf Config, id string, params map[string]string) ? {
|
||||||
id_int := id.int()
|
id_int := id.int()
|
||||||
if id_int != 0 {
|
if id_int != 0 {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
res := c.patch_git_repo(id_int, params) ?
|
res := c.patch_git_repo(id_int, params)?
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
|
@ -176,6 +176,6 @@ fn info(conf Config, id string) ? {
|
||||||
}
|
}
|
||||||
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
repo := c.get_git_repo(id_int) ?
|
repo := c.get_git_repo(id_int)?
|
||||||
println(repo)
|
println(repo)
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,12 +27,12 @@ pub fn cmd() cli.Command {
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
repo_id := cmd.flags.get_int('repo') ?
|
repo_id := cmd.flags.get_int('repo')?
|
||||||
|
|
||||||
if repo_id == 0 { list(conf) ? } else { list_for_repo(conf, repo_id) ? }
|
if repo_id == 0 { list(conf)? } else { list_for_repo(conf, repo_id)? }
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -41,11 +41,11 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'id'
|
usage: 'id'
|
||||||
description: 'Show all info for a specific build log.'
|
description: 'Show all info for a specific build log.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
id := cmd.args[0].int()
|
id := cmd.args[0].int()
|
||||||
info(conf, id) ?
|
info(conf, id)?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -54,11 +54,11 @@ pub fn cmd() cli.Command {
|
||||||
usage: 'id'
|
usage: 'id'
|
||||||
description: 'Output the content of a build log to stdout.'
|
description: 'Output the content of a build log to stdout.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
id := cmd.args[0].int()
|
id := cmd.args[0].int()
|
||||||
content(conf, id) ?
|
content(conf, id)?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -75,7 +75,7 @@ fn print_log_list(logs []db.BuildLog) {
|
||||||
// list prints a list of all build logs.
|
// list prints a list of all build logs.
|
||||||
fn list(conf Config) ? {
|
fn list(conf Config) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
logs := c.get_build_logs() ?.data
|
logs := c.get_build_logs()?.data
|
||||||
|
|
||||||
print_log_list(logs)
|
print_log_list(logs)
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ fn list(conf Config) ? {
|
||||||
// list prints a list of all build logs for a given repo.
|
// list prints a list of all build logs for a given repo.
|
||||||
fn list_for_repo(conf Config, repo_id int) ? {
|
fn list_for_repo(conf Config, repo_id int) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
logs := c.get_build_logs_for_repo(repo_id) ?.data
|
logs := c.get_build_logs_for_repo(repo_id)?.data
|
||||||
|
|
||||||
print_log_list(logs)
|
print_log_list(logs)
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ fn list_for_repo(conf Config, repo_id int) ? {
|
||||||
// info print the detailed info for a given build log.
|
// info print the detailed info for a given build log.
|
||||||
fn info(conf Config, id int) ? {
|
fn info(conf Config, id int) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
log := c.get_build_log(id) ?.data
|
log := c.get_build_log(id)?.data
|
||||||
|
|
||||||
print(log)
|
print(log)
|
||||||
}
|
}
|
||||||
|
@ -100,7 +100,7 @@ fn info(conf Config, id int) ? {
|
||||||
// stdout.
|
// stdout.
|
||||||
fn content(conf Config, id int) ? {
|
fn content(conf Config, id int) ? {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
content := c.get_build_log_content(id) ?
|
content := c.get_build_log_content(id)?
|
||||||
|
|
||||||
println(content)
|
println(content)
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,10 +23,10 @@ pub fn cmd() cli.Command {
|
||||||
name: 'cron'
|
name: 'cron'
|
||||||
description: 'Start the cron service that periodically runs builds.'
|
description: 'Start the cron service that periodically runs builds.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
cron(conf) ?
|
cron(conf)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub fn cron(conf Config) ? {
|
||||||
}
|
}
|
||||||
|
|
||||||
mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce,
|
mut d := daemon.init_daemon(logger, conf.address, conf.api_key, conf.base_image, ce,
|
||||||
conf.max_concurrent_builds, conf.api_update_frequency, conf.image_rebuild_frequency) ?
|
conf.max_concurrent_builds, conf.api_update_frequency, conf.image_rebuild_frequency)?
|
||||||
|
|
||||||
d.run()
|
d.run()
|
||||||
}
|
}
|
||||||
|
|
|
@ -218,7 +218,7 @@ fn parse_part(s string, min int, max int) ?[]int {
|
||||||
mut bitv := []bool{len: max - min + 1, init: false}
|
mut bitv := []bool{len: max - min + 1, init: false}
|
||||||
|
|
||||||
for range in s.split(',') {
|
for range in s.split(',') {
|
||||||
parse_range(range, min, max, mut bitv) ?
|
parse_range(range, min, max, mut bitv)?
|
||||||
}
|
}
|
||||||
|
|
||||||
return bitv_to_ints(bitv, min)
|
return bitv_to_ints(bitv, min)
|
||||||
|
|
|
@ -13,14 +13,14 @@ fn parse_range_error(s string, min int, max int) string {
|
||||||
// =====parse_range=====
|
// =====parse_range=====
|
||||||
fn test_range_star_range() ? {
|
fn test_range_star_range() ? {
|
||||||
mut bitv := []bool{len: 6, init: false}
|
mut bitv := []bool{len: 6, init: false}
|
||||||
parse_range('*', 0, 5, mut bitv) ?
|
parse_range('*', 0, 5, mut bitv)?
|
||||||
|
|
||||||
assert bitv == [true, true, true, true, true, true]
|
assert bitv == [true, true, true, true, true, true]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_range_number() ? {
|
fn test_range_number() ? {
|
||||||
mut bitv := []bool{len: 6, init: false}
|
mut bitv := []bool{len: 6, init: false}
|
||||||
parse_range('4', 0, 5, mut bitv) ?
|
parse_range('4', 0, 5, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 0) == [4]
|
assert bitv_to_ints(bitv, 0) == [4]
|
||||||
}
|
}
|
||||||
|
@ -39,14 +39,14 @@ fn test_range_number_invalid() ? {
|
||||||
|
|
||||||
fn test_range_step_star_1() ? {
|
fn test_range_step_star_1() ? {
|
||||||
mut bitv := []bool{len: 21, init: false}
|
mut bitv := []bool{len: 21, init: false}
|
||||||
parse_range('*/4', 0, 20, mut bitv) ?
|
parse_range('*/4', 0, 20, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20]
|
assert bitv_to_ints(bitv, 0) == [0, 4, 8, 12, 16, 20]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_range_step_star_2() ? {
|
fn test_range_step_star_2() ? {
|
||||||
mut bitv := []bool{len: 8, init: false}
|
mut bitv := []bool{len: 8, init: false}
|
||||||
parse_range('*/3', 1, 8, mut bitv) ?
|
parse_range('*/3', 1, 8, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 1) == [1, 4, 7]
|
assert bitv_to_ints(bitv, 1) == [1, 4, 7]
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ fn test_range_step_zero() ? {
|
||||||
|
|
||||||
fn test_range_step_number() ? {
|
fn test_range_step_number() ? {
|
||||||
mut bitv := []bool{len: 21, init: false}
|
mut bitv := []bool{len: 21, init: false}
|
||||||
parse_range('5/4', 2, 22, mut bitv) ?
|
parse_range('5/4', 2, 22, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 2) == [5, 9, 13, 17, 21]
|
assert bitv_to_ints(bitv, 2) == [5, 9, 13, 17, 21]
|
||||||
}
|
}
|
||||||
|
@ -76,23 +76,23 @@ fn test_range_step_number_too_small() ? {
|
||||||
|
|
||||||
fn test_range_dash() ? {
|
fn test_range_dash() ? {
|
||||||
mut bitv := []bool{len: 10, init: false}
|
mut bitv := []bool{len: 10, init: false}
|
||||||
parse_range('4-8', 0, 9, mut bitv) ?
|
parse_range('4-8', 0, 9, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 0) == [4, 5, 6, 7, 8]
|
assert bitv_to_ints(bitv, 0) == [4, 5, 6, 7, 8]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_range_dash_step() ? {
|
fn test_range_dash_step() ? {
|
||||||
mut bitv := []bool{len: 10, init: false}
|
mut bitv := []bool{len: 10, init: false}
|
||||||
parse_range('4-8/2', 0, 9, mut bitv) ?
|
parse_range('4-8/2', 0, 9, mut bitv)?
|
||||||
|
|
||||||
assert bitv_to_ints(bitv, 0) == [4, 6, 8]
|
assert bitv_to_ints(bitv, 0) == [4, 6, 8]
|
||||||
}
|
}
|
||||||
|
|
||||||
// =====parse_part=====
|
// =====parse_part=====
|
||||||
fn test_part_single() ? {
|
fn test_part_single() ? {
|
||||||
assert parse_part('*', 0, 5) ? == [0, 1, 2, 3, 4, 5]
|
assert parse_part('*', 0, 5)? == [0, 1, 2, 3, 4, 5]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_part_multiple() ? {
|
fn test_part_multiple() ? {
|
||||||
assert parse_part('*/2,2/3', 1, 8) ? == [1, 2, 3, 5, 7, 8]
|
assert parse_part('*/2,2/3', 1, 8)? == [1, 2, 3, 5, 7, 8]
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,11 +3,11 @@ module expression
|
||||||
import time { parse }
|
import time { parse }
|
||||||
|
|
||||||
fn util_test_time(exp string, t1_str string, t2_str string) ? {
|
fn util_test_time(exp string, t1_str string, t2_str string) ? {
|
||||||
ce := parse_expression(exp) ?
|
ce := parse_expression(exp)?
|
||||||
t1 := parse(t1_str) ?
|
t1 := parse(t1_str)?
|
||||||
t2 := parse(t2_str) ?
|
t2 := parse(t2_str)?
|
||||||
|
|
||||||
t3 := ce.next(t1) ?
|
t3 := ce.next(t1)?
|
||||||
|
|
||||||
assert t2.year == t3.year
|
assert t2.year == t3.year
|
||||||
assert t2.month == t3.month
|
assert t2.month == t3.month
|
||||||
|
@ -18,17 +18,17 @@ fn util_test_time(exp string, t1_str string, t2_str string) ? {
|
||||||
|
|
||||||
fn test_next_simple() ? {
|
fn test_next_simple() ? {
|
||||||
// Very simple
|
// Very simple
|
||||||
util_test_time('0 3', '2002-01-01 00:00:00', '2002-01-01 03:00:00') ?
|
util_test_time('0 3', '2002-01-01 00:00:00', '2002-01-01 03:00:00')?
|
||||||
|
|
||||||
// Overlap to next day
|
// Overlap to next day
|
||||||
util_test_time('0 3', '2002-01-01 03:00:00', '2002-01-02 03:00:00') ?
|
util_test_time('0 3', '2002-01-01 03:00:00', '2002-01-02 03:00:00')?
|
||||||
util_test_time('0 3', '2002-01-01 04:00:00', '2002-01-02 03:00:00') ?
|
util_test_time('0 3', '2002-01-01 04:00:00', '2002-01-02 03:00:00')?
|
||||||
|
|
||||||
util_test_time('0 3/4', '2002-01-01 04:00:00', '2002-01-01 07:00:00') ?
|
util_test_time('0 3/4', '2002-01-01 04:00:00', '2002-01-01 07:00:00')?
|
||||||
|
|
||||||
// Overlap to next month
|
// Overlap to next month
|
||||||
util_test_time('0 3', '2002-11-31 04:00:00', '2002-12-01 03:00:00') ?
|
util_test_time('0 3', '2002-11-31 04:00:00', '2002-12-01 03:00:00')?
|
||||||
|
|
||||||
// Overlap to next year
|
// Overlap to next year
|
||||||
util_test_time('0 3', '2002-12-31 04:00:00', '2003-01-01 03:00:00') ?
|
util_test_time('0 3', '2002-12-31 04:00:00', '2003-01-01 03:00:00')?
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ struct VieterDb {
|
||||||
|
|
||||||
// init initializes a database & adds the correct tables.
|
// init initializes a database & adds the correct tables.
|
||||||
pub fn init(db_path string) ?VieterDb {
|
pub fn init(db_path string) ?VieterDb {
|
||||||
conn := sqlite.connect(db_path) ?
|
conn := sqlite.connect(db_path)?
|
||||||
|
|
||||||
sql conn {
|
sql conn {
|
||||||
create table GitRepo
|
create table GitRepo
|
||||||
|
|
|
@ -11,7 +11,7 @@ struct Container {
|
||||||
|
|
||||||
// containers returns a list of all currently running containers
|
// containers returns a list of all currently running containers
|
||||||
pub fn containers() ?[]Container {
|
pub fn containers() ?[]Container {
|
||||||
res := request('GET', urllib.parse('/v1.41/containers/json') ?) ?
|
res := request('GET', urllib.parse('/v1.41/containers/json')?)?
|
||||||
|
|
||||||
return json.decode([]Container, res.text) or {}
|
return json.decode([]Container, res.text) or {}
|
||||||
}
|
}
|
||||||
|
@ -32,19 +32,19 @@ struct CreatedContainer {
|
||||||
// create_container creates a container defined by the given configuration. If
|
// create_container creates a container defined by the given configuration. If
|
||||||
// successful, it returns the ID of the newly created container.
|
// successful, it returns the ID of the newly created container.
|
||||||
pub fn create_container(c &NewContainer) ?string {
|
pub fn create_container(c &NewContainer) ?string {
|
||||||
res := request_with_json('POST', urllib.parse('/v1.41/containers/create') ?, c) ?
|
res := request_with_json('POST', urllib.parse('/v1.41/containers/create')?, c)?
|
||||||
|
|
||||||
if res.status_code != 201 {
|
if res.status_code != 201 {
|
||||||
return error('Failed to create container.')
|
return error('Failed to create container.')
|
||||||
}
|
}
|
||||||
|
|
||||||
return json.decode(CreatedContainer, res.text) ?.id
|
return json.decode(CreatedContainer, res.text)?.id
|
||||||
}
|
}
|
||||||
|
|
||||||
// start_container starts a container with a given ID. It returns whether the
|
// start_container starts a container with a given ID. It returns whether the
|
||||||
// container was started or not.
|
// container was started or not.
|
||||||
pub fn start_container(id string) ?bool {
|
pub fn start_container(id string) ?bool {
|
||||||
res := request('POST', urllib.parse('/v1.41/containers/$id/start') ?) ?
|
res := request('POST', urllib.parse('/v1.41/containers/$id/start')?)?
|
||||||
|
|
||||||
return res.status_code == 204
|
return res.status_code == 204
|
||||||
}
|
}
|
||||||
|
@ -70,18 +70,18 @@ pub mut:
|
||||||
// inspect_container returns the result of inspecting a container with a given
|
// inspect_container returns the result of inspecting a container with a given
|
||||||
// ID.
|
// ID.
|
||||||
pub fn inspect_container(id string) ?ContainerInspect {
|
pub fn inspect_container(id string) ?ContainerInspect {
|
||||||
res := request('GET', urllib.parse('/v1.41/containers/$id/json') ?) ?
|
res := request('GET', urllib.parse('/v1.41/containers/$id/json')?)?
|
||||||
|
|
||||||
if res.status_code != 200 {
|
if res.status_code != 200 {
|
||||||
return error('Failed to inspect container.')
|
return error('Failed to inspect container.')
|
||||||
}
|
}
|
||||||
|
|
||||||
mut data := json.decode(ContainerInspect, res.text) ?
|
mut data := json.decode(ContainerInspect, res.text)?
|
||||||
|
|
||||||
data.state.start_time = time.parse_rfc3339(data.state.start_time_str) ?
|
data.state.start_time = time.parse_rfc3339(data.state.start_time_str)?
|
||||||
|
|
||||||
if data.state.status == 'exited' {
|
if data.state.status == 'exited' {
|
||||||
data.state.end_time = time.parse_rfc3339(data.state.end_time_str) ?
|
data.state.end_time = time.parse_rfc3339(data.state.end_time_str)?
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
@ -89,7 +89,7 @@ pub fn inspect_container(id string) ?ContainerInspect {
|
||||||
|
|
||||||
// remove_container removes a container with a given ID.
|
// remove_container removes a container with a given ID.
|
||||||
pub fn remove_container(id string) ?bool {
|
pub fn remove_container(id string) ?bool {
|
||||||
res := request('DELETE', urllib.parse('/v1.41/containers/$id') ?) ?
|
res := request('DELETE', urllib.parse('/v1.41/containers/$id')?)?
|
||||||
|
|
||||||
return res.status_code == 204
|
return res.status_code == 204
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,7 @@ pub fn remove_container(id string) ?bool {
|
||||||
// get_container_logs retrieves the logs for a Docker container, both stdout &
|
// get_container_logs retrieves the logs for a Docker container, both stdout &
|
||||||
// stderr.
|
// stderr.
|
||||||
pub fn get_container_logs(id string) ?string {
|
pub fn get_container_logs(id string) ?string {
|
||||||
res := request('GET', urllib.parse('/v1.41/containers/$id/logs?stdout=true&stderr=true') ?) ?
|
res := request('GET', urllib.parse('/v1.41/containers/$id/logs?stdout=true&stderr=true')?)?
|
||||||
mut res_bytes := res.text.bytes()
|
mut res_bytes := res.text.bytes()
|
||||||
|
|
||||||
// Docker uses a special "stream" format for their logs, so we have to
|
// Docker uses a special "stream" format for their logs, so we have to
|
||||||
|
|
|
@ -27,7 +27,7 @@ fn send(req &string) ?http.Response {
|
||||||
// Write the request to the socket
|
// Write the request to the socket
|
||||||
s.write_string(req) or { return error('Failed to write request to socket ${docker.socket}.') }
|
s.write_string(req) or { return error('Failed to write request to socket ${docker.socket}.') }
|
||||||
|
|
||||||
s.wait_for_write() ?
|
s.wait_for_write()?
|
||||||
|
|
||||||
mut c := 0
|
mut c := 0
|
||||||
mut buf := []u8{len: docker.buf_len}
|
mut buf := []u8{len: docker.buf_len}
|
||||||
|
@ -56,7 +56,7 @@ fn send(req &string) ?http.Response {
|
||||||
// A chunked HTTP response always ends with '0\r\n\r\n'.
|
// A chunked HTTP response always ends with '0\r\n\r\n'.
|
||||||
for res.len < 5 || res#[-5..] != [u8(`0`), `\r`, `\n`, `\r`, `\n`] {
|
for res.len < 5 || res#[-5..] != [u8(`0`), `\r`, `\n`, `\r`, `\n`] {
|
||||||
// Wait for the server to respond
|
// Wait for the server to respond
|
||||||
s.wait_for_write() ?
|
s.wait_for_write()?
|
||||||
|
|
||||||
for {
|
for {
|
||||||
c = s.read(mut buf) or {
|
c = s.read(mut buf) or {
|
||||||
|
|
|
@ -11,13 +11,13 @@ pub:
|
||||||
|
|
||||||
// pull_image pulls tries to pull the image for the given image & tag
|
// pull_image pulls tries to pull the image for the given image & tag
|
||||||
pub fn pull_image(image string, tag string) ?http.Response {
|
pub fn pull_image(image string, tag string) ?http.Response {
|
||||||
return request('POST', urllib.parse('/v1.41/images/create?fromImage=$image&tag=$tag') ?)
|
return request('POST', urllib.parse('/v1.41/images/create?fromImage=$image&tag=$tag')?)
|
||||||
}
|
}
|
||||||
|
|
||||||
// create_image_from_container creates a new image from a container with the
|
// create_image_from_container creates a new image from a container with the
|
||||||
// given repo & tag, given the container's ID.
|
// given repo & tag, given the container's ID.
|
||||||
pub fn create_image_from_container(id string, repo string, tag string) ?Image {
|
pub fn create_image_from_container(id string, repo string, tag string) ?Image {
|
||||||
res := request('POST', urllib.parse('/v1.41/commit?container=$id&repo=$repo&tag=$tag') ?) ?
|
res := request('POST', urllib.parse('/v1.41/commit?container=$id&repo=$repo&tag=$tag')?)?
|
||||||
|
|
||||||
if res.status_code != 201 {
|
if res.status_code != 201 {
|
||||||
return error('Failed to create image from container.')
|
return error('Failed to create image from container.')
|
||||||
|
@ -28,7 +28,7 @@ pub fn create_image_from_container(id string, repo string, tag string) ?Image {
|
||||||
|
|
||||||
// remove_image removes the image with the given ID.
|
// remove_image removes the image with the given ID.
|
||||||
pub fn remove_image(id string) ?bool {
|
pub fn remove_image(id string) ?bool {
|
||||||
res := request('DELETE', urllib.parse('/v1.41/images/$id') ?) ?
|
res := request('DELETE', urllib.parse('/v1.41/images/$id')?)?
|
||||||
|
|
||||||
return res.status_code == 200
|
return res.status_code == 200
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ pub fn load<T>(path string) ?T {
|
||||||
if os.exists(path) {
|
if os.exists(path) {
|
||||||
// We don't use reflect here because reflect also sets any fields not
|
// We don't use reflect here because reflect also sets any fields not
|
||||||
// in the toml back to their zero value, which we don't want
|
// in the toml back to their zero value, which we don't want
|
||||||
doc := toml.parse_file(path) ?
|
doc := toml.parse_file(path)?
|
||||||
|
|
||||||
$for field in T.fields {
|
$for field in T.fields {
|
||||||
s := doc.value(field.name)
|
s := doc.value(field.name)
|
||||||
|
@ -66,7 +66,7 @@ pub fn load<T>(path string) ?T {
|
||||||
}
|
}
|
||||||
|
|
||||||
$for field in T.fields {
|
$for field in T.fields {
|
||||||
env_value := get_env_var(field.name) ?
|
env_value := get_env_var(field.name)?
|
||||||
|
|
||||||
// The value of an env var will always take precedence over the toml
|
// The value of an env var will always take precedence over the toml
|
||||||
// file.
|
// file.
|
||||||
|
|
|
@ -159,7 +159,7 @@ pub fn read_pkg_archive(pkg_path string) ?Pkg {
|
||||||
|
|
||||||
pkg_text := unsafe { buf.vstring_with_len(size).clone() }
|
pkg_text := unsafe { buf.vstring_with_len(size).clone() }
|
||||||
|
|
||||||
pkg_info = parse_pkg_info_string(pkg_text) ?
|
pkg_info = parse_pkg_info_string(pkg_text)?
|
||||||
} else {
|
} else {
|
||||||
C.archive_read_data_skip(a)
|
C.archive_read_data_skip(a)
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,22 +53,22 @@ pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?Re
|
||||||
return error('Failed to read package file: $err.msg()')
|
return error('Failed to read package file: $err.msg()')
|
||||||
}
|
}
|
||||||
|
|
||||||
added := r.add_pkg_in_repo(repo, pkg) ?
|
added := r.add_pkg_in_repo(repo, pkg)?
|
||||||
|
|
||||||
// If the add was successful, we move the file to the packages directory
|
// If the add was successful, we move the file to the packages directory
|
||||||
for arch in added {
|
for arch in added {
|
||||||
repo_pkg_path := os.real_path(os.join_path(r.pkg_dir, repo, arch))
|
repo_pkg_path := os.real_path(os.join_path(r.pkg_dir, repo, arch))
|
||||||
dest_path := os.join_path_single(repo_pkg_path, pkg.filename())
|
dest_path := os.join_path_single(repo_pkg_path, pkg.filename())
|
||||||
|
|
||||||
os.mkdir_all(repo_pkg_path) ?
|
os.mkdir_all(repo_pkg_path)?
|
||||||
|
|
||||||
// We create hard links so that "any" arch packages aren't stored
|
// We create hard links so that "any" arch packages aren't stored
|
||||||
// multiple times
|
// multiple times
|
||||||
os.link(pkg_path, dest_path) ?
|
os.link(pkg_path, dest_path)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// After linking, we can remove the original file
|
// After linking, we can remove the original file
|
||||||
os.rm(pkg_path) ?
|
os.rm(pkg_path)?
|
||||||
|
|
||||||
return RepoAddResult{
|
return RepoAddResult{
|
||||||
added: added.len > 0
|
added: added.len > 0
|
||||||
|
@ -87,7 +87,7 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?[]strin
|
||||||
// A package not of arch 'any' can be handled easily by adding it to the
|
// A package not of arch 'any' can be handled easily by adding it to the
|
||||||
// respective repo
|
// respective repo
|
||||||
if pkg.info.arch != 'any' {
|
if pkg.info.arch != 'any' {
|
||||||
if r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg) ? {
|
if r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg)? {
|
||||||
return [pkg.info.arch]
|
return [pkg.info.arch]
|
||||||
} else {
|
} else {
|
||||||
return []
|
return []
|
||||||
|
@ -104,7 +104,7 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?[]strin
|
||||||
// If this is the first package that's added to the repo, the directory
|
// If this is the first package that's added to the repo, the directory
|
||||||
// won't exist yet
|
// won't exist yet
|
||||||
if os.exists(repo_dir) {
|
if os.exists(repo_dir) {
|
||||||
arch_repos = os.ls(repo_dir) ?
|
arch_repos = os.ls(repo_dir)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// The default_arch should always be updated when a package with arch 'any'
|
// The default_arch should always be updated when a package with arch 'any'
|
||||||
|
@ -118,7 +118,7 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?[]strin
|
||||||
// We add the package to each repository. If any of the repositories
|
// We add the package to each repository. If any of the repositories
|
||||||
// return true, the result of the function is also true.
|
// return true, the result of the function is also true.
|
||||||
for arch in arch_repos {
|
for arch in arch_repos {
|
||||||
if r.add_pkg_in_arch_repo(repo, arch, pkg) ? {
|
if r.add_pkg_in_arch_repo(repo, arch, pkg)? {
|
||||||
added << arch
|
added << arch
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,22 +135,22 @@ fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &pac
|
||||||
pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version')
|
pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version')
|
||||||
|
|
||||||
// Remove the previous version of the package, if present
|
// Remove the previous version of the package, if present
|
||||||
r.remove_pkg_from_arch_repo(repo, arch, pkg.info.name, false) ?
|
r.remove_pkg_from_arch_repo(repo, arch, pkg.info.name, false)?
|
||||||
|
|
||||||
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
|
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
|
||||||
|
|
||||||
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
|
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
|
||||||
os.rmdir_all(pkg_dir) ?
|
os.rmdir_all(pkg_dir)?
|
||||||
|
|
||||||
return error('Failed to write desc file.')
|
return error('Failed to write desc file.')
|
||||||
}
|
}
|
||||||
os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or {
|
os.write_file(os.join_path_single(pkg_dir, 'files'), pkg.to_files()) or {
|
||||||
os.rmdir_all(pkg_dir) ?
|
os.rmdir_all(pkg_dir)?
|
||||||
|
|
||||||
return error('Failed to write files file.')
|
return error('Failed to write files file.')
|
||||||
}
|
}
|
||||||
|
|
||||||
r.sync(repo, arch) ?
|
r.sync(repo, arch)?
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,7 @@ fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg
|
||||||
|
|
||||||
// We iterate over every directory in the repo dir
|
// We iterate over every directory in the repo dir
|
||||||
// TODO filter so we only check directories
|
// TODO filter so we only check directories
|
||||||
for d in os.ls(repo_dir) ? {
|
for d in os.ls(repo_dir)? {
|
||||||
// Because a repository only allows a single version of each package,
|
// Because a repository only allows a single version of each package,
|
||||||
// we need only compare whether the name of the package is the same,
|
// we need only compare whether the name of the package is the same,
|
||||||
// not the version.
|
// not the version.
|
||||||
|
@ -178,22 +178,22 @@ fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg
|
||||||
// We lock the mutex here to prevent other routines from creating a
|
// We lock the mutex here to prevent other routines from creating a
|
||||||
// new archive while we remove an entry
|
// new archive while we remove an entry
|
||||||
lock r.mutex {
|
lock r.mutex {
|
||||||
os.rmdir_all(os.join_path_single(repo_dir, d)) ?
|
os.rmdir_all(os.join_path_single(repo_dir, d))?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also remove the package archive
|
// Also remove the package archive
|
||||||
repo_pkg_dir := os.join_path(r.pkg_dir, repo, arch)
|
repo_pkg_dir := os.join_path(r.pkg_dir, repo, arch)
|
||||||
|
|
||||||
archives := os.ls(repo_pkg_dir) ?.filter(it.split('-')#[..-3].join('-') == name)
|
archives := os.ls(repo_pkg_dir)?.filter(it.split('-')#[..-3].join('-') == name)
|
||||||
|
|
||||||
for archive_name in archives {
|
for archive_name in archives {
|
||||||
full_path := os.join_path_single(repo_pkg_dir, archive_name)
|
full_path := os.join_path_single(repo_pkg_dir, archive_name)
|
||||||
os.rm(full_path) ?
|
os.rm(full_path)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sync the db archives if requested
|
// Sync the db archives if requested
|
||||||
if sync {
|
if sync {
|
||||||
r.sync(repo, arch) ?
|
r.sync(repo, arch)?
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
|
|
@ -54,7 +54,7 @@ fn (r &RepoGroupManager) sync(repo string, arch string) ? {
|
||||||
C.archive_write_open_filename(a_files, &char(files_path.str))
|
C.archive_write_open_filename(a_files, &char(files_path.str))
|
||||||
|
|
||||||
// Iterate over each directory
|
// Iterate over each directory
|
||||||
for d in os.ls(subrepo_path) ?.filter(os.is_dir(os.join_path_single(subrepo_path,
|
for d in os.ls(subrepo_path)?.filter(os.is_dir(os.join_path_single(subrepo_path,
|
||||||
it))) {
|
it))) {
|
||||||
// desc
|
// desc
|
||||||
mut inner_path := os.join_path_single(d, 'desc')
|
mut inner_path := os.join_path_single(d, 'desc')
|
||||||
|
|
|
@ -18,10 +18,10 @@ pub fn cmd() cli.Command {
|
||||||
name: 'server'
|
name: 'server'
|
||||||
description: 'Start the Vieter server.'
|
description: 'Start the Vieter server.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file')?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file)?
|
||||||
|
|
||||||
server(conf) ?
|
server(conf)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,8 +56,8 @@ fn (mut app App) get_log_content(id int) web.Result {
|
||||||
// parse_query_time unescapes an HTTP query parameter & tries to parse it as a
|
// parse_query_time unescapes an HTTP query parameter & tries to parse it as a
|
||||||
// time.Time struct.
|
// time.Time struct.
|
||||||
fn parse_query_time(query string) ?time.Time {
|
fn parse_query_time(query string) ?time.Time {
|
||||||
unescaped := urllib.query_unescape(query) ?
|
unescaped := urllib.query_unescape(query)?
|
||||||
t := time.parse(unescaped) ?
|
t := time.parse(unescaped)?
|
||||||
|
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub fn exit_with_message(code int, msg string) {
|
||||||
|
|
||||||
// reader_to_file writes the contents of a BufferedReader to a file
|
// reader_to_file writes the contents of a BufferedReader to a file
|
||||||
pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
|
pub fn reader_to_file(mut reader io.BufferedReader, length int, path string) ? {
|
||||||
mut file := os.create(path) ?
|
mut file := os.create(path)?
|
||||||
defer {
|
defer {
|
||||||
file.close()
|
file.close()
|
||||||
}
|
}
|
||||||
|
|
|
@ -190,7 +190,7 @@ pub fn (ctx Context) before_request() {}
|
||||||
|
|
||||||
// send_string
|
// send_string
|
||||||
fn send_string(mut conn net.TcpConn, s string) ? {
|
fn send_string(mut conn net.TcpConn, s string) ? {
|
||||||
conn.write(s.bytes()) ?
|
conn.write(s.bytes())?
|
||||||
}
|
}
|
||||||
|
|
||||||
// send_response_to_client sends a response to the client
|
// send_response_to_client sends a response to the client
|
||||||
|
|
Loading…
Reference in New Issue