Compare commits

..

26 Commits

Author SHA1 Message Date
Jef Roosens 48e2ae7645
feat(build): show shell commands in build logs
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
2022-06-01 20:34:36 +02:00
Jef Roosens 9f753f9c93
feat(build): add target repo to builds; update system for every build 2022-06-01 17:08:18 +02:00
Jef Roosens ec92b16a73 Merge pull request 'Use local timezone with CLI' (#210) from Chewing_Bever/vieter:utc-only into dev
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#210
2022-05-31 12:55:16 +02:00
Jef Roosens aded6d438a
feat(cli): use correct timezones strings for log info; show build
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
duration
2022-05-31 12:46:39 +02:00
Jef Roosens edd71b41c2
feat(cli): interpet input dates & print dates as local timezone 2022-05-31 12:31:44 +02:00
Jef Roosens f0565c4168 Merge pull request 'Better API: BuildLog API & CLI' (#206) from Chewing_Bever/vieter:better-api into dev
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#206
2022-05-30 23:40:58 +02:00
Jef Roosens a4ffc2c0e3
feat(cli): added more advanced date flags for BuildLog CLI
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
2022-05-30 23:27:09 +02:00
Jef Roosens 401e0291e3
feat(cli): added some filter flags to GitRepo CLI 2022-05-29 21:59:21 +02:00
Jef Roosens a39c1aa5eb
feat(server): added proper filtering the BuildLog API 2022-05-29 20:15:54 +02:00
Jef Roosens 4f32dec5b5
feat(db): added function to convert sqlite output to struct 2022-05-29 20:15:54 +02:00
Jef Roosens 31e903ebeb
feat(server): partial implementation of BuildLog API filter 2022-05-29 20:15:54 +02:00
Jef Roosens 596da100b6 Merge pull request 'migrations' (#196) from Chewing_Bever/vieter:migrations into dev
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#196
2022-05-28 19:51:40 +02:00
Jef Roosens 0d5704ba15
feat(server): initial implementation of migrations
ci/woodpecker/pr/test Pipeline is pending Details
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline failed Details
ci/woodpecker/pr/docker unknown status Details
ci/woodpecker/pr/build Pipeline failed Details
2022-05-28 19:49:49 +02:00
Jef Roosens cdb88e1620 Merge pull request 'Add vieter schedule command' (#201) from Chewing_Bever/vieter:schedule-cli into dev
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#201
2022-05-26 13:50:11 +02:00
Jef Roosens 768da5b790
refactor: added CronExpression.next_n function
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
2022-05-26 13:41:28 +02:00
Jef Roosens bd4bb9a9fb
feat: added cli command for previewing cron schedules 2022-05-26 09:15:49 +02:00
Jef Roosens c0b739035b Merge pull request 'fix(cron): retrieve all GitRepo's instead of first 25' (#198) from Chewing_Bever/vieter:cron-25-bug into dev
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#198
2022-05-25 09:36:08 +02:00
Jef Roosens 7f6e9e636c
fix(cron): retrieve all GitRepo's instead of first 25
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
2022-05-25 09:24:01 +02:00
Jef Roosens 96d0c2f1eb Merge pull request 'Better API & CLI: GitRepo' (#188) from Chewing_Bever/vieter:better-api into dev
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#188
2022-05-19 22:26:47 +02:00
Jef Roosens 0233b8559d
doc: added some missing docstrings
ci/woodpecker/pr/docs Pipeline was successful Details
ci/woodpecker/pr/lint Pipeline was successful Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
ci/woodpecker/pr/test Pipeline was successful Details
2022-05-19 22:14:41 +02:00
Jef Roosens 2fc25f1afe
refactor: moved BuildLog to models 2022-05-19 22:11:48 +02:00
Jef Roosens 6bd5b7cb48
refactor: separated GitRepo types into own module
feat: added more query params for GitRepo API
2022-05-19 22:11:48 +02:00
Jef Roosens 5e81dadce3
feat: partially added filters to GitRepo CLI 2022-05-19 22:11:35 +02:00
Jef Roosens 1e079143cd
feat(server): added better query params to GitRepo API 2022-05-18 16:05:42 +02:00
Jef Roosens 7627b28bcf Merge pull request 'Release 0.3.0-alpha.2' (#185) from release-0.3.0-alpha.2 into main
ci/woodpecker/tag/docs Pipeline was successful Details
ci/woodpecker/tag/lint Pipeline was successful Details
ci/woodpecker/tag/arch-rel Pipeline was successful Details
ci/woodpecker/tag/build Pipeline was successful Details
ci/woodpecker/tag/test Pipeline was successful Details
ci/woodpecker/tag/gitea Pipeline was successful Details
ci/woodpecker/tag/docker Pipeline was successful Details
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/arch Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/push/docker Pipeline was successful Details
ci/woodpecker/push/deploy Pipeline was successful Details
Reviewed-on: vieter/vieter#185
2022-05-18 07:56:03 +02:00
Jef Roosens 0de5ffb45d
chore: bumped versions
ci/woodpecker/push/arch unknown status Details
ci/woodpecker/push/deploy unknown status Details
ci/woodpecker/push/docker unknown status Details
ci/woodpecker/push/docs Pipeline was successful Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/test Pipeline was successful Details
ci/woodpecker/pr/docs unknown status Details
ci/woodpecker/pr/lint unknown status Details
ci/woodpecker/pr/test unknown status Details
ci/woodpecker/pr/build Pipeline was successful Details
ci/woodpecker/pr/docker Pipeline was successful Details
2022-05-16 17:34:51 +02:00
27 changed files with 677 additions and 173 deletions

View File

@ -9,6 +9,26 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
* Database migrations
* Improved GitRepo & BuildLog API
* Pagination using `limit` & `offset` query params
* GitRepo: filter by repo
* BuildLog: filter by start & end date, repo, exit code & arch
* CLI flags to take advantage of above API improvements
### Changed
* Packages from target repo are available during builds
* This can be used as a basic way to support AUR dependencies, by adding
the dependencies to the same repository
* Every build now updates its packages first instead of solely relying on the
updated builder image
* Build logs now show commands being executed
## [0.3.0-alpha.2](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.2)
### Added
* Web API for adding & querying build logs
* CLI commands to access build logs API
* Cron build logs are uploaded to above API

View File

@ -3,7 +3,7 @@
pkgbase='vieter'
pkgname='vieter'
pkgver='0.3.0_alpha.1'
pkgver='0.3.0_alpha.2'
pkgrel=1
depends=('glibc' 'openssl' 'libarchive' 'sqlite')
makedepends=('git' 'vieter-v')

View File

@ -4,9 +4,9 @@ import docker
import encoding.base64
import time
import os
import db
import strings
import util
import models { GitRepo }
const (
container_build_dir = '/build'
@ -93,7 +93,7 @@ pub:
// build_repo builds, packages & publishes a given Arch package based on the
// provided GitRepo. The base image ID should be of an image previously created
// by create_build_image. It returns the logs of the container.
pub fn build_repo(address string, api_key string, base_image_id string, repo &db.GitRepo) ?BuildResult {
pub fn build_repo(address string, api_key string, base_image_id string, repo &GitRepo) ?BuildResult {
mut dd := docker.new_conn()?
defer {
@ -101,40 +101,19 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &db
}
build_arch := os.uname().machine
build_script := create_build_script(address, repo, build_arch)
repo_url := '$address/$repo.repo'
// TODO what to do with PKGBUILDs that build multiple packages?
commands := [
// This will later be replaced by a proper setting for changing the
// mirrorlist
"echo -e '[$repo.repo]\nServer = $address/\$repo/\$arch\nSigLevel = Optional' >> /etc/pacman.conf"
// We need to update the package list of the repo we just added above.
// This should however not pull in a lot of packages as long as the
// builder image is rebuilt frequently.
'pacman -Syu --needed --noconfirm',
'su builder',
'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo',
'cd repo',
'makepkg --nobuild --syncdeps --needed --noconfirm',
'source PKGBUILD',
// The build container checks whether the package is already
// present on the server
'curl -s --head --fail $repo_url/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0',
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $repo_url/publish; done',
]
// We convert the list of commands into a base64 string, which then gets
// passed to the container as an env var
cmds_str := base64.encode_str(commands.join('\n'))
// We convert the build script into a base64 string, which then gets passed
// to the container as an env var
base64_script := base64.encode_str(build_script)
c := docker.NewContainer{
image: '$base_image_id'
env: ['BUILD_SCRIPT=$cmds_str', 'API_KEY=$api_key']
env: ['BUILD_SCRIPT=$base64_script', 'API_KEY=$api_key']
entrypoint: ['/bin/sh', '-c']
cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/bash -e']
work_dir: '/build'
// user: 'builder:builder'
user: '0:0'
}
id := dd.create_container(c)?.id

View File

@ -0,0 +1,20 @@
echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf'
echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf
echo -e '+ pacman -Syu --needed --noconfirm'
pacman -Syu --needed --noconfirm
echo -e '+ su builder'
su builder
echo -e '+ git clone --single-branch --depth 1 --branch main https://examplerepo.com repo'
git clone --single-branch --depth 1 --branch main https://examplerepo.com repo
echo -e '+ cd repo'
cd repo
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
makepkg --nobuild --syncdeps --needed --noconfirm
echo -e '+ source PKGBUILD'
source PKGBUILD
echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0'
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
[ "$(id -u)" == 0 ] && exit 0
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done

55
src/build/shell.v 100644
View File

@ -0,0 +1,55 @@
module build
import models { GitRepo }
// escape_shell_string escapes any characters that could be interpreted
// incorrectly by a shell. The resulting value should be safe to use inside an
// echo statement.
fn escape_shell_string(s string) string {
return s.replace(r'\', r'\\').replace("'", r"'\''")
}
// echo_commands takes a list of shell commands & prepends each one with
// an echo call displaying said command.
pub fn echo_commands(cmds []string) []string {
mut out := []string{cap: 2 * cmds.len}
for cmd in cmds {
out << "echo -e '+ ${escape_shell_string(cmd)}'"
out << cmd
}
return out
}
// create_build_script generates a shell script that builds a given GitRepo.
fn create_build_script(address string, repo &GitRepo, build_arch string) string {
repo_url := '$address/$repo.repo'
commands := echo_commands([
// This will later be replaced by a proper setting for changing the
// mirrorlist
"echo -e '[$repo.repo]\\nServer = $address/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf"
// We need to update the package list of the repo we just added above.
// This should however not pull in a lot of packages as long as the
// builder image is rebuilt frequently.
'pacman -Syu --needed --noconfirm',
// makepkg can't run as root
'su builder',
'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo',
'cd repo',
'makepkg --nobuild --syncdeps --needed --noconfirm',
'source PKGBUILD',
// The build container checks whether the package is already present on
// the server.
'curl -s --head --fail $repo_url/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0',
// If the above curl command succeeds, we don't need to rebuild the
// package. However, because we're in a su shell, the exit command will
// drop us back into the root shell. Therefore, we must check whether
// we're in root so we don't proceed.
'[ "\$(id -u)" == 0 ] && exit 0',
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $repo_url/publish; done',
])
return commands.join('\n')
}

View File

@ -0,0 +1,16 @@
module build
import models { GitRepo }
fn test_create_build_script() {
repo := GitRepo{
id: 1
url: 'https://examplerepo.com'
branch: 'main'
repo: 'vieter'
}
build_script := create_build_script('https://example.com', repo, 'x86_64')
expected := $embed_file('build_script.sh')
assert build_script == expected.to_string().trim_space()
}

View File

@ -29,7 +29,10 @@ fn (c &Client) send_request_raw(method Method, url string, params map[string]str
// Escape each query param
for k, v in params {
params_escaped[k] = urllib.query_escape(v)
// An empty parameter should be the same as not providing it at all
if v != '' {
params_escaped[k] = urllib.query_escape(v)
}
}
params_str := params_escaped.keys().map('$it=${params[it]}').join('&')

View File

@ -1,16 +1,38 @@
module client
import db { GitRepo }
import models { GitRepo, GitRepoFilter }
import net.http { Method }
import response { Response }
// get_git_repos returns the current list of repos.
pub fn (c &Client) get_git_repos() ?[]GitRepo {
data := c.send_request<[]GitRepo>(Method.get, '/api/repos', {})?
// get_git_repos returns a list of GitRepo's, given a filter object.
pub fn (c &Client) get_git_repos(filter GitRepoFilter) ?[]GitRepo {
params := models.params_from(filter)
data := c.send_request<[]GitRepo>(Method.get, '/api/repos', params)?
return data.data
}
// get_all_git_repos retrieves *all* GitRepo's from the API using the default
// limit.
pub fn (c &Client) get_all_git_repos() ?[]GitRepo {
mut repos := []GitRepo{}
mut offset := u64(0)
for {
sub_repos := c.get_git_repos(offset: offset)?
if sub_repos.len == 0 {
break
}
repos << sub_repos
offset += u64(sub_repos.len)
}
return repos
}
// get_git_repo returns the repo for a specific ID.
pub fn (c &Client) get_git_repo(id int) ?GitRepo {
data := c.send_request<GitRepo>(Method.get, '/api/repos/$id', {})?

View File

@ -1,13 +1,14 @@
module client
import db { BuildLog }
import models { BuildLog, BuildLogFilter }
import net.http { Method }
import response { Response }
import time
// get_build_logs returns all build logs.
pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> {
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {})?
pub fn (c &Client) get_build_logs(filter BuildLogFilter) ?Response<[]BuildLog> {
params := models.params_from(filter)
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params)?
return data
}

View File

@ -5,6 +5,7 @@ import env
import cron.expression { parse_expression }
import client
import console
import models { GitRepoFilter }
struct Config {
address string [required]
@ -21,11 +22,50 @@ pub fn cmd() cli.Command {
cli.Command{
name: 'list'
description: 'List the current repos.'
flags: [
cli.Flag{
name: 'limit'
description: 'How many results to return.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'offset'
description: 'Minimum index to return.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'repo'
description: 'Only return Git repos that publish to this repo.'
flag: cli.FlagType.string
},
cli.Flag{
name: 'arch'
description: 'Only return repos enabled for this architecture.'
flag: cli.FlagType.string
},
]
execute: fn (cmd cli.Command) ? {
config_file := cmd.flags.get_string('config-file')?
conf := env.load<Config>(config_file)?
list(conf)?
mut filter := GitRepoFilter{}
limit := cmd.flags.get_int('limit')?
if limit != 0 {
filter.limit = u64(limit)
}
offset := cmd.flags.get_int('offset')?
if offset != 0 {
filter.offset = u64(offset)
}
repo := cmd.flags.get_string('repo')?
if repo != '' {
filter.repo = repo
}
list(conf, filter)?
}
},
cli.Command{
@ -133,9 +173,9 @@ pub fn cmd() cli.Command {
// ID. If multiple or none are found, an error is raised.
// list prints out a list of all repositories.
fn list(conf Config) ? {
fn list(conf Config, filter GitRepoFilter) ? {
c := client.new(conf.address, conf.api_key)
repos := c.get_git_repos()?
repos := c.get_git_repos(filter)?
data := repos.map([it.id.str(), it.url, it.branch, it.repo])
println(console.pretty_table(['id', 'url', 'branch', 'repo'], data)?)

View File

@ -3,8 +3,9 @@ module logs
import cli
import env
import client
import db
import console
import time
import models { BuildLog, BuildLogFilter }
struct Config {
address string [required]
@ -19,21 +20,120 @@ pub fn cmd() cli.Command {
commands: [
cli.Command{
name: 'list'
description: 'List the build logs. If a repo ID is provided, only list the build logs for that repo.'
description: 'List build logs. All date strings in the output are converted to the local timezone. Any time strings provided as input should be in the local timezone as well.'
flags: [
cli.Flag{
name: 'repo'
description: 'ID of the Git repo to restrict list to.'
name: 'limit'
description: 'How many results to return.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'offset'
description: 'Minimum index to return.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'repo'
description: 'Only return logs for this repo id.'
flag: cli.FlagType.int
},
cli.Flag{
name: 'today'
description: 'Only list logs started today.'
flag: cli.FlagType.bool
},
cli.Flag{
name: 'failed'
description: 'Only list logs with non-zero exit codes.'
flag: cli.FlagType.bool
},
cli.Flag{
name: 'day'
description: 'Only list logs started on this day. (format: YYYY-MM-DD)'
flag: cli.FlagType.string
},
cli.Flag{
name: 'before'
description: 'Only list logs started before this timestamp. (format: YYYY-MM-DD HH:mm:ss)'
flag: cli.FlagType.string
},
cli.Flag{
name: 'after'
description: 'Only list logs started after this timestamp. (format: YYYY-MM-DD HH:mm:ss)'
flag: cli.FlagType.string
},
]
execute: fn (cmd cli.Command) ? {
config_file := cmd.flags.get_string('config-file')?
conf := env.load<Config>(config_file)?
repo_id := cmd.flags.get_int('repo')?
mut filter := BuildLogFilter{}
if repo_id == 0 { list(conf)? } else { list_for_repo(conf, repo_id)? }
limit := cmd.flags.get_int('limit')?
if limit != 0 {
filter.limit = u64(limit)
}
offset := cmd.flags.get_int('offset')?
if offset != 0 {
filter.offset = u64(offset)
}
repo_id := cmd.flags.get_int('repo')?
if repo_id != 0 {
filter.repo = repo_id
}
tz_offset := time.offset()
if cmd.flags.get_bool('today')? {
today := time.now()
filter.after = time.new_time(time.Time{
year: today.year
month: today.month
day: today.day
}).add_seconds(-tz_offset)
filter.before = filter.after.add_days(1)
}
// The -today flag overwrites any of the other date flags.
else {
day_str := cmd.flags.get_string('day')?
before_str := cmd.flags.get_string('before')?
after_str := cmd.flags.get_string('after')?
if day_str != '' {
day := time.parse_rfc3339(day_str)?
day_utc := time.new_time(time.Time{
year: day.year
month: day.month
day: day.day
}).add_seconds(-tz_offset)
// The extra -1 is so we also return logs that
// started at exactly midnight (filter bounds are
// exclusive). therefore, we have to request logs
// started after 23:59:59 the previous day.
filter.after = day_utc.add_seconds(-1)
filter.before = day_utc.add_days(1)
} else {
if before_str != '' {
filter.before = time.parse(before_str)?.add_seconds(-tz_offset)
}
if after_str != '' {
filter.after = time.parse(after_str)?.add_seconds(-tz_offset)
}
}
}
if cmd.flags.get_bool('failed')? {
filter.exit_codes = [
'!0',
]
}
list(conf, filter)?
}
},
cli.Command{
@ -67,17 +167,17 @@ pub fn cmd() cli.Command {
}
// print_log_list prints a list of logs.
fn print_log_list(logs []db.BuildLog) ? {
data := logs.map([it.id.str(), it.repo_id.str(), it.start_time.str(),
fn print_log_list(logs []BuildLog) ? {
data := logs.map([it.id.str(), it.repo_id.str(), it.start_time.local().str(),
it.exit_code.str()])
println(console.pretty_table(['id', 'repo', 'start time', 'exit code'], data)?)
}
// list prints a list of all build logs.
fn list(conf Config) ? {
fn list(conf Config, filter BuildLogFilter) ? {
c := client.new(conf.address, conf.api_key)
logs := c.get_build_logs()?.data
logs := c.get_build_logs(filter)?.data
print_log_list(logs)?
}

View File

@ -0,0 +1,30 @@
module schedule
import cli
import cron.expression { parse_expression }
import time
// cmd returns the cli submodule for previewing a cron schedule.
pub fn cmd() cli.Command {
return cli.Command{
name: 'schedule'
usage: 'schedule'
description: 'Preview the behavior of a cron schedule.'
flags: [
cli.Flag{
name: 'count'
description: 'How many scheduled times to show.'
flag: cli.FlagType.int
default_value: ['5']
},
]
execute: fn (cmd cli.Command) ? {
ce := parse_expression(cmd.args.join(' '))?
count := cmd.flags.get_int('count')?
for t in ce.next_n(time.now(), count)? {
println(t)
}
}
}
}

View File

@ -7,9 +7,9 @@ import cron.expression { CronExpression, parse_expression }
import math
import build
import docker
import db
import os
import client
import models { GitRepo }
const (
// How many seconds to wait before retrying to update API if failed
@ -20,7 +20,7 @@ const (
struct ScheduledBuild {
pub:
repo db.GitRepo
repo GitRepo
timestamp time.Time
}
@ -38,7 +38,7 @@ mut:
api_update_frequency int
image_rebuild_frequency int
// Repos currently loaded from API.
repos []db.GitRepo
repos []GitRepo
// At what point to update the list of repositories.
api_update_timestamp time.Time
image_build_timestamp time.Time
@ -149,7 +149,7 @@ pub fn (mut d Daemon) run() {
}
// schedule_build adds the next occurence of the given repo build to the queue.
fn (mut d Daemon) schedule_build(repo db.GitRepo) {
fn (mut d Daemon) schedule_build(repo GitRepo) {
ce := if repo.schedule != '' {
parse_expression(repo.schedule) or {
// TODO This shouldn't return an error if the expression is empty.
@ -178,7 +178,7 @@ fn (mut d Daemon) schedule_build(repo db.GitRepo) {
fn (mut d Daemon) renew_repos() {
d.linfo('Renewing repos...')
mut new_repos := d.client.get_git_repos() or {
mut new_repos := d.client.get_all_git_repos() or {
d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...')
d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout)

View File

@ -121,6 +121,20 @@ pub fn (ce &CronExpression) next_from_now() ?time.Time {
return ce.next(time.now())
}
// next_n returns the n next occurences of the expression, given a starting
// time.
pub fn (ce &CronExpression) next_n(ref time.Time, n int) ?[]time.Time {
mut times := []time.Time{cap: n}
times << ce.next(ref)?
for i in 1 .. n {
times << ce.next(times[i - 1])?
}
return times
}
// parse_range parses a given string into a range of sorted integers, if
// possible.
fn parse_range(s string, min int, max int, mut bitv []bool) ? {

View File

@ -1,21 +1,89 @@
module db
import sqlite
import time
struct VieterDb {
conn sqlite.DB
}
struct MigrationVersion {
id int [primary]
version int
}
const (
migrations_up = [$embed_file('migrations/001-initial/up.sql')]
migrations_down = [$embed_file('migrations/001-initial/down.sql')]
)
// init initializes a database & adds the correct tables.
pub fn init(db_path string) ?VieterDb {
conn := sqlite.connect(db_path)?
sql conn {
create table GitRepo
create table BuildLog
create table MigrationVersion
}
cur_version := sql conn {
select from MigrationVersion limit 1
}
// If there's no row yet, we add it here
if cur_version == MigrationVersion{} {
sql conn {
insert cur_version into MigrationVersion
}
}
// Apply each migration in order
for i in cur_version.version .. db.migrations_up.len {
migration := db.migrations_up[i].to_string()
version_num := i + 1
// vfmt does not like these dots
println('Applying migration $version_num' + '...')
// The sqlite library seems to not like it when multiple statements are
// passed in a single exec. Therefore, we split them & run them all
// separately.
for part in migration.split(';').map(it.trim_space()).filter(it != '') {
res := conn.exec_none(part)
if res != sqlite.sqlite_done {
return error('An error occurred while applying migration $version_num')
}
}
// The where clause doesn't really matter, as there will always only be
// one entry anyways.
sql conn {
update MigrationVersion set version = version_num where id > 0
}
}
return VieterDb{
conn: conn
}
}
// row_into<T> converts an sqlite.Row into a given type T by parsing each field
// from a string according to its type.
pub fn row_into<T>(row sqlite.Row) T {
mut i := 0
mut out := T{}
$for field in T.fields {
$if field.typ is string {
out.$(field.name) = row.vals[i]
} $else $if field.typ is int {
out.$(field.name) = row.vals[i].int()
} $else $if field.typ is time.Time {
out.$(field.name) = time.unix(row.vals[i].int())
}
i += 1
}
return out
}

View File

@ -1,85 +1,21 @@
module db
pub struct GitRepoArch {
pub:
id int [primary; sql: serial]
repo_id int [nonull]
value string [nonull]
}
// str returns a string representation.
pub fn (gra &GitRepoArch) str() string {
return gra.value
}
pub struct GitRepo {
pub mut:
id int [optional; primary; sql: serial]
// URL of the Git repository
url string [nonull]
// Branch of the Git repository to use
branch string [nonull]
// Which repo the builder should publish packages to
repo string [nonull]
// Cron schedule describing how frequently to build the repo.
schedule string [optional]
// On which architectures the package is allowed to be built. In reality,
// this controls which builders will periodically build the image.
arch []GitRepoArch [fkey: 'repo_id']
}
// str returns a string representation.
pub fn (gr &GitRepo) str() string {
mut parts := [
'id: $gr.id',
'url: $gr.url',
'branch: $gr.branch',
'repo: $gr.repo',
'schedule: $gr.schedule',
'arch: ${gr.arch.map(it.value).join(', ')}',
]
str := parts.join('\n')
return str
}
// patch_from_params patches a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn (mut r GitRepo) patch_from_params(params map[string]string) {
$for field in GitRepo.fields {
if field.name in params {
$if field.typ is string {
r.$(field.name) = params[field.name]
// This specific type check is needed for the compiler to ensure
// our types are correct
} $else $if field.typ is []GitRepoArch {
r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it })
}
}
}
}
// git_repo_from_params creates a GitRepo from a map[string]string, usually
// provided from a web.App's params
pub fn git_repo_from_params(params map[string]string) ?GitRepo {
mut repo := GitRepo{}
// If we're creating a new GitRepo, we want all fields to be present before
// "patching".
$for field in GitRepo.fields {
if field.name !in params && !field.attrs.contains('optional') {
return error('Missing parameter: ${field.name}.')
}
}
repo.patch_from_params(params)
return repo
}
import models { GitRepo, GitRepoArch, GitRepoFilter }
// get_git_repos returns all GitRepo's in the database.
pub fn (db &VieterDb) get_git_repos() []GitRepo {
pub fn (db &VieterDb) get_git_repos(filter GitRepoFilter) []GitRepo {
// This seems to currently be blocked by a bug in the ORM, I'll have to ask
// around.
if filter.repo != '' {
res := sql db.conn {
select from GitRepo where repo == filter.repo order by id limit filter.limit offset filter.offset
}
return res
}
res := sql db.conn {
select from GitRepo order by id
select from GitRepo order by id limit filter.limit offset filter.offset
}
return res
@ -130,6 +66,7 @@ pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) {
}
}
values_str := values.join(', ')
// I think this is actual SQL & not the ORM language
query := 'update GitRepo set $values_str where id == $repo_id'
db.conn.exec_none(query)

View File

@ -1,38 +1,57 @@
module db
import models { BuildLog, BuildLogFilter }
import time
pub struct BuildLog {
pub:
id int [primary; sql: serial]
repo_id int [nonull]
start_time time.Time [nonull]
end_time time.Time [nonull]
arch string [nonull]
exit_code int [nonull]
}
// str returns a string representation.
pub fn (bl &BuildLog) str() string {
mut parts := [
'id: $bl.id',
'repo id: $bl.repo_id',
'start time: $bl.start_time',
'end time: $bl.end_time',
'arch: $bl.arch',
'exit code: $bl.exit_code',
]
str := parts.join('\n')
return str
}
// get_build_logs returns all BuildLog's in the database.
pub fn (db &VieterDb) get_build_logs() []BuildLog {
res := sql db.conn {
select from BuildLog order by id
pub fn (db &VieterDb) get_build_logs(filter BuildLogFilter) []BuildLog {
mut where_parts := []string{}
if filter.repo != 0 {
where_parts << 'repo_id == $filter.repo'
}
if filter.before != time.Time{} {
where_parts << 'start_time < $filter.before.unix_time()'
}
if filter.after != time.Time{} {
where_parts << 'start_time > $filter.after.unix_time()'
}
// NOTE: possible SQL injection
if filter.arch != '' {
where_parts << "arch == '$filter.arch'"
}
mut parts := []string{}
for exp in filter.exit_codes {
if exp[0] == `!` {
code := exp[1..].int()
parts << 'exit_code != $code'
} else {
code := exp.int()
parts << 'exit_code == $code'
}
}
if parts.len > 0 {
where_parts << parts.map('($it)').join(' or ')
}
mut where_str := ''
if where_parts.len > 0 {
where_str = 'where ' + where_parts.map('($it)').join(' and ')
}
query := 'select * from BuildLog $where_str limit $filter.limit offset $filter.offset'
rows, _ := db.conn.exec(query)
res := rows.map(row_into<BuildLog>(it))
return res
}

View File

@ -0,0 +1,3 @@
DROP TABLE IF EXISTS BuildLog;
DROP TABLE IF EXISTS GitRepoArch;
DROP TABLE IF EXISTS GitRepo;

View File

@ -0,0 +1,22 @@
CREATE TABLE IF NOT EXISTS GitRepo (
id INTEGER PRIMARY KEY,
url TEXT NOT NULL,
branch TEXT NOT NULL,
repo TEXT NOT NULL,
schedule TEXT
);
CREATE TABLE IF NOT EXISTS GitRepoArch (
id INTEGER PRIMARY KEY,
repo_id INTEGER NOT NULL,
value TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS BuildLog (
id INTEGER PRIMARY KEY,
repo_id INTEGER NOT NULL,
start_time INTEGER NOT NULL,
end_time iNTEGER NOT NULL,
arch TEXT NOT NULL,
exit_code INTEGER NOT NULL
);

View File

@ -83,6 +83,7 @@ pub fn (mut d DockerConn) inspect_container(id string) ?ContainerInspect {
mut data := json.decode(ContainerInspect, body)?
// The Docker engine API *should* always return UTC time.
data.state.start_time = time.parse_rfc3339(data.state.start_time_str)?
if data.state.status == 'exited' {

View File

@ -5,13 +5,14 @@ import server
import cli
import console.git
import console.logs
import console.schedule
import cron
fn main() {
mut app := cli.Command{
name: 'vieter'
description: 'Vieter is a lightweight implementation of an Arch repository server.'
version: '0.3.0-alpha.1'
version: '0.3.0-alpha.2'
flags: [
cli.Flag{
flag: cli.FlagType.string
@ -27,6 +28,7 @@ fn main() {
git.cmd(),
cron.cmd(),
logs.cmd(),
schedule.cmd(),
]
}
app.setup()

52
src/models/git.v 100644
View File

@ -0,0 +1,52 @@
module models
pub struct GitRepoArch {
pub:
id int [primary; sql: serial]
repo_id int [nonull]
value string [nonull]
}
// str returns a string representation.
pub fn (gra &GitRepoArch) str() string {
return gra.value
}
pub struct GitRepo {
pub mut:
id int [primary; sql: serial]
// URL of the Git repository
url string [nonull]
// Branch of the Git repository to use
branch string [nonull]
// Which repo the builder should publish packages to
repo string [nonull]
// Cron schedule describing how frequently to build the repo.
schedule string
// On which architectures the package is allowed to be built. In reality,
// this controls which builders will periodically build the image.
arch []GitRepoArch [fkey: 'repo_id']
}
// str returns a string representation.
pub fn (gr &GitRepo) str() string {
mut parts := [
'id: $gr.id',
'url: $gr.url',
'branch: $gr.branch',
'repo: $gr.repo',
'schedule: $gr.schedule',
'arch: ${gr.arch.map(it.value).join(', ')}',
]
str := parts.join('\n')
return str
}
[params]
pub struct GitRepoFilter {
pub mut:
limit u64 = 25
offset u64
repo string
}

41
src/models/logs.v 100644
View File

@ -0,0 +1,41 @@
module models
import time
pub struct BuildLog {
pub mut:
id int [primary; sql: serial]
repo_id int [nonull]
start_time time.Time [nonull]
end_time time.Time [nonull]
arch string [nonull]
exit_code int [nonull]
}
// str returns a string representation.
pub fn (bl &BuildLog) str() string {
mut parts := [
'id: $bl.id',
'repo id: $bl.repo_id',
'start time: $bl.start_time.local()',
'end time: $bl.end_time.local()',
'duration: ${bl.end_time - bl.start_time}',
'arch: $bl.arch',
'exit code: $bl.exit_code',
]
str := parts.join('\n')
return str
}
[params]
pub struct BuildLogFilter {
pub mut:
limit u64 = 25
offset u64
repo int
before time.Time
after time.Time
arch string
exit_codes []string
}

View File

@ -0,0 +1,53 @@
module models
import time
// from_params<T> creates a new instance of T from the given map by parsing all
// of its fields from the map.
pub fn from_params<T>(params map[string]string) ?T {
mut o := T{}
patch_from_params<T>(mut o, params)?
return o
}
// patch_from_params<T> updates the given T object with the params defined in
// the map.
pub fn patch_from_params<T>(mut o T, params map[string]string) ? {
$for field in T.fields {
if field.name in params && params[field.name] != '' {
$if field.typ is string {
o.$(field.name) = params[field.name]
} $else $if field.typ is int {
o.$(field.name) = params[field.name].int()
} $else $if field.typ is u64 {
o.$(field.name) = params[field.name].u64()
} $else $if field.typ is []GitRepoArch {
o.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it })
} $else $if field.typ is time.Time {
o.$(field.name) = time.unix(params[field.name].int())
} $else $if field.typ is []string {
o.$(field.name) = params[field.name].split(',')
}
} else if field.attrs.contains('nonull') {
return error('Missing parameter: ${field.name}.')
}
}
}
// params_from<T> converts a given T struct into a map of strings.
pub fn params_from<T>(o &T) map[string]string {
mut out := map[string]string{}
$for field in T.fields {
$if field.typ is time.Time {
out[field.name] = o.$(field.name).unix_time().str()
} $else $if field.typ is []string {
out[field.name] = o.$(field.name).join(',')
} $else {
out[field.name] = o.$(field.name).str()
}
}
return out
}

View File

@ -4,6 +4,7 @@ import web
import net.http
import response { new_data_response, new_response }
import db
import models { GitRepo, GitRepoArch, GitRepoFilter }
// get_repos returns the current list of repos.
['/api/repos'; get]
@ -12,7 +13,10 @@ fn (mut app App) get_repos() web.Result {
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
}
repos := app.db.get_git_repos()
filter := models.from_params<GitRepoFilter>(app.query) or {
return app.json(http.Status.bad_request, new_response('Invalid query parameters.'))
}
repos := app.db.get_git_repos(filter)
return app.json(http.Status.ok, new_data_response(repos))
}
@ -44,7 +48,7 @@ fn (mut app App) post_repo() web.Result {
params['arch'] = app.conf.default_arch
}
new_repo := db.git_repo_from_params(params) or {
new_repo := models.from_params<GitRepo>(params) or {
return app.json(http.Status.bad_request, new_response(err.msg()))
}
@ -75,7 +79,7 @@ fn (mut app App) patch_repo(id int) web.Result {
app.db.update_git_repo(id, app.query)
if 'arch' in app.query {
arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{ value: it })
arch_objs := app.query['arch'].split(',').map(GitRepoArch{ value: it })
app.db.update_git_repo_archs(id, arch_objs)
}

View File

@ -8,6 +8,7 @@ import db
import time
import os
import util
import models { BuildLog, BuildLogFilter }
// get_logs returns all build logs in the database. A 'repo' query param can
// optionally be added to limit the list of build logs to that repository.
@ -17,11 +18,10 @@ fn (mut app App) get_logs() web.Result {
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
}
logs := if 'repo' in app.query {
app.db.get_build_logs_for_repo(app.query['repo'].int())
} else {
app.db.get_build_logs()
filter := models.from_params<BuildLogFilter>(app.query) or {
return app.json(http.Status.bad_request, new_response('Invalid query parameters.'))
}
logs := app.db.get_build_logs(filter)
return app.json(http.Status.ok, new_data_response(logs))
}
@ -97,7 +97,7 @@ fn (mut app App) post_log() web.Result {
}
// Store log in db
log := db.BuildLog{
log := BuildLog{
repo_id: repo_id
start_time: start_time
end_time: end_time

View File

@ -68,7 +68,9 @@ pub fn server(conf Config) ? {
}
db_file := os.join_path_single(conf.data_dir, server.db_file_name)
db := db.init(db_file) or { util.exit_with_message(1, 'Failed to initialize database.') }
db := db.init(db_file) or {
util.exit_with_message(1, 'Failed to initialize database: $err.msg()')
}
web.run(&App{
logger: logger