forked from vieter-v/vieter
Merge pull request 'Add direct PKGBUILD link as target option' (#254) from Chewing_Bever/vieter:direct-pkgbuild-target into dev
Reviewed-on: vieter/vieter#254
commit
424b0651e9
|
@ -10,12 +10,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
### Added
|
||||
|
||||
* Server port can now be configured
|
||||
* Targets now have a 'kind' field describing whether it's a Git repository or a
|
||||
URL to a PKGBUILD
|
||||
* Targets with kind 'url' can provide a direct URL to a PKGBUILD instead of
|
||||
providing a Git repository
|
||||
|
||||
### Changed
|
||||
|
||||
* Moved all API routes under `/v1` namespace
|
||||
* Renamed `vieter repos` to `vieter targets`
|
||||
* Renamed `/api/v1/repos` namespace to `/api/v1/targets`
|
||||
* Branch name for 'git' targets is now optional; if not provided, the
|
||||
repository will be cloned with the default branch
|
||||
|
||||
### Removed
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ curl \
|
|||
"data": [
|
||||
{
|
||||
"id": 1,
|
||||
"kind": "git",
|
||||
"url": "https://aur.archlinux.org/discord-ptb.git",
|
||||
"branch": "master",
|
||||
"repo": "bur",
|
||||
|
@ -69,6 +70,7 @@ curl \
|
|||
"message": "",
|
||||
"data": {
|
||||
"id": 1,
|
||||
"kind": "git",
|
||||
"url": "https://aur.archlinux.org/discord-ptb.git",
|
||||
"branch": "master",
|
||||
"repo": "bur",
|
||||
|
@ -108,6 +110,7 @@ Create a new target with the given data.
|
|||
|
||||
Parameter | Description
|
||||
--------- | -----------
|
||||
kind | Kind of target to add; one of 'git', 'url'.
|
||||
url | URL of the Git repository.
|
||||
branch | Branch of the Git repository.
|
||||
repo | Vieter repository to publish built packages to.
|
||||
|
@ -132,6 +135,7 @@ id | id of target to modify
|
|||
|
||||
Parameter | Description
|
||||
--------- | -----------
|
||||
kind | Kind of target; one of 'git', 'url'.
|
||||
url | URL of the Git repository.
|
||||
branch | Branch of the Git repository.
|
||||
repo | Vieter repository to publish built packages to.
|
||||
|
|
|
@ -20,24 +20,24 @@ pages](https://rustybever.be/man/vieter/vieter-targets.1.html) describe this in
|
|||
greater detail, but the basic usage is as follows:
|
||||
|
||||
```
|
||||
vieter targets add some-url some-branch some-repository
|
||||
vieter targets add some-url some-repository
|
||||
```
|
||||
|
||||
Here, `some-url` is the URL of the Git repository containing the PKGBUILD. This
|
||||
URL is passed to `git clone`, meaning the repository should be public. Vieter
|
||||
expects the same format as an AUR Git repository, so you can directly use AUR
|
||||
URLs here.
|
||||
URLs here. Alternatively, you can also provide the URL to a PKGBUILD file
|
||||
instead. See
|
||||
[vieter-targets-add(1)](https://rustybever.be/man/vieter/vieter-targets-add.1.html)
|
||||
for more information.
|
||||
|
||||
`some-branch` is the branch of the Git repository the build should check out.
|
||||
If you're using an AUR package, this should be `master`.
|
||||
|
||||
Finally, `some-repo` is the repository to which the built package archives
|
||||
should be published.
|
||||
`some-repo` is the repository to which the built package archives should be
|
||||
published.
|
||||
|
||||
The above command intentionally leaves out a few parameters to make the CLI
|
||||
more useable. For information on how to modify all parameters using the CLI,
|
||||
see
|
||||
[vieter-targets-edit(1)](https://rustybever.be/man/vieter/vieter-targets-edit.1.html).
|
||||
[vieter-targets(1)](https://rustybever.be/man/vieter/vieter-targets.1.html).
|
||||
|
||||
## Reading logs
|
||||
|
||||
|
|
|
@ -90,10 +90,10 @@ pub:
|
|||
logs string
|
||||
}
|
||||
|
||||
// build_repo builds, packages & publishes a given Arch package based on the
|
||||
// build_target builds, packages & publishes a given Arch package based on the
|
||||
// provided target. The base image ID should be of an image previously created
|
||||
// by create_build_image. It returns the logs of the container.
|
||||
pub fn build_repo(address string, api_key string, base_image_id string, repo &Target) ?BuildResult {
|
||||
pub fn build_target(address string, api_key string, base_image_id string, target &Target) ?BuildResult {
|
||||
mut dd := docker.new_conn()?
|
||||
|
||||
defer {
|
||||
|
@ -101,7 +101,7 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &Ta
|
|||
}
|
||||
|
||||
build_arch := os.uname().machine
|
||||
build_script := create_build_script(address, repo, build_arch)
|
||||
build_script := create_build_script(address, target, build_arch)
|
||||
|
||||
// We convert the build script into a base64 string, which then gets passed
|
||||
// to the container as an env var
|
||||
|
|
|
@ -4,8 +4,8 @@ echo -e '+ pacman -Syu --needed --noconfirm'
|
|||
pacman -Syu --needed --noconfirm
|
||||
echo -e '+ su builder'
|
||||
su builder
|
||||
echo -e '+ git clone --single-branch --depth 1 --branch main https://examplerepo.com repo'
|
||||
git clone --single-branch --depth 1 --branch main https://examplerepo.com repo
|
||||
echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo'
|
||||
git clone --single-branch --depth 1 'https://examplerepo.com' repo
|
||||
echo -e '+ cd repo'
|
||||
cd repo
|
||||
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
|
|
@ -0,0 +1,20 @@
|
|||
echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf'
|
||||
echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf
|
||||
echo -e '+ pacman -Syu --needed --noconfirm'
|
||||
pacman -Syu --needed --noconfirm
|
||||
echo -e '+ su builder'
|
||||
su builder
|
||||
echo -e '+ git clone --single-branch --depth 1 --branch main '\''https://examplerepo.com'\'' repo'
|
||||
git clone --single-branch --depth 1 --branch main 'https://examplerepo.com' repo
|
||||
echo -e '+ cd repo'
|
||||
cd repo
|
||||
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
|
||||
makepkg --nobuild --syncdeps --needed --noconfirm
|
||||
echo -e '+ source PKGBUILD'
|
||||
source PKGBUILD
|
||||
echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0'
|
||||
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||
[ "$(id -u)" == 0 ] && exit 0
|
||||
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -0,0 +1,22 @@
|
|||
echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf'
|
||||
echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf
|
||||
echo -e '+ pacman -Syu --needed --noconfirm'
|
||||
pacman -Syu --needed --noconfirm
|
||||
echo -e '+ su builder'
|
||||
su builder
|
||||
echo -e '+ mkdir repo'
|
||||
mkdir repo
|
||||
echo -e '+ curl -o repo/PKGBUILD -L '\''https://examplerepo.com'\'''
|
||||
curl -o repo/PKGBUILD -L 'https://examplerepo.com'
|
||||
echo -e '+ cd repo'
|
||||
cd repo
|
||||
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
|
||||
makepkg --nobuild --syncdeps --needed --noconfirm
|
||||
echo -e '+ source PKGBUILD'
|
||||
source PKGBUILD
|
||||
echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0'
|
||||
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||
[ "$(id -u)" == 0 ] && exit 0
|
||||
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -23,20 +23,45 @@ pub fn echo_commands(cmds []string) []string {
|
|||
}
|
||||
|
||||
// create_build_script generates a shell script that builds a given Target.
|
||||
fn create_build_script(address string, repo &Target, build_arch string) string {
|
||||
repo_url := '$address/$repo.repo'
|
||||
fn create_build_script(address string, target &Target, build_arch string) string {
|
||||
repo_url := '$address/$target.repo'
|
||||
|
||||
commands := echo_commands([
|
||||
mut commands := [
|
||||
// This will later be replaced by a proper setting for changing the
|
||||
// mirrorlist
|
||||
"echo -e '[$repo.repo]\\nServer = $address/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf"
|
||||
"echo -e '[$target.repo]\\nServer = $address/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf"
|
||||
// We need to update the package list of the repo we just added above.
|
||||
// This should however not pull in a lot of packages as long as the
|
||||
// builder image is rebuilt frequently.
|
||||
'pacman -Syu --needed --noconfirm',
|
||||
// makepkg can't run as root
|
||||
'su builder',
|
||||
'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo',
|
||||
]
|
||||
|
||||
commands << match target.kind {
|
||||
'git' {
|
||||
if target.branch == '' {
|
||||
[
|
||||
"git clone --single-branch --depth 1 '$target.url' repo",
|
||||
]
|
||||
} else {
|
||||
[
|
||||
"git clone --single-branch --depth 1 --branch $target.branch '$target.url' repo",
|
||||
]
|
||||
}
|
||||
}
|
||||
'url' {
|
||||
[
|
||||
'mkdir repo',
|
||||
"curl -o repo/PKGBUILD -L '$target.url'",
|
||||
]
|
||||
}
|
||||
else {
|
||||
panic("Invalid kind. This shouldn't be possible.")
|
||||
}
|
||||
}
|
||||
|
||||
commands << [
|
||||
'cd repo',
|
||||
'makepkg --nobuild --syncdeps --needed --noconfirm',
|
||||
'source PKGBUILD',
|
||||
|
@ -49,7 +74,7 @@ fn create_build_script(address string, repo &Target, build_arch string) string {
|
|||
// we're in root so we don't proceed.
|
||||
'[ "\$(id -u)" == 0 ] && exit 0',
|
||||
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $repo_url/publish; done',
|
||||
])
|
||||
]
|
||||
|
||||
return commands.join('\n')
|
||||
return echo_commands(commands).join('\n')
|
||||
}
|
||||
|
|
|
@ -2,15 +2,42 @@ module build
|
|||
|
||||
import models { Target }
|
||||
|
||||
fn test_create_build_script() {
|
||||
fn test_create_build_script_git_branch() {
|
||||
target := Target{
|
||||
id: 1
|
||||
kind: 'git'
|
||||
url: 'https://examplerepo.com'
|
||||
branch: 'main'
|
||||
repo: 'vieter'
|
||||
}
|
||||
build_script := create_build_script('https://example.com', target, 'x86_64')
|
||||
expected := $embed_file('build_script.sh')
|
||||
expected := $embed_file('build_script_git_branch.sh')
|
||||
|
||||
assert build_script == expected.to_string().trim_space()
|
||||
}
|
||||
|
||||
fn test_create_build_script_git() {
|
||||
target := Target{
|
||||
id: 1
|
||||
kind: 'git'
|
||||
url: 'https://examplerepo.com'
|
||||
repo: 'vieter'
|
||||
}
|
||||
build_script := create_build_script('https://example.com', target, 'x86_64')
|
||||
expected := $embed_file('build_script_git.sh')
|
||||
|
||||
assert build_script == expected.to_string().trim_space()
|
||||
}
|
||||
|
||||
fn test_create_build_script_url() {
|
||||
target := Target{
|
||||
id: 1
|
||||
kind: 'url'
|
||||
url: 'https://examplerepo.com'
|
||||
repo: 'vieter'
|
||||
}
|
||||
build_script := create_build_script('https://example.com', target, 'x86_64')
|
||||
expected := $embed_file('build_script_url.sh')
|
||||
|
||||
assert build_script == expected.to_string().trim_space()
|
||||
}
|
||||
|
|
|
@ -40,18 +40,17 @@ pub fn (c &Client) get_target(id int) ?Target {
|
|||
return data.data
|
||||
}
|
||||
|
||||
pub struct NewTarget {
|
||||
kind string
|
||||
url string
|
||||
branch string
|
||||
repo string
|
||||
arch []string
|
||||
}
|
||||
|
||||
// add_target adds a new target to the server.
|
||||
pub fn (c &Client) add_target(url string, branch string, repo string, arch []string) ?Response<string> {
|
||||
mut params := {
|
||||
'url': url
|
||||
'branch': branch
|
||||
'repo': repo
|
||||
}
|
||||
|
||||
if arch.len > 0 {
|
||||
params['arch'] = arch.join(',')
|
||||
}
|
||||
|
||||
pub fn (c &Client) add_target(t NewTarget) ?Response<string> {
|
||||
params := models.params_from<NewTarget>(t)
|
||||
data := c.send_request<string>(Method.post, '/api/v1/targets', params)?
|
||||
|
||||
return data
|
||||
|
|
|
@ -6,9 +6,9 @@ import os
|
|||
import build
|
||||
|
||||
// build locally builds the target with the given id.
|
||||
fn build(conf Config, repo_id int) ? {
|
||||
fn build(conf Config, target_id int) ? {
|
||||
c := client.new(conf.address, conf.api_key)
|
||||
repo := c.get_target(repo_id)?
|
||||
target := c.get_target(target_id)?
|
||||
|
||||
build_arch := os.uname().machine
|
||||
|
||||
|
@ -16,7 +16,7 @@ fn build(conf Config, repo_id int) ? {
|
|||
image_id := build.create_build_image(conf.base_image)?
|
||||
|
||||
println('Running build...')
|
||||
res := build.build_repo(conf.address, conf.api_key, image_id, repo)?
|
||||
res := build.build_target(conf.address, conf.api_key, image_id, target)?
|
||||
|
||||
println('Removing build image...')
|
||||
|
||||
|
@ -29,6 +29,6 @@ fn build(conf Config, repo_id int) ? {
|
|||
dd.remove_image(image_id)?
|
||||
|
||||
println('Uploading logs to Vieter...')
|
||||
c.add_build_log(repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||
c.add_build_log(target.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||
res.logs)?
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ module targets
|
|||
import cli
|
||||
import vieter.vconf
|
||||
import cron.expression { parse_expression }
|
||||
import client
|
||||
import client { NewTarget }
|
||||
import console
|
||||
import models { TargetFilter }
|
||||
|
||||
|
@ -65,14 +65,34 @@ pub fn cmd() cli.Command {
|
|||
},
|
||||
cli.Command{
|
||||
name: 'add'
|
||||
required_args: 3
|
||||
usage: 'url branch repo'
|
||||
description: 'Add a new Git repository target.'
|
||||
required_args: 2
|
||||
usage: 'url repo'
|
||||
description: 'Add a new target with the given URL & target repo.'
|
||||
flags: [
|
||||
cli.Flag{
|
||||
name: 'kind'
|
||||
description: "Kind of target to add. Defaults to 'git' if not specified. One of 'git', 'url'."
|
||||
flag: cli.FlagType.string
|
||||
default_value: ['git']
|
||||
},
|
||||
cli.Flag{
|
||||
name: 'branch'
|
||||
description: "Which branch to clone; only applies to kind 'git'."
|
||||
flag: cli.FlagType.string
|
||||
},
|
||||
]
|
||||
execute: fn (cmd cli.Command) ? {
|
||||
config_file := cmd.flags.get_string('config-file')?
|
||||
conf := vconf.load<Config>(prefix: 'VIETER_', default_path: config_file)?
|
||||
|
||||
add(conf, cmd.args[0], cmd.args[1], cmd.args[2])?
|
||||
t := NewTarget{
|
||||
kind: cmd.flags.get_string('kind')?
|
||||
url: cmd.args[0]
|
||||
repo: cmd.args[1]
|
||||
branch: cmd.flags.get_string('branch') or { '' }
|
||||
}
|
||||
|
||||
add(conf, t)?
|
||||
}
|
||||
},
|
||||
cli.Command{
|
||||
|
@ -103,11 +123,11 @@ pub fn cmd() cli.Command {
|
|||
name: 'edit'
|
||||
required_args: 1
|
||||
usage: 'id'
|
||||
description: 'Edit the Git repository target that matches the given id.'
|
||||
description: 'Edit the target that matches the given id.'
|
||||
flags: [
|
||||
cli.Flag{
|
||||
name: 'url'
|
||||
description: 'URL of the Git repository.'
|
||||
description: 'URL value. Meaning depends on kind of target.'
|
||||
flag: cli.FlagType.string
|
||||
},
|
||||
cli.Flag{
|
||||
|
@ -130,6 +150,11 @@ pub fn cmd() cli.Command {
|
|||
description: 'Cron schedule for repository.'
|
||||
flag: cli.FlagType.string
|
||||
},
|
||||
cli.Flag{
|
||||
name: 'kind'
|
||||
description: 'Kind of target.'
|
||||
flag: cli.FlagType.string
|
||||
},
|
||||
]
|
||||
execute: fn (cmd cli.Command) ? {
|
||||
config_file := cmd.flags.get_string('config-file')?
|
||||
|
@ -171,22 +196,21 @@ pub fn cmd() cli.Command {
|
|||
fn list(conf Config, filter TargetFilter) ? {
|
||||
c := client.new(conf.address, conf.api_key)
|
||||
repos := c.get_targets(filter)?
|
||||
data := repos.map([it.id.str(), it.url, it.branch, it.repo])
|
||||
data := repos.map([it.id.str(), it.kind, it.url, it.repo])
|
||||
|
||||
println(console.pretty_table(['id', 'url', 'branch', 'repo'], data)?)
|
||||
println(console.pretty_table(['id', 'kind', 'url', 'repo'], data)?)
|
||||
}
|
||||
|
||||
// add adds a new repository to the server's list.
|
||||
fn add(conf Config, url string, branch string, repo string) ? {
|
||||
fn add(conf Config, t &NewTarget) ? {
|
||||
c := client.new(conf.address, conf.api_key)
|
||||
res := c.add_target(url, branch, repo, [])?
|
||||
res := c.add_target(t)?
|
||||
|
||||
println(res.message)
|
||||
}
|
||||
|
||||
// remove removes a repository from the server's list.
|
||||
fn remove(conf Config, id string) ? {
|
||||
// id, _ := get_repo_by_prefix(conf, id_prefix) ?
|
||||
id_int := id.int()
|
||||
|
||||
if id_int != 0 {
|
||||
|
|
|
@ -71,29 +71,31 @@ fn (mut d Daemon) start_build(sb ScheduledBuild) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// run_build actually starts the build process for a given repo.
|
||||
// run_build actually starts the build process for a given target.
|
||||
fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) {
|
||||
d.linfo('started build: $sb.repo.url $sb.repo.branch')
|
||||
d.linfo('started build: $sb.target.url -> $sb.target.repo')
|
||||
|
||||
// 0 means success, 1 means failure
|
||||
mut status := 0
|
||||
|
||||
res := build.build_repo(d.client.address, d.client.api_key, d.builder_images.last(),
|
||||
&sb.repo) or {
|
||||
d.ldebug('build_repo error: $err.msg()')
|
||||
res := build.build_target(d.client.address, d.client.api_key, d.builder_images.last(),
|
||||
&sb.target) or {
|
||||
d.ldebug('build_target error: $err.msg()')
|
||||
status = 1
|
||||
|
||||
build.BuildResult{}
|
||||
}
|
||||
|
||||
if status == 0 {
|
||||
d.linfo('finished build: $sb.repo.url $sb.repo.branch; uploading logs...')
|
||||
d.linfo('finished build: $sb.target.url -> $sb.target.repo; uploading logs...')
|
||||
|
||||
build_arch := os.uname().machine
|
||||
d.client.add_build_log(sb.repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||
res.logs) or { d.lerror('Failed to upload logs for $sb.repo.url $sb.repo.arch') }
|
||||
d.client.add_build_log(sb.target.id, res.start_time, res.end_time, build_arch,
|
||||
res.exit_code, res.logs) or {
|
||||
d.lerror('Failed to upload logs for build: $sb.target.url -> $sb.target.repo')
|
||||
}
|
||||
} else {
|
||||
d.linfo('failed build: $sb.repo.url $sb.repo.branch')
|
||||
d.linfo('an error occured during build: $sb.target.url -> $sb.target.repo')
|
||||
}
|
||||
|
||||
stdatomic.store_u64(&d.atomics[build_index], daemon.build_done)
|
||||
|
|
|
@ -20,7 +20,7 @@ const (
|
|||
|
||||
struct ScheduledBuild {
|
||||
pub:
|
||||
repo Target
|
||||
target Target
|
||||
timestamp time.Time
|
||||
}
|
||||
|
||||
|
@ -37,9 +37,9 @@ mut:
|
|||
global_schedule CronExpression
|
||||
api_update_frequency int
|
||||
image_rebuild_frequency int
|
||||
// Repos currently loaded from API.
|
||||
repos []Target
|
||||
// At what point to update the list of repositories.
|
||||
// Targets currently loaded from API.
|
||||
targets []Target
|
||||
// At what point to update the list of targets.
|
||||
api_update_timestamp time.Time
|
||||
image_build_timestamp time.Time
|
||||
queue MinHeap<ScheduledBuild>
|
||||
|
@ -51,7 +51,7 @@ mut:
|
|||
logger shared log.Log
|
||||
}
|
||||
|
||||
// init_daemon initializes a new Daemon object. It renews the repositories &
|
||||
// init_daemon initializes a new Daemon object. It renews the targets &
|
||||
// populates the build queue for the first time.
|
||||
pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int, image_rebuild_frequency int) ?Daemon {
|
||||
mut d := Daemon{
|
||||
|
@ -65,8 +65,8 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st
|
|||
logger: logger
|
||||
}
|
||||
|
||||
// Initialize the repos & queue
|
||||
d.renew_repos()
|
||||
// Initialize the targets & queue
|
||||
d.renew_targets()
|
||||
d.renew_queue()
|
||||
if !d.rebuild_base_image() {
|
||||
return error('The base image failed to build. The Vieter cron daemon cannot run without an initial builder image.')
|
||||
|
@ -76,21 +76,21 @@ pub fn init_daemon(logger log.Log, address string, api_key string, base_image st
|
|||
}
|
||||
|
||||
// run starts the actual daemon process. It runs builds when possible &
|
||||
// periodically refreshes the list of repositories to ensure we stay in sync.
|
||||
// periodically refreshes the list of targets to ensure we stay in sync.
|
||||
pub fn (mut d Daemon) run() {
|
||||
for {
|
||||
finished_builds := d.clean_finished_builds()
|
||||
|
||||
// Update the API's contents if needed & renew the queue
|
||||
if time.now() >= d.api_update_timestamp {
|
||||
d.renew_repos()
|
||||
d.renew_targets()
|
||||
d.renew_queue()
|
||||
}
|
||||
// The finished builds should only be rescheduled if the API contents
|
||||
// haven't been renewed.
|
||||
else {
|
||||
for sb in finished_builds {
|
||||
d.schedule_build(sb.repo)
|
||||
d.schedule_build(sb.target)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,7 +114,7 @@ pub fn (mut d Daemon) run() {
|
|||
// every second to clean up any finished builds & start new ones.
|
||||
mut delay := time.Duration(1 * time.second)
|
||||
|
||||
// Sleep either until we have to refresh the repos or when the next
|
||||
// Sleep either until we have to refresh the targets or when the next
|
||||
// build has to start, with a minimum of 1 second.
|
||||
if d.current_build_count() == 0 {
|
||||
now := time.now()
|
||||
|
@ -148,12 +148,13 @@ pub fn (mut d Daemon) run() {
|
|||
}
|
||||
}
|
||||
|
||||
// schedule_build adds the next occurence of the given repo build to the queue.
|
||||
fn (mut d Daemon) schedule_build(repo Target) {
|
||||
ce := if repo.schedule != '' {
|
||||
parse_expression(repo.schedule) or {
|
||||
// schedule_build adds the next occurence of the given targets build to the
|
||||
// queue.
|
||||
fn (mut d Daemon) schedule_build(target Target) {
|
||||
ce := if target.schedule != '' {
|
||||
parse_expression(target.schedule) or {
|
||||
// TODO This shouldn't return an error if the expression is empty.
|
||||
d.lerror("Error while parsing cron expression '$repo.schedule' (id $repo.id): $err.msg()")
|
||||
d.lerror("Error while parsing cron expression '$target.schedule' (id $target.id): $err.msg()")
|
||||
|
||||
d.global_schedule
|
||||
}
|
||||
|
@ -161,41 +162,41 @@ fn (mut d Daemon) schedule_build(repo Target) {
|
|||
d.global_schedule
|
||||
}
|
||||
|
||||
// A repo that can't be scheduled will just be skipped for now
|
||||
// A target that can't be scheduled will just be skipped for now
|
||||
timestamp := ce.next_from_now() or {
|
||||
d.lerror("Couldn't calculate next timestamp from '$repo.schedule'; skipping")
|
||||
d.lerror("Couldn't calculate next timestamp from '$target.schedule'; skipping")
|
||||
return
|
||||
}
|
||||
|
||||
d.queue.insert(ScheduledBuild{
|
||||
repo: repo
|
||||
target: target
|
||||
timestamp: timestamp
|
||||
})
|
||||
}
|
||||
|
||||
// renew_repos requests the newest list of Git repos from the server & replaces
|
||||
// renew_targets requests the newest list of targets from the server & replaces
|
||||
// the old one.
|
||||
fn (mut d Daemon) renew_repos() {
|
||||
d.linfo('Renewing repos...')
|
||||
fn (mut d Daemon) renew_targets() {
|
||||
d.linfo('Renewing targets...')
|
||||
|
||||
mut new_repos := d.client.get_all_targets() or {
|
||||
d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...')
|
||||
mut new_targets := d.client.get_all_targets() or {
|
||||
d.lerror('Failed to renew targets. Retrying in ${daemon.api_update_retry_timeout}s...')
|
||||
d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Filter out any repos that shouldn't run on this architecture
|
||||
// Filter out any targets that shouldn't run on this architecture
|
||||
cur_arch := os.uname().machine
|
||||
new_repos = new_repos.filter(it.arch.any(it.value == cur_arch))
|
||||
new_targets = new_targets.filter(it.arch.any(it.value == cur_arch))
|
||||
|
||||
d.repos = new_repos
|
||||
d.targets = new_targets
|
||||
|
||||
d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency)
|
||||
}
|
||||
|
||||
// renew_queue replaces the old queue with a new one that reflects the newest
|
||||
// values in repos_map.
|
||||
// values in targets.
|
||||
fn (mut d Daemon) renew_queue() {
|
||||
d.linfo('Renewing queue...')
|
||||
mut new_queue := MinHeap<ScheduledBuild>{}
|
||||
|
@ -225,10 +226,10 @@ fn (mut d Daemon) renew_queue() {
|
|||
|
||||
d.queue = new_queue
|
||||
|
||||
// For each repository in repos_map, parse their cron expression (or use
|
||||
// the default one if not present) & add them to the queue
|
||||
for repo in d.repos {
|
||||
d.schedule_build(repo)
|
||||
// For each target in targets, parse their cron expression (or use the
|
||||
// default one if not present) & add them to the queue
|
||||
for target in d.targets {
|
||||
d.schedule_build(target)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,13 @@ const (
|
|||
migrations_up = [
|
||||
$embed_file('migrations/001-initial/up.sql'),
|
||||
$embed_file('migrations/002-rename-to-targets/up.sql'),
|
||||
$embed_file('migrations/003-target-url-type/up.sql'),
|
||||
]
|
||||
migrations_down = [
|
||||
$embed_file('migrations/001-initial/down.sql'),
|
||||
$embed_file('migrations/002-rename-to-targets/down.sql'),
|
||||
$embed_file('migrations/003-target-url-type/down.sql'),
|
||||
]
|
||||
migrations_down = [$embed_file('migrations/001-initial/down.sql'),
|
||||
$embed_file('migrations/002-rename-to-targets/down.sql')]
|
||||
)
|
||||
|
||||
// init initializes a database & adds the correct tables.
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
-- I'm not sure whether I should remove any non-git targets here. Keeping them
|
||||
-- will result in invalid targets, but removing them means losing data.
|
||||
ALTER TABLE Target DROP COLUMN kind;
|
||||
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE Target ADD COLUMN kind TEXT NOT NULL DEFAULT 'git';
|
|
@ -1,5 +1,7 @@
|
|||
module models
|
||||
|
||||
pub const valid_kinds = ['git', 'url']
|
||||
|
||||
pub struct TargetArch {
|
||||
pub:
|
||||
id int [primary; sql: serial]
|
||||
|
@ -15,10 +17,13 @@ pub fn (gra &TargetArch) str() string {
|
|||
pub struct Target {
|
||||
pub mut:
|
||||
id int [primary; sql: serial]
|
||||
// URL of the Git repository
|
||||
kind string [nonull]
|
||||
// If kind is git: URL of the Git repository
|
||||
// If kind is url: URL to PKGBUILD file
|
||||
url string [nonull]
|
||||
// Branch of the Git repository to use
|
||||
branch string [nonull]
|
||||
// Branch of the Git repository to use; only applicable when kind is git.
|
||||
// If not provided, the repository is cloned with the default branch.
|
||||
branch string
|
||||
// Which repo the builder should publish packages to
|
||||
repo string [nonull]
|
||||
// Cron schedule describing how frequently to build the repo.
|
||||
|
@ -32,6 +37,7 @@ pub mut:
|
|||
pub fn (gr &Target) str() string {
|
||||
mut parts := [
|
||||
'id: $gr.id',
|
||||
'kind: $gr.kind',
|
||||
'url: $gr.url',
|
||||
'branch: $gr.branch',
|
||||
'repo: $gr.repo',
|
||||
|
|
|
@ -52,6 +52,11 @@ fn (mut app App) v1_post_target() web.Result {
|
|||
return app.json(http.Status.bad_request, new_response(err.msg()))
|
||||
}
|
||||
|
||||
// Ensure someone doesn't submit an invalid kind
|
||||
if new_repo.kind !in models.valid_kinds {
|
||||
return app.json(http.Status.bad_request, new_response('Invalid kind.'))
|
||||
}
|
||||
|
||||
app.db.add_target(new_repo)
|
||||
|
||||
return app.json(http.Status.ok, new_response('Repo added successfully.'))
|
||||
|
|
Loading…
Reference in New Issue