feat(agent): partially wrote daemon code

web-stuff
Jef Roosens 2022-12-12 22:09:57 +01:00 committed by Chewing_Bever
parent 7ef8d4b846
commit 6f23d690a7
Signed by: Jef Roosens
GPG Key ID: B75D4F293C7052DB
6 changed files with 116 additions and 13 deletions

View File

@ -6,6 +6,8 @@ import conf as vconf
struct Config { struct Config {
pub: pub:
log_level string = 'WARN' log_level string = 'WARN'
// Architecture that the agent represents
arch string
api_key string api_key string
address string address string
data_dir string data_dir string

View File

@ -4,6 +4,8 @@ import log
import sync.stdatomic import sync.stdatomic
import build { BuildConfig } import build { BuildConfig }
import client import client
import time
import os
const ( const (
build_empty = 0 build_empty = 0
@ -14,6 +16,7 @@ const (
struct AgentDaemon { struct AgentDaemon {
logger shared log.Log logger shared log.Log
conf Config conf Config
mut:
images ImageManager images ImageManager
// Which builds are currently running; length is same as // Which builds are currently running; length is same as
// conf.max_concurrent_builds // conf.max_concurrent_builds
@ -41,13 +44,33 @@ pub fn (mut d AgentDaemon) run() {
for { for {
free_builds := d.update_atomics() free_builds := d.update_atomics()
if free_builds > 0 { // All build slots are taken, so there's nothing to be done
if free_builds == 0 {
time.sleep(1 * time.second)
continue
}
// Builds have finished, so old builder images might have freed up.
d.images.clean_old_images()
// Poll for new jobs
new_configs := d.client.poll_jobs(free_builds) or {
d.lerror('Failed to poll jobs: $err.msg()')
time.sleep(1 * time.second)
continue
}
// Schedule new jobs
for config in new_configs {
d.start_build(config)
} }
} }
} }
// update_atomics checks for each build whether it's completed, and sets it to // update_atomics checks for each build whether it's completed, and sets it to
// free again if so. The return value is how many fields are now set to free. // free again if so. The return value is how many build slots are currently
// free.
fn (mut d AgentDaemon) update_atomics() int { fn (mut d AgentDaemon) update_atomics() int {
mut count := 0 mut count := 0
@ -62,3 +85,53 @@ fn (mut d AgentDaemon) update_atomics() int {
return count return count
} }
// start_build starts a build for the given BuildConfig object.
fn (mut d AgentDaemon) start_build(config BuildConfig) bool {
for i in 0 .. d.atomics.len {
if stdatomic.load_u64(&d.atomics[i]) == agent.build_empty {
stdatomic.store_u64(&d.atomics[i], agent.build_running)
d.builds[i] = config
go d.run_build(i, config)
return true
}
}
return false
}
// run_build actually starts the build process for a given target.
fn (mut d AgentDaemon) run_build(build_index int, config BuildConfig) {
d.linfo('started build: $config.url -> $config.repo')
// 0 means success, 1 means failure
mut status := 0
new_config := BuildConfig{
...config
base_image: d.images.get(config.base_image)
}
res := build.build_config(d.client.address, d.client.api_key, new_config) or {
d.ldebug('build_config error: $err.msg()')
status = 1
build.BuildResult{}
}
if status == 0 {
d.linfo('finished build: $config.url -> $config.repo; uploading logs...')
build_arch := os.uname().machine
d.client.add_build_log(config.target_id, res.start_time, res.end_time, build_arch,
res.exit_code, res.logs) or {
d.lerror('Failed to upload logs for build: $config.url -> $config.repo')
}
} else {
d.linfo('an error occured during build: $config.url -> $config.repo')
}
stdatomic.store_u64(&d.atomics[build_index], agent.build_done)
}

View File

@ -19,6 +19,10 @@ fn new_image_manager(refresh_frequency int) ImageManager {
} }
} }
pub fn (m &ImageManager) get(base_image string) string {
return m.images[base_image].last()
}
fn (mut m ImageManager) refresh_image(base_image string) ! { fn (mut m ImageManager) refresh_image(base_image string) ! {
// No need to refresh the image if the previous one is still new enough // No need to refresh the image if the previous one is still new enough
if base_image in m.timestamps if base_image in m.timestamps

View File

@ -103,10 +103,23 @@ pub:
logs string logs string
} }
pub fn build_target(address string, api_key string, base_image_id string, target &Target) !BuildResult {
config := BuildConfig{
target_id: target.id
kind: target.kind
url: target.url
branch: target.branch
repo: target.repo
base_image: base_image_id
}
return build_config(address, api_key, config)
}
// build_target builds, packages & publishes a given Arch package based on the // build_target builds, packages & publishes a given Arch package based on the
// provided target. The base image ID should be of an image previously created // provided target. The base image ID should be of an image previously created
// by create_build_image. It returns the logs of the container. // by create_build_image. It returns the logs of the container.
pub fn build_target(address string, api_key string, base_image_id string, target &Target) !BuildResult { pub fn build_config(address string, api_key string, config BuildConfig) !BuildResult {
mut dd := docker.new_conn()! mut dd := docker.new_conn()!
defer { defer {
@ -114,14 +127,14 @@ pub fn build_target(address string, api_key string, base_image_id string, target
} }
build_arch := os.uname().machine build_arch := os.uname().machine
build_script := create_build_script(address, target, build_arch) build_script := create_build_script(address, config, build_arch)
// We convert the build script into a base64 string, which then gets passed // We convert the build script into a base64 string, which then gets passed
// to the container as an env var // to the container as an env var
base64_script := base64.encode_str(build_script) base64_script := base64.encode_str(build_script)
c := docker.NewContainer{ c := docker.NewContainer{
image: '$base_image_id' image: '$config.base_image'
env: [ env: [
'BUILD_SCRIPT=$base64_script', 'BUILD_SCRIPT=$base64_script',
'API_KEY=$api_key', 'API_KEY=$api_key',

View File

@ -23,13 +23,13 @@ pub fn echo_commands(cmds []string) []string {
} }
// create_build_script generates a shell script that builds a given Target. // create_build_script generates a shell script that builds a given Target.
fn create_build_script(address string, target &Target, build_arch string) string { fn create_build_script(address string, config BuildConfig, build_arch string) string {
repo_url := '$address/$target.repo' repo_url := '$address/$config.repo'
mut commands := [ mut commands := [
// This will later be replaced by a proper setting for changing the // This will later be replaced by a proper setting for changing the
// mirrorlist // mirrorlist
"echo -e '[$target.repo]\\nServer = $address/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf" "echo -e '[$config.repo]\\nServer = $address/\$repo/\$arch\\nSigLevel = Optional' >> /etc/pacman.conf"
// We need to update the package list of the repo we just added above. // We need to update the package list of the repo we just added above.
// This should however not pull in a lot of packages as long as the // This should however not pull in a lot of packages as long as the
// builder image is rebuilt frequently. // builder image is rebuilt frequently.
@ -38,22 +38,22 @@ fn create_build_script(address string, target &Target, build_arch string) string
'su builder', 'su builder',
] ]
commands << match target.kind { commands << match config.kind {
'git' { 'git' {
if target.branch == '' { if config.branch == '' {
[ [
"git clone --single-branch --depth 1 '$target.url' repo", "git clone --single-branch --depth 1 '$config.url' repo",
] ]
} else { } else {
[ [
"git clone --single-branch --depth 1 --branch $target.branch '$target.url' repo", "git clone --single-branch --depth 1 --branch $config.branch '$config.url' repo",
] ]
} }
} }
'url' { 'url' {
[ [
'mkdir repo', 'mkdir repo',
"curl -o repo/PKGBUILD -L '$target.url'", "curl -o repo/PKGBUILD -L '$config.url'",
] ]
} }
else { else {

11
src/client/jobs.v 100644
View File

@ -0,0 +1,11 @@
module client
import build { BuildConfig }
pub fn (c &Client) poll_jobs(max int) ![]BuildConfig {
data := c.send_request<[]BuildConfig>(.get, '/api/v1/jobs/poll', {
'max': max.str()
})!
return data.data
}