Merge pull request 'Address some small problems' (#306) from Chewing_Bever/vieter:image-fixes into dev
ci/woodpecker/push/docs Pipeline was successful
Details
ci/woodpecker/push/lint Pipeline was successful
Details
ci/woodpecker/push/arch Pipeline was successful
Details
ci/woodpecker/push/build Pipeline was successful
Details
ci/woodpecker/push/man Pipeline was successful
Details
ci/woodpecker/push/test Pipeline was successful
Details
ci/woodpecker/push/docker Pipeline was successful
Details
ci/woodpecker/push/deploy Pipeline was successful
Details
ci/woodpecker/push/docs Pipeline was successful
Details
ci/woodpecker/push/lint Pipeline was successful
Details
ci/woodpecker/push/arch Pipeline was successful
Details
ci/woodpecker/push/build Pipeline was successful
Details
ci/woodpecker/push/man Pipeline was successful
Details
ci/woodpecker/push/test Pipeline was successful
Details
ci/woodpecker/push/docker Pipeline was successful
Details
ci/woodpecker/push/deploy Pipeline was successful
Details
Reviewed-on: #306pull/307/head
commit
894323ddcb
22
CHANGELOG.md
22
CHANGELOG.md
|
@ -7,24 +7,30 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
## [Unreleased](https://git.rustybever.be/vieter-v/vieter/src/branch/dev)
|
## [Unreleased](https://git.rustybever.be/vieter-v/vieter/src/branch/dev)
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* Allow specifying subdirectory inside Git repository
|
||||||
|
* Added option to deploy using agent-server architecture instead of cron daemon
|
||||||
|
* Allow scheduling builds on the server from the CLI tool instead of building
|
||||||
|
them locally
|
||||||
|
* Allow force-building packages, meaning the build won't check if the
|
||||||
|
repository is already up to date
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
* Migrated codebase to V 0.3.2
|
* Migrated codebase to V 0.3.2
|
||||||
* Cron expression parser now uses bitfields instead of bool arrays
|
* Cron expression parser now uses bitfields instead of bool arrays
|
||||||
* Added option to deploy using agent-server architecture instead of cron daemon
|
|
||||||
* Allow force-building packages, meaning the build won't check if the
|
|
||||||
repository is already up to date
|
|
||||||
* Allow scheduling builds on the server from the CLI tool instead of building
|
|
||||||
them locally
|
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
* Arch value for target is now properly set if not provided
|
* Arch value for target is now properly set if not provided
|
||||||
* All API endpoints now return proper JSON on success
|
|
||||||
* CLI no longer exits with non-zero status code when removing/patching
|
|
||||||
target
|
|
||||||
* Allow NULL values for branch in database
|
* Allow NULL values for branch in database
|
||||||
* Endpoint for adding targets now returns the correct id
|
* Endpoint for adding targets now returns the correct id
|
||||||
|
* CLI now correctly errors and doesn't error when sending requests
|
||||||
|
* Fixed possible infinite loop when removing old build images
|
||||||
|
* Check whether build image still exists before starting build
|
||||||
|
* Don't run makepkg `prepare()` function twice
|
||||||
|
* Don't buffer stdout in Docker containers
|
||||||
|
|
||||||
## [0.4.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.4.0)
|
## [0.4.0](https://git.rustybever.be/vieter-v/vieter/src/tag/0.4.0)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
|
||||||
"https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \
|
"https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \
|
||||||
chmod +x vieter ; \
|
chmod +x vieter ; \
|
||||||
else \
|
else \
|
||||||
|
cd src && v install && cd .. && \
|
||||||
LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \
|
LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \
|
||||||
mv pvieter vieter ; \
|
mv pvieter vieter ; \
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -80,13 +80,24 @@ pub fn (mut d AgentDaemon) run() {
|
||||||
last_poll_time = time.now()
|
last_poll_time = time.now()
|
||||||
|
|
||||||
for config in new_configs {
|
for config in new_configs {
|
||||||
// TODO handle this better than to just skip the config
|
|
||||||
// Make sure a recent build base image is available for
|
// Make sure a recent build base image is available for
|
||||||
// building the config
|
// building the config
|
||||||
|
if !d.images.up_to_date(config.base_image) {
|
||||||
|
d.linfo('Building builder image from base image $config.base_image')
|
||||||
|
|
||||||
|
// TODO handle this better than to just skip the config
|
||||||
d.images.refresh_image(config.base_image) or {
|
d.images.refresh_image(config.base_image) or {
|
||||||
d.lerror(err.msg())
|
d.lerror(err.msg())
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// It's technically still possible that the build image is
|
||||||
|
// removed in the very short period between building the
|
||||||
|
// builder image and starting a build container with it. If
|
||||||
|
// this happens, faith really just didn't want you to do this
|
||||||
|
// build.
|
||||||
|
|
||||||
d.start_build(config)
|
d.start_build(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,16 +33,42 @@ pub fn (m &ImageManager) get(base_image string) string {
|
||||||
return m.images[base_image].last()
|
return m.images[base_image].last()
|
||||||
}
|
}
|
||||||
|
|
||||||
// refresh_image builds a new builder image from the given base image if the
|
// up_to_date returns true if the last known builder image exists and is up to
|
||||||
// previous builder image is too old or non-existent. This function will do
|
// date. If this function returns true, the last builder image may be used to
|
||||||
// nothing if these conditions aren't met, so it's safe to call it every time
|
// perform a build.
|
||||||
// you want to ensure an image is up to date.
|
pub fn (mut m ImageManager) up_to_date(base_image string) bool {
|
||||||
fn (mut m ImageManager) refresh_image(base_image string) ! {
|
if base_image !in m.timestamps
|
||||||
if base_image in m.timestamps
|
|| m.timestamps[base_image].add_seconds(m.max_image_age) <= time.now() {
|
||||||
&& m.timestamps[base_image].add_seconds(m.max_image_age) > time.now() {
|
return false
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// It's possible the image has been removed by some external event, so we
|
||||||
|
// check whether it actually exists as well.
|
||||||
|
mut dd := docker.new_conn() or { return false }
|
||||||
|
|
||||||
|
defer {
|
||||||
|
dd.close() or {}
|
||||||
|
}
|
||||||
|
|
||||||
|
dd.image_inspect(m.images[base_image].last()) or {
|
||||||
|
// Image doesn't exist, so we stop tracking it
|
||||||
|
if err.code() == 404 {
|
||||||
|
m.images[base_image].delete_last()
|
||||||
|
m.timestamps.delete(base_image)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the inspect fails, it's either because the image doesn't exist or
|
||||||
|
// because of some other error. Either way, we can't know *for certain*
|
||||||
|
// that the image exists, so we return false.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// refresh_image builds a new builder image from the given base image. This
|
||||||
|
// function should only be called if `up_to_date` returned false.
|
||||||
|
fn (mut m ImageManager) refresh_image(base_image string) ! {
|
||||||
// TODO use better image tags for built images
|
// TODO use better image tags for built images
|
||||||
new_image := build.create_build_image(base_image) or {
|
new_image := build.create_build_image(base_image) or {
|
||||||
return error('Failed to build builder image from base image $base_image')
|
return error('Failed to build builder image from base image $base_image')
|
||||||
|
@ -73,7 +99,21 @@ fn (mut m ImageManager) clean_old_images() {
|
||||||
// wasn't deleted. Therefore, we move the index over. If the function
|
// wasn't deleted. Therefore, we move the index over. If the function
|
||||||
// returns true, the array's length has decreased by one so we don't
|
// returns true, the array's length has decreased by one so we don't
|
||||||
// move the index.
|
// move the index.
|
||||||
dd.remove_image(m.images[image][i]) or { i += 1 }
|
dd.remove_image(m.images[image][i]) or {
|
||||||
|
// The image was removed by an external event
|
||||||
|
if err.code() == 404 {
|
||||||
|
m.images[image].delete(i)
|
||||||
|
}
|
||||||
|
// The image couldn't be removed, so we need to keep track of
|
||||||
|
// it
|
||||||
|
else {
|
||||||
|
i += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
m.images[image].delete(i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ pub:
|
||||||
kind string
|
kind string
|
||||||
url string
|
url string
|
||||||
branch string
|
branch string
|
||||||
|
path string
|
||||||
repo string
|
repo string
|
||||||
base_image string
|
base_image string
|
||||||
force bool
|
force bool
|
||||||
|
@ -29,7 +30,7 @@ pub:
|
||||||
|
|
||||||
// str return a single-line string representation of a build log
|
// str return a single-line string representation of a build log
|
||||||
pub fn (c BuildConfig) str() string {
|
pub fn (c BuildConfig) str() string {
|
||||||
return '{ target: $c.target_id, kind: $c.kind, url: $c.url, branch: $c.branch, repo: $c.repo, base_image: $c.base_image, force: $c.force }'
|
return '{ target: $c.target_id, kind: $c.kind, url: $c.url, branch: $c.branch, path: $c.path, repo: $c.repo, base_image: $c.base_image, force: $c.force }'
|
||||||
}
|
}
|
||||||
|
|
||||||
// create_build_image creates a builder image given some base image which can
|
// create_build_image creates a builder image given some base image which can
|
||||||
|
@ -116,6 +117,7 @@ pub fn build_target(address string, api_key string, base_image_id string, target
|
||||||
kind: target.kind
|
kind: target.kind
|
||||||
url: target.url
|
url: target.url
|
||||||
branch: target.branch
|
branch: target.branch
|
||||||
|
path: target.path
|
||||||
repo: target.repo
|
repo: target.repo
|
||||||
base_image: base_image_id
|
base_image: base_image_id
|
||||||
force: force
|
force: force
|
||||||
|
|
|
@ -16,5 +16,5 @@ echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkg
|
||||||
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||||
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||||
[ "$(id -u)" == 0 ] && exit 0
|
[ "$(id -u)" == 0 ] && exit 0
|
||||||
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||||
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -16,5 +16,5 @@ echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkg
|
||||||
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||||
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||||
[ "$(id -u)" == 0 ] && exit 0
|
[ "$(id -u)" == 0 ] && exit 0
|
||||||
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||||
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -0,0 +1,20 @@
|
||||||
|
echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf'
|
||||||
|
echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf
|
||||||
|
echo -e '+ pacman -Syu --needed --noconfirm'
|
||||||
|
pacman -Syu --needed --noconfirm
|
||||||
|
echo -e '+ su builder'
|
||||||
|
su builder
|
||||||
|
echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo'
|
||||||
|
git clone --single-branch --depth 1 'https://examplerepo.com' repo
|
||||||
|
echo -e '+ cd '\''repo/example/path'\'''
|
||||||
|
cd 'repo/example/path'
|
||||||
|
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
|
||||||
|
makepkg --nobuild --syncdeps --needed --noconfirm
|
||||||
|
echo -e '+ source PKGBUILD'
|
||||||
|
source PKGBUILD
|
||||||
|
echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0'
|
||||||
|
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||||
|
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||||
|
[ "$(id -u)" == 0 ] && exit 0
|
||||||
|
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||||
|
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -0,0 +1,20 @@
|
||||||
|
echo -e '+ echo -e '\''[vieter]\\nServer = https://example.com/$repo/$arch\\nSigLevel = Optional'\'' >> /etc/pacman.conf'
|
||||||
|
echo -e '[vieter]\nServer = https://example.com/$repo/$arch\nSigLevel = Optional' >> /etc/pacman.conf
|
||||||
|
echo -e '+ pacman -Syu --needed --noconfirm'
|
||||||
|
pacman -Syu --needed --noconfirm
|
||||||
|
echo -e '+ su builder'
|
||||||
|
su builder
|
||||||
|
echo -e '+ git clone --single-branch --depth 1 '\''https://examplerepo.com'\'' repo'
|
||||||
|
git clone --single-branch --depth 1 'https://examplerepo.com' repo
|
||||||
|
echo -e '+ cd '\''repo/example/path with spaces'\'''
|
||||||
|
cd 'repo/example/path with spaces'
|
||||||
|
echo -e '+ makepkg --nobuild --syncdeps --needed --noconfirm'
|
||||||
|
makepkg --nobuild --syncdeps --needed --noconfirm
|
||||||
|
echo -e '+ source PKGBUILD'
|
||||||
|
source PKGBUILD
|
||||||
|
echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0'
|
||||||
|
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||||
|
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||||
|
[ "$(id -u)" == 0 ] && exit 0
|
||||||
|
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||||
|
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -18,5 +18,5 @@ echo -e '+ curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkg
|
||||||
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
curl -s --head --fail https://example.com/vieter/x86_64/$pkgname-$pkgver-$pkgrel && exit 0
|
||||||
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
echo -e '+ [ "$(id -u)" == 0 ] && exit 0'
|
||||||
[ "$(id -u)" == 0 ] && exit 0
|
[ "$(id -u)" == 0 ] && exit 0
|
||||||
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
echo -e '+ MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done'
|
||||||
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
MAKEFLAGS="-j$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $API_KEY" https://example.com/vieter/publish; done
|
|
@ -59,8 +59,13 @@ fn create_build_script(address string, config BuildConfig, build_arch string) st
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
commands << if config.path != '' {
|
||||||
|
"cd 'repo/$config.path'"
|
||||||
|
} else {
|
||||||
|
'cd repo'
|
||||||
|
}
|
||||||
|
|
||||||
commands << [
|
commands << [
|
||||||
'cd repo',
|
|
||||||
'makepkg --nobuild --syncdeps --needed --noconfirm',
|
'makepkg --nobuild --syncdeps --needed --noconfirm',
|
||||||
'source PKGBUILD',
|
'source PKGBUILD',
|
||||||
]
|
]
|
||||||
|
@ -79,7 +84,7 @@ fn create_build_script(address string, config BuildConfig, build_arch string) st
|
||||||
}
|
}
|
||||||
|
|
||||||
commands << [
|
commands << [
|
||||||
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $repo_url/publish; done',
|
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed --noextract && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $repo_url/publish; done',
|
||||||
]
|
]
|
||||||
|
|
||||||
return echo_commands(commands).join('\n')
|
return echo_commands(commands).join('\n')
|
||||||
|
|
|
@ -1,5 +1,46 @@
|
||||||
module build
|
module build
|
||||||
|
|
||||||
|
fn test_create_build_script_git() {
|
||||||
|
config := BuildConfig{
|
||||||
|
target_id: 1
|
||||||
|
kind: 'git'
|
||||||
|
url: 'https://examplerepo.com'
|
||||||
|
repo: 'vieter'
|
||||||
|
base_image: 'not-used:latest'
|
||||||
|
}
|
||||||
|
|
||||||
|
build_script := create_build_script('https://example.com', config, 'x86_64')
|
||||||
|
expected := $embed_file('scripts/git.sh')
|
||||||
|
|
||||||
|
assert build_script == expected.to_string().trim_space()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_create_build_script_git_path() {
|
||||||
|
mut config := BuildConfig{
|
||||||
|
target_id: 1
|
||||||
|
kind: 'git'
|
||||||
|
url: 'https://examplerepo.com'
|
||||||
|
repo: 'vieter'
|
||||||
|
path: 'example/path'
|
||||||
|
base_image: 'not-used:latest'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut build_script := create_build_script('https://example.com', config, 'x86_64')
|
||||||
|
mut expected := $embed_file('scripts/git_path.sh')
|
||||||
|
|
||||||
|
assert build_script == expected.to_string().trim_space()
|
||||||
|
|
||||||
|
config = BuildConfig{
|
||||||
|
...config
|
||||||
|
path: 'example/path with spaces'
|
||||||
|
}
|
||||||
|
|
||||||
|
build_script = create_build_script('https://example.com', config, 'x86_64')
|
||||||
|
expected = $embed_file('scripts/git_path_spaces.sh')
|
||||||
|
|
||||||
|
assert build_script == expected.to_string().trim_space()
|
||||||
|
}
|
||||||
|
|
||||||
fn test_create_build_script_git_branch() {
|
fn test_create_build_script_git_branch() {
|
||||||
config := BuildConfig{
|
config := BuildConfig{
|
||||||
target_id: 1
|
target_id: 1
|
||||||
|
@ -11,22 +52,7 @@ fn test_create_build_script_git_branch() {
|
||||||
}
|
}
|
||||||
|
|
||||||
build_script := create_build_script('https://example.com', config, 'x86_64')
|
build_script := create_build_script('https://example.com', config, 'x86_64')
|
||||||
expected := $embed_file('build_script_git_branch.sh')
|
expected := $embed_file('scripts/git_branch.sh')
|
||||||
|
|
||||||
assert build_script == expected.to_string().trim_space()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_create_build_script_git() {
|
|
||||||
config := BuildConfig{
|
|
||||||
target_id: 1
|
|
||||||
kind: 'git'
|
|
||||||
url: 'https://examplerepo.com'
|
|
||||||
repo: 'vieter'
|
|
||||||
base_image: 'not-used:latest'
|
|
||||||
}
|
|
||||||
|
|
||||||
build_script := create_build_script('https://example.com', config, 'x86_64')
|
|
||||||
expected := $embed_file('build_script_git.sh')
|
|
||||||
|
|
||||||
assert build_script == expected.to_string().trim_space()
|
assert build_script == expected.to_string().trim_space()
|
||||||
}
|
}
|
||||||
|
@ -41,7 +67,7 @@ fn test_create_build_script_url() {
|
||||||
}
|
}
|
||||||
|
|
||||||
build_script := create_build_script('https://example.com', config, 'x86_64')
|
build_script := create_build_script('https://example.com', config, 'x86_64')
|
||||||
expected := $embed_file('build_script_url.sh')
|
expected := $embed_file('scripts/url.sh')
|
||||||
|
|
||||||
assert build_script == expected.to_string().trim_space()
|
assert build_script == expected.to_string().trim_space()
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ module client
|
||||||
|
|
||||||
import net.http { Method }
|
import net.http { Method }
|
||||||
import net.urllib
|
import net.urllib
|
||||||
import web.response { Response }
|
import web.response { Response, new_data_response }
|
||||||
import json
|
import json
|
||||||
|
|
||||||
pub struct Client {
|
pub struct Client {
|
||||||
|
@ -56,8 +56,28 @@ fn (c &Client) send_request<T>(method Method, url string, params map[string]stri
|
||||||
// send_request_with_body<T> calls send_request_raw_response & parses its
|
// send_request_with_body<T> calls send_request_raw_response & parses its
|
||||||
// output as a Response<T> object.
|
// output as a Response<T> object.
|
||||||
fn (c &Client) send_request_with_body<T>(method Method, url string, params map[string]string, body string) !Response<T> {
|
fn (c &Client) send_request_with_body<T>(method Method, url string, params map[string]string, body string) !Response<T> {
|
||||||
res_text := c.send_request_raw_response(method, url, params, body)!
|
res := c.send_request_raw(method, url, params, body)!
|
||||||
data := json.decode(Response<T>, res_text)!
|
status := res.status()
|
||||||
|
|
||||||
|
// Non-successful requests are expected to return either an empty body or
|
||||||
|
// Response<string>
|
||||||
|
if status.is_error() {
|
||||||
|
// A non-successful status call will have an empty body
|
||||||
|
if res.body == '' {
|
||||||
|
return error('Error $res.status_code ($status.str()): (empty response)')
|
||||||
|
}
|
||||||
|
|
||||||
|
data := json.decode(Response<string>, res.body)!
|
||||||
|
|
||||||
|
return error('Status $res.status_code ($status.str()): $data.message')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Just return an empty successful response
|
||||||
|
if res.body == '' {
|
||||||
|
return new_data_response(T{})
|
||||||
|
}
|
||||||
|
|
||||||
|
data := json.decode(Response<T>, res.body)!
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
module client
|
module client
|
||||||
|
|
||||||
import build { BuildConfig }
|
import build { BuildConfig }
|
||||||
import web.response { Response }
|
|
||||||
|
|
||||||
// poll_jobs requests a list of new build jobs from the server.
|
// poll_jobs requests a list of new build jobs from the server.
|
||||||
pub fn (c &Client) poll_jobs(arch string, max int) ![]BuildConfig {
|
pub fn (c &Client) poll_jobs(arch string, max int) ![]BuildConfig {
|
||||||
|
@ -15,12 +14,10 @@ pub fn (c &Client) poll_jobs(arch string, max int) ![]BuildConfig {
|
||||||
|
|
||||||
// queue_job adds a new one-time build job for the given target to the job
|
// queue_job adds a new one-time build job for the given target to the job
|
||||||
// queue.
|
// queue.
|
||||||
pub fn (c &Client) queue_job(target_id int, arch string, force bool) !Response<string> {
|
pub fn (c &Client) queue_job(target_id int, arch string, force bool) ! {
|
||||||
data := c.send_request<string>(.post, '/api/v1/jobs/queue', {
|
c.send_request<string>(.post, '/api/v1/jobs/queue', {
|
||||||
'target': target_id.str()
|
'target': target_id.str()
|
||||||
'arch': arch
|
'arch': arch
|
||||||
'force': force.str()
|
'force': force.str()
|
||||||
})!
|
})!
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,29 +6,18 @@ import web.response { Response }
|
||||||
import time
|
import time
|
||||||
|
|
||||||
// get_build_logs returns all build logs.
|
// get_build_logs returns all build logs.
|
||||||
pub fn (c &Client) get_build_logs(filter BuildLogFilter) !Response<[]BuildLog> {
|
pub fn (c &Client) get_build_logs(filter BuildLogFilter) ![]BuildLog {
|
||||||
params := models.params_from(filter)
|
params := models.params_from(filter)
|
||||||
data := c.send_request<[]BuildLog>(Method.get, '/api/v1/logs', params)!
|
data := c.send_request<[]BuildLog>(Method.get, '/api/v1/logs', params)!
|
||||||
|
|
||||||
return data
|
return data.data
|
||||||
}
|
|
||||||
|
|
||||||
// get_build_logs_for_target returns all build logs for a given target.
|
|
||||||
pub fn (c &Client) get_build_logs_for_target(target_id int) !Response<[]BuildLog> {
|
|
||||||
params := {
|
|
||||||
'repo': target_id.str()
|
|
||||||
}
|
|
||||||
|
|
||||||
data := c.send_request<[]BuildLog>(Method.get, '/api/v1/logs', params)!
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_build_log returns a specific build log.
|
// get_build_log returns a specific build log.
|
||||||
pub fn (c &Client) get_build_log(id int) !Response<BuildLog> {
|
pub fn (c &Client) get_build_log(id int) !BuildLog {
|
||||||
data := c.send_request<BuildLog>(Method.get, '/api/v1/logs/$id', {})!
|
data := c.send_request<BuildLog>(Method.get, '/api/v1/logs/$id', {})!
|
||||||
|
|
||||||
return data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_build_log_content returns the contents of the build log file.
|
// get_build_log_content returns the contents of the build log file.
|
||||||
|
|
|
@ -2,7 +2,6 @@ module client
|
||||||
|
|
||||||
import models { Target, TargetFilter }
|
import models { Target, TargetFilter }
|
||||||
import net.http { Method }
|
import net.http { Method }
|
||||||
import web.response { Response }
|
|
||||||
|
|
||||||
// get_targets returns a list of targets, given a filter object.
|
// get_targets returns a list of targets, given a filter object.
|
||||||
pub fn (c &Client) get_targets(filter TargetFilter) ![]Target {
|
pub fn (c &Client) get_targets(filter TargetFilter) ![]Target {
|
||||||
|
@ -45,28 +44,29 @@ pub struct NewTarget {
|
||||||
url string
|
url string
|
||||||
branch string
|
branch string
|
||||||
repo string
|
repo string
|
||||||
|
path string
|
||||||
arch []string
|
arch []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// add_target adds a new target to the server.
|
// add_target adds a new target to the server.
|
||||||
pub fn (c &Client) add_target(t NewTarget) !Response<int> {
|
pub fn (c &Client) add_target(t NewTarget) !int {
|
||||||
params := models.params_from<NewTarget>(t)
|
params := models.params_from<NewTarget>(t)
|
||||||
data := c.send_request<int>(Method.post, '/api/v1/targets', params)!
|
data := c.send_request<int>(Method.post, '/api/v1/targets', params)!
|
||||||
|
|
||||||
return data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove_target removes the target with the given id from the server.
|
// remove_target removes the target with the given id from the server.
|
||||||
pub fn (c &Client) remove_target(id int) !Response<string> {
|
pub fn (c &Client) remove_target(id int) !string {
|
||||||
data := c.send_request<string>(Method.delete, '/api/v1/targets/$id', {})!
|
data := c.send_request<string>(Method.delete, '/api/v1/targets/$id', {})!
|
||||||
|
|
||||||
return data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
||||||
// patch_target sends a PATCH request to the given target with the params as
|
// patch_target sends a PATCH request to the given target with the params as
|
||||||
// payload.
|
// payload.
|
||||||
pub fn (c &Client) patch_target(id int, params map[string]string) !Response<string> {
|
pub fn (c &Client) patch_target(id int, params map[string]string) !string {
|
||||||
data := c.send_request<string>(Method.patch, '/api/v1/targets/$id', params)!
|
data := c.send_request<string>(Method.patch, '/api/v1/targets/$id', params)!
|
||||||
|
|
||||||
return data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
|
@ -183,15 +183,7 @@ fn print_log_list(logs []BuildLog, raw bool) ! {
|
||||||
// list prints a list of all build logs.
|
// list prints a list of all build logs.
|
||||||
fn list(conf Config, filter BuildLogFilter, raw bool) ! {
|
fn list(conf Config, filter BuildLogFilter, raw bool) ! {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
logs := c.get_build_logs(filter)!.data
|
logs := c.get_build_logs(filter)!
|
||||||
|
|
||||||
print_log_list(logs, raw)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// list prints a list of all build logs for a given target.
|
|
||||||
fn list_for_target(conf Config, target_id int, raw bool) ! {
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
|
||||||
logs := c.get_build_logs_for_target(target_id)!.data
|
|
||||||
|
|
||||||
print_log_list(logs, raw)!
|
print_log_list(logs, raw)!
|
||||||
}
|
}
|
||||||
|
@ -199,7 +191,7 @@ fn list_for_target(conf Config, target_id int, raw bool) ! {
|
||||||
// info print the detailed info for a given build log.
|
// info print the detailed info for a given build log.
|
||||||
fn info(conf Config, id int) ! {
|
fn info(conf Config, id int) ! {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
log := c.get_build_log(id)!.data
|
log := c.get_build_log(id)!
|
||||||
|
|
||||||
print(log)
|
print(log)
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ struct Config {
|
||||||
base_image string = 'archlinux:base-devel'
|
base_image string = 'archlinux:base-devel'
|
||||||
}
|
}
|
||||||
|
|
||||||
// cmd returns the cli submodule that handles the repos API interaction
|
// cmd returns the cli submodule that handles the targets API interaction
|
||||||
pub fn cmd() cli.Command {
|
pub fn cmd() cli.Command {
|
||||||
return cli.Command{
|
return cli.Command{
|
||||||
name: 'targets'
|
name: 'targets'
|
||||||
|
@ -82,6 +82,11 @@ pub fn cmd() cli.Command {
|
||||||
description: "Which branch to clone; only applies to kind 'git'."
|
description: "Which branch to clone; only applies to kind 'git'."
|
||||||
flag: cli.FlagType.string
|
flag: cli.FlagType.string
|
||||||
},
|
},
|
||||||
|
cli.Flag{
|
||||||
|
name: 'path'
|
||||||
|
description: 'Subdirectory inside Git repository to use.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
]
|
]
|
||||||
execute: fn (cmd cli.Command) ! {
|
execute: fn (cmd cli.Command) ! {
|
||||||
config_file := cmd.flags.get_string('config-file')!
|
config_file := cmd.flags.get_string('config-file')!
|
||||||
|
@ -92,6 +97,7 @@ pub fn cmd() cli.Command {
|
||||||
url: cmd.args[0]
|
url: cmd.args[0]
|
||||||
repo: cmd.args[1]
|
repo: cmd.args[1]
|
||||||
branch: cmd.flags.get_string('branch') or { '' }
|
branch: cmd.flags.get_string('branch') or { '' }
|
||||||
|
path: cmd.flags.get_string('path') or { '' }
|
||||||
}
|
}
|
||||||
|
|
||||||
raw := cmd.flags.get_bool('raw')!
|
raw := cmd.flags.get_bool('raw')!
|
||||||
|
@ -159,6 +165,11 @@ pub fn cmd() cli.Command {
|
||||||
description: 'Kind of target.'
|
description: 'Kind of target.'
|
||||||
flag: cli.FlagType.string
|
flag: cli.FlagType.string
|
||||||
},
|
},
|
||||||
|
cli.Flag{
|
||||||
|
name: 'path'
|
||||||
|
description: 'Subdirectory inside Git repository to use.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
]
|
]
|
||||||
execute: fn (cmd cli.Command) ! {
|
execute: fn (cmd cli.Command) ! {
|
||||||
config_file := cmd.flags.get_string('config-file')!
|
config_file := cmd.flags.get_string('config-file')!
|
||||||
|
@ -215,8 +226,7 @@ pub fn cmd() cli.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
res := c.queue_job(target_id, arch, force)!
|
c.queue_job(target_id, arch, force)!
|
||||||
println(res.message)
|
|
||||||
} else {
|
} else {
|
||||||
build(conf, target_id, force)!
|
build(conf, target_id, force)!
|
||||||
}
|
}
|
||||||
|
@ -226,14 +236,11 @@ pub fn cmd() cli.Command {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_repo_by_prefix tries to find the repo with the given prefix in its
|
|
||||||
// ID. If multiple or none are found, an error is raised.
|
|
||||||
|
|
||||||
// list prints out a list of all repositories.
|
// list prints out a list of all repositories.
|
||||||
fn list(conf Config, filter TargetFilter, raw bool) ! {
|
fn list(conf Config, filter TargetFilter, raw bool) ! {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
repos := c.get_targets(filter)!
|
targets := c.get_targets(filter)!
|
||||||
data := repos.map([it.id.str(), it.kind, it.url, it.repo])
|
data := targets.map([it.id.str(), it.kind, it.url, it.repo])
|
||||||
|
|
||||||
if raw {
|
if raw {
|
||||||
println(console.tabbed_table(data))
|
println(console.tabbed_table(data))
|
||||||
|
@ -242,29 +249,25 @@ fn list(conf Config, filter TargetFilter, raw bool) ! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// add adds a new repository to the server's list.
|
// add adds a new target to the server's list.
|
||||||
fn add(conf Config, t &NewTarget, raw bool) ! {
|
fn add(conf Config, t &NewTarget, raw bool) ! {
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
res := c.add_target(t)!
|
target_id := c.add_target(t)!
|
||||||
|
|
||||||
if raw {
|
if raw {
|
||||||
println(res.data)
|
println(target_id)
|
||||||
} else {
|
} else {
|
||||||
println('Target added with id $res.data')
|
println('Target added with id $target_id')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove removes a repository from the server's list.
|
// remove removes a target from the server's list.
|
||||||
fn remove(conf Config, id string) ! {
|
fn remove(conf Config, id string) ! {
|
||||||
id_int := id.int()
|
|
||||||
|
|
||||||
if id_int != 0 {
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
c.remove_target(id_int)!
|
c.remove_target(id.int())!
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// patch patches a given repository with the provided params.
|
// patch patches a given target with the provided params.
|
||||||
fn patch(conf Config, id string, params map[string]string) ! {
|
fn patch(conf Config, id string, params map[string]string) ! {
|
||||||
// We check the cron expression first because it's useless to send an
|
// We check the cron expression first because it's useless to send an
|
||||||
// invalid one to the server.
|
// invalid one to the server.
|
||||||
|
@ -274,22 +277,13 @@ fn patch(conf Config, id string, params map[string]string) ! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
id_int := id.int()
|
|
||||||
if id_int != 0 {
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
c.patch_target(id_int, params)!
|
c.patch_target(id.int(), params)!
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// info shows detailed information for a given repo.
|
// info shows detailed information for a given target.
|
||||||
fn info(conf Config, id string) ! {
|
fn info(conf Config, id string) ! {
|
||||||
id_int := id.int()
|
|
||||||
|
|
||||||
if id_int == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c := client.new(conf.address, conf.api_key)
|
c := client.new(conf.address, conf.api_key)
|
||||||
repo := c.get_target(id_int)!
|
target := c.get_target(id.int())!
|
||||||
println(repo)
|
println(target)
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,12 +18,14 @@ const (
|
||||||
$embed_file('migrations/002-rename-to-targets/up.sql'),
|
$embed_file('migrations/002-rename-to-targets/up.sql'),
|
||||||
$embed_file('migrations/003-target-url-type/up.sql'),
|
$embed_file('migrations/003-target-url-type/up.sql'),
|
||||||
$embed_file('migrations/004-nullable-branch/up.sql'),
|
$embed_file('migrations/004-nullable-branch/up.sql'),
|
||||||
|
$embed_file('migrations/005-repo-path/up.sql'),
|
||||||
]
|
]
|
||||||
migrations_down = [
|
migrations_down = [
|
||||||
$embed_file('migrations/001-initial/down.sql'),
|
$embed_file('migrations/001-initial/down.sql'),
|
||||||
$embed_file('migrations/002-rename-to-targets/down.sql'),
|
$embed_file('migrations/002-rename-to-targets/down.sql'),
|
||||||
$embed_file('migrations/003-target-url-type/down.sql'),
|
$embed_file('migrations/003-target-url-type/down.sql'),
|
||||||
$embed_file('migrations/004-nullable-branch/down.sql'),
|
$embed_file('migrations/004-nullable-branch/down.sql'),
|
||||||
|
$embed_file('migrations/005-repo-path/down.sql'),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
ALTER TABLE Target DROP COLUMN path;
|
|
@ -0,0 +1 @@
|
||||||
|
ALTER TABLE Target ADD COLUMN path TEXT;
|
|
@ -12,6 +12,11 @@ import cron
|
||||||
import agent
|
import agent
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
// Stop buffering output so logs always show up immediately
|
||||||
|
unsafe {
|
||||||
|
C.setbuf(C.stdout, 0)
|
||||||
|
}
|
||||||
|
|
||||||
mut app := cli.Command{
|
mut app := cli.Command{
|
||||||
name: 'vieter'
|
name: 'vieter'
|
||||||
description: 'Vieter is a lightweight implementation of an Arch repository server.'
|
description: 'Vieter is a lightweight implementation of an Arch repository server.'
|
||||||
|
|
|
@ -28,21 +28,24 @@ pub mut:
|
||||||
repo string [nonull]
|
repo string [nonull]
|
||||||
// Cron schedule describing how frequently to build the repo.
|
// Cron schedule describing how frequently to build the repo.
|
||||||
schedule string
|
schedule string
|
||||||
|
// Subdirectory in the Git repository to cd into
|
||||||
|
path string
|
||||||
// On which architectures the package is allowed to be built. In reality,
|
// On which architectures the package is allowed to be built. In reality,
|
||||||
// this controls which builders will periodically build the image.
|
// this controls which agents will build this package when scheduled.
|
||||||
arch []TargetArch [fkey: 'target_id']
|
arch []TargetArch [fkey: 'target_id']
|
||||||
}
|
}
|
||||||
|
|
||||||
// str returns a string representation.
|
// str returns a string representation.
|
||||||
pub fn (gr &Target) str() string {
|
pub fn (t &Target) str() string {
|
||||||
mut parts := [
|
mut parts := [
|
||||||
'id: $gr.id',
|
'id: $t.id',
|
||||||
'kind: $gr.kind',
|
'kind: $t.kind',
|
||||||
'url: $gr.url',
|
'url: $t.url',
|
||||||
'branch: $gr.branch',
|
'branch: $t.branch',
|
||||||
'repo: $gr.repo',
|
'path: $t.path',
|
||||||
'schedule: $gr.schedule',
|
'repo: $t.repo',
|
||||||
'arch: ${gr.arch.map(it.value).join(', ')}',
|
'schedule: $t.schedule',
|
||||||
|
'arch: ${t.arch.map(it.value).join(', ')}',
|
||||||
]
|
]
|
||||||
str := parts.join('\n')
|
str := parts.join('\n')
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
module server
|
module server
|
||||||
|
|
||||||
import web
|
import web
|
||||||
import net.http
|
|
||||||
import net.urllib
|
import net.urllib
|
||||||
import web.response { new_data_response, new_response }
|
import web.response { new_data_response, new_response }
|
||||||
import db
|
import db
|
||||||
|
@ -15,7 +14,7 @@ import models { BuildLog, BuildLogFilter }
|
||||||
['/api/v1/logs'; auth; get]
|
['/api/v1/logs'; auth; get]
|
||||||
fn (mut app App) v1_get_logs() web.Result {
|
fn (mut app App) v1_get_logs() web.Result {
|
||||||
filter := models.from_params<BuildLogFilter>(app.query) or {
|
filter := models.from_params<BuildLogFilter>(app.query) or {
|
||||||
return app.json(http.Status.bad_request, new_response('Invalid query parameters.'))
|
return app.json(.bad_request, new_response('Invalid query parameters.'))
|
||||||
}
|
}
|
||||||
logs := app.db.get_build_logs(filter)
|
logs := app.db.get_build_logs(filter)
|
||||||
|
|
||||||
|
@ -25,7 +24,7 @@ fn (mut app App) v1_get_logs() web.Result {
|
||||||
// v1_get_single_log returns the build log with the given id.
|
// v1_get_single_log returns the build log with the given id.
|
||||||
['/api/v1/logs/:id'; auth; get]
|
['/api/v1/logs/:id'; auth; get]
|
||||||
fn (mut app App) v1_get_single_log(id int) web.Result {
|
fn (mut app App) v1_get_single_log(id int) web.Result {
|
||||||
log := app.db.get_build_log(id) or { return app.not_found() }
|
log := app.db.get_build_log(id) or { return app.status(.not_found) }
|
||||||
|
|
||||||
return app.json(.ok, new_data_response(log))
|
return app.json(.ok, new_data_response(log))
|
||||||
}
|
}
|
||||||
|
@ -33,7 +32,7 @@ fn (mut app App) v1_get_single_log(id int) web.Result {
|
||||||
// v1_get_log_content returns the actual build log file for the given id.
|
// v1_get_log_content returns the actual build log file for the given id.
|
||||||
['/api/v1/logs/:id/content'; auth; get]
|
['/api/v1/logs/:id/content'; auth; get]
|
||||||
fn (mut app App) v1_get_log_content(id int) web.Result {
|
fn (mut app App) v1_get_log_content(id int) web.Result {
|
||||||
log := app.db.get_build_log(id) or { return app.not_found() }
|
log := app.db.get_build_log(id) or { return app.status(.not_found) }
|
||||||
file_name := log.start_time.custom_format('YYYY-MM-DD_HH-mm-ss')
|
file_name := log.start_time.custom_format('YYYY-MM-DD_HH-mm-ss')
|
||||||
full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.target_id.str(), log.arch,
|
full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.target_id.str(), log.arch,
|
||||||
file_name)
|
file_name)
|
||||||
|
@ -57,25 +56,25 @@ fn (mut app App) v1_post_log() web.Result {
|
||||||
start_time_int := app.query['startTime'].int()
|
start_time_int := app.query['startTime'].int()
|
||||||
|
|
||||||
if start_time_int == 0 {
|
if start_time_int == 0 {
|
||||||
return app.json(http.Status.bad_request, new_response('Invalid or missing start time.'))
|
return app.json(.bad_request, new_response('Invalid or missing start time.'))
|
||||||
}
|
}
|
||||||
start_time := time.unix(start_time_int)
|
start_time := time.unix(start_time_int)
|
||||||
|
|
||||||
end_time_int := app.query['endTime'].int()
|
end_time_int := app.query['endTime'].int()
|
||||||
|
|
||||||
if end_time_int == 0 {
|
if end_time_int == 0 {
|
||||||
return app.json(http.Status.bad_request, new_response('Invalid or missing end time.'))
|
return app.json(.bad_request, new_response('Invalid or missing end time.'))
|
||||||
}
|
}
|
||||||
end_time := time.unix(end_time_int)
|
end_time := time.unix(end_time_int)
|
||||||
|
|
||||||
if 'exitCode' !in app.query {
|
if 'exitCode' !in app.query {
|
||||||
return app.json(http.Status.bad_request, new_response('Missing exit code.'))
|
return app.json(.bad_request, new_response('Missing exit code.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
exit_code := app.query['exitCode'].int()
|
exit_code := app.query['exitCode'].int()
|
||||||
|
|
||||||
if 'arch' !in app.query {
|
if 'arch' !in app.query {
|
||||||
return app.json(http.Status.bad_request, new_response("Missing parameter 'arch'."))
|
return app.json(.bad_request, new_response("Missing parameter 'arch'."))
|
||||||
}
|
}
|
||||||
|
|
||||||
arch := app.query['arch']
|
arch := app.query['arch']
|
||||||
|
@ -83,7 +82,7 @@ fn (mut app App) v1_post_log() web.Result {
|
||||||
target_id := app.query['target'].int()
|
target_id := app.query['target'].int()
|
||||||
|
|
||||||
if !app.db.target_exists(target_id) {
|
if !app.db.target_exists(target_id) {
|
||||||
return app.json(http.Status.bad_request, new_response('Unknown target.'))
|
return app.json(.bad_request, new_response('Unknown target.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store log in db
|
// Store log in db
|
||||||
|
@ -105,7 +104,7 @@ fn (mut app App) v1_post_log() web.Result {
|
||||||
os.mkdir_all(repo_logs_dir) or {
|
os.mkdir_all(repo_logs_dir) or {
|
||||||
app.lerror("Couldn't create dir '$repo_logs_dir'.")
|
app.lerror("Couldn't create dir '$repo_logs_dir'.")
|
||||||
|
|
||||||
return app.json(http.Status.internal_server_error, new_response('An error occured while processing the request.'))
|
return app.status(.internal_server_error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,10 +116,10 @@ fn (mut app App) v1_post_log() web.Result {
|
||||||
util.reader_to_file(mut app.reader, length.int(), full_path) or {
|
util.reader_to_file(mut app.reader, length.int(), full_path) or {
|
||||||
app.lerror('An error occured while receiving logs: $err.msg()')
|
app.lerror('An error occured while receiving logs: $err.msg()')
|
||||||
|
|
||||||
return app.json(http.Status.internal_server_error, new_response('Failed to upload logs.'))
|
return app.status(.internal_server_error)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return app.status(http.Status.length_required)
|
return app.status(.length_required)
|
||||||
}
|
}
|
||||||
|
|
||||||
return app.json(.ok, new_data_response(log_id))
|
return app.json(.ok, new_data_response(log_id))
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
module server
|
module server
|
||||||
|
|
||||||
import web
|
import web
|
||||||
import net.http
|
|
||||||
import web.response { new_data_response, new_response }
|
import web.response { new_data_response, new_response }
|
||||||
import db
|
import db
|
||||||
import models { Target, TargetArch, TargetFilter }
|
import models { Target, TargetArch, TargetFilter }
|
||||||
|
@ -10,7 +9,7 @@ import models { Target, TargetArch, TargetFilter }
|
||||||
['/api/v1/targets'; auth; get]
|
['/api/v1/targets'; auth; get]
|
||||||
fn (mut app App) v1_get_targets() web.Result {
|
fn (mut app App) v1_get_targets() web.Result {
|
||||||
filter := models.from_params<TargetFilter>(app.query) or {
|
filter := models.from_params<TargetFilter>(app.query) or {
|
||||||
return app.json(http.Status.bad_request, new_response('Invalid query parameters.'))
|
return app.json(.bad_request, new_response('Invalid query parameters.'))
|
||||||
}
|
}
|
||||||
targets := app.db.get_targets(filter)
|
targets := app.db.get_targets(filter)
|
||||||
|
|
||||||
|
@ -20,7 +19,7 @@ fn (mut app App) v1_get_targets() web.Result {
|
||||||
// v1_get_single_target returns the information for a single target.
|
// v1_get_single_target returns the information for a single target.
|
||||||
['/api/v1/targets/:id'; auth; get]
|
['/api/v1/targets/:id'; auth; get]
|
||||||
fn (mut app App) v1_get_single_target(id int) web.Result {
|
fn (mut app App) v1_get_single_target(id int) web.Result {
|
||||||
target := app.db.get_target(id) or { return app.not_found() }
|
target := app.db.get_target(id) or { return app.status(.not_found) }
|
||||||
|
|
||||||
return app.json(.ok, new_data_response(target))
|
return app.json(.ok, new_data_response(target))
|
||||||
}
|
}
|
||||||
|
@ -37,12 +36,12 @@ fn (mut app App) v1_post_target() web.Result {
|
||||||
}
|
}
|
||||||
|
|
||||||
mut new_target := models.from_params<Target>(params) or {
|
mut new_target := models.from_params<Target>(params) or {
|
||||||
return app.json(http.Status.bad_request, new_response(err.msg()))
|
return app.json(.bad_request, new_response(err.msg()))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure someone doesn't submit an invalid kind
|
// Ensure someone doesn't submit an invalid kind
|
||||||
if new_target.kind !in models.valid_kinds {
|
if new_target.kind !in models.valid_kinds {
|
||||||
return app.json(http.Status.bad_request, new_response('Invalid kind.'))
|
return app.json(.bad_request, new_response('Invalid kind.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
id := app.db.add_target(new_target)
|
id := app.db.add_target(new_target)
|
||||||
|
@ -61,7 +60,7 @@ fn (mut app App) v1_delete_target(id int) web.Result {
|
||||||
app.db.delete_target(id)
|
app.db.delete_target(id)
|
||||||
app.job_queue.invalidate(id)
|
app.job_queue.invalidate(id)
|
||||||
|
|
||||||
return app.json(.ok, new_response(''))
|
return app.status(.ok)
|
||||||
}
|
}
|
||||||
|
|
||||||
// v1_patch_target updates a target's data with the given query params.
|
// v1_patch_target updates a target's data with the given query params.
|
||||||
|
|
|
@ -260,13 +260,6 @@ pub fn (mut ctx Context) redirect(url string) Result {
|
||||||
return Result{}
|
return Result{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// not_found Send an not_found response
|
|
||||||
pub fn (mut ctx Context) not_found() Result {
|
|
||||||
ctx.send_custom_response(http_404) or {}
|
|
||||||
|
|
||||||
return Result{}
|
|
||||||
}
|
|
||||||
|
|
||||||
interface DbInterface {
|
interface DbInterface {
|
||||||
db voidptr
|
db voidptr
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue