forked from vieter-v/vieter
Compare commits
36 Commits
03318586ed
...
78fc3afcd3
Author | SHA1 | Date |
---|---|---|
Jef Roosens | 78fc3afcd3 | |
Jef Roosens | cae44fb593 | |
Jef Roosens | 3821ed29fd | |
Jef Roosens | 5a5f7f8346 | |
Jef Roosens | ea4c4fce16 | |
Jef Roosens | e79d18100f | |
Jef Roosens | 4b172cb5d8 | |
Jef Roosens | 27aa215eff | |
Jef Roosens | 7e5f0c5a53 | |
Jef Roosens | 30cce4fa72 | |
Jef Roosens | 5f7d7c4780 | |
Jef Roosens | 5b016df85d | |
Jef Roosens | fa6603bd45 | |
Jef Roosens | 407b226955 | |
Jef Roosens | f42d3fd8b0 | |
Jef Roosens | 139142fcec | |
Jef Roosens | 393e641a76 | |
Jef Roosens | 7e01dbafec | |
Jef Roosens | 58c1ecd25e | |
Jef Roosens | 230920576d | |
Jef Roosens | 356a34ab01 | |
Jef Roosens | 1156e896f7 | |
Jef Roosens | a3b6680153 | |
Jef Roosens | 7fdbcdf3e7 | |
Jef Roosens | d4306133e0 | |
Jef Roosens | 1990ade089 | |
Jef Roosens | e008133981 | |
Jef Roosens | 1a076a7a8c | |
Jef Roosens | 8c5652c230 | |
Jef Roosens | b6d5bd3228 | |
Jef Roosens | 5781796e99 | |
Jef Roosens | 204144cee8 | |
Jef Roosens | c818273790 | |
Jef Roosens | 7419144f97 | |
Jef Roosens | 0a2488a4df | |
Jef Roosens | 891a206116 |
|
@ -23,8 +23,10 @@ pipeline:
|
||||||
- su builder
|
- su builder
|
||||||
# Due to a bug with the V compiler, we can't just use the PKGBUILD from
|
# Due to a bug with the V compiler, we can't just use the PKGBUILD from
|
||||||
# inside the repo
|
# inside the repo
|
||||||
- curl -OL https://git.rustybever.be/Chewing_Bever/vieter/raw/branch/dev/PKGBUILD
|
- curl -OL https://git.rustybever.be/vieter/vieter/raw/branch/dev/PKGBUILD
|
||||||
- makepkg -s --noconfirm --needed
|
- makepkg -s --noconfirm --needed
|
||||||
|
when:
|
||||||
|
event: push
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
image: 'curlimages/curl'
|
image: 'curlimages/curl'
|
||||||
|
@ -33,3 +35,5 @@ pipeline:
|
||||||
- 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done'
|
- 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done'
|
||||||
secrets:
|
secrets:
|
||||||
- vieter_api_key
|
- vieter_api_key
|
||||||
|
when:
|
||||||
|
event: push
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
matrix:
|
matrix:
|
||||||
PLATFORM:
|
PLATFORM:
|
||||||
- linux/amd64
|
- 'linux/amd64'
|
||||||
- linux/arm64
|
- 'linux/arm64'
|
||||||
# I just don't have a performant enough runner for this platform
|
|
||||||
# - linux/arm/v7
|
|
||||||
|
|
||||||
# These checks already get performed on the feature branches
|
|
||||||
platform: ${PLATFORM}
|
platform: ${PLATFORM}
|
||||||
|
|
||||||
pipeline:
|
pipeline:
|
||||||
|
@ -15,15 +12,15 @@ pipeline:
|
||||||
commands:
|
commands:
|
||||||
- make
|
- make
|
||||||
when:
|
when:
|
||||||
event: push
|
event: [pull_request]
|
||||||
branch:
|
branch:
|
||||||
exclude: [main, dev]
|
exclude: [main]
|
||||||
|
|
||||||
prod:
|
prod:
|
||||||
image: 'chewingbever/vlang:latest'
|
image: 'chewingbever/vlang:latest'
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
- LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static
|
- LDFLAGS=-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static
|
||||||
commands:
|
commands:
|
||||||
# Apparently this -D is *very* important
|
# Apparently this -D is *very* important
|
||||||
- CFLAGS='-DGC_THREADS=1' make prod
|
- CFLAGS='-DGC_THREADS=1' make prod
|
||||||
|
@ -35,7 +32,7 @@ pipeline:
|
||||||
- strip -s pvieter
|
- strip -s pvieter
|
||||||
- du -h pvieter
|
- du -h pvieter
|
||||||
when:
|
when:
|
||||||
event: push
|
event: [push, pull_request]
|
||||||
|
|
||||||
upload:
|
upload:
|
||||||
image: 'chewingbever/vlang:latest'
|
image: 'chewingbever/vlang:latest'
|
||||||
|
@ -52,6 +49,7 @@ pipeline:
|
||||||
- >
|
- >
|
||||||
curl
|
curl
|
||||||
--silent
|
--silent
|
||||||
|
--fail
|
||||||
-XPUT
|
-XPUT
|
||||||
-T pvieter
|
-T pvieter
|
||||||
-H "Host: $URL"
|
-H "Host: $URL"
|
||||||
|
@ -60,4 +58,4 @@ pipeline:
|
||||||
-H "Authorization: AWS $S3_USERNAME:$SIGNATURE"
|
-H "Authorization: AWS $S3_USERNAME:$SIGNATURE"
|
||||||
https://$URL$OBJ_PATH
|
https://$URL$OBJ_PATH
|
||||||
when:
|
when:
|
||||||
event: push
|
event: [push, pull_request]
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
# These builds are not important for the project, but might be valuable for
|
|
||||||
# fixing bugs in the V compiler.
|
|
||||||
|
|
||||||
platform: linux/amd64
|
|
||||||
branches:
|
|
||||||
exclude: [master, dev]
|
|
||||||
|
|
||||||
pipeline:
|
|
||||||
autofree:
|
|
||||||
image: 'chewingbever/vlang:latest'
|
|
||||||
pull: true
|
|
||||||
group: 'build'
|
|
||||||
commands:
|
|
||||||
- make autofree
|
|
||||||
- readelf -d afvieter
|
|
||||||
- du -h afvieter
|
|
||||||
when:
|
|
||||||
event: push
|
|
||||||
|
|
||||||
skip-unused:
|
|
||||||
image: 'chewingbever/vlang:latest'
|
|
||||||
pull: true
|
|
||||||
group: 'build'
|
|
||||||
commands:
|
|
||||||
- make skip-unused
|
|
||||||
- readelf -d suvieter
|
|
||||||
- du -h suvieter
|
|
||||||
when:
|
|
||||||
event: push
|
|
|
@ -1,4 +1,4 @@
|
||||||
branches: 'dev'
|
branches: [ 'dev' ]
|
||||||
platform: 'linux/amd64'
|
platform: 'linux/amd64'
|
||||||
depends_on:
|
depends_on:
|
||||||
- 'docker'
|
- 'docker'
|
||||||
|
@ -14,3 +14,5 @@ pipeline:
|
||||||
commands:
|
commands:
|
||||||
- 'curl -XPOST -s --fail $WEBHOOK_APP'
|
- 'curl -XPOST -s --fail $WEBHOOK_APP'
|
||||||
- 'curl -XPOST -s --fail $WEBHOOK_CRON'
|
- 'curl -XPOST -s --fail $WEBHOOK_CRON'
|
||||||
|
when:
|
||||||
|
event: push
|
||||||
|
|
|
@ -1,30 +1,36 @@
|
||||||
branches: [main, dev]
|
branches: [main, dev]
|
||||||
platform: linux/amd64
|
platform: 'linux/amd64'
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
|
|
||||||
pipeline:
|
pipeline:
|
||||||
dev:
|
dev:
|
||||||
image: woodpeckerci/plugin-docker-buildx
|
image: 'woodpeckerci/plugin-docker-buildx'
|
||||||
secrets: [ docker_username, docker_password ]
|
secrets:
|
||||||
|
- 'docker_username'
|
||||||
|
- 'docker_password'
|
||||||
settings:
|
settings:
|
||||||
repo: chewingbever/vieter
|
repo: 'chewingbever/vieter'
|
||||||
tag: dev
|
tags:
|
||||||
platforms: [ linux/arm64/v8, linux/amd64 ]
|
- 'dev'
|
||||||
|
- ${CI_COMMIT_SHA}
|
||||||
|
platforms: [ 'linux/arm64/v8', 'linux/amd64' ]
|
||||||
build_args_from_env:
|
build_args_from_env:
|
||||||
- CI_COMMIT_SHA
|
- 'CI_COMMIT_SHA'
|
||||||
when:
|
when:
|
||||||
event: push
|
event: push
|
||||||
branch: dev
|
branch: dev
|
||||||
|
|
||||||
release:
|
release:
|
||||||
image: woodpeckerci/plugin-docker-buildx
|
image: 'woodpeckerci/plugin-docker-buildx'
|
||||||
secrets: [ docker_username, docker_password ]
|
secrets:
|
||||||
|
- 'docker_username'
|
||||||
|
- 'docker_password'
|
||||||
settings:
|
settings:
|
||||||
repo: chewingbever/vieter
|
repo: 'chewingbever/vieter'
|
||||||
auto_tag: true
|
auto_tag: true
|
||||||
platforms: [ linux/arm64/v8, linux/amd64 ]
|
platforms: [ 'linux/arm64/v8', 'linux/amd64' ]
|
||||||
build_args_from_env:
|
build_args_from_env:
|
||||||
- CI_COMMIT_SHA
|
- 'CI_COMMIT_SHA'
|
||||||
when:
|
when:
|
||||||
event: tag
|
event: tag
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# Yeah so this only works on tags so we'll worry about this later
|
platform: 'linux/amd64'
|
||||||
platform: linux/amd64
|
branches: [ 'main' ]
|
||||||
branches: main
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# These checks already get performed on the feature branches
|
# These checks already get performed on the feature branches
|
||||||
branches:
|
branches:
|
||||||
exclude: [ main, dev ]
|
exclude: [ main ]
|
||||||
platform: linux/amd64
|
platform: 'linux/amd64'
|
||||||
|
|
||||||
pipeline:
|
pipeline:
|
||||||
lint:
|
lint:
|
||||||
|
@ -9,3 +9,5 @@ pipeline:
|
||||||
pull: true
|
pull: true
|
||||||
commands:
|
commands:
|
||||||
- make lint
|
- make lint
|
||||||
|
when:
|
||||||
|
event: [ pull_request ]
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
matrix:
|
matrix:
|
||||||
PLATFORM:
|
PLATFORM:
|
||||||
- linux/amd64
|
- 'linux/amd64'
|
||||||
- linux/arm64
|
- 'linux/arm64'
|
||||||
|
|
||||||
branches:
|
branches:
|
||||||
exclude: [main, dev]
|
exclude: [ main ]
|
||||||
platform: ${PLATFORM}
|
platform: ${PLATFORM}
|
||||||
|
|
||||||
pipeline:
|
pipeline:
|
||||||
|
@ -14,4 +14,4 @@ pipeline:
|
||||||
commands:
|
commands:
|
||||||
- make test
|
- make test
|
||||||
when:
|
when:
|
||||||
event: push
|
event: [pull_request]
|
||||||
|
|
18
CHANGELOG.md
18
CHANGELOG.md
|
@ -5,11 +5,21 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [Unreleased](https://git.rustybever.be/Chewing_Bever/vieter)
|
## [Unreleased](https://git.rustybever.be/vieter/vieter/src/branch/dev)
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* Web API for adding & querying build logs
|
||||||
|
* CLI commands to access build logs API
|
||||||
|
|
||||||
|
## [0.3.0-alpha.1](https://git.rustybever.be/vieter/vieter/src/tag/0.3.0-alpha.1)
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
* Switched from compiler fork to fully vanilla compiler mirror
|
* Switched from compiler fork to fully vanilla compiler mirror
|
||||||
|
* `download_dir`, `repos_file` & `repos_dir` config values have been replaced
|
||||||
|
with `data_dir`
|
||||||
|
* Storage of metadata (e.g. Git repositories) is now done using Sqlite
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
@ -21,7 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
* Binary no longer panics when an env var is missing
|
* Binary no longer panics when an env var is missing
|
||||||
|
|
||||||
## [0.2.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.2.0)
|
## [0.2.0](https://git.rustybever.be/vieter/vieter/src/tag/0.2.0)
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
|
@ -55,13 +65,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
* Packages with unknown fields in .PKGINFO are now allowed
|
* Packages with unknown fields in .PKGINFO are now allowed
|
||||||
* Old packages are now properly removed
|
* Old packages are now properly removed
|
||||||
|
|
||||||
## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0)
|
## [0.1.0](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0)
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
* Improved logging
|
* Improved logging
|
||||||
|
|
||||||
## [0.1.0-rc.1](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0-rc.1)
|
## [0.1.0-rc.1](https://git.rustybever.be/vieter/vieter/src/tag/0.1.0-rc.1)
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
|
||||||
"https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \
|
"https://s3.rustybever.be/vieter/commits/${CI_COMMIT_SHA}/vieter-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \
|
||||||
chmod +x vieter ; \
|
chmod +x vieter ; \
|
||||||
else \
|
else \
|
||||||
LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -static' make prod && \
|
LDFLAGS='-lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3 -static' make prod && \
|
||||||
mv pvieter vieter ; \
|
mv pvieter vieter ; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -31,10 +31,8 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
|
||||||
FROM busybox:1.35.0
|
FROM busybox:1.35.0
|
||||||
|
|
||||||
ENV PATH=/bin \
|
ENV PATH=/bin \
|
||||||
VIETER_REPOS_DIR=/data/repos \
|
VIETER_DATA_DIR=/data \
|
||||||
VIETER_PKG_DIR=/data/pkgs \
|
VIETER_PKG_DIR=/data/pkgs
|
||||||
VIETER_DOWNLOAD_DIR=/data/downloads \
|
|
||||||
VIETER_REPOS_FILE=/data/repos.json
|
|
||||||
|
|
||||||
COPY --from=builder /app/dumb-init /app/vieter /bin/
|
COPY --from=builder /app/dumb-init /app/vieter /bin/
|
||||||
|
|
||||||
|
|
8
PKGBUILD
8
PKGBUILD
|
@ -4,12 +4,12 @@ pkgbase='vieter'
|
||||||
pkgname='vieter'
|
pkgname='vieter'
|
||||||
pkgver=0.2.0.r25.g20112b8
|
pkgver=0.2.0.r25.g20112b8
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
depends=('glibc' 'openssl' 'libarchive' 'gc')
|
depends=('glibc' 'openssl' 'libarchive' 'gc' 'sqlite')
|
||||||
makedepends=('git' 'gcc' 'vieter-v')
|
makedepends=('git' 'gcc' 'vieter-v')
|
||||||
arch=('x86_64' 'aarch64' 'armv7')
|
arch=('x86_64' 'aarch64')
|
||||||
url='https://git.rustybever.be/Chewing_Bever/vieter'
|
url='https://git.rustybever.be/vieter/vieter'
|
||||||
license=('AGPL3')
|
license=('AGPL3')
|
||||||
source=($pkgname::git+https://git.rustybever.be/Chewing_Bever/vieter#branch=dev)
|
source=($pkgname::git+https://git.rustybever.be/vieter/vieter#branch=dev)
|
||||||
md5sums=('SKIP')
|
md5sums=('SKIP')
|
||||||
|
|
||||||
pkgver() {
|
pkgver() {
|
||||||
|
|
|
@ -35,18 +35,10 @@ passed to them. Each mode requires a different configuration.
|
||||||
* `log_file`: log file to write logs to. Defaults to `vieter.log` in the
|
* `log_file`: log file to write logs to. Defaults to `vieter.log` in the
|
||||||
current directory.
|
current directory.
|
||||||
* `pkg_dir`: where Vieter should store the actual package archives.
|
* `pkg_dir`: where Vieter should store the actual package archives.
|
||||||
* `download_dir`: where Vieter should initially download uploaded files.
|
* `data_dir`: where Vieter stores the repositories, log file & database.
|
||||||
* `api_key`: the API key to use when authenticating requests.
|
* `api_key`: the API key to use when authenticating requests.
|
||||||
* `repo_dir`: where Vieter should store the contents of the repository.
|
|
||||||
* `repos_file`: JSON file where the list of Git repositories is saved
|
|
||||||
* `default_arch`: architecture to always add packages of arch `any` to.
|
* `default_arch`: architecture to always add packages of arch `any` to.
|
||||||
|
|
||||||
{{< hint info >}}
|
|
||||||
**Note**
|
|
||||||
Because Vieter hard links files between `download_dir` & `pkg_dir`, they need
|
|
||||||
to be on the same file system.
|
|
||||||
{{< /hint >}}
|
|
||||||
|
|
||||||
### Builder
|
### Builder
|
||||||
|
|
||||||
* `api_key`: the API key to use when authenticating requests.
|
* `api_key`: the API key to use when authenticating requests.
|
||||||
|
@ -62,3 +54,18 @@ to be on the same file system.
|
||||||
|
|
||||||
* `api_key`: the API key to use when authenticating requests.
|
* `api_key`: the API key to use when authenticating requests.
|
||||||
* `address`: Base your URL of your Vieter instance, e.g. https://example.com
|
* `address`: Base your URL of your Vieter instance, e.g. https://example.com
|
||||||
|
|
||||||
|
### Cron
|
||||||
|
|
||||||
|
* `log_level`: defines how much logs to show. Valid values are one of `FATAL`,
|
||||||
|
`ERROR`, `WARN`, `INFO` or `DEBUG`. Defaults to `WARN`
|
||||||
|
* `api_key`: the API key to use when authenticating requests.
|
||||||
|
* `address`: Base your URL of your Vieter instance, e.g. https://example.com.
|
||||||
|
This *must* be the publicly facing URL of your Vieter instance.
|
||||||
|
* `data_dir`: where Vieter stores the log file.
|
||||||
|
* `base_image`: Docker image from which to create the builder images.
|
||||||
|
* `max_concurrent_builds`: amount of builds to run at once.
|
||||||
|
* `api_update_frequency`: how frequenty to check for changes in the repo list.
|
||||||
|
* `image_rebuild+frequency`: how frequently to rebuild the builder image
|
||||||
|
* `global_schedule`: cron schedule to use for any repo without an individual
|
||||||
|
schedule
|
||||||
|
|
|
@ -3,8 +3,9 @@ module build
|
||||||
import docker
|
import docker
|
||||||
import encoding.base64
|
import encoding.base64
|
||||||
import time
|
import time
|
||||||
import git
|
|
||||||
import os
|
import os
|
||||||
|
import db
|
||||||
|
import client
|
||||||
|
|
||||||
const container_build_dir = '/build'
|
const container_build_dir = '/build'
|
||||||
|
|
||||||
|
@ -72,10 +73,18 @@ pub fn create_build_image(base_image string) ?string {
|
||||||
return image.id
|
return image.id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct BuildResult {
|
||||||
|
pub:
|
||||||
|
start_time time.Time
|
||||||
|
end_time time.Time
|
||||||
|
exit_code int
|
||||||
|
logs string
|
||||||
|
}
|
||||||
|
|
||||||
// build_repo builds, packages & publishes a given Arch package based on the
|
// build_repo builds, packages & publishes a given Arch package based on the
|
||||||
// provided GitRepo. The base image ID should be of an image previously created
|
// provided GitRepo. The base image ID should be of an image previously created
|
||||||
// by create_build_image.
|
// by create_build_image. It returns the logs of the container.
|
||||||
pub fn build_repo(address string, api_key string, base_image_id string, repo &git.GitRepo) ? {
|
pub fn build_repo(address string, api_key string, base_image_id string, repo &db.GitRepo) ?BuildResult {
|
||||||
build_arch := os.uname().machine
|
build_arch := os.uname().machine
|
||||||
|
|
||||||
// TODO what to do with PKGBUILDs that build multiple packages?
|
// TODO what to do with PKGBUILDs that build multiple packages?
|
||||||
|
@ -86,7 +95,7 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &gi
|
||||||
'source PKGBUILD',
|
'source PKGBUILD',
|
||||||
// The build container checks whether the package is already
|
// The build container checks whether the package is already
|
||||||
// present on the server
|
// present on the server
|
||||||
'curl --head --fail $address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0',
|
'curl -s --head --fail $address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0',
|
||||||
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $address/$repo.repo/publish; done',
|
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $address/$repo.repo/publish; done',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -106,43 +115,44 @@ pub fn build_repo(address string, api_key string, base_image_id string, repo &gi
|
||||||
id := docker.create_container(c) ?
|
id := docker.create_container(c) ?
|
||||||
docker.start_container(id) ?
|
docker.start_container(id) ?
|
||||||
|
|
||||||
|
mut data := docker.inspect_container(id) ?
|
||||||
|
|
||||||
// This loop waits until the container has stopped, so we can remove it after
|
// This loop waits until the container has stopped, so we can remove it after
|
||||||
for {
|
for data.state.running {
|
||||||
data := docker.inspect_container(id) ?
|
|
||||||
|
|
||||||
if !data.state.running {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
time.sleep(1 * time.second)
|
time.sleep(1 * time.second)
|
||||||
|
|
||||||
|
data = docker.inspect_container(id) ?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logs := docker.get_container_logs(id) ?
|
||||||
|
|
||||||
docker.remove_container(id) ?
|
docker.remove_container(id) ?
|
||||||
|
|
||||||
|
return BuildResult{
|
||||||
|
start_time: data.state.start_time
|
||||||
|
end_time: data.state.end_time
|
||||||
|
exit_code: data.state.exit_code
|
||||||
|
logs: logs
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// build builds every Git repo in the server's list.
|
// build builds every Git repo in the server's list.
|
||||||
fn build(conf Config) ? {
|
fn build(conf Config, repo_id int) ? {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
repo := c.get_git_repo(repo_id) ?
|
||||||
|
|
||||||
build_arch := os.uname().machine
|
build_arch := os.uname().machine
|
||||||
|
|
||||||
// We get the repos map from the Vieter instance
|
println('Creating base image...')
|
||||||
repos_map := git.get_repos(conf.address, conf.api_key) ?
|
|
||||||
|
|
||||||
// We filter out any repos that aren't allowed to be built on this
|
|
||||||
// architecture
|
|
||||||
filtered_repos := repos_map.keys().map(repos_map[it]).filter(it.arch.contains(build_arch))
|
|
||||||
|
|
||||||
// No point in doing work if there's no repos present
|
|
||||||
if filtered_repos.len == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// First, we create a base image which has updated repos n stuff
|
|
||||||
image_id := create_build_image(conf.base_image) ?
|
image_id := create_build_image(conf.base_image) ?
|
||||||
|
|
||||||
for repo in filtered_repos {
|
println('Running build...')
|
||||||
build_repo(conf.address, conf.api_key, image_id, repo) ?
|
res := build_repo(conf.address, conf.api_key, image_id, repo) ?
|
||||||
}
|
|
||||||
|
|
||||||
// Finally, we remove the builder image
|
println('Removing build image...')
|
||||||
docker.remove_image(image_id) ?
|
docker.remove_image(image_id) ?
|
||||||
|
|
||||||
|
println('Uploading logs to Vieter...')
|
||||||
|
c.add_build_log(repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||||
|
res.logs) ?
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,12 +14,16 @@ pub:
|
||||||
pub fn cmd() cli.Command {
|
pub fn cmd() cli.Command {
|
||||||
return cli.Command{
|
return cli.Command{
|
||||||
name: 'build'
|
name: 'build'
|
||||||
description: 'Run the build process.'
|
required_args: 1
|
||||||
|
usage: 'id'
|
||||||
|
description: 'Build the repository with the given ID.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
build(conf) ?
|
id := cmd.args[0].int()
|
||||||
|
|
||||||
|
build(conf, id) ?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,67 @@
|
||||||
|
module client
|
||||||
|
|
||||||
|
import net.http { Method }
|
||||||
|
import net.urllib
|
||||||
|
import response { Response }
|
||||||
|
import json
|
||||||
|
|
||||||
|
pub struct Client {
|
||||||
|
pub:
|
||||||
|
address string
|
||||||
|
api_key string
|
||||||
|
}
|
||||||
|
|
||||||
|
// new creates a new Client instance.
|
||||||
|
pub fn new(address string, api_key string) Client {
|
||||||
|
return Client{
|
||||||
|
address: address
|
||||||
|
api_key: api_key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_request_raw sends an HTTP request, returning the http.Response object.
|
||||||
|
// It encodes the params so that they're safe to pass as HTTP query parameters.
|
||||||
|
fn (c &Client) send_request_raw(method Method, url string, params map[string]string, body string) ?http.Response {
|
||||||
|
mut full_url := '$c.address$url'
|
||||||
|
|
||||||
|
if params.len > 0 {
|
||||||
|
mut params_escaped := map[string]string{}
|
||||||
|
|
||||||
|
// Escape each query param
|
||||||
|
for k, v in params {
|
||||||
|
params_escaped[k] = urllib.query_escape(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
params_str := params_escaped.keys().map('$it=${params[it]}').join('&')
|
||||||
|
|
||||||
|
full_url = '$full_url?$params_str'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut req := http.new_request(method, full_url, body) ?
|
||||||
|
req.add_custom_header('X-Api-Key', c.api_key) ?
|
||||||
|
|
||||||
|
res := req.do() ?
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_request<T> just calls send_request_with_body<T> with an empty body.
|
||||||
|
fn (c &Client) send_request<T>(method Method, url string, params map[string]string) ?Response<T> {
|
||||||
|
return c.send_request_with_body<T>(method, url, params, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_request_with_body<T> calls send_request_raw_response & parses its
|
||||||
|
// output as a Response<T> object.
|
||||||
|
fn (c &Client) send_request_with_body<T>(method Method, url string, params map[string]string, body string) ?Response<T> {
|
||||||
|
res_text := c.send_request_raw_response(method, url, params, body) ?
|
||||||
|
data := json.decode(Response<T>, res_text) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// send_request_raw_response returns the raw text response for an HTTP request.
|
||||||
|
fn (c &Client) send_request_raw_response(method Method, url string, params map[string]string, body string) ?string {
|
||||||
|
res := c.send_request_raw(method, url, params, body) ?
|
||||||
|
|
||||||
|
return res.text
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
module client
|
||||||
|
|
||||||
|
import db { GitRepo }
|
||||||
|
import net.http { Method }
|
||||||
|
import response { Response }
|
||||||
|
|
||||||
|
// get_git_repos returns the current list of repos.
|
||||||
|
pub fn (c &Client) get_git_repos() ?[]GitRepo {
|
||||||
|
data := c.send_request<[]GitRepo>(Method.get, '/api/repos', {}) ?
|
||||||
|
|
||||||
|
return data.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_git_repo returns the repo for a specific ID.
|
||||||
|
pub fn (c &Client) get_git_repo(id int) ?GitRepo {
|
||||||
|
data := c.send_request<GitRepo>(Method.get, '/api/repos/$id', {}) ?
|
||||||
|
|
||||||
|
return data.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// add_git_repo adds a new repo to the server.
|
||||||
|
pub fn (c &Client) add_git_repo(url string, branch string, repo string, arch []string) ?Response<string> {
|
||||||
|
mut params := {
|
||||||
|
'url': url
|
||||||
|
'branch': branch
|
||||||
|
'repo': repo
|
||||||
|
}
|
||||||
|
|
||||||
|
if arch.len > 0 {
|
||||||
|
params['arch'] = arch.join(',')
|
||||||
|
}
|
||||||
|
|
||||||
|
data := c.send_request<string>(Method.post, '/api/repos', params) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove_git_repo removes the repo with the given ID from the server.
|
||||||
|
pub fn (c &Client) remove_git_repo(id int) ?Response<string> {
|
||||||
|
data := c.send_request<string>(Method.delete, '/api/repos/$id', {}) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// patch_git_repo sends a PATCH request to the given repo with the params as
|
||||||
|
// payload.
|
||||||
|
pub fn (c &Client) patch_git_repo(id int, params map[string]string) ?Response<string> {
|
||||||
|
data := c.send_request<string>(Method.patch, '/api/repos/$id', params) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
module client
|
||||||
|
|
||||||
|
import db { BuildLog }
|
||||||
|
import net.http { Method }
|
||||||
|
import response { Response }
|
||||||
|
import time
|
||||||
|
|
||||||
|
// get_build_logs returns all build logs.
|
||||||
|
pub fn (c &Client) get_build_logs() ?Response<[]BuildLog> {
|
||||||
|
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', {}) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_logs_for_repo returns all build logs for a given repo.
|
||||||
|
pub fn (c &Client) get_build_logs_for_repo(repo_id int) ?Response<[]BuildLog> {
|
||||||
|
params := {
|
||||||
|
'repo': repo_id.str()
|
||||||
|
}
|
||||||
|
|
||||||
|
data := c.send_request<[]BuildLog>(Method.get, '/api/logs', params) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_log returns a specific build log.
|
||||||
|
pub fn (c &Client) get_build_log(id int) ?Response<BuildLog> {
|
||||||
|
data := c.send_request<BuildLog>(Method.get, '/api/logs/$id', {}) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_log_content returns the contents of the build log file.
|
||||||
|
pub fn (c &Client) get_build_log_content(id int) ?string {
|
||||||
|
data := c.send_request_raw_response(Method.get, '/api/logs/$id/content', {}, '') ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// add_build_log adds a new build log to the server.
|
||||||
|
pub fn (c &Client) add_build_log(repo_id int, start_time time.Time, end_time time.Time, arch string, exit_code int, content string) ?Response<string> {
|
||||||
|
params := {
|
||||||
|
'repo': repo_id.str()
|
||||||
|
'startTime': start_time.str()
|
||||||
|
'endTime': end_time.str()
|
||||||
|
'arch': arch
|
||||||
|
'exitCode': exit_code.str()
|
||||||
|
}
|
||||||
|
|
||||||
|
data := c.send_request_with_body<string>(Method.post, '/api/logs', params, content) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
module console
|
|
@ -3,6 +3,7 @@ module git
|
||||||
import cli
|
import cli
|
||||||
import env
|
import env
|
||||||
import cron.expression { parse_expression }
|
import cron.expression { parse_expression }
|
||||||
|
import client
|
||||||
|
|
||||||
struct Config {
|
struct Config {
|
||||||
address string [required]
|
address string [required]
|
||||||
|
@ -116,54 +117,39 @@ pub fn cmd() cli.Command {
|
||||||
|
|
||||||
// get_repo_by_prefix tries to find the repo with the given prefix in its
|
// get_repo_by_prefix tries to find the repo with the given prefix in its
|
||||||
// ID. If multiple or none are found, an error is raised.
|
// ID. If multiple or none are found, an error is raised.
|
||||||
fn get_repo_by_prefix(conf Config, id_prefix string) ?(string, GitRepo) {
|
|
||||||
repos := get_repos(conf.address, conf.api_key) ?
|
|
||||||
|
|
||||||
mut res := map[string]GitRepo{}
|
|
||||||
|
|
||||||
for id, repo in repos {
|
|
||||||
if id.starts_with(id_prefix) {
|
|
||||||
res[id] = repo
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.len == 0 {
|
|
||||||
return error('No repo found for given prefix.')
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.len > 1 {
|
|
||||||
return error('Multiple repos found for given prefix.')
|
|
||||||
}
|
|
||||||
|
|
||||||
return res.keys()[0], res[res.keys()[0]]
|
|
||||||
}
|
|
||||||
|
|
||||||
// list prints out a list of all repositories.
|
// list prints out a list of all repositories.
|
||||||
fn list(conf Config) ? {
|
fn list(conf Config) ? {
|
||||||
repos := get_repos(conf.address, conf.api_key) ?
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
repos := c.get_git_repos() ?
|
||||||
|
|
||||||
for id, details in repos {
|
for repo in repos {
|
||||||
println('${id[..8]}\t$details.url\t$details.branch\t$details.repo')
|
println('$repo.id\t$repo.url\t$repo.branch\t$repo.repo')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// add adds a new repository to the server's list.
|
// add adds a new repository to the server's list.
|
||||||
fn add(conf Config, url string, branch string, repo string) ? {
|
fn add(conf Config, url string, branch string, repo string) ? {
|
||||||
res := add_repo(conf.address, conf.api_key, url, branch, repo, []) ?
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
res := c.add_git_repo(url, branch, repo, []) ?
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove removes a repository from the server's list.
|
// remove removes a repository from the server's list.
|
||||||
fn remove(conf Config, id_prefix string) ? {
|
fn remove(conf Config, id string) ? {
|
||||||
id, _ := get_repo_by_prefix(conf, id_prefix) ?
|
// id, _ := get_repo_by_prefix(conf, id_prefix) ?
|
||||||
res := remove_repo(conf.address, conf.api_key, id) ?
|
id_int := id.int()
|
||||||
|
|
||||||
|
if id_int != 0 {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
res := c.remove_git_repo(id_int) ?
|
||||||
println(res.message)
|
println(res.message)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// patch patches a given repository with the provided params.
|
// patch patches a given repository with the provided params.
|
||||||
fn patch(conf Config, id_prefix string, params map[string]string) ? {
|
fn patch(conf Config, id string, params map[string]string) ? {
|
||||||
// We check the cron expression first because it's useless to send an
|
// We check the cron expression first because it's useless to send an
|
||||||
// invalid one to the server.
|
// invalid one to the server.
|
||||||
if 'schedule' in params && params['schedule'] != '' {
|
if 'schedule' in params && params['schedule'] != '' {
|
||||||
|
@ -172,20 +158,24 @@ fn patch(conf Config, id_prefix string, params map[string]string) ? {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
id, _ := get_repo_by_prefix(conf, id_prefix) ?
|
id_int := id.int()
|
||||||
res := patch_repo(conf.address, conf.api_key, id, params) ?
|
if id_int != 0 {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
res := c.patch_git_repo(id_int, params) ?
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// info shows detailed information for a given repo.
|
// info shows detailed information for a given repo.
|
||||||
fn info(conf Config, id_prefix string) ? {
|
fn info(conf Config, id string) ? {
|
||||||
id, repo := get_repo_by_prefix(conf, id_prefix) ?
|
id_int := id.int()
|
||||||
|
|
||||||
println('id: $id')
|
if id_int == 0 {
|
||||||
|
return
|
||||||
$for field in GitRepo.fields {
|
|
||||||
val := repo.$(field.name)
|
|
||||||
println('$field.name: $val')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
repo := c.get_git_repo(id_int) ?
|
||||||
|
println(repo)
|
||||||
}
|
}
|
|
@ -0,0 +1,106 @@
|
||||||
|
module logs
|
||||||
|
|
||||||
|
import cli
|
||||||
|
import env
|
||||||
|
import client
|
||||||
|
import db
|
||||||
|
|
||||||
|
struct Config {
|
||||||
|
address string [required]
|
||||||
|
api_key string [required]
|
||||||
|
}
|
||||||
|
|
||||||
|
// cmd returns the cli module that handles the build repos API.
|
||||||
|
pub fn cmd() cli.Command {
|
||||||
|
return cli.Command{
|
||||||
|
name: 'logs'
|
||||||
|
description: 'Interact with the build logs API.'
|
||||||
|
commands: [
|
||||||
|
cli.Command{
|
||||||
|
name: 'list'
|
||||||
|
description: 'List the build logs. If a repo ID is provided, only list the build logs for that repo.'
|
||||||
|
flags: [
|
||||||
|
cli.Flag{
|
||||||
|
name: 'repo'
|
||||||
|
description: 'ID of the Git repo to restrict list to.'
|
||||||
|
flag: cli.FlagType.int
|
||||||
|
},
|
||||||
|
]
|
||||||
|
execute: fn (cmd cli.Command) ? {
|
||||||
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
|
repo_id := cmd.flags.get_int('repo') ?
|
||||||
|
|
||||||
|
if repo_id == 0 { list(conf) ? } else { list_for_repo(conf, repo_id) ? }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cli.Command{
|
||||||
|
name: 'info'
|
||||||
|
required_args: 1
|
||||||
|
usage: 'id'
|
||||||
|
description: 'Show all info for a specific build log.'
|
||||||
|
execute: fn (cmd cli.Command) ? {
|
||||||
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
|
id := cmd.args[0].int()
|
||||||
|
info(conf, id) ?
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cli.Command{
|
||||||
|
name: 'content'
|
||||||
|
required_args: 1
|
||||||
|
usage: 'id'
|
||||||
|
description: 'Output the content of a build log to stdout.'
|
||||||
|
execute: fn (cmd cli.Command) ? {
|
||||||
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
|
id := cmd.args[0].int()
|
||||||
|
content(conf, id) ?
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// print_log_list prints a list of logs.
|
||||||
|
fn print_log_list(logs []db.BuildLog) {
|
||||||
|
for log in logs {
|
||||||
|
println('$log.id\t$log.start_time\t$log.exit_code')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// list prints a list of all build logs.
|
||||||
|
fn list(conf Config) ? {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
logs := c.get_build_logs() ?.data
|
||||||
|
|
||||||
|
print_log_list(logs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// list prints a list of all build logs for a given repo.
|
||||||
|
fn list_for_repo(conf Config, repo_id int) ? {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
logs := c.get_build_logs_for_repo(repo_id) ?.data
|
||||||
|
|
||||||
|
print_log_list(logs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// info print the detailed info for a given build log.
|
||||||
|
fn info(conf Config, id int) ? {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
log := c.get_build_log(id) ?.data
|
||||||
|
|
||||||
|
print(log)
|
||||||
|
}
|
||||||
|
|
||||||
|
// content outputs the contents of the log file for a given build log to
|
||||||
|
// stdout.
|
||||||
|
fn content(conf Config, id int) ? {
|
||||||
|
c := client.new(conf.address, conf.api_key)
|
||||||
|
content := c.get_build_log_content(id) ?
|
||||||
|
|
||||||
|
println(content)
|
||||||
|
}
|
|
@ -6,9 +6,9 @@ import env
|
||||||
struct Config {
|
struct Config {
|
||||||
pub:
|
pub:
|
||||||
log_level string = 'WARN'
|
log_level string = 'WARN'
|
||||||
log_file string = 'vieter.log'
|
|
||||||
api_key string
|
api_key string
|
||||||
address string
|
address string
|
||||||
|
data_dir string
|
||||||
base_image string = 'archlinux:base-devel'
|
base_image string = 'archlinux:base-devel'
|
||||||
max_concurrent_builds int = 1
|
max_concurrent_builds int = 1
|
||||||
api_update_frequency int = 15
|
api_update_frequency int = 15
|
||||||
|
|
|
@ -3,6 +3,9 @@ module cron
|
||||||
import log
|
import log
|
||||||
import cron.daemon
|
import cron.daemon
|
||||||
import cron.expression
|
import cron.expression
|
||||||
|
import os
|
||||||
|
|
||||||
|
const log_file_name = 'vieter.cron.log'
|
||||||
|
|
||||||
// cron starts a cron daemon & starts periodically scheduling builds.
|
// cron starts a cron daemon & starts periodically scheduling builds.
|
||||||
pub fn cron(conf Config) ? {
|
pub fn cron(conf Config) ? {
|
||||||
|
@ -15,7 +18,8 @@ pub fn cron(conf Config) ? {
|
||||||
level: log_level
|
level: log_level
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.set_full_logpath(conf.log_file)
|
log_file := os.join_path_single(conf.data_dir, cron.log_file_name)
|
||||||
|
logger.set_full_logpath(log_file)
|
||||||
logger.log_to_console_too()
|
logger.log_to_console_too()
|
||||||
|
|
||||||
ce := expression.parse_expression(conf.global_schedule) or {
|
ce := expression.parse_expression(conf.global_schedule) or {
|
||||||
|
|
|
@ -3,12 +3,13 @@ module daemon
|
||||||
import time
|
import time
|
||||||
import sync.stdatomic
|
import sync.stdatomic
|
||||||
import build
|
import build
|
||||||
|
import os
|
||||||
|
|
||||||
const build_empty = 0
|
const (
|
||||||
|
build_empty = 0
|
||||||
const build_running = 1
|
build_running = 1
|
||||||
|
build_done = 2
|
||||||
const build_done = 2
|
)
|
||||||
|
|
||||||
// clean_finished_builds removes finished builds from the build slots & returns
|
// clean_finished_builds removes finished builds from the build slots & returns
|
||||||
// them.
|
// them.
|
||||||
|
@ -77,13 +78,20 @@ fn (mut d Daemon) run_build(build_index int, sb ScheduledBuild) {
|
||||||
// 0 means success, 1 means failure
|
// 0 means success, 1 means failure
|
||||||
mut status := 0
|
mut status := 0
|
||||||
|
|
||||||
build.build_repo(d.address, d.api_key, d.builder_images.last(), &sb.repo) or {
|
res := build.build_repo(d.client.address, d.client.api_key, d.builder_images.last(),
|
||||||
|
&sb.repo) or {
|
||||||
d.ldebug('build_repo error: $err.msg()')
|
d.ldebug('build_repo error: $err.msg()')
|
||||||
status = 1
|
status = 1
|
||||||
|
|
||||||
|
build.BuildResult{}
|
||||||
}
|
}
|
||||||
|
|
||||||
if status == 0 {
|
if status == 0 {
|
||||||
d.linfo('finished build: $sb.repo.url $sb.repo.branch')
|
d.linfo('finished build: $sb.repo.url $sb.repo.branch; uploading logs...')
|
||||||
|
|
||||||
|
build_arch := os.uname().machine
|
||||||
|
d.client.add_build_log(sb.repo.id, res.start_time, res.end_time, build_arch, res.exit_code,
|
||||||
|
res.logs) or { d.lerror('Failed to upload logs for $sb.repo.url $sb.repo.arch') }
|
||||||
} else {
|
} else {
|
||||||
d.linfo('failed build: $sb.repo.url $sb.repo.branch')
|
d.linfo('failed build: $sb.repo.url $sb.repo.branch')
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
module daemon
|
module daemon
|
||||||
|
|
||||||
import git
|
|
||||||
import time
|
import time
|
||||||
import log
|
import log
|
||||||
import datatypes { MinHeap }
|
import datatypes { MinHeap }
|
||||||
|
@ -8,17 +7,20 @@ import cron.expression { CronExpression, parse_expression }
|
||||||
import math
|
import math
|
||||||
import build
|
import build
|
||||||
import docker
|
import docker
|
||||||
|
import db
|
||||||
|
import os
|
||||||
|
import client
|
||||||
|
|
||||||
// How many seconds to wait before retrying to update API if failed
|
const (
|
||||||
const api_update_retry_timeout = 5
|
// How many seconds to wait before retrying to update API if failed
|
||||||
|
api_update_retry_timeout = 5
|
||||||
// How many seconds to wait before retrying to rebuild image if failed
|
// How many seconds to wait before retrying to rebuild image if failed
|
||||||
const rebuild_base_image_retry_timout = 30
|
rebuild_base_image_retry_timout = 30
|
||||||
|
)
|
||||||
|
|
||||||
struct ScheduledBuild {
|
struct ScheduledBuild {
|
||||||
pub:
|
pub:
|
||||||
repo_id string
|
repo db.GitRepo
|
||||||
repo git.GitRepo
|
|
||||||
timestamp time.Time
|
timestamp time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,15 +31,14 @@ fn (r1 ScheduledBuild) < (r2 ScheduledBuild) bool {
|
||||||
|
|
||||||
pub struct Daemon {
|
pub struct Daemon {
|
||||||
mut:
|
mut:
|
||||||
address string
|
client client.Client
|
||||||
api_key string
|
|
||||||
base_image string
|
base_image string
|
||||||
builder_images []string
|
builder_images []string
|
||||||
global_schedule CronExpression
|
global_schedule CronExpression
|
||||||
api_update_frequency int
|
api_update_frequency int
|
||||||
image_rebuild_frequency int
|
image_rebuild_frequency int
|
||||||
// Repos currently loaded from API.
|
// Repos currently loaded from API.
|
||||||
repos_map map[string]git.GitRepo
|
repos []db.GitRepo
|
||||||
// At what point to update the list of repositories.
|
// At what point to update the list of repositories.
|
||||||
api_update_timestamp time.Time
|
api_update_timestamp time.Time
|
||||||
image_build_timestamp time.Time
|
image_build_timestamp time.Time
|
||||||
|
@ -54,8 +55,7 @@ mut:
|
||||||
// populates the build queue for the first time.
|
// populates the build queue for the first time.
|
||||||
pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int, image_rebuild_frequency int) ?Daemon {
|
pub fn init_daemon(logger log.Log, address string, api_key string, base_image string, global_schedule CronExpression, max_concurrent_builds int, api_update_frequency int, image_rebuild_frequency int) ?Daemon {
|
||||||
mut d := Daemon{
|
mut d := Daemon{
|
||||||
address: address
|
client: client.new(address, api_key)
|
||||||
api_key: api_key
|
|
||||||
base_image: base_image
|
base_image: base_image
|
||||||
global_schedule: global_schedule
|
global_schedule: global_schedule
|
||||||
api_update_frequency: api_update_frequency
|
api_update_frequency: api_update_frequency
|
||||||
|
@ -90,7 +90,7 @@ pub fn (mut d Daemon) run() {
|
||||||
// haven't been renewed.
|
// haven't been renewed.
|
||||||
else {
|
else {
|
||||||
for sb in finished_builds {
|
for sb in finished_builds {
|
||||||
d.schedule_build(sb.repo_id, sb.repo)
|
d.schedule_build(sb.repo)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,11 +149,11 @@ pub fn (mut d Daemon) run() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// schedule_build adds the next occurence of the given repo build to the queue.
|
// schedule_build adds the next occurence of the given repo build to the queue.
|
||||||
fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) {
|
fn (mut d Daemon) schedule_build(repo db.GitRepo) {
|
||||||
ce := if repo.schedule != '' {
|
ce := if repo.schedule != '' {
|
||||||
parse_expression(repo.schedule) or {
|
parse_expression(repo.schedule) or {
|
||||||
// TODO This shouldn't return an error if the expression is empty.
|
// TODO This shouldn't return an error if the expression is empty.
|
||||||
d.lerror("Error while parsing cron expression '$repo.schedule' ($repo_id): $err.msg()")
|
d.lerror("Error while parsing cron expression '$repo.schedule' (id $repo.id): $err.msg()")
|
||||||
|
|
||||||
d.global_schedule
|
d.global_schedule
|
||||||
}
|
}
|
||||||
|
@ -168,7 +168,6 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) {
|
||||||
}
|
}
|
||||||
|
|
||||||
d.queue.insert(ScheduledBuild{
|
d.queue.insert(ScheduledBuild{
|
||||||
repo_id: repo_id
|
|
||||||
repo: repo
|
repo: repo
|
||||||
timestamp: timestamp
|
timestamp: timestamp
|
||||||
})
|
})
|
||||||
|
@ -179,14 +178,18 @@ fn (mut d Daemon) schedule_build(repo_id string, repo git.GitRepo) {
|
||||||
fn (mut d Daemon) renew_repos() {
|
fn (mut d Daemon) renew_repos() {
|
||||||
d.linfo('Renewing repos...')
|
d.linfo('Renewing repos...')
|
||||||
|
|
||||||
mut new_repos := git.get_repos(d.address, d.api_key) or {
|
mut new_repos := d.client.get_git_repos() or {
|
||||||
d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...')
|
d.lerror('Failed to renew repos. Retrying in ${daemon.api_update_retry_timeout}s...')
|
||||||
d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout)
|
d.api_update_timestamp = time.now().add_seconds(daemon.api_update_retry_timeout)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
d.repos_map = new_repos.move()
|
// Filter out any repos that shouldn't run on this architecture
|
||||||
|
cur_arch := os.uname().machine
|
||||||
|
new_repos = new_repos.filter(it.arch.any(it.value == cur_arch))
|
||||||
|
|
||||||
|
d.repos = new_repos
|
||||||
|
|
||||||
d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency)
|
d.api_update_timestamp = time.now().add_seconds(60 * d.api_update_frequency)
|
||||||
}
|
}
|
||||||
|
@ -224,8 +227,8 @@ fn (mut d Daemon) renew_queue() {
|
||||||
|
|
||||||
// For each repository in repos_map, parse their cron expression (or use
|
// For each repository in repos_map, parse their cron expression (or use
|
||||||
// the default one if not present) & add them to the queue
|
// the default one if not present) & add them to the queue
|
||||||
for id, repo in d.repos_map {
|
for repo in d.repos {
|
||||||
d.schedule_build(id, repo)
|
d.schedule_build(repo)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
module db
|
||||||
|
|
||||||
|
import sqlite
|
||||||
|
|
||||||
|
struct VieterDb {
|
||||||
|
conn sqlite.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// init initializes a database & adds the correct tables.
|
||||||
|
pub fn init(db_path string) ?VieterDb {
|
||||||
|
conn := sqlite.connect(db_path) ?
|
||||||
|
|
||||||
|
sql conn {
|
||||||
|
create table GitRepo
|
||||||
|
create table BuildLog
|
||||||
|
}
|
||||||
|
|
||||||
|
return VieterDb{
|
||||||
|
conn: conn
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,162 @@
|
||||||
|
module db
|
||||||
|
|
||||||
|
pub struct GitRepoArch {
|
||||||
|
pub:
|
||||||
|
id int [primary; sql: serial]
|
||||||
|
repo_id int [nonull]
|
||||||
|
value string [nonull]
|
||||||
|
}
|
||||||
|
|
||||||
|
// str returns a string representation.
|
||||||
|
pub fn (gra &GitRepoArch) str() string {
|
||||||
|
return gra.value
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct GitRepo {
|
||||||
|
pub mut:
|
||||||
|
id int [optional; primary; sql: serial]
|
||||||
|
// URL of the Git repository
|
||||||
|
url string [nonull]
|
||||||
|
// Branch of the Git repository to use
|
||||||
|
branch string [nonull]
|
||||||
|
// Which repo the builder should publish packages to
|
||||||
|
repo string [nonull]
|
||||||
|
// Cron schedule describing how frequently to build the repo.
|
||||||
|
schedule string [optional]
|
||||||
|
// On which architectures the package is allowed to be built. In reality,
|
||||||
|
// this controls which builders will periodically build the image.
|
||||||
|
arch []GitRepoArch [fkey: 'repo_id']
|
||||||
|
}
|
||||||
|
|
||||||
|
// str returns a string representation.
|
||||||
|
pub fn (gr &GitRepo) str() string {
|
||||||
|
mut parts := [
|
||||||
|
'id: $gr.id',
|
||||||
|
'url: $gr.url',
|
||||||
|
'branch: $gr.branch',
|
||||||
|
'repo: $gr.repo',
|
||||||
|
'schedule: $gr.schedule',
|
||||||
|
'arch: ${gr.arch.map(it.value).join(', ')}',
|
||||||
|
]
|
||||||
|
str := parts.join('\n')
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// patch_from_params patches a GitRepo from a map[string]string, usually
|
||||||
|
// provided from a web.App's params
|
||||||
|
pub fn (mut r GitRepo) patch_from_params(params map[string]string) {
|
||||||
|
$for field in GitRepo.fields {
|
||||||
|
if field.name in params {
|
||||||
|
$if field.typ is string {
|
||||||
|
r.$(field.name) = params[field.name]
|
||||||
|
// This specific type check is needed for the compiler to ensure
|
||||||
|
// our types are correct
|
||||||
|
} $else $if field.typ is []GitRepoArch {
|
||||||
|
r.$(field.name) = params[field.name].split(',').map(GitRepoArch{ value: it })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// git_repo_from_params creates a GitRepo from a map[string]string, usually
|
||||||
|
// provided from a web.App's params
|
||||||
|
pub fn git_repo_from_params(params map[string]string) ?GitRepo {
|
||||||
|
mut repo := GitRepo{}
|
||||||
|
|
||||||
|
// If we're creating a new GitRepo, we want all fields to be present before
|
||||||
|
// "patching".
|
||||||
|
$for field in GitRepo.fields {
|
||||||
|
if field.name !in params && !field.attrs.contains('optional') {
|
||||||
|
return error('Missing parameter: ${field.name}.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
repo.patch_from_params(params)
|
||||||
|
|
||||||
|
return repo
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_git_repos returns all GitRepo's in the database.
|
||||||
|
pub fn (db &VieterDb) get_git_repos() []GitRepo {
|
||||||
|
res := sql db.conn {
|
||||||
|
select from GitRepo order by id
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_git_repo tries to return a specific GitRepo.
|
||||||
|
pub fn (db &VieterDb) get_git_repo(repo_id int) ?GitRepo {
|
||||||
|
res := sql db.conn {
|
||||||
|
select from GitRepo where id == repo_id
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a select statement fails, it returns a zeroed object. By
|
||||||
|
// checking one of the required fields, we can see whether the query
|
||||||
|
// returned a result or not.
|
||||||
|
if res.id == 0 {
|
||||||
|
return none
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// add_git_repo inserts the given GitRepo into the database.
|
||||||
|
pub fn (db &VieterDb) add_git_repo(repo GitRepo) {
|
||||||
|
sql db.conn {
|
||||||
|
insert repo into GitRepo
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete_git_repo deletes the repo with the given ID from the database.
|
||||||
|
pub fn (db &VieterDb) delete_git_repo(repo_id int) {
|
||||||
|
sql db.conn {
|
||||||
|
delete from GitRepo where id == repo_id
|
||||||
|
delete from GitRepoArch where repo_id == repo_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// update_git_repo updates any non-array values for a given GitRepo.
|
||||||
|
pub fn (db &VieterDb) update_git_repo(repo_id int, params map[string]string) {
|
||||||
|
mut values := []string{}
|
||||||
|
|
||||||
|
// TODO does this allow for SQL injection?
|
||||||
|
$for field in GitRepo.fields {
|
||||||
|
if field.name in params {
|
||||||
|
// Any fields that are array types require their own update method
|
||||||
|
$if field.typ is string {
|
||||||
|
values << "$field.name = '${params[field.name]}'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
values_str := values.join(', ')
|
||||||
|
query := 'update GitRepo set $values_str where id == $repo_id'
|
||||||
|
|
||||||
|
db.conn.exec_none(query)
|
||||||
|
}
|
||||||
|
|
||||||
|
// update_git_repo_archs updates a given GitRepo's arch value.
|
||||||
|
pub fn (db &VieterDb) update_git_repo_archs(repo_id int, archs []GitRepoArch) {
|
||||||
|
archs_with_id := archs.map(GitRepoArch{
|
||||||
|
...it
|
||||||
|
repo_id: repo_id
|
||||||
|
})
|
||||||
|
|
||||||
|
sql db.conn {
|
||||||
|
delete from GitRepoArch where repo_id == repo_id
|
||||||
|
}
|
||||||
|
|
||||||
|
for arch in archs_with_id {
|
||||||
|
sql db.conn {
|
||||||
|
insert arch into GitRepoArch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// git_repo_exists is a utility function that checks whether a repo with the
|
||||||
|
// given id exists.
|
||||||
|
pub fn (db &VieterDb) git_repo_exists(repo_id int) bool {
|
||||||
|
db.get_git_repo(repo_id) or { return false }
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
module db
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
pub struct BuildLog {
|
||||||
|
pub:
|
||||||
|
id int [primary; sql: serial]
|
||||||
|
repo_id int [nonull]
|
||||||
|
start_time time.Time [nonull]
|
||||||
|
end_time time.Time [nonull]
|
||||||
|
arch string [nonull]
|
||||||
|
exit_code int [nonull]
|
||||||
|
}
|
||||||
|
|
||||||
|
// str returns a string representation.
|
||||||
|
pub fn (bl &BuildLog) str() string {
|
||||||
|
mut parts := [
|
||||||
|
'id: $bl.id',
|
||||||
|
'repo id: $bl.repo_id',
|
||||||
|
'start time: $bl.start_time',
|
||||||
|
'end time: $bl.end_time',
|
||||||
|
'arch: $bl.arch',
|
||||||
|
'exit code: $bl.exit_code',
|
||||||
|
]
|
||||||
|
str := parts.join('\n')
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_logs returns all BuildLog's in the database.
|
||||||
|
pub fn (db &VieterDb) get_build_logs() []BuildLog {
|
||||||
|
res := sql db.conn {
|
||||||
|
select from BuildLog order by id
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_logs_for_repo returns all BuildLog's in the database for a given
|
||||||
|
// repo.
|
||||||
|
pub fn (db &VieterDb) get_build_logs_for_repo(repo_id int) []BuildLog {
|
||||||
|
res := sql db.conn {
|
||||||
|
select from BuildLog where repo_id == repo_id order by id
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_build_log tries to return a specific BuildLog.
|
||||||
|
pub fn (db &VieterDb) get_build_log(id int) ?BuildLog {
|
||||||
|
res := sql db.conn {
|
||||||
|
select from BuildLog where id == id
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.id == 0 {
|
||||||
|
return none
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// add_build_log inserts the given BuildLog into the database.
|
||||||
|
pub fn (db &VieterDb) add_build_log(log BuildLog) {
|
||||||
|
sql db.conn {
|
||||||
|
insert log into BuildLog
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete_build_log delete the BuildLog with the given ID from the database.
|
||||||
|
pub fn (db &VieterDb) delete_build_log(id int) {
|
||||||
|
sql db.conn {
|
||||||
|
delete from BuildLog where id == id
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,6 +2,7 @@ module docker
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import net.urllib
|
import net.urllib
|
||||||
|
import time
|
||||||
|
|
||||||
struct Container {
|
struct Container {
|
||||||
id string [json: Id]
|
id string [json: Id]
|
||||||
|
@ -49,13 +50,21 @@ pub fn start_container(id string) ?bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ContainerInspect {
|
struct ContainerInspect {
|
||||||
pub:
|
pub mut:
|
||||||
state ContainerState [json: State]
|
state ContainerState [json: State]
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ContainerState {
|
struct ContainerState {
|
||||||
pub:
|
pub:
|
||||||
running bool [json: Running]
|
running bool [json: Running]
|
||||||
|
status string [json: Status]
|
||||||
|
exit_code int [json: ExitCode]
|
||||||
|
// These use a rather specific format so they have to be parsed later
|
||||||
|
start_time_str string [json: StartedAt]
|
||||||
|
end_time_str string [json: FinishedAt]
|
||||||
|
pub mut:
|
||||||
|
start_time time.Time [skip]
|
||||||
|
end_time time.Time [skip]
|
||||||
}
|
}
|
||||||
|
|
||||||
// inspect_container returns the result of inspecting a container with a given
|
// inspect_container returns the result of inspecting a container with a given
|
||||||
|
@ -67,7 +76,15 @@ pub fn inspect_container(id string) ?ContainerInspect {
|
||||||
return error('Failed to inspect container.')
|
return error('Failed to inspect container.')
|
||||||
}
|
}
|
||||||
|
|
||||||
return json.decode(ContainerInspect, res.text) or {}
|
mut data := json.decode(ContainerInspect, res.text) ?
|
||||||
|
|
||||||
|
data.state.start_time = time.parse_rfc3339(data.state.start_time_str) ?
|
||||||
|
|
||||||
|
if data.state.status == 'exited' {
|
||||||
|
data.state.end_time = time.parse_rfc3339(data.state.end_time_str) ?
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove_container removes a container with a given ID.
|
// remove_container removes a container with a given ID.
|
||||||
|
@ -76,3 +93,25 @@ pub fn remove_container(id string) ?bool {
|
||||||
|
|
||||||
return res.status_code == 204
|
return res.status_code == 204
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get_container_logs retrieves the logs for a Docker container, both stdout &
|
||||||
|
// stderr.
|
||||||
|
pub fn get_container_logs(id string) ?string {
|
||||||
|
res := request('GET', urllib.parse('/v1.41/containers/$id/logs?stdout=true&stderr=true') ?) ?
|
||||||
|
mut res_bytes := res.text.bytes()
|
||||||
|
|
||||||
|
// Docker uses a special "stream" format for their logs, so we have to
|
||||||
|
// clean up the data.
|
||||||
|
mut index := 0
|
||||||
|
|
||||||
|
for index < res_bytes.len {
|
||||||
|
// The reverse is required because V reads in the bytes differently
|
||||||
|
t := res_bytes[index + 4..index + 8].reverse()
|
||||||
|
len_length := unsafe { *(&u32(&t[0])) }
|
||||||
|
|
||||||
|
res_bytes.delete_many(index, 8)
|
||||||
|
index += int(len_length)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res_bytes.bytestr()
|
||||||
|
}
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
module git
|
|
||||||
|
|
||||||
import json
|
|
||||||
import response { Response }
|
|
||||||
import net.http
|
|
||||||
|
|
||||||
// send_request<T> is a convenience method for sending requests to the repos
|
|
||||||
// API. It mostly does string manipulation to create a query string containing
|
|
||||||
// the provided params.
|
|
||||||
fn send_request<T>(method http.Method, address string, url string, api_key string, params map[string]string) ?Response<T> {
|
|
||||||
mut full_url := '$address$url'
|
|
||||||
|
|
||||||
if params.len > 0 {
|
|
||||||
params_str := params.keys().map('$it=${params[it]}').join('&')
|
|
||||||
|
|
||||||
full_url = '$full_url?$params_str'
|
|
||||||
}
|
|
||||||
|
|
||||||
mut req := http.new_request(method, full_url, '') ?
|
|
||||||
req.add_custom_header('X-API-Key', api_key) ?
|
|
||||||
|
|
||||||
res := req.do() ?
|
|
||||||
data := json.decode(Response<T>, res.text) ?
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
// get_repos returns the current list of repos.
|
|
||||||
pub fn get_repos(address string, api_key string) ?map[string]GitRepo {
|
|
||||||
data := send_request<map[string]GitRepo>(http.Method.get, address, '/api/repos', api_key,
|
|
||||||
{}) ?
|
|
||||||
|
|
||||||
return data.data
|
|
||||||
}
|
|
||||||
|
|
||||||
// add_repo adds a new repo to the server.
|
|
||||||
pub fn add_repo(address string, api_key string, url string, branch string, repo string, arch []string) ?Response<string> {
|
|
||||||
mut params := {
|
|
||||||
'url': url
|
|
||||||
'branch': branch
|
|
||||||
'repo': repo
|
|
||||||
}
|
|
||||||
|
|
||||||
if arch.len > 0 {
|
|
||||||
params['arch'] = arch.join(',')
|
|
||||||
}
|
|
||||||
|
|
||||||
data := send_request<string>(http.Method.post, address, '/api/repos', api_key, params) ?
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove_repo removes the repo with the given ID from the server.
|
|
||||||
pub fn remove_repo(address string, api_key string, id string) ?Response<string> {
|
|
||||||
data := send_request<string>(http.Method.delete, address, '/api/repos/$id', api_key,
|
|
||||||
{}) ?
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
// patch_repo sends a PATCH request to the given repo with the params as
|
|
||||||
// payload.
|
|
||||||
pub fn patch_repo(address string, api_key string, id string, params map[string]string) ?Response<string> {
|
|
||||||
data := send_request<string>(http.Method.patch, address, '/api/repos/$id', api_key,
|
|
||||||
params) ?
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
|
@ -1,84 +0,0 @@
|
||||||
module git
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
pub struct GitRepo {
|
|
||||||
pub mut:
|
|
||||||
// URL of the Git repository
|
|
||||||
url string
|
|
||||||
// Branch of the Git repository to use
|
|
||||||
branch string
|
|
||||||
// On which architectures the package is allowed to be built. In reality,
|
|
||||||
// this controls which builders will periodically build the image.
|
|
||||||
arch []string
|
|
||||||
// Which repo the builder should publish packages to
|
|
||||||
repo string
|
|
||||||
// Cron schedule describing how frequently to build the repo.
|
|
||||||
schedule string [optional]
|
|
||||||
}
|
|
||||||
|
|
||||||
// patch_from_params patches a GitRepo from a map[string]string, usually
|
|
||||||
// provided from a web.App's params
|
|
||||||
pub fn (mut r GitRepo) patch_from_params(params map[string]string) {
|
|
||||||
$for field in GitRepo.fields {
|
|
||||||
if field.name in params {
|
|
||||||
$if field.typ is string {
|
|
||||||
r.$(field.name) = params[field.name]
|
|
||||||
// This specific type check is needed for the compiler to ensure
|
|
||||||
// our types are correct
|
|
||||||
} $else $if field.typ is []string {
|
|
||||||
r.$(field.name) = params[field.name].split(',')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read_repos reads the provided path & parses it into a map of GitRepo's.
|
|
||||||
pub fn read_repos(path string) ?map[string]GitRepo {
|
|
||||||
if !os.exists(path) {
|
|
||||||
mut f := os.create(path) ?
|
|
||||||
|
|
||||||
defer {
|
|
||||||
f.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
f.write_string('{}') ?
|
|
||||||
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
|
|
||||||
content := os.read_file(path) ?
|
|
||||||
res := json.decode(map[string]GitRepo, content) ?
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// write_repos writes a map of GitRepo's back to disk given the provided path.
|
|
||||||
pub fn write_repos(path string, repos &map[string]GitRepo) ? {
|
|
||||||
mut f := os.create(path) ?
|
|
||||||
|
|
||||||
defer {
|
|
||||||
f.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
value := json.encode(repos)
|
|
||||||
f.write_string(value) ?
|
|
||||||
}
|
|
||||||
|
|
||||||
// repo_from_params creates a GitRepo from a map[string]string, usually
|
|
||||||
// provided from a web.App's params
|
|
||||||
pub fn repo_from_params(params map[string]string) ?GitRepo {
|
|
||||||
mut repo := GitRepo{}
|
|
||||||
|
|
||||||
// If we're creating a new GitRepo, we want all fields to be present before
|
|
||||||
// "patching".
|
|
||||||
$for field in GitRepo.fields {
|
|
||||||
if field.name !in params && !field.attrs.contains('optional') {
|
|
||||||
return error('Missing parameter: ${field.name}.')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
repo.patch_from_params(params)
|
|
||||||
|
|
||||||
return repo
|
|
||||||
}
|
|
|
@ -4,14 +4,15 @@ import os
|
||||||
import server
|
import server
|
||||||
import cli
|
import cli
|
||||||
import build
|
import build
|
||||||
import git
|
import console.git
|
||||||
|
import console.logs
|
||||||
import cron
|
import cron
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
mut app := cli.Command{
|
mut app := cli.Command{
|
||||||
name: 'vieter'
|
name: 'vieter'
|
||||||
description: 'Vieter is a lightweight implementation of an Arch repository server.'
|
description: 'Vieter is a lightweight implementation of an Arch repository server.'
|
||||||
version: '0.2.0'
|
version: '0.3.0-alpha.1'
|
||||||
flags: [
|
flags: [
|
||||||
cli.Flag{
|
cli.Flag{
|
||||||
flag: cli.FlagType.string
|
flag: cli.FlagType.string
|
||||||
|
@ -27,6 +28,7 @@ fn main() {
|
||||||
build.cmd(),
|
build.cmd(),
|
||||||
git.cmd(),
|
git.cmd(),
|
||||||
cron.cmd(),
|
cron.cmd(),
|
||||||
|
logs.cmd(),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,12 +6,9 @@ import env
|
||||||
struct Config {
|
struct Config {
|
||||||
pub:
|
pub:
|
||||||
log_level string = 'WARN'
|
log_level string = 'WARN'
|
||||||
log_file string = 'vieter.log'
|
|
||||||
pkg_dir string
|
pkg_dir string
|
||||||
download_dir string
|
data_dir string
|
||||||
api_key string
|
api_key string
|
||||||
repos_dir string
|
|
||||||
repos_file string
|
|
||||||
default_arch string
|
default_arch string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
module server
|
module server
|
||||||
|
|
||||||
import web
|
import web
|
||||||
import git
|
|
||||||
import net.http
|
import net.http
|
||||||
import rand
|
|
||||||
import response { new_data_response, new_response }
|
import response { new_data_response, new_response }
|
||||||
|
import db
|
||||||
const repos_file = 'repos.json'
|
|
||||||
|
|
||||||
// get_repos returns the current list of repos.
|
// get_repos returns the current list of repos.
|
||||||
['/api/repos'; get]
|
['/api/repos'; get]
|
||||||
|
@ -15,37 +12,19 @@ fn (mut app App) get_repos() web.Result {
|
||||||
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
repos := rlock app.git_mutex {
|
repos := app.db.get_git_repos()
|
||||||
git.read_repos(app.conf.repos_file) or {
|
|
||||||
app.lerror('Failed to read repos file: $err.msg()')
|
|
||||||
|
|
||||||
return app.status(http.Status.internal_server_error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_data_response(repos))
|
return app.json(http.Status.ok, new_data_response(repos))
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_single_repo returns the information for a single repo.
|
// get_single_repo returns the information for a single repo.
|
||||||
['/api/repos/:id'; get]
|
['/api/repos/:id'; get]
|
||||||
fn (mut app App) get_single_repo(id string) web.Result {
|
fn (mut app App) get_single_repo(id int) web.Result {
|
||||||
if !app.is_authorized() {
|
if !app.is_authorized() {
|
||||||
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
repos := rlock app.git_mutex {
|
repo := app.db.get_git_repo(id) or { return app.not_found() }
|
||||||
git.read_repos(app.conf.repos_file) or {
|
|
||||||
app.lerror('Failed to read repos file.')
|
|
||||||
|
|
||||||
return app.status(http.Status.internal_server_error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if id !in repos {
|
|
||||||
return app.not_found()
|
|
||||||
}
|
|
||||||
|
|
||||||
repo := repos[id]
|
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_data_response(repo))
|
return app.json(http.Status.ok, new_data_response(repo))
|
||||||
}
|
}
|
||||||
|
@ -65,89 +44,40 @@ fn (mut app App) post_repo() web.Result {
|
||||||
params['arch'] = app.conf.default_arch
|
params['arch'] = app.conf.default_arch
|
||||||
}
|
}
|
||||||
|
|
||||||
new_repo := git.repo_from_params(params) or {
|
new_repo := db.git_repo_from_params(params) or {
|
||||||
return app.json(http.Status.bad_request, new_response(err.msg()))
|
return app.json(http.Status.bad_request, new_response(err.msg()))
|
||||||
}
|
}
|
||||||
|
|
||||||
id := rand.uuid_v4()
|
app.db.add_git_repo(new_repo)
|
||||||
|
|
||||||
mut repos := rlock app.git_mutex {
|
|
||||||
git.read_repos(app.conf.repos_file) or {
|
|
||||||
app.lerror('Failed to read repos file.')
|
|
||||||
|
|
||||||
return app.status(http.Status.internal_server_error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We need to check for duplicates
|
|
||||||
for _, repo in repos {
|
|
||||||
if repo == new_repo {
|
|
||||||
return app.json(http.Status.bad_request, new_response('Duplicate repository.'))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
repos[id] = new_repo
|
|
||||||
|
|
||||||
lock app.git_mutex {
|
|
||||||
git.write_repos(app.conf.repos_file, &repos) or {
|
|
||||||
return app.status(http.Status.internal_server_error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_response('Repo added successfully.'))
|
return app.json(http.Status.ok, new_response('Repo added successfully.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete_repo removes a given repo from the server's list.
|
// delete_repo removes a given repo from the server's list.
|
||||||
['/api/repos/:id'; delete]
|
['/api/repos/:id'; delete]
|
||||||
fn (mut app App) delete_repo(id string) web.Result {
|
fn (mut app App) delete_repo(id int) web.Result {
|
||||||
if !app.is_authorized() {
|
if !app.is_authorized() {
|
||||||
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
mut repos := rlock app.git_mutex {
|
app.db.delete_git_repo(id)
|
||||||
git.read_repos(app.conf.repos_file) or {
|
|
||||||
app.lerror('Failed to read repos file.')
|
|
||||||
|
|
||||||
return app.status(http.Status.internal_server_error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if id !in repos {
|
|
||||||
return app.not_found()
|
|
||||||
}
|
|
||||||
|
|
||||||
repos.delete(id)
|
|
||||||
|
|
||||||
lock app.git_mutex {
|
|
||||||
git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) }
|
|
||||||
}
|
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_response('Repo removed successfully.'))
|
return app.json(http.Status.ok, new_response('Repo removed successfully.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// patch_repo updates a repo's data with the given query params.
|
// patch_repo updates a repo's data with the given query params.
|
||||||
['/api/repos/:id'; patch]
|
['/api/repos/:id'; patch]
|
||||||
fn (mut app App) patch_repo(id string) web.Result {
|
fn (mut app App) patch_repo(id int) web.Result {
|
||||||
if !app.is_authorized() {
|
if !app.is_authorized() {
|
||||||
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
mut repos := rlock app.git_mutex {
|
app.db.update_git_repo(id, app.query)
|
||||||
git.read_repos(app.conf.repos_file) or {
|
|
||||||
app.lerror('Failed to read repos file.')
|
|
||||||
|
|
||||||
return app.status(http.Status.internal_server_error)
|
if 'arch' in app.query {
|
||||||
}
|
arch_objs := app.query['arch'].split(',').map(db.GitRepoArch{ value: it })
|
||||||
}
|
|
||||||
|
|
||||||
if id !in repos {
|
app.db.update_git_repo_archs(id, arch_objs)
|
||||||
return app.not_found()
|
|
||||||
}
|
|
||||||
|
|
||||||
repos[id].patch_from_params(app.query)
|
|
||||||
|
|
||||||
lock app.git_mutex {
|
|
||||||
git.write_repos(app.conf.repos_file, &repos) or { return app.server_error(500) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_response('Repo updated successfully.'))
|
return app.json(http.Status.ok, new_response('Repo updated successfully.'))
|
||||||
|
|
|
@ -0,0 +1,136 @@
|
||||||
|
module server
|
||||||
|
|
||||||
|
import web
|
||||||
|
import net.http
|
||||||
|
import net.urllib
|
||||||
|
import response { new_data_response, new_response }
|
||||||
|
import db
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import util
|
||||||
|
|
||||||
|
// get_logs returns all build logs in the database. A 'repo' query param can
|
||||||
|
// optionally be added to limit the list of build logs to that repository.
|
||||||
|
['/api/logs'; get]
|
||||||
|
fn (mut app App) get_logs() web.Result {
|
||||||
|
if !app.is_authorized() {
|
||||||
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
logs := if 'repo' in app.query {
|
||||||
|
app.db.get_build_logs_for_repo(app.query['repo'].int())
|
||||||
|
} else {
|
||||||
|
app.db.get_build_logs()
|
||||||
|
}
|
||||||
|
|
||||||
|
return app.json(http.Status.ok, new_data_response(logs))
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_single_log returns the build log with the given id.
|
||||||
|
['/api/logs/:id'; get]
|
||||||
|
fn (mut app App) get_single_log(id int) web.Result {
|
||||||
|
if !app.is_authorized() {
|
||||||
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
log := app.db.get_build_log(id) or { return app.not_found() }
|
||||||
|
|
||||||
|
return app.json(http.Status.ok, new_data_response(log))
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_log_content returns the actual build log file for the given id.
|
||||||
|
['/api/logs/:id/content'; get]
|
||||||
|
fn (mut app App) get_log_content(id int) web.Result {
|
||||||
|
if !app.is_authorized() {
|
||||||
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
log := app.db.get_build_log(id) or { return app.not_found() }
|
||||||
|
file_name := log.start_time.custom_format('YYYY-MM-DD_HH-mm-ss')
|
||||||
|
full_path := os.join_path(app.conf.data_dir, logs_dir_name, log.repo_id.str(), log.arch,
|
||||||
|
file_name)
|
||||||
|
|
||||||
|
return app.file(full_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse_query_time unescapes an HTTP query parameter & tries to parse it as a
|
||||||
|
// time.Time struct.
|
||||||
|
fn parse_query_time(query string) ?time.Time {
|
||||||
|
unescaped := urllib.query_unescape(query) ?
|
||||||
|
t := time.parse(unescaped) ?
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// post_log adds a new log to the database.
|
||||||
|
['/api/logs'; post]
|
||||||
|
fn (mut app App) post_log() web.Result {
|
||||||
|
if !app.is_authorized() {
|
||||||
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse query params
|
||||||
|
start_time := parse_query_time(app.query['startTime']) or {
|
||||||
|
return app.json(http.Status.bad_request, new_response('Invalid or missing start time.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
end_time := parse_query_time(app.query['endTime']) or {
|
||||||
|
return app.json(http.Status.bad_request, new_response('Invalid or missing end time.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'exitCode' !in app.query {
|
||||||
|
return app.json(http.Status.bad_request, new_response('Missing exit code.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
exit_code := app.query['exitCode'].int()
|
||||||
|
|
||||||
|
if 'arch' !in app.query {
|
||||||
|
return app.json(http.Status.bad_request, new_response("Missing parameter 'arch'."))
|
||||||
|
}
|
||||||
|
|
||||||
|
arch := app.query['arch']
|
||||||
|
|
||||||
|
repo_id := app.query['repo'].int()
|
||||||
|
|
||||||
|
if !app.db.git_repo_exists(repo_id) {
|
||||||
|
return app.json(http.Status.bad_request, new_response('Unknown Git repo.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store log in db
|
||||||
|
log := db.BuildLog{
|
||||||
|
repo_id: repo_id
|
||||||
|
start_time: start_time
|
||||||
|
end_time: end_time
|
||||||
|
arch: arch
|
||||||
|
exit_code: exit_code
|
||||||
|
}
|
||||||
|
|
||||||
|
app.db.add_build_log(log)
|
||||||
|
|
||||||
|
repo_logs_dir := os.join_path(app.conf.data_dir, logs_dir_name, repo_id.str(), arch)
|
||||||
|
|
||||||
|
// Create the logs directory of it doesn't exist
|
||||||
|
if !os.exists(repo_logs_dir) {
|
||||||
|
os.mkdir_all(repo_logs_dir) or {
|
||||||
|
app.lerror("Couldn't create dir '$repo_logs_dir'.")
|
||||||
|
|
||||||
|
return app.json(http.Status.internal_server_error, new_response('An error occured while processing the request.'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream log contents to correct file
|
||||||
|
file_name := start_time.custom_format('YYYY-MM-DD_HH-mm-ss')
|
||||||
|
full_path := os.join_path_single(repo_logs_dir, file_name)
|
||||||
|
|
||||||
|
if length := app.req.header.get(.content_length) {
|
||||||
|
util.reader_to_file(mut app.reader, length.int(), full_path) or {
|
||||||
|
app.lerror('An error occured while receiving logs: $err.msg()')
|
||||||
|
|
||||||
|
return app.json(http.Status.internal_server_error, new_response('Failed to upload logs.'))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return app.status(http.Status.length_required)
|
||||||
|
}
|
||||||
|
|
||||||
|
return app.json(http.Status.ok, new_response('Logs added successfully.'))
|
||||||
|
}
|
|
@ -68,7 +68,7 @@ fn (mut app App) put_package(repo string) web.Result {
|
||||||
|
|
||||||
if length := app.req.header.get(.content_length) {
|
if length := app.req.header.get(.content_length) {
|
||||||
// Generate a random filename for the temp file
|
// Generate a random filename for the temp file
|
||||||
pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4())
|
pkg_path = os.join_path_single(app.repo.pkg_dir, rand.uuid_v4())
|
||||||
|
|
||||||
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,15 @@ import os
|
||||||
import log
|
import log
|
||||||
import repo
|
import repo
|
||||||
import util
|
import util
|
||||||
|
import db
|
||||||
|
|
||||||
const port = 8000
|
const (
|
||||||
|
port = 8000
|
||||||
|
log_file_name = 'vieter.log'
|
||||||
|
repo_dir_name = 'repos'
|
||||||
|
db_file_name = 'vieter.sqlite'
|
||||||
|
logs_dir_name = 'logs'
|
||||||
|
)
|
||||||
|
|
||||||
struct App {
|
struct App {
|
||||||
web.Context
|
web.Context
|
||||||
|
@ -14,8 +21,7 @@ pub:
|
||||||
conf Config [required; web_global]
|
conf Config [required; web_global]
|
||||||
pub mut:
|
pub mut:
|
||||||
repo repo.RepoGroupManager [required; web_global]
|
repo repo.RepoGroupManager [required; web_global]
|
||||||
// This is used to claim the file lock on the repos file
|
db db.VieterDb
|
||||||
git_mutex shared util.Dummy
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// server starts the web server & starts listening for requests
|
// server starts the web server & starts listening for requests
|
||||||
|
@ -30,11 +36,22 @@ pub fn server(conf Config) ? {
|
||||||
util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
os.mkdir_all(conf.data_dir) or { util.exit_with_message(1, 'Failed to create data directory.') }
|
||||||
|
|
||||||
|
logs_dir := os.join_path_single(conf.data_dir, server.logs_dir_name)
|
||||||
|
|
||||||
|
if !os.exists(logs_dir) {
|
||||||
|
os.mkdir(os.join_path_single(conf.data_dir, server.logs_dir_name)) or {
|
||||||
|
util.exit_with_message(1, 'Failed to create logs directory.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
mut logger := log.Log{
|
mut logger := log.Log{
|
||||||
level: log_level
|
level: log_level
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.set_full_logpath(conf.log_file)
|
log_file := os.join_path_single(conf.data_dir, server.log_file_name)
|
||||||
|
logger.set_full_logpath(log_file)
|
||||||
logger.log_to_console_too()
|
logger.log_to_console_too()
|
||||||
|
|
||||||
defer {
|
defer {
|
||||||
|
@ -43,19 +60,20 @@ pub fn server(conf Config) ? {
|
||||||
logger.close()
|
logger.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
repo_dir := os.join_path_single(conf.data_dir, server.repo_dir_name)
|
||||||
// This also creates the directories if needed
|
// This also creates the directories if needed
|
||||||
repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or {
|
repo := repo.new(repo_dir, conf.pkg_dir, conf.default_arch) or {
|
||||||
logger.error(err.msg())
|
logger.error(err.msg())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
os.mkdir_all(conf.download_dir) or {
|
db_file := os.join_path_single(conf.data_dir, server.db_file_name)
|
||||||
util.exit_with_message(1, 'Failed to create download directory.')
|
db := db.init(db_file) or { util.exit_with_message(1, 'Failed to initialize database.') }
|
||||||
}
|
|
||||||
|
|
||||||
web.run(&App{
|
web.run(&App{
|
||||||
logger: logger
|
logger: logger
|
||||||
conf: conf
|
conf: conf
|
||||||
repo: repo
|
repo: repo
|
||||||
|
db: db
|
||||||
}, server.port)
|
}, server.port)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
# This file contains settings used during development
|
# This file contains settings used during development
|
||||||
api_key = "test"
|
api_key = "test"
|
||||||
download_dir = "data/downloads"
|
data_dir = "data"
|
||||||
repos_dir = "data/repos"
|
|
||||||
pkg_dir = "data/pkgs"
|
pkg_dir = "data/pkgs"
|
||||||
log_level = "DEBUG"
|
log_level = "DEBUG"
|
||||||
repos_file = "data/repos.json"
|
|
||||||
default_arch = "x86_64"
|
default_arch = "x86_64"
|
||||||
|
|
||||||
address = "http://localhost:8000"
|
address = "http://localhost:8000"
|
||||||
|
|
Loading…
Reference in New Issue