Merge pull request 'multi-repo & multi-arch support' (#112) from multi-arch-repos-v2 into dev
Reviewed-on: Chewing_Bever/vieter#112pull/120/head
commit
e074af64da
|
@ -1,4 +1,3 @@
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
# Unix-style newlines with a newline ending every file
|
# Unix-style newlines with a newline ending every file
|
||||||
|
|
|
@ -3,10 +3,10 @@ branches: [dev]
|
||||||
|
|
||||||
pipeline:
|
pipeline:
|
||||||
build:
|
build:
|
||||||
image: 'archlinux:latest'
|
image: 'archlinux:base-devel'
|
||||||
commands:
|
commands:
|
||||||
# Update packages
|
# Update packages
|
||||||
- pacman -Syu --needed --noconfirm base-devel
|
- pacman -Syu
|
||||||
# Create non-root user to perform build & switch to their home
|
# Create non-root user to perform build & switch to their home
|
||||||
- groupadd -g 1000 builder
|
- groupadd -g 1000 builder
|
||||||
- useradd -mg builder builder
|
- useradd -mg builder builder
|
||||||
|
@ -17,9 +17,9 @@ pipeline:
|
||||||
- makepkg -s --noconfirm --needed
|
- makepkg -s --noconfirm --needed
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
image: 'archlinux:latest'
|
image: 'curlimages/curl:latest'
|
||||||
commands:
|
commands:
|
||||||
# Publish the package
|
# Publish the package
|
||||||
- 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/publish; done'
|
- 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done'
|
||||||
secrets:
|
secrets:
|
||||||
- vieter_api_key
|
- vieter_api_key
|
||||||
|
|
|
@ -22,16 +22,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
* Very basic build system
|
* Very basic build system
|
||||||
* Build is triggered by separate cron container
|
* Build is triggered by separate cron container
|
||||||
* Packages build on cron container's system
|
* Packages build on cron container's system
|
||||||
|
* A HEAD request is used to determine whether a package should be rebuilt
|
||||||
|
or not
|
||||||
* Hardcoded planning of builds
|
* Hardcoded planning of builds
|
||||||
* Builds are sequential
|
* Builds are sequential
|
||||||
* API for managing Git repositories to build
|
* API for managing Git repositories to build
|
||||||
* CLI to list, add & remove Git repos to build
|
* CLI to list, add & remove Git repos to build
|
||||||
* Published packages on my Vieter instance
|
* Published packages on my Vieter instance
|
||||||
|
* Support for multiple repositories
|
||||||
|
* Support for multiple architectures per repository
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
* Each package can now only have one version in the repository at once
|
* Each package can now only have one version in the repository at once
|
||||||
(required by Pacman)
|
(required by Pacman)
|
||||||
|
* Packages with unknown fields in .PKGINFO are now allowed
|
||||||
|
|
||||||
## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0)
|
## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0)
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \
|
||||||
FROM busybox:1.35.0
|
FROM busybox:1.35.0
|
||||||
|
|
||||||
ENV PATH=/bin \
|
ENV PATH=/bin \
|
||||||
VIETER_REPO_DIR=/data/repo \
|
VIETER_REPOS_DIR=/data/repos \
|
||||||
VIETER_PKG_DIR=/data/pkgs \
|
VIETER_PKG_DIR=/data/pkgs \
|
||||||
VIETER_DOWNLOAD_DIR=/data/downloads \
|
VIETER_DOWNLOAD_DIR=/data/downloads \
|
||||||
VIETER_REPOS_FILE=/data/repos.json
|
VIETER_REPOS_FILE=/data/repos.json
|
||||||
|
|
|
@ -4,11 +4,14 @@ import docker
|
||||||
import encoding.base64
|
import encoding.base64
|
||||||
import time
|
import time
|
||||||
import git
|
import git
|
||||||
|
import os
|
||||||
|
|
||||||
const container_build_dir = '/build'
|
const container_build_dir = '/build'
|
||||||
|
|
||||||
const build_image_repo = 'vieter-build'
|
const build_image_repo = 'vieter-build'
|
||||||
|
|
||||||
|
const base_image = 'archlinux:latest'
|
||||||
|
|
||||||
fn create_build_image() ?string {
|
fn create_build_image() ?string {
|
||||||
commands := [
|
commands := [
|
||||||
// Update repos & install required packages
|
// Update repos & install required packages
|
||||||
|
@ -26,7 +29,7 @@ fn create_build_image() ?string {
|
||||||
cmds_str := base64.encode_str(commands.join('\n'))
|
cmds_str := base64.encode_str(commands.join('\n'))
|
||||||
|
|
||||||
c := docker.NewContainer{
|
c := docker.NewContainer{
|
||||||
image: 'archlinux:latest'
|
image: build.base_image
|
||||||
env: ['BUILD_SCRIPT=$cmds_str']
|
env: ['BUILD_SCRIPT=$cmds_str']
|
||||||
entrypoint: ['/bin/sh', '-c']
|
entrypoint: ['/bin/sh', '-c']
|
||||||
cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/sh -e']
|
cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/sh -e']
|
||||||
|
@ -60,27 +63,34 @@ fn create_build_image() ?string {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build(conf Config) ? {
|
fn build(conf Config) ? {
|
||||||
// We get the repos list from the Vieter instance
|
build_arch := os.uname().machine
|
||||||
repos := git.get_repos(conf.address, conf.api_key) ?
|
|
||||||
|
// We get the repos map from the Vieter instance
|
||||||
|
repos_map := git.get_repos(conf.address, conf.api_key) ?
|
||||||
|
|
||||||
|
// We filter out any repos that aren't allowed to be built on this
|
||||||
|
// architecture
|
||||||
|
filtered_repos := repos_map.keys().map(repos_map[it]).filter(it.arch.contains(build_arch))
|
||||||
|
|
||||||
// No point in doing work if there's no repos present
|
// No point in doing work if there's no repos present
|
||||||
if repos.len == 0 {
|
if filtered_repos.len == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// First, we create a base image which has updated repos n stuff
|
// First, we create a base image which has updated repos n stuff
|
||||||
image_id := create_build_image() ?
|
image_id := create_build_image() ?
|
||||||
|
|
||||||
for _, repo in repos {
|
for repo in filtered_repos {
|
||||||
// TODO what to do with PKGBUILDs that build multiple packages?
|
// TODO what to do with PKGBUILDs that build multiple packages?
|
||||||
commands := [
|
commands := [
|
||||||
'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo',
|
'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo',
|
||||||
'cd repo',
|
'cd repo',
|
||||||
'makepkg --nobuild --nodeps',
|
'makepkg --nobuild --nodeps',
|
||||||
'source PKGBUILD',
|
'source PKGBUILD',
|
||||||
// The build container checks whether the package is already present on the server
|
// The build container checks whether the package is already
|
||||||
'curl --head --fail $conf.address/\$pkgname-\$pkgver-\$pkgrel-\$(uname -m).pkg.tar.zst && exit 0',
|
// present on the server
|
||||||
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/publish; done',
|
'curl --head --fail $conf.address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel-${build_arch}.pkg.tar.zst && exit 0',
|
||||||
|
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/$repo.repo/publish; done',
|
||||||
]
|
]
|
||||||
|
|
||||||
// We convert the list of commands into a base64 string, which then gets
|
// We convert the list of commands into a base64 string, which then gets
|
||||||
|
|
108
src/git/cli.v
108
src/git/cli.v
|
@ -26,14 +26,14 @@ pub fn cmd() cli.Command {
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
name: 'add'
|
name: 'add'
|
||||||
required_args: 2
|
required_args: 4
|
||||||
usage: 'url branch arch...'
|
usage: 'url branch repo arch...'
|
||||||
description: 'Add a new repository.'
|
description: 'Add a new repository.'
|
||||||
execute: fn (cmd cli.Command) ? {
|
execute: fn (cmd cli.Command) ? {
|
||||||
config_file := cmd.flags.get_string('config-file') ?
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
conf := env.load<Config>(config_file) ?
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
add(conf, cmd.args[0], cmd.args[1], cmd.args[2..]) ?
|
add(conf, cmd.args[0], cmd.args[1], cmd.args[2], cmd.args[3..]) ?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cli.Command{
|
cli.Command{
|
||||||
|
@ -48,46 +48,100 @@ pub fn cmd() cli.Command {
|
||||||
remove(conf, cmd.args[0]) ?
|
remove(conf, cmd.args[0]) ?
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
cli.Command{
|
||||||
|
name: 'edit'
|
||||||
|
required_args: 1
|
||||||
|
usage: 'id'
|
||||||
|
description: 'Edit the repository that matches the given ID prefix.'
|
||||||
|
flags: [
|
||||||
|
cli.Flag{
|
||||||
|
name: 'url'
|
||||||
|
description: 'URL of the Git repository.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
|
cli.Flag{
|
||||||
|
name: 'branch'
|
||||||
|
description: 'Branch of the Git repository.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
|
cli.Flag{
|
||||||
|
name: 'repo'
|
||||||
|
description: 'Repo to publish builds to.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
|
cli.Flag{
|
||||||
|
name: 'arch'
|
||||||
|
description: 'Comma-separated list of architectures to build on.'
|
||||||
|
flag: cli.FlagType.string
|
||||||
|
},
|
||||||
|
]
|
||||||
|
execute: fn (cmd cli.Command) ? {
|
||||||
|
config_file := cmd.flags.get_string('config-file') ?
|
||||||
|
conf := env.load<Config>(config_file) ?
|
||||||
|
|
||||||
|
found := cmd.flags.get_all_found()
|
||||||
|
|
||||||
|
mut params := map[string]string{}
|
||||||
|
|
||||||
|
for f in found {
|
||||||
|
if f.name != 'config-file' {
|
||||||
|
params[f.name] = f.get_string() ?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
patch(conf, cmd.args[0], params) ?
|
||||||
|
}
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string {
|
||||||
|
repos := get_repos(conf.address, conf.api_key) ?
|
||||||
|
|
||||||
|
mut res := []string{}
|
||||||
|
|
||||||
|
for id, _ in repos {
|
||||||
|
if id.starts_with(id_prefix) {
|
||||||
|
res << id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.len == 0 {
|
||||||
|
return error('No repo found for given prefix.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.len > 1 {
|
||||||
|
return error('Multiple repos found for given prefix.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return res[0]
|
||||||
|
}
|
||||||
|
|
||||||
fn list(conf Config) ? {
|
fn list(conf Config) ? {
|
||||||
repos := get_repos(conf.address, conf.api_key) ?
|
repos := get_repos(conf.address, conf.api_key) ?
|
||||||
|
|
||||||
for id, details in repos {
|
for id, details in repos {
|
||||||
println('${id[..8]}\t$details.url\t$details.branch\t$details.arch')
|
println('${id[..8]}\t$details.url\t$details.branch\t$details.repo\t$details.arch')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add(conf Config, url string, branch string, arch []string) ? {
|
fn add(conf Config, url string, branch string, repo string, arch []string) ? {
|
||||||
res := add_repo(conf.address, conf.api_key, url, branch, arch) ?
|
res := add_repo(conf.address, conf.api_key, url, branch, repo, arch) ?
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(conf Config, id_prefix string) ? {
|
fn remove(conf Config, id_prefix string) ? {
|
||||||
repos := get_repos(conf.address, conf.api_key) ?
|
id := get_repo_id_by_prefix(conf, id_prefix) ?
|
||||||
|
res := remove_repo(conf.address, conf.api_key, id) ?
|
||||||
mut to_remove := []string{}
|
|
||||||
|
println(res.message)
|
||||||
for id, _ in repos {
|
}
|
||||||
if id.starts_with(id_prefix) {
|
|
||||||
to_remove << id
|
fn patch(conf Config, id_prefix string, params map[string]string) ? {
|
||||||
}
|
id := get_repo_id_by_prefix(conf, id_prefix) ?
|
||||||
}
|
res := patch_repo(conf.address, conf.api_key, id, params) ?
|
||||||
|
|
||||||
if to_remove.len == 0 {
|
|
||||||
eprintln('No repo found for given prefix.')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if to_remove.len > 1 {
|
|
||||||
eprintln('Multiple repos found for given prefix.')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
res := remove_repo(conf.address, conf.api_key, to_remove[0]) ?
|
|
||||||
|
|
||||||
println(res.message)
|
println(res.message)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,36 +4,58 @@ import json
|
||||||
import response { Response }
|
import response { Response }
|
||||||
import net.http
|
import net.http
|
||||||
|
|
||||||
// get_repos returns the current list of repos.
|
fn send_request<T>(method http.Method, address string, url string, api_key string, params map[string]string) ?Response<T> {
|
||||||
pub fn get_repos(address string, api_key string) ?map[string]GitRepo {
|
mut full_url := '$address$url'
|
||||||
mut req := http.new_request(http.Method.get, '$address/api/repos', '') ?
|
|
||||||
|
if params.len > 0 {
|
||||||
|
params_str := params.keys().map('$it=${params[it]}').join('&')
|
||||||
|
|
||||||
|
full_url = '$full_url?$params_str'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut req := http.new_request(method, full_url, '') ?
|
||||||
req.add_custom_header('X-API-Key', api_key) ?
|
req.add_custom_header('X-API-Key', api_key) ?
|
||||||
|
|
||||||
res := req.do() ?
|
res := req.do() ?
|
||||||
data := json.decode(Response<map[string]GitRepo>, res.text) ?
|
data := json.decode(Response<T>, res.text) ?
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_repos returns the current list of repos.
|
||||||
|
pub fn get_repos(address string, api_key string) ?map[string]GitRepo {
|
||||||
|
data := send_request<map[string]GitRepo>(http.Method.get, address, '/api/repos', api_key,
|
||||||
|
{}) ?
|
||||||
|
|
||||||
return data.data
|
return data.data
|
||||||
}
|
}
|
||||||
|
|
||||||
// add_repo adds a new repo to the server.
|
// add_repo adds a new repo to the server.
|
||||||
pub fn add_repo(address string, api_key string, url string, branch string, arch []string) ?Response<string> {
|
pub fn add_repo(address string, api_key string, url string, branch string, repo string, arch []string) ?Response<string> {
|
||||||
mut req := http.new_request(http.Method.post, '$address/api/repos?url=$url&branch=$branch&arch=${arch.join(',')}',
|
params := {
|
||||||
'') ?
|
'url': url
|
||||||
req.add_custom_header('X-API-Key', api_key) ?
|
'branch': branch
|
||||||
|
'repo': repo
|
||||||
res := req.do() ?
|
'arch': arch.join(',')
|
||||||
data := json.decode(Response<string>, res.text) ?
|
}
|
||||||
|
data := send_request<string>(http.Method.post, address, '/api/repos', api_key, params) ?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove_repo removes the repo with the given ID from the server.
|
// remove_repo removes the repo with the given ID from the server.
|
||||||
pub fn remove_repo(address string, api_key string, id string) ?Response<string> {
|
pub fn remove_repo(address string, api_key string, id string) ?Response<string> {
|
||||||
mut req := http.new_request(http.Method.delete, '$address/api/repos/$id', '') ?
|
data := send_request<string>(http.Method.delete, address, '/api/repos/$id', api_key,
|
||||||
req.add_custom_header('X-API-Key', api_key) ?
|
{}) ?
|
||||||
|
|
||||||
res := req.do() ?
|
return data
|
||||||
data := json.decode(Response<string>, res.text) ?
|
}
|
||||||
|
|
||||||
|
// patch_repo sends a PATCH request to the given repo with the params as
|
||||||
|
// payload.
|
||||||
|
pub fn patch_repo(address string, api_key string, id string, params map[string]string) ?Response<string> {
|
||||||
|
data := send_request<string>(http.Method.patch, address, '/api/repos/$id', api_key,
|
||||||
|
params) ?
|
||||||
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,8 @@ pub mut:
|
||||||
// On which architectures the package is allowed to be built. In reality,
|
// On which architectures the package is allowed to be built. In reality,
|
||||||
// this controls which builders will periodically build the image.
|
// this controls which builders will periodically build the image.
|
||||||
arch []string
|
arch []string
|
||||||
|
// Which repo the builder should publish packages to
|
||||||
|
repo string
|
||||||
}
|
}
|
||||||
|
|
||||||
// patch_from_params patches a GitRepo from a map[string]string, usually
|
// patch_from_params patches a GitRepo from a map[string]string, usually
|
||||||
|
|
|
@ -72,14 +72,11 @@ fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
||||||
'pkgbase' { pkg_info.base = value }
|
'pkgbase' { pkg_info.base = value }
|
||||||
'pkgver' { pkg_info.version = value }
|
'pkgver' { pkg_info.version = value }
|
||||||
'pkgdesc' { pkg_info.description = value }
|
'pkgdesc' { pkg_info.description = value }
|
||||||
'csize' { continue }
|
|
||||||
'size' { pkg_info.size = value.int() }
|
'size' { pkg_info.size = value.int() }
|
||||||
'url' { pkg_info.url = value }
|
'url' { pkg_info.url = value }
|
||||||
'arch' { pkg_info.arch = value }
|
'arch' { pkg_info.arch = value }
|
||||||
'builddate' { pkg_info.build_date = value.int() }
|
'builddate' { pkg_info.build_date = value.int() }
|
||||||
'packager' { pkg_info.packager = value }
|
'packager' { pkg_info.packager = value }
|
||||||
'md5sum' { continue }
|
|
||||||
'sha256sum' { continue }
|
|
||||||
'pgpsig' { pkg_info.pgpsig = value }
|
'pgpsig' { pkg_info.pgpsig = value }
|
||||||
'pgpsigsize' { pkg_info.pgpsigsize = value.int() }
|
'pgpsigsize' { pkg_info.pgpsigsize = value.int() }
|
||||||
// Array values
|
// Array values
|
||||||
|
@ -92,16 +89,19 @@ fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo {
|
||||||
'optdepend' { pkg_info.optdepends << value }
|
'optdepend' { pkg_info.optdepends << value }
|
||||||
'makedepend' { pkg_info.makedepends << value }
|
'makedepend' { pkg_info.makedepends << value }
|
||||||
'checkdepend' { pkg_info.checkdepends << value }
|
'checkdepend' { pkg_info.checkdepends << value }
|
||||||
else { return error("Invalid key '$key'.") }
|
// There's no real point in trying to exactly manage which fields
|
||||||
|
// are allowed, so we just ignore any we don't explicitely need for
|
||||||
|
// in the db file
|
||||||
|
else { continue }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return pkg_info
|
return pkg_info
|
||||||
}
|
}
|
||||||
|
|
||||||
// read_pkg extracts the file list & .PKGINFO contents from an archive
|
// read_pkg_archive extracts the file list & .PKGINFO contents from an archive
|
||||||
// NOTE: this command currently only supports zstd-compressed tarballs
|
// NOTE: this command only supports zstd- & gzip-compressed tarballs
|
||||||
pub fn read_pkg(pkg_path string) ?Pkg {
|
pub fn read_pkg_archive(pkg_path string) ?Pkg {
|
||||||
if !os.is_file(pkg_path) {
|
if !os.is_file(pkg_path) {
|
||||||
return error("'$pkg_path' doesn't exist or isn't a file.")
|
return error("'$pkg_path' doesn't exist or isn't a file.")
|
||||||
}
|
}
|
||||||
|
|
133
src/repo/repo.v
133
src/repo/repo.v
|
@ -4,15 +4,20 @@ import os
|
||||||
import package
|
import package
|
||||||
import util
|
import util
|
||||||
|
|
||||||
// This struct manages a single repository.
|
// Manages a group of repositories. Each repository contains one or more
|
||||||
pub struct Repo {
|
// arch-repositories, each of which represent a specific architecture.
|
||||||
|
pub struct RepoGroupManager {
|
||||||
mut:
|
mut:
|
||||||
mutex shared util.Dummy
|
mutex shared util.Dummy
|
||||||
pub:
|
pub:
|
||||||
// Where to store repository files
|
// Where to store repositories' files
|
||||||
repo_dir string [required]
|
repos_dir string [required]
|
||||||
// Where to find packages; packages are expected to all be in the same directory
|
// Where packages are stored; each architecture gets its own subdirectory
|
||||||
pkg_dir string [required]
|
pkg_dir string [required]
|
||||||
|
// The default architecture to use for a repository. In reality, this value
|
||||||
|
// is only required when a package with architecture "any" is added as the
|
||||||
|
// first package of a repository.
|
||||||
|
default_arch string [required]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct RepoAddResult {
|
pub struct RepoAddResult {
|
||||||
|
@ -21,28 +26,33 @@ pub:
|
||||||
pkg &package.Pkg [required]
|
pkg &package.Pkg [required]
|
||||||
}
|
}
|
||||||
|
|
||||||
// new creates a new Repo & creates the directories as needed
|
// new creates a new RepoGroupManager & creates the directories as needed
|
||||||
pub fn new(repo_dir string, pkg_dir string) ?Repo {
|
pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupManager {
|
||||||
if !os.is_dir(repo_dir) {
|
if !os.is_dir(repos_dir) {
|
||||||
os.mkdir_all(repo_dir) or { return error('Failed to create repo directory: $err.msg') }
|
os.mkdir_all(repos_dir) or { return error('Failed to create repos directory: $err.msg') }
|
||||||
}
|
}
|
||||||
|
|
||||||
if !os.is_dir(pkg_dir) {
|
if !os.is_dir(pkg_dir) {
|
||||||
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg') }
|
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg') }
|
||||||
}
|
}
|
||||||
|
|
||||||
return Repo{
|
return RepoGroupManager{
|
||||||
repo_dir: repo_dir
|
repos_dir: repos_dir
|
||||||
pkg_dir: pkg_dir
|
pkg_dir: pkg_dir
|
||||||
|
default_arch: default_arch
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// add_from_path adds a package from an arbitrary path & moves it into the pkgs
|
// add_pkg_from_path adds a package to a given repo, given the file path to the
|
||||||
// directory if necessary.
|
// pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive
|
||||||
pub fn (r &Repo) add_from_path(pkg_path string) ?RepoAddResult {
|
// file, passes the result to add_pkg_in_repo, and moves the archive to
|
||||||
pkg := package.read_pkg(pkg_path) or { return error('Failed to read package file: $err.msg') }
|
// r.pkg_dir if it was successfully added.
|
||||||
|
pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult {
|
||||||
|
pkg := package.read_pkg_archive(pkg_path) or {
|
||||||
|
return error('Failed to read package file: $err.msg')
|
||||||
|
}
|
||||||
|
|
||||||
added := r.add(pkg) ?
|
added := r.add_pkg_in_repo(repo, pkg) ?
|
||||||
|
|
||||||
// If the add was successful, we move the file to the packages directory
|
// If the add was successful, we move the file to the packages directory
|
||||||
if added {
|
if added {
|
||||||
|
@ -60,9 +70,55 @@ pub fn (r &Repo) add_from_path(pkg_path string) ?RepoAddResult {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// add adds a given Pkg to the repository
|
// add_pkg_in_repo adds a package to a given repo. This function is responsible
|
||||||
fn (r &Repo) add(pkg &package.Pkg) ?bool {
|
// for inspecting the package architecture. If said architecture is 'any', the
|
||||||
pkg_dir := r.pkg_path(pkg)
|
// package is added to each arch-repository within the given repo. A package of
|
||||||
|
// architecture 'any' will always be added to the arch-repo defined by
|
||||||
|
// r.default_arch. If this arch-repo doesn't exist yet, it will be created. If
|
||||||
|
// the architecture isn't 'any', the package is only added to the specific
|
||||||
|
// architecture.
|
||||||
|
fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?bool {
|
||||||
|
// A package without arch 'any' can be handled without any further checks
|
||||||
|
if pkg.info.arch != 'any' {
|
||||||
|
return r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
repo_dir := os.join_path_single(r.repos_dir, repo)
|
||||||
|
|
||||||
|
mut arch_repos := []string{}
|
||||||
|
|
||||||
|
// If this is the first package that's added to the repo, the directory
|
||||||
|
// won't exist yet
|
||||||
|
if os.exists(repo_dir) {
|
||||||
|
// We get a listing of all currently present arch-repos in the given repo
|
||||||
|
arch_repos = os.ls(repo_dir) ?.filter(os.is_dir(os.join_path_single(repo_dir,
|
||||||
|
it)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// The default_arch should always be updated when a package with arch 'any'
|
||||||
|
// is added.
|
||||||
|
if !arch_repos.contains(r.default_arch) {
|
||||||
|
arch_repos << r.default_arch
|
||||||
|
}
|
||||||
|
|
||||||
|
mut added := false
|
||||||
|
|
||||||
|
// We add the package to each repository. If any of the repositories
|
||||||
|
// return true, the result of the function is also true.
|
||||||
|
for arch in arch_repos {
|
||||||
|
added = added || r.add_pkg_in_arch_repo(repo, arch, pkg) ?
|
||||||
|
}
|
||||||
|
|
||||||
|
return added
|
||||||
|
}
|
||||||
|
|
||||||
|
// add_pkg_in_arch_repo is the function that actually adds a package to a given
|
||||||
|
// arch-repo. It records the package's data in the arch-repo's desc & files
|
||||||
|
// files, and afterwards updates the db & files archives to reflect these
|
||||||
|
// changes. The function returns false if the package was already present in
|
||||||
|
// the repo, and true otherwise.
|
||||||
|
fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &package.Pkg) ?bool {
|
||||||
|
pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version')
|
||||||
|
|
||||||
// We can't add the same package twice
|
// We can't add the same package twice
|
||||||
if os.exists(pkg_dir) {
|
if os.exists(pkg_dir) {
|
||||||
|
@ -70,9 +126,9 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// We remove the older package version first, if present
|
// We remove the older package version first, if present
|
||||||
r.remove(pkg.info.name, false) ?
|
r.remove_pkg_from_arch_repo(repo, arch, pkg, false) ?
|
||||||
|
|
||||||
os.mkdir(pkg_dir) or { return error('Failed to create package directory.') }
|
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
|
||||||
|
|
||||||
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
|
os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or {
|
||||||
os.rmdir_all(pkg_dir) ?
|
os.rmdir_all(pkg_dir) ?
|
||||||
|
@ -85,27 +141,39 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool {
|
||||||
return error('Failed to write files file.')
|
return error('Failed to write files file.')
|
||||||
}
|
}
|
||||||
|
|
||||||
r.sync() ?
|
r.sync(repo, arch) ?
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove removes a package from the database. It returns false if the package
|
// remove_pkg_from_arch_repo removes a package from an arch-repo's database. It
|
||||||
// wasn't present in the database.
|
// returns false if the package wasn't present in the database. It also
|
||||||
fn (r &Repo) remove(pkg_name string, sync bool) ?bool {
|
// optionally re-syncs the repo archives.
|
||||||
|
fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg &package.Pkg, sync bool) ?bool {
|
||||||
|
repo_dir := os.join_path(r.repos_dir, repo, arch)
|
||||||
|
|
||||||
|
// If the repository doesn't exist yet, the result is automatically false
|
||||||
|
if !os.exists(repo_dir) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// We iterate over every directory in the repo dir
|
// We iterate over every directory in the repo dir
|
||||||
for d in os.ls(r.repo_dir) ? {
|
// TODO filter so we only check directories
|
||||||
|
for d in os.ls(repo_dir) ? {
|
||||||
|
// Because a repository only allows a single version of each package,
|
||||||
|
// we need only compare whether the name of the package is the same,
|
||||||
|
// not the version.
|
||||||
name := d.split('-')#[..-2].join('-')
|
name := d.split('-')#[..-2].join('-')
|
||||||
|
|
||||||
if name == pkg_name {
|
if name == pkg.info.name {
|
||||||
// We lock the mutex here to prevent other routines from creating a
|
// We lock the mutex here to prevent other routines from creating a
|
||||||
// new archive while we removed an entry
|
// new archive while we remove an entry
|
||||||
lock r.mutex {
|
lock r.mutex {
|
||||||
os.rmdir_all(os.join_path_single(r.repo_dir, d)) ?
|
os.rmdir_all(os.join_path_single(repo_dir, d)) ?
|
||||||
}
|
}
|
||||||
|
|
||||||
if sync {
|
if sync {
|
||||||
r.sync() ?
|
r.sync(repo, arch) ?
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
|
@ -114,8 +182,3 @@ fn (r &Repo) remove(pkg_name string, sync bool) ?bool {
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the path where the given package's desc & files files are stored
|
|
||||||
fn (r &Repo) pkg_path(pkg &package.Pkg) string {
|
|
||||||
return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version')
|
|
||||||
}
|
|
||||||
|
|
|
@ -30,8 +30,9 @@ fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &stri
|
||||||
}
|
}
|
||||||
|
|
||||||
// Re-generate the repo archive files
|
// Re-generate the repo archive files
|
||||||
fn (r &Repo) sync() ? {
|
fn (r &RepoGroupManager) sync(repo string, arch string) ? {
|
||||||
// TODO also write files archive
|
subrepo_path := os.join_path(r.repos_dir, repo, arch)
|
||||||
|
|
||||||
lock r.mutex {
|
lock r.mutex {
|
||||||
a_db := C.archive_write_new()
|
a_db := C.archive_write_new()
|
||||||
a_files := C.archive_write_new()
|
a_files := C.archive_write_new()
|
||||||
|
@ -44,18 +45,18 @@ fn (r &Repo) sync() ? {
|
||||||
C.archive_write_add_filter_gzip(a_files)
|
C.archive_write_add_filter_gzip(a_files)
|
||||||
C.archive_write_set_format_pax_restricted(a_files)
|
C.archive_write_set_format_pax_restricted(a_files)
|
||||||
|
|
||||||
db_path := os.join_path_single(r.repo_dir, 'vieter.db.tar.gz')
|
db_path := os.join_path_single(subrepo_path, '${repo}.db.tar.gz')
|
||||||
files_path := os.join_path_single(r.repo_dir, 'vieter.files.tar.gz')
|
files_path := os.join_path_single(subrepo_path, '${repo}.files.tar.gz')
|
||||||
|
|
||||||
C.archive_write_open_filename(a_db, &char(db_path.str))
|
C.archive_write_open_filename(a_db, &char(db_path.str))
|
||||||
C.archive_write_open_filename(a_files, &char(files_path.str))
|
C.archive_write_open_filename(a_files, &char(files_path.str))
|
||||||
|
|
||||||
// Iterate over each directory
|
// Iterate over each directory
|
||||||
for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir,
|
for d in os.ls(subrepo_path) ?.filter(os.is_dir(os.join_path_single(subrepo_path,
|
||||||
it))) {
|
it))) {
|
||||||
// desc
|
// desc
|
||||||
mut inner_path := os.join_path_single(d, 'desc')
|
mut inner_path := os.join_path_single(d, 'desc')
|
||||||
mut actual_path := os.join_path_single(r.repo_dir, inner_path)
|
mut actual_path := os.join_path_single(subrepo_path, inner_path)
|
||||||
|
|
||||||
archive_add_entry(a_db, entry, actual_path, inner_path)
|
archive_add_entry(a_db, entry, actual_path, inner_path)
|
||||||
archive_add_entry(a_files, entry, actual_path, inner_path)
|
archive_add_entry(a_files, entry, actual_path, inner_path)
|
||||||
|
@ -64,7 +65,7 @@ fn (r &Repo) sync() ? {
|
||||||
|
|
||||||
// files
|
// files
|
||||||
inner_path = os.join_path_single(d, 'files')
|
inner_path = os.join_path_single(d, 'files')
|
||||||
actual_path = os.join_path_single(r.repo_dir, inner_path)
|
actual_path = os.join_path_single(subrepo_path, inner_path)
|
||||||
|
|
||||||
archive_add_entry(a_files, entry, actual_path, inner_path)
|
archive_add_entry(a_files, entry, actual_path, inner_path)
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,9 @@ pub:
|
||||||
pkg_dir string
|
pkg_dir string
|
||||||
download_dir string
|
download_dir string
|
||||||
api_key string
|
api_key string
|
||||||
repo_dir string
|
repos_dir string
|
||||||
repos_file string
|
repos_file string
|
||||||
|
default_arch string
|
||||||
}
|
}
|
||||||
|
|
||||||
// cmd returns the cli submodule that handles starting the server
|
// cmd returns the cli submodule that handles starting the server
|
||||||
|
|
|
@ -16,15 +16,20 @@ pub fn (mut app App) healthcheck() web.Result {
|
||||||
return app.json(http.Status.ok, new_response('Healthy.'))
|
return app.json(http.Status.ok, new_response('Healthy.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_root handles a GET request for a file on the root
|
['/:repo/:arch/:filename'; get; head]
|
||||||
['/:filename'; get; head]
|
fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Result {
|
||||||
fn (mut app App) get_root(filename string) web.Result {
|
|
||||||
mut full_path := ''
|
mut full_path := ''
|
||||||
|
|
||||||
if filename.ends_with('.db') || filename.ends_with('.files') {
|
db_exts := ['.db', '.files', '.db.tar.gz', '.files.tar.gz']
|
||||||
full_path = os.join_path_single(app.repo.repo_dir, '${filename}.tar.gz')
|
|
||||||
} else if filename.ends_with('.db.tar.gz') || filename.ends_with('.files.tar.gz') {
|
if db_exts.any(filename.ends_with(it)) {
|
||||||
full_path = os.join_path_single(app.repo.repo_dir, '$filename')
|
full_path = os.join_path(app.repo.repos_dir, repo, arch, filename)
|
||||||
|
|
||||||
|
// repo-add does this using symlinks, but we just change the requested
|
||||||
|
// path
|
||||||
|
if !full_path.ends_with('.tar.gz') {
|
||||||
|
full_path += '.tar.gz'
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
full_path = os.join_path_single(app.repo.pkg_dir, filename)
|
full_path = os.join_path_single(app.repo.pkg_dir, filename)
|
||||||
}
|
}
|
||||||
|
@ -41,8 +46,8 @@ fn (mut app App) get_root(filename string) web.Result {
|
||||||
return app.file(full_path)
|
return app.file(full_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
['/publish'; post]
|
['/:repo/publish'; post]
|
||||||
fn (mut app App) put_package() web.Result {
|
fn (mut app App) put_package(repo string) web.Result {
|
||||||
if !app.is_authorized() {
|
if !app.is_authorized() {
|
||||||
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
return app.json(http.Status.unauthorized, new_response('Unauthorized.'))
|
||||||
}
|
}
|
||||||
|
@ -53,10 +58,6 @@ fn (mut app App) put_package() web.Result {
|
||||||
// Generate a random filename for the temp file
|
// Generate a random filename for the temp file
|
||||||
pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4())
|
pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4())
|
||||||
|
|
||||||
for os.exists(pkg_path) {
|
|
||||||
pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4())
|
|
||||||
}
|
|
||||||
|
|
||||||
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.")
|
||||||
|
|
||||||
// This is used to time how long it takes to upload a file
|
// This is used to time how long it takes to upload a file
|
||||||
|
@ -77,22 +78,23 @@ fn (mut app App) put_package() web.Result {
|
||||||
return app.status(http.Status.length_required)
|
return app.status(http.Status.length_required)
|
||||||
}
|
}
|
||||||
|
|
||||||
res := app.repo.add_from_path(pkg_path) or {
|
res := app.repo.add_pkg_from_path(repo, pkg_path) or {
|
||||||
app.lerror('Error while adding package: $err.msg')
|
app.lerror('Error while adding package: $err.msg')
|
||||||
|
|
||||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") }
|
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") }
|
||||||
|
|
||||||
return app.json(http.Status.internal_server_error, new_response('Failed to add package.'))
|
return app.json(http.Status.internal_server_error, new_response('Failed to add package.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !res.added {
|
if !res.added {
|
||||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") }
|
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") }
|
||||||
|
|
||||||
app.lwarn("Duplicate package '$res.pkg.full_name()'.")
|
app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo ($res.pkg.info.arch)'.")
|
||||||
|
|
||||||
return app.json(http.Status.bad_request, new_response('File already exists.'))
|
return app.json(http.Status.bad_request, new_response('File already exists.'))
|
||||||
}
|
}
|
||||||
|
|
||||||
app.linfo("Added '$res.pkg.full_name()' to repository.")
|
app.linfo("Added '$res.pkg.full_name()' to repo '$repo ($res.pkg.info.arch)'.")
|
||||||
|
|
||||||
return app.json(http.Status.ok, new_response('Package added successfully.'))
|
return app.json(http.Status.ok, new_response('Package added successfully.'))
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,13 +13,18 @@ struct App {
|
||||||
pub:
|
pub:
|
||||||
conf Config [required; web_global]
|
conf Config [required; web_global]
|
||||||
pub mut:
|
pub mut:
|
||||||
repo repo.Repo [required; web_global]
|
repo repo.RepoGroupManager [required; web_global]
|
||||||
// This is used to claim the file lock on the repos file
|
// This is used to claim the file lock on the repos file
|
||||||
git_mutex shared util.Dummy
|
git_mutex shared util.Dummy
|
||||||
}
|
}
|
||||||
|
|
||||||
// server starts the web server & starts listening for requests
|
// server starts the web server & starts listening for requests
|
||||||
pub fn server(conf Config) ? {
|
pub fn server(conf Config) ? {
|
||||||
|
// Prevent using 'any' as the default arch
|
||||||
|
if conf.default_arch == 'any' {
|
||||||
|
util.exit_with_message(1, "'any' is not allowed as the value for default_arch.")
|
||||||
|
}
|
||||||
|
|
||||||
// Configure logger
|
// Configure logger
|
||||||
log_level := log.level_from_tag(conf.log_level) or {
|
log_level := log.level_from_tag(conf.log_level) or {
|
||||||
util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.')
|
||||||
|
@ -39,7 +44,7 @@ pub fn server(conf Config) ? {
|
||||||
}
|
}
|
||||||
|
|
||||||
// This also creates the directories if needed
|
// This also creates the directories if needed
|
||||||
repo := repo.new(conf.repo_dir, conf.pkg_dir) or {
|
repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or {
|
||||||
logger.error(err.msg)
|
logger.error(err.msg)
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
4
test.py
4
test.py
|
@ -46,7 +46,7 @@ def create_random_pkginfo(words, name_min_len, name_max_len):
|
||||||
"pkgname": name,
|
"pkgname": name,
|
||||||
"pkgbase": name,
|
"pkgbase": name,
|
||||||
"pkgver": ver,
|
"pkgver": ver,
|
||||||
"arch": "x86_64"
|
"arch": "any"
|
||||||
}
|
}
|
||||||
|
|
||||||
return "\n".join(f"{key} = {value}" for key, value in data.items())
|
return "\n".join(f"{key} = {value}" for key, value in data.items())
|
||||||
|
@ -97,7 +97,7 @@ async def upload_random_package(tar_path, sem):
|
||||||
async with sem:
|
async with sem:
|
||||||
with open(tar_path, 'rb') as f:
|
with open(tar_path, 'rb') as f:
|
||||||
async with aiohttp.ClientSession() as s:
|
async with aiohttp.ClientSession() as s:
|
||||||
async with s.post("http://localhost:8000/publish", data=f.read(), headers={"x-api-key": "test"}) as r:
|
async with s.post("http://localhost:8000/vieter/publish", data=f.read(), headers={"x-api-key": "test"}) as r:
|
||||||
return await check_output(r)
|
return await check_output(r)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
# This file contains settings used during development
|
# This file contains settings used during development
|
||||||
api_key = "test"
|
api_key = "test"
|
||||||
download_dir = "data/downloads"
|
download_dir = "data/downloads"
|
||||||
repo_dir = "data/repo"
|
repos_dir = "data/repos"
|
||||||
pkg_dir = "data/pkgs"
|
pkg_dir = "data/pkgs"
|
||||||
log_level = "DEBUG"
|
log_level = "DEBUG"
|
||||||
repos_file = "data/repos.json"
|
repos_file = "data/repos.json"
|
||||||
|
default_arch = "x86_64"
|
||||||
|
|
||||||
address = "http://localhost:8000"
|
address = "http://localhost:8000"
|
||||||
|
|
Loading…
Reference in New Issue