Merge branch 'dev' into cron
commit
5c38071998
|
@ -7,7 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased](https://git.rustybever.be/Chewing_Bever/vieter)
|
||||
|
||||
## Changed
|
||||
## [0.2.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.2.0)
|
||||
|
||||
### Changed
|
||||
|
||||
* Better config system
|
||||
* Support for both a config file & environment variables
|
||||
|
@ -17,7 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
* All routes now return proper JSON where applicable & the correct status
|
||||
codes
|
||||
|
||||
## Added
|
||||
### Added
|
||||
|
||||
* Very basic build system
|
||||
* Build is triggered by separate cron container
|
||||
|
@ -32,11 +34,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
* Support for multiple repositories
|
||||
* Support for multiple architectures per repository
|
||||
|
||||
## Fixed
|
||||
### Fixed
|
||||
|
||||
* Each package can now only have one version in the repository at once
|
||||
(required by Pacman)
|
||||
* Packages with unknown fields in .PKGINFO are now allowed
|
||||
* Old packages are now properly removed
|
||||
|
||||
## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0)
|
||||
|
||||
|
|
52
README.md
52
README.md
|
@ -1,10 +1,17 @@
|
|||
# Vieter
|
||||
|
||||
Vieter is a re-implementation of the Pieter project. The goal is to create a
|
||||
simple PKGBUILD-based build system, combined with a self-hosted Arch
|
||||
repository. This would allow me to periodically re-build AUR packages (or
|
||||
PKGBUILDs I created myself), & make sure I never have to compile anything on my
|
||||
own systems, making my updates a lot quicker.
|
||||
## Documentation
|
||||
|
||||
I host documentation for Vieter over at https://rustybever.be/docs/vieter/.
|
||||
|
||||
## Overview
|
||||
|
||||
Vieter is a restart of the Pieter project. The goal is to create a simple,
|
||||
lightweight self-hostable Arch repository server, paired with a system that
|
||||
periodically builds & publishes select Arch packages. This would allow me to
|
||||
build AUR packages (or PKGBUILDs I created myself) "in the cloud" & make sure I
|
||||
never have to compile anything on my own systems, making my updates a lot
|
||||
quicker.
|
||||
|
||||
## Why V?
|
||||
|
||||
|
@ -20,27 +27,26 @@ V standard library, and therefore the compiler. The source code for this fork
|
|||
can be found [here](https://git.rustybever.be/Chewing_Bever/vieter-v). You can
|
||||
obtain this modified version of the compiler by running `make v`, which will
|
||||
clone & build the compiler. Afterwards, all make commands that require the V
|
||||
compiler will use this new binary.
|
||||
compiler will use this new binary. I try to keep this fork as up to date with
|
||||
upstream as possible.
|
||||
|
||||
## Features
|
||||
|
||||
The project will consist of a server-agent model, where one or more builder
|
||||
nodes can register with the server. These agents communicate with the Docker
|
||||
daemon to start builds, which are then uploaded to the server's repository. The
|
||||
server also allows for non-agents to upload packages, as long as they have the
|
||||
required secrets. This allows me to also develop non-git packages, such as my
|
||||
terminal, & upload them to the servers using CI.
|
||||
* Arch repository server
|
||||
* Support for multiple repositories & multiple architectures
|
||||
* Endpoints for publishing new packages
|
||||
* API for managing repositories to build
|
||||
* Build system
|
||||
* Periodic rebuilding of packages
|
||||
* Prevent unnecessary rebuilds
|
||||
|
||||
## Directory Structure
|
||||
## Building
|
||||
|
||||
The data directory consists of three main directories:
|
||||
In order to build Vieter, you'll need a couple of libraries:
|
||||
|
||||
* `downloads` - This is where packages are initially downloaded. Because vieter
|
||||
moves files from this folder to the `pkgs` folder, these two folders should
|
||||
best be on the same drive
|
||||
* `pkgs` - This is where approved package files are stored.
|
||||
* `repos` - Each repository gets a subfolder here. The subfolder contains the
|
||||
uncompressed contents of the db file.
|
||||
* Each repo subdirectory contains the compressed db & files archive for the
|
||||
repository, alongside a directory called `files` which contains the
|
||||
uncompressed contents.
|
||||
* gc
|
||||
* libarchive
|
||||
* openssl
|
||||
|
||||
Before building Vieter, you'll have to build the compiler using `make v`.
|
||||
Afterwards, run `make` to build the debug binary.
|
||||
|
|
|
@ -93,7 +93,7 @@ fn build(conf Config) ? {
|
|||
'source PKGBUILD',
|
||||
// The build container checks whether the package is already
|
||||
// present on the server
|
||||
'curl --head --fail $conf.address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel-${build_arch}.pkg.tar.zst && exit 0',
|
||||
'curl --head --fail $conf.address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel && exit 0',
|
||||
'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/$repo.repo/publish; done',
|
||||
]
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ fn main() {
|
|||
mut app := cli.Command{
|
||||
name: 'vieter'
|
||||
description: 'Vieter is a lightweight implementation of an Arch repository server.'
|
||||
version: '0.1.0'
|
||||
version: '0.2.0'
|
||||
flags: [
|
||||
cli.Flag{
|
||||
flag: cli.FlagType.string
|
||||
|
|
|
@ -12,11 +12,12 @@ mut:
|
|||
pub:
|
||||
// Where to store repositories' files
|
||||
repos_dir string [required]
|
||||
// Where packages are stored; each architecture gets its own subdirectory
|
||||
// Where packages are stored; each arch-repository gets its own
|
||||
// subdirectory
|
||||
pkg_dir string [required]
|
||||
// The default architecture to use for a repository. In reality, this value
|
||||
// is only required when a package with architecture "any" is added as the
|
||||
// first package of a repository.
|
||||
// The default architecture to use for a repository. Whenever a package of
|
||||
// arch "any" is added to a repo, it will also be added to this
|
||||
// architecture.
|
||||
default_arch string [required]
|
||||
}
|
||||
|
||||
|
@ -45,8 +46,8 @@ pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupMana
|
|||
|
||||
// add_pkg_from_path adds a package to a given repo, given the file path to the
|
||||
// pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive
|
||||
// file, passes the result to add_pkg_in_repo, and moves the archive to
|
||||
// r.pkg_dir if it was successfully added.
|
||||
// file, passes the result to add_pkg_in_repo, and hard links the archive to
|
||||
// the right subdirectories in r.pkg_dir if it was successfully added.
|
||||
pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult {
|
||||
pkg := package.read_pkg_archive(pkg_path) or {
|
||||
return error('Failed to read package file: $err.msg')
|
||||
|
@ -55,17 +56,22 @@ pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?Re
|
|||
added := r.add_pkg_in_repo(repo, pkg) ?
|
||||
|
||||
// If the add was successful, we move the file to the packages directory
|
||||
if added {
|
||||
dest_path := os.real_path(os.join_path_single(r.pkg_dir, pkg.filename()))
|
||||
for arch in added {
|
||||
repo_pkg_path := os.real_path(os.join_path(r.pkg_dir, repo, arch))
|
||||
dest_path := os.join_path_single(repo_pkg_path, pkg.filename())
|
||||
|
||||
// Only move the file if it's not already in the package directory
|
||||
if dest_path != os.real_path(pkg_path) {
|
||||
os.mv(pkg_path, dest_path) ?
|
||||
}
|
||||
os.mkdir_all(repo_pkg_path) ?
|
||||
|
||||
// We create hard links so that "any" arch packages aren't stored
|
||||
// multiple times
|
||||
os.link(pkg_path, dest_path) ?
|
||||
}
|
||||
|
||||
// After linking, we can remove the original file
|
||||
os.rm(pkg_path) ?
|
||||
|
||||
return RepoAddResult{
|
||||
added: added
|
||||
added: added.len > 0
|
||||
pkg: &pkg
|
||||
}
|
||||
}
|
||||
|
@ -73,26 +79,32 @@ pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?Re
|
|||
// add_pkg_in_repo adds a package to a given repo. This function is responsible
|
||||
// for inspecting the package architecture. If said architecture is 'any', the
|
||||
// package is added to each arch-repository within the given repo. A package of
|
||||
// architecture 'any' will always be added to the arch-repo defined by
|
||||
// r.default_arch. If this arch-repo doesn't exist yet, it will be created. If
|
||||
// the architecture isn't 'any', the package is only added to the specific
|
||||
// architecture 'any' is always added to the arch-repo defined by
|
||||
// r.default_arch. If this arch-repo doesn't exist yet, it is created. If the
|
||||
// architecture isn't 'any', the package is only added to the specific
|
||||
// architecture.
|
||||
fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?bool {
|
||||
// A package without arch 'any' can be handled without any further checks
|
||||
fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?[]string {
|
||||
// A package not of arch 'any' can be handled easily by adding it to the
|
||||
// respective repo
|
||||
if pkg.info.arch != 'any' {
|
||||
return r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg)
|
||||
if r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg) ? {
|
||||
return [pkg.info.arch]
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
repo_dir := os.join_path_single(r.repos_dir, repo)
|
||||
|
||||
mut arch_repos := []string{}
|
||||
|
||||
// If it is an "any" package, the package gets added to every currently
|
||||
// present arch-repo. It will always get added to the r.default_arch repo,
|
||||
// even if no or multiple others are present.
|
||||
repo_dir := os.join_path_single(r.repos_dir, repo)
|
||||
|
||||
// If this is the first package that's added to the repo, the directory
|
||||
// won't exist yet
|
||||
if os.exists(repo_dir) {
|
||||
// We get a listing of all currently present arch-repos in the given repo
|
||||
arch_repos = os.ls(repo_dir) ?.filter(os.is_dir(os.join_path_single(repo_dir,
|
||||
it)))
|
||||
arch_repos = os.ls(repo_dir) ?
|
||||
}
|
||||
|
||||
// The default_arch should always be updated when a package with arch 'any'
|
||||
|
@ -101,12 +113,14 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?bool {
|
|||
arch_repos << r.default_arch
|
||||
}
|
||||
|
||||
mut added := false
|
||||
mut added := []string{}
|
||||
|
||||
// We add the package to each repository. If any of the repositories
|
||||
// return true, the result of the function is also true.
|
||||
for arch in arch_repos {
|
||||
added = added || r.add_pkg_in_arch_repo(repo, arch, pkg) ?
|
||||
if r.add_pkg_in_arch_repo(repo, arch, pkg) ? {
|
||||
added << arch
|
||||
}
|
||||
}
|
||||
|
||||
return added
|
||||
|
@ -120,13 +134,8 @@ fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?bool {
|
|||
fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &package.Pkg) ?bool {
|
||||
pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version')
|
||||
|
||||
// We can't add the same package twice
|
||||
if os.exists(pkg_dir) {
|
||||
return false
|
||||
}
|
||||
|
||||
// We remove the older package version first, if present
|
||||
r.remove_pkg_from_arch_repo(repo, arch, pkg, false) ?
|
||||
// Remove the previous version of the package, if present
|
||||
r.remove_pkg_from_arch_repo(repo, arch, pkg.info.name, false) ?
|
||||
|
||||
os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') }
|
||||
|
||||
|
@ -149,7 +158,7 @@ fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &pac
|
|||
// remove_pkg_from_arch_repo removes a package from an arch-repo's database. It
|
||||
// returns false if the package wasn't present in the database. It also
|
||||
// optionally re-syncs the repo archives.
|
||||
fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg &package.Pkg, sync bool) ?bool {
|
||||
fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg_name string, sync bool) ?bool {
|
||||
repo_dir := os.join_path(r.repos_dir, repo, arch)
|
||||
|
||||
// If the repository doesn't exist yet, the result is automatically false
|
||||
|
@ -165,13 +174,24 @@ fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg
|
|||
// not the version.
|
||||
name := d.split('-')#[..-2].join('-')
|
||||
|
||||
if name == pkg.info.name {
|
||||
if name == pkg_name {
|
||||
// We lock the mutex here to prevent other routines from creating a
|
||||
// new archive while we remove an entry
|
||||
lock r.mutex {
|
||||
os.rmdir_all(os.join_path_single(repo_dir, d)) ?
|
||||
}
|
||||
|
||||
// Also remove the package archive
|
||||
repo_pkg_dir := os.join_path(r.pkg_dir, repo, arch)
|
||||
|
||||
archives := os.ls(repo_pkg_dir) ?.filter(it.split('-')#[..-3].join('-') == name)
|
||||
|
||||
for archive_name in archives {
|
||||
full_path := os.join_path_single(repo_pkg_dir, archive_name)
|
||||
os.rm(full_path) ?
|
||||
}
|
||||
|
||||
// Sync the db archives if requested
|
||||
if sync {
|
||||
r.sync(repo, arch) ?
|
||||
}
|
||||
|
|
|
@ -22,7 +22,9 @@ fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Re
|
|||
|
||||
db_exts := ['.db', '.files', '.db.tar.gz', '.files.tar.gz']
|
||||
|
||||
if db_exts.any(filename.ends_with(it)) {
|
||||
// There's no point in having the ability to serve db archives with wrong
|
||||
// filenames
|
||||
if db_exts.any(filename == '$repo$it') {
|
||||
full_path = os.join_path(app.repo.repos_dir, repo, arch, filename)
|
||||
|
||||
// repo-add does this using symlinks, but we just change the requested
|
||||
|
@ -30,8 +32,14 @@ fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Re
|
|||
if !full_path.ends_with('.tar.gz') {
|
||||
full_path += '.tar.gz'
|
||||
}
|
||||
} else {
|
||||
full_path = os.join_path_single(app.repo.pkg_dir, filename)
|
||||
} else if filename.contains('.pkg') {
|
||||
full_path = os.join_path(app.repo.pkg_dir, repo, arch, filename)
|
||||
}
|
||||
// Default behavior is to return the desc file for the package, if present.
|
||||
// This can then also be used by the build system to properly check whether
|
||||
// a package is present in an arch-repo.
|
||||
else {
|
||||
full_path = os.join_path(app.repo.repos_dir, repo, arch, filename, 'desc')
|
||||
}
|
||||
|
||||
// Scuffed way to respond to HEAD requests
|
||||
|
@ -89,7 +97,7 @@ fn (mut app App) put_package(repo string) web.Result {
|
|||
if !res.added {
|
||||
os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") }
|
||||
|
||||
app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo ($res.pkg.info.arch)'.")
|
||||
app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo'.")
|
||||
|
||||
return app.json(http.Status.bad_request, new_response('File already exists.'))
|
||||
}
|
||||
|
|
4
test.py
4
test.py
|
@ -38,7 +38,7 @@ def create_random_pkginfo(words, name_min_len, name_max_len):
|
|||
Generates a random .PKGINFO
|
||||
"""
|
||||
name = "-".join(random_words(words, name_min_len, name_max_len))
|
||||
ver = "0.1.0-1" # doesn't matter what it is anyways
|
||||
ver = "0.1.0-3" # doesn't matter what it is anyways
|
||||
|
||||
# TODO add random dependencies (all types)
|
||||
|
||||
|
@ -46,7 +46,7 @@ def create_random_pkginfo(words, name_min_len, name_max_len):
|
|||
"pkgname": name,
|
||||
"pkgbase": name,
|
||||
"pkgver": ver,
|
||||
"arch": "any"
|
||||
"arch": "x86_64"
|
||||
}
|
||||
|
||||
return "\n".join(f"{key} = {value}" for key, value in data.items())
|
||||
|
|
Loading…
Reference in New Issue