diff --git a/.editorconfig b/.editorconfig index 630e4fa7..e23a3c76 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,4 +1,3 @@ -# top-most EditorConfig file root = true # Unix-style newlines with a newline ending every file diff --git a/.woodpecker/.arch.yml b/.woodpecker/.arch.yml index d6463532..23c34083 100644 --- a/.woodpecker/.arch.yml +++ b/.woodpecker/.arch.yml @@ -3,10 +3,10 @@ branches: [dev] pipeline: build: - image: 'archlinux:latest' + image: 'archlinux:base-devel' commands: # Update packages - - pacman -Syu --needed --noconfirm base-devel + - pacman -Syu # Create non-root user to perform build & switch to their home - groupadd -g 1000 builder - useradd -mg builder builder @@ -17,9 +17,9 @@ pipeline: - makepkg -s --noconfirm --needed publish: - image: 'archlinux:latest' + image: 'curlimages/curl:latest' commands: # Publish the package - - 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/publish; done' + - 'for pkg in $(ls -1 *.pkg*); do curl -XPOST -T "$pkg" -H "X-API-KEY: $VIETER_API_KEY" https://arch.r8r.be/vieter/publish; done' secrets: - vieter_api_key diff --git a/CHANGELOG.md b/CHANGELOG.md index 96f7dcf1..e1daaec4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,16 +22,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * Very basic build system * Build is triggered by separate cron container * Packages build on cron container's system + * A HEAD request is used to determine whether a package should be rebuilt + or not * Hardcoded planning of builds * Builds are sequential * API for managing Git repositories to build * CLI to list, add & remove Git repos to build * Published packages on my Vieter instance +* Support for multiple repositories +* Support for multiple architectures per repository ## Fixed * Each package can now only have one version in the repository at once (required by Pacman) +* Packages with unknown fields in .PKGINFO are now allowed ## [0.1.0](https://git.rustybever.be/Chewing_Bever/vieter/src/tag/0.1.0) diff --git a/Dockerfile b/Dockerfile index 8b625217..58087ad2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ RUN if [ -n "${CI_COMMIT_SHA}" ]; then \ FROM busybox:1.35.0 ENV PATH=/bin \ - VIETER_REPO_DIR=/data/repo \ + VIETER_REPOS_DIR=/data/repos \ VIETER_PKG_DIR=/data/pkgs \ VIETER_DOWNLOAD_DIR=/data/downloads \ VIETER_REPOS_FILE=/data/repos.json diff --git a/src/archive.v b/src/archive.c.v similarity index 100% rename from src/archive.v rename to src/archive.c.v diff --git a/src/build/build.v b/src/build/build.v index c42c98d8..4270e9d7 100644 --- a/src/build/build.v +++ b/src/build/build.v @@ -4,11 +4,14 @@ import docker import encoding.base64 import time import git +import os const container_build_dir = '/build' const build_image_repo = 'vieter-build' +const base_image = 'archlinux:latest' + fn create_build_image() ?string { commands := [ // Update repos & install required packages @@ -26,7 +29,7 @@ fn create_build_image() ?string { cmds_str := base64.encode_str(commands.join('\n')) c := docker.NewContainer{ - image: 'archlinux:latest' + image: build.base_image env: ['BUILD_SCRIPT=$cmds_str'] entrypoint: ['/bin/sh', '-c'] cmd: ['echo \$BUILD_SCRIPT | base64 -d | /bin/sh -e'] @@ -60,27 +63,34 @@ fn create_build_image() ?string { } fn build(conf Config) ? { - // We get the repos list from the Vieter instance - repos := git.get_repos(conf.address, conf.api_key) ? + build_arch := os.uname().machine + + // We get the repos map from the Vieter instance + repos_map := git.get_repos(conf.address, conf.api_key) ? + + // We filter out any repos that aren't allowed to be built on this + // architecture + filtered_repos := repos_map.keys().map(repos_map[it]).filter(it.arch.contains(build_arch)) // No point in doing work if there's no repos present - if repos.len == 0 { + if filtered_repos.len == 0 { return } // First, we create a base image which has updated repos n stuff image_id := create_build_image() ? - for _, repo in repos { + for repo in filtered_repos { // TODO what to do with PKGBUILDs that build multiple packages? commands := [ 'git clone --single-branch --depth 1 --branch $repo.branch $repo.url repo', 'cd repo', 'makepkg --nobuild --nodeps', 'source PKGBUILD', - // The build container checks whether the package is already present on the server - 'curl --head --fail $conf.address/\$pkgname-\$pkgver-\$pkgrel-\$(uname -m).pkg.tar.zst && exit 0', - 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/publish; done', + // The build container checks whether the package is already + // present on the server + 'curl --head --fail $conf.address/$repo.repo/$build_arch/\$pkgname-\$pkgver-\$pkgrel-${build_arch}.pkg.tar.zst && exit 0', + 'MAKEFLAGS="-j\$(nproc)" makepkg -s --noconfirm --needed && for pkg in \$(ls -1 *.pkg*); do curl -XPOST -T "\$pkg" -H "X-API-KEY: \$API_KEY" $conf.address/$repo.repo/publish; done', ] // We convert the list of commands into a base64 string, which then gets diff --git a/src/git/cli.v b/src/git/cli.v index 4a066d51..463f1ba1 100644 --- a/src/git/cli.v +++ b/src/git/cli.v @@ -26,14 +26,14 @@ pub fn cmd() cli.Command { }, cli.Command{ name: 'add' - required_args: 2 - usage: 'url branch arch...' + required_args: 4 + usage: 'url branch repo arch...' description: 'Add a new repository.' execute: fn (cmd cli.Command) ? { config_file := cmd.flags.get_string('config-file') ? conf := env.load(config_file) ? - add(conf, cmd.args[0], cmd.args[1], cmd.args[2..]) ? + add(conf, cmd.args[0], cmd.args[1], cmd.args[2], cmd.args[3..]) ? } }, cli.Command{ @@ -48,46 +48,100 @@ pub fn cmd() cli.Command { remove(conf, cmd.args[0]) ? } }, + cli.Command{ + name: 'edit' + required_args: 1 + usage: 'id' + description: 'Edit the repository that matches the given ID prefix.' + flags: [ + cli.Flag{ + name: 'url' + description: 'URL of the Git repository.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'branch' + description: 'Branch of the Git repository.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'repo' + description: 'Repo to publish builds to.' + flag: cli.FlagType.string + }, + cli.Flag{ + name: 'arch' + description: 'Comma-separated list of architectures to build on.' + flag: cli.FlagType.string + }, + ] + execute: fn (cmd cli.Command) ? { + config_file := cmd.flags.get_string('config-file') ? + conf := env.load(config_file) ? + + found := cmd.flags.get_all_found() + + mut params := map[string]string{} + + for f in found { + if f.name != 'config-file' { + params[f.name] = f.get_string() ? + } + } + + patch(conf, cmd.args[0], params) ? + } + }, ] } } +fn get_repo_id_by_prefix(conf Config, id_prefix string) ?string { + repos := get_repos(conf.address, conf.api_key) ? + + mut res := []string{} + + for id, _ in repos { + if id.starts_with(id_prefix) { + res << id + } + } + + if res.len == 0 { + return error('No repo found for given prefix.') + } + + if res.len > 1 { + return error('Multiple repos found for given prefix.') + } + + return res[0] +} + fn list(conf Config) ? { repos := get_repos(conf.address, conf.api_key) ? for id, details in repos { - println('${id[..8]}\t$details.url\t$details.branch\t$details.arch') + println('${id[..8]}\t$details.url\t$details.branch\t$details.repo\t$details.arch') } } -fn add(conf Config, url string, branch string, arch []string) ? { - res := add_repo(conf.address, conf.api_key, url, branch, arch) ? +fn add(conf Config, url string, branch string, repo string, arch []string) ? { + res := add_repo(conf.address, conf.api_key, url, branch, repo, arch) ? println(res.message) } fn remove(conf Config, id_prefix string) ? { - repos := get_repos(conf.address, conf.api_key) ? - - mut to_remove := []string{} - - for id, _ in repos { - if id.starts_with(id_prefix) { - to_remove << id - } - } - - if to_remove.len == 0 { - eprintln('No repo found for given prefix.') - exit(1) - } - - if to_remove.len > 1 { - eprintln('Multiple repos found for given prefix.') - exit(1) - } - - res := remove_repo(conf.address, conf.api_key, to_remove[0]) ? + id := get_repo_id_by_prefix(conf, id_prefix) ? + res := remove_repo(conf.address, conf.api_key, id) ? + + println(res.message) +} + +fn patch(conf Config, id_prefix string, params map[string]string) ? { + id := get_repo_id_by_prefix(conf, id_prefix) ? + res := patch_repo(conf.address, conf.api_key, id, params) ? println(res.message) } diff --git a/src/git/client.v b/src/git/client.v index 97fe9fb5..e4a39acd 100644 --- a/src/git/client.v +++ b/src/git/client.v @@ -4,36 +4,58 @@ import json import response { Response } import net.http -// get_repos returns the current list of repos. -pub fn get_repos(address string, api_key string) ?map[string]GitRepo { - mut req := http.new_request(http.Method.get, '$address/api/repos', '') ? +fn send_request(method http.Method, address string, url string, api_key string, params map[string]string) ?Response { + mut full_url := '$address$url' + + if params.len > 0 { + params_str := params.keys().map('$it=${params[it]}').join('&') + + full_url = '$full_url?$params_str' + } + + mut req := http.new_request(method, full_url, '') ? req.add_custom_header('X-API-Key', api_key) ? res := req.do() ? - data := json.decode(Response, res.text) ? + data := json.decode(Response, res.text) ? + + return data +} + +// get_repos returns the current list of repos. +pub fn get_repos(address string, api_key string) ?map[string]GitRepo { + data := send_request(http.Method.get, address, '/api/repos', api_key, + {}) ? return data.data } // add_repo adds a new repo to the server. -pub fn add_repo(address string, api_key string, url string, branch string, arch []string) ?Response { - mut req := http.new_request(http.Method.post, '$address/api/repos?url=$url&branch=$branch&arch=${arch.join(',')}', - '') ? - req.add_custom_header('X-API-Key', api_key) ? - - res := req.do() ? - data := json.decode(Response, res.text) ? +pub fn add_repo(address string, api_key string, url string, branch string, repo string, arch []string) ?Response { + params := { + 'url': url + 'branch': branch + 'repo': repo + 'arch': arch.join(',') + } + data := send_request(http.Method.post, address, '/api/repos', api_key, params) ? return data } // remove_repo removes the repo with the given ID from the server. pub fn remove_repo(address string, api_key string, id string) ?Response { - mut req := http.new_request(http.Method.delete, '$address/api/repos/$id', '') ? - req.add_custom_header('X-API-Key', api_key) ? - - res := req.do() ? - data := json.decode(Response, res.text) ? + data := send_request(http.Method.delete, address, '/api/repos/$id', api_key, + {}) ? + + return data +} + +// patch_repo sends a PATCH request to the given repo with the params as +// payload. +pub fn patch_repo(address string, api_key string, id string, params map[string]string) ?Response { + data := send_request(http.Method.patch, address, '/api/repos/$id', api_key, + params) ? return data } diff --git a/src/git/git.v b/src/git/git.v index c5390b60..eaec8959 100644 --- a/src/git/git.v +++ b/src/git/git.v @@ -12,6 +12,8 @@ pub mut: // On which architectures the package is allowed to be built. In reality, // this controls which builders will periodically build the image. arch []string + // Which repo the builder should publish packages to + repo string } // patch_from_params patches a GitRepo from a map[string]string, usually diff --git a/src/package.v b/src/package.v index 4e3b97f8..8bd30d04 100644 --- a/src/package.v +++ b/src/package.v @@ -72,14 +72,11 @@ fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo { 'pkgbase' { pkg_info.base = value } 'pkgver' { pkg_info.version = value } 'pkgdesc' { pkg_info.description = value } - 'csize' { continue } 'size' { pkg_info.size = value.int() } 'url' { pkg_info.url = value } 'arch' { pkg_info.arch = value } 'builddate' { pkg_info.build_date = value.int() } 'packager' { pkg_info.packager = value } - 'md5sum' { continue } - 'sha256sum' { continue } 'pgpsig' { pkg_info.pgpsig = value } 'pgpsigsize' { pkg_info.pgpsigsize = value.int() } // Array values @@ -92,16 +89,19 @@ fn parse_pkg_info_string(pkg_info_str &string) ?PkgInfo { 'optdepend' { pkg_info.optdepends << value } 'makedepend' { pkg_info.makedepends << value } 'checkdepend' { pkg_info.checkdepends << value } - else { return error("Invalid key '$key'.") } + // There's no real point in trying to exactly manage which fields + // are allowed, so we just ignore any we don't explicitely need for + // in the db file + else { continue } } } return pkg_info } -// read_pkg extracts the file list & .PKGINFO contents from an archive -// NOTE: this command currently only supports zstd-compressed tarballs -pub fn read_pkg(pkg_path string) ?Pkg { +// read_pkg_archive extracts the file list & .PKGINFO contents from an archive +// NOTE: this command only supports zstd- & gzip-compressed tarballs +pub fn read_pkg_archive(pkg_path string) ?Pkg { if !os.is_file(pkg_path) { return error("'$pkg_path' doesn't exist or isn't a file.") } diff --git a/src/repo/repo.v b/src/repo/repo.v index f1419aca..228b17e7 100644 --- a/src/repo/repo.v +++ b/src/repo/repo.v @@ -4,15 +4,20 @@ import os import package import util -// This struct manages a single repository. -pub struct Repo { +// Manages a group of repositories. Each repository contains one or more +// arch-repositories, each of which represent a specific architecture. +pub struct RepoGroupManager { mut: mutex shared util.Dummy pub: - // Where to store repository files - repo_dir string [required] - // Where to find packages; packages are expected to all be in the same directory + // Where to store repositories' files + repos_dir string [required] + // Where packages are stored; each architecture gets its own subdirectory pkg_dir string [required] + // The default architecture to use for a repository. In reality, this value + // is only required when a package with architecture "any" is added as the + // first package of a repository. + default_arch string [required] } pub struct RepoAddResult { @@ -21,28 +26,33 @@ pub: pkg &package.Pkg [required] } -// new creates a new Repo & creates the directories as needed -pub fn new(repo_dir string, pkg_dir string) ?Repo { - if !os.is_dir(repo_dir) { - os.mkdir_all(repo_dir) or { return error('Failed to create repo directory: $err.msg') } +// new creates a new RepoGroupManager & creates the directories as needed +pub fn new(repos_dir string, pkg_dir string, default_arch string) ?RepoGroupManager { + if !os.is_dir(repos_dir) { + os.mkdir_all(repos_dir) or { return error('Failed to create repos directory: $err.msg') } } if !os.is_dir(pkg_dir) { os.mkdir_all(pkg_dir) or { return error('Failed to create package directory: $err.msg') } } - return Repo{ - repo_dir: repo_dir + return RepoGroupManager{ + repos_dir: repos_dir pkg_dir: pkg_dir + default_arch: default_arch } } -// add_from_path adds a package from an arbitrary path & moves it into the pkgs -// directory if necessary. -pub fn (r &Repo) add_from_path(pkg_path string) ?RepoAddResult { - pkg := package.read_pkg(pkg_path) or { return error('Failed to read package file: $err.msg') } +// add_pkg_from_path adds a package to a given repo, given the file path to the +// pkg archive. It's a wrapper around add_pkg_in_repo that parses the archive +// file, passes the result to add_pkg_in_repo, and moves the archive to +// r.pkg_dir if it was successfully added. +pub fn (r &RepoGroupManager) add_pkg_from_path(repo string, pkg_path string) ?RepoAddResult { + pkg := package.read_pkg_archive(pkg_path) or { + return error('Failed to read package file: $err.msg') + } - added := r.add(pkg) ? + added := r.add_pkg_in_repo(repo, pkg) ? // If the add was successful, we move the file to the packages directory if added { @@ -60,9 +70,55 @@ pub fn (r &Repo) add_from_path(pkg_path string) ?RepoAddResult { } } -// add adds a given Pkg to the repository -fn (r &Repo) add(pkg &package.Pkg) ?bool { - pkg_dir := r.pkg_path(pkg) +// add_pkg_in_repo adds a package to a given repo. This function is responsible +// for inspecting the package architecture. If said architecture is 'any', the +// package is added to each arch-repository within the given repo. A package of +// architecture 'any' will always be added to the arch-repo defined by +// r.default_arch. If this arch-repo doesn't exist yet, it will be created. If +// the architecture isn't 'any', the package is only added to the specific +// architecture. +fn (r &RepoGroupManager) add_pkg_in_repo(repo string, pkg &package.Pkg) ?bool { + // A package without arch 'any' can be handled without any further checks + if pkg.info.arch != 'any' { + return r.add_pkg_in_arch_repo(repo, pkg.info.arch, pkg) + } + + repo_dir := os.join_path_single(r.repos_dir, repo) + + mut arch_repos := []string{} + + // If this is the first package that's added to the repo, the directory + // won't exist yet + if os.exists(repo_dir) { + // We get a listing of all currently present arch-repos in the given repo + arch_repos = os.ls(repo_dir) ?.filter(os.is_dir(os.join_path_single(repo_dir, + it))) + } + + // The default_arch should always be updated when a package with arch 'any' + // is added. + if !arch_repos.contains(r.default_arch) { + arch_repos << r.default_arch + } + + mut added := false + + // We add the package to each repository. If any of the repositories + // return true, the result of the function is also true. + for arch in arch_repos { + added = added || r.add_pkg_in_arch_repo(repo, arch, pkg) ? + } + + return added +} + +// add_pkg_in_arch_repo is the function that actually adds a package to a given +// arch-repo. It records the package's data in the arch-repo's desc & files +// files, and afterwards updates the db & files archives to reflect these +// changes. The function returns false if the package was already present in +// the repo, and true otherwise. +fn (r &RepoGroupManager) add_pkg_in_arch_repo(repo string, arch string, pkg &package.Pkg) ?bool { + pkg_dir := os.join_path(r.repos_dir, repo, arch, '$pkg.info.name-$pkg.info.version') // We can't add the same package twice if os.exists(pkg_dir) { @@ -70,9 +126,9 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool { } // We remove the older package version first, if present - r.remove(pkg.info.name, false) ? + r.remove_pkg_from_arch_repo(repo, arch, pkg, false) ? - os.mkdir(pkg_dir) or { return error('Failed to create package directory.') } + os.mkdir_all(pkg_dir) or { return error('Failed to create package directory.') } os.write_file(os.join_path_single(pkg_dir, 'desc'), pkg.to_desc()) or { os.rmdir_all(pkg_dir) ? @@ -85,27 +141,39 @@ fn (r &Repo) add(pkg &package.Pkg) ?bool { return error('Failed to write files file.') } - r.sync() ? + r.sync(repo, arch) ? return true } -// remove removes a package from the database. It returns false if the package -// wasn't present in the database. -fn (r &Repo) remove(pkg_name string, sync bool) ?bool { +// remove_pkg_from_arch_repo removes a package from an arch-repo's database. It +// returns false if the package wasn't present in the database. It also +// optionally re-syncs the repo archives. +fn (r &RepoGroupManager) remove_pkg_from_arch_repo(repo string, arch string, pkg &package.Pkg, sync bool) ?bool { + repo_dir := os.join_path(r.repos_dir, repo, arch) + + // If the repository doesn't exist yet, the result is automatically false + if !os.exists(repo_dir) { + return false + } + // We iterate over every directory in the repo dir - for d in os.ls(r.repo_dir) ? { + // TODO filter so we only check directories + for d in os.ls(repo_dir) ? { + // Because a repository only allows a single version of each package, + // we need only compare whether the name of the package is the same, + // not the version. name := d.split('-')#[..-2].join('-') - if name == pkg_name { + if name == pkg.info.name { // We lock the mutex here to prevent other routines from creating a - // new archive while we removed an entry + // new archive while we remove an entry lock r.mutex { - os.rmdir_all(os.join_path_single(r.repo_dir, d)) ? + os.rmdir_all(os.join_path_single(repo_dir, d)) ? } if sync { - r.sync() ? + r.sync(repo, arch) ? } return true @@ -114,8 +182,3 @@ fn (r &Repo) remove(pkg_name string, sync bool) ?bool { return false } - -// Returns the path where the given package's desc & files files are stored -fn (r &Repo) pkg_path(pkg &package.Pkg) string { - return os.join_path(r.repo_dir, '$pkg.info.name-$pkg.info.version') -} diff --git a/src/repo/sync.v b/src/repo/sync.v index d6080e07..e2b7aac7 100644 --- a/src/repo/sync.v +++ b/src/repo/sync.v @@ -30,8 +30,9 @@ fn archive_add_entry(archive &C.archive, entry &C.archive_entry, file_path &stri } // Re-generate the repo archive files -fn (r &Repo) sync() ? { - // TODO also write files archive +fn (r &RepoGroupManager) sync(repo string, arch string) ? { + subrepo_path := os.join_path(r.repos_dir, repo, arch) + lock r.mutex { a_db := C.archive_write_new() a_files := C.archive_write_new() @@ -44,18 +45,18 @@ fn (r &Repo) sync() ? { C.archive_write_add_filter_gzip(a_files) C.archive_write_set_format_pax_restricted(a_files) - db_path := os.join_path_single(r.repo_dir, 'vieter.db.tar.gz') - files_path := os.join_path_single(r.repo_dir, 'vieter.files.tar.gz') + db_path := os.join_path_single(subrepo_path, '${repo}.db.tar.gz') + files_path := os.join_path_single(subrepo_path, '${repo}.files.tar.gz') C.archive_write_open_filename(a_db, &char(db_path.str)) C.archive_write_open_filename(a_files, &char(files_path.str)) // Iterate over each directory - for d in os.ls(r.repo_dir) ?.filter(os.is_dir(os.join_path_single(r.repo_dir, + for d in os.ls(subrepo_path) ?.filter(os.is_dir(os.join_path_single(subrepo_path, it))) { // desc mut inner_path := os.join_path_single(d, 'desc') - mut actual_path := os.join_path_single(r.repo_dir, inner_path) + mut actual_path := os.join_path_single(subrepo_path, inner_path) archive_add_entry(a_db, entry, actual_path, inner_path) archive_add_entry(a_files, entry, actual_path, inner_path) @@ -64,7 +65,7 @@ fn (r &Repo) sync() ? { // files inner_path = os.join_path_single(d, 'files') - actual_path = os.join_path_single(r.repo_dir, inner_path) + actual_path = os.join_path_single(subrepo_path, inner_path) archive_add_entry(a_files, entry, actual_path, inner_path) diff --git a/src/server/cli.v b/src/server/cli.v index f0dc0b12..bea223d4 100644 --- a/src/server/cli.v +++ b/src/server/cli.v @@ -10,8 +10,9 @@ pub: pkg_dir string download_dir string api_key string - repo_dir string + repos_dir string repos_file string + default_arch string } // cmd returns the cli submodule that handles starting the server diff --git a/src/server/routes.v b/src/server/routes.v index 07279cbe..a9264256 100644 --- a/src/server/routes.v +++ b/src/server/routes.v @@ -16,15 +16,20 @@ pub fn (mut app App) healthcheck() web.Result { return app.json(http.Status.ok, new_response('Healthy.')) } -// get_root handles a GET request for a file on the root -['/:filename'; get; head] -fn (mut app App) get_root(filename string) web.Result { +['/:repo/:arch/:filename'; get; head] +fn (mut app App) get_repo_file(repo string, arch string, filename string) web.Result { mut full_path := '' - if filename.ends_with('.db') || filename.ends_with('.files') { - full_path = os.join_path_single(app.repo.repo_dir, '${filename}.tar.gz') - } else if filename.ends_with('.db.tar.gz') || filename.ends_with('.files.tar.gz') { - full_path = os.join_path_single(app.repo.repo_dir, '$filename') + db_exts := ['.db', '.files', '.db.tar.gz', '.files.tar.gz'] + + if db_exts.any(filename.ends_with(it)) { + full_path = os.join_path(app.repo.repos_dir, repo, arch, filename) + + // repo-add does this using symlinks, but we just change the requested + // path + if !full_path.ends_with('.tar.gz') { + full_path += '.tar.gz' + } } else { full_path = os.join_path_single(app.repo.pkg_dir, filename) } @@ -41,8 +46,8 @@ fn (mut app App) get_root(filename string) web.Result { return app.file(full_path) } -['/publish'; post] -fn (mut app App) put_package() web.Result { +['/:repo/publish'; post] +fn (mut app App) put_package(repo string) web.Result { if !app.is_authorized() { return app.json(http.Status.unauthorized, new_response('Unauthorized.')) } @@ -53,10 +58,6 @@ fn (mut app App) put_package() web.Result { // Generate a random filename for the temp file pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4()) - for os.exists(pkg_path) { - pkg_path = os.join_path_single(app.conf.download_dir, rand.uuid_v4()) - } - app.ldebug("Uploading $length bytes (${util.pretty_bytes(length.int())}) to '$pkg_path'.") // This is used to time how long it takes to upload a file @@ -77,22 +78,23 @@ fn (mut app App) put_package() web.Result { return app.status(http.Status.length_required) } - res := app.repo.add_from_path(pkg_path) or { + res := app.repo.add_pkg_from_path(repo, pkg_path) or { app.lerror('Error while adding package: $err.msg') os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") } return app.json(http.Status.internal_server_error, new_response('Failed to add package.')) } + if !res.added { os.rm(pkg_path) or { app.lerror("Failed to remove download '$pkg_path': $err.msg") } - app.lwarn("Duplicate package '$res.pkg.full_name()'.") + app.lwarn("Duplicate package '$res.pkg.full_name()' in repo '$repo ($res.pkg.info.arch)'.") return app.json(http.Status.bad_request, new_response('File already exists.')) } - app.linfo("Added '$res.pkg.full_name()' to repository.") + app.linfo("Added '$res.pkg.full_name()' to repo '$repo ($res.pkg.info.arch)'.") return app.json(http.Status.ok, new_response('Package added successfully.')) } diff --git a/src/server/server.v b/src/server/server.v index ab2e46bc..5bf9a87e 100644 --- a/src/server/server.v +++ b/src/server/server.v @@ -13,13 +13,18 @@ struct App { pub: conf Config [required; web_global] pub mut: - repo repo.Repo [required; web_global] + repo repo.RepoGroupManager [required; web_global] // This is used to claim the file lock on the repos file git_mutex shared util.Dummy } // server starts the web server & starts listening for requests pub fn server(conf Config) ? { + // Prevent using 'any' as the default arch + if conf.default_arch == 'any' { + util.exit_with_message(1, "'any' is not allowed as the value for default_arch.") + } + // Configure logger log_level := log.level_from_tag(conf.log_level) or { util.exit_with_message(1, 'Invalid log level. The allowed values are FATAL, ERROR, WARN, INFO & DEBUG.') @@ -39,7 +44,7 @@ pub fn server(conf Config) ? { } // This also creates the directories if needed - repo := repo.new(conf.repo_dir, conf.pkg_dir) or { + repo := repo.new(conf.repos_dir, conf.pkg_dir, conf.default_arch) or { logger.error(err.msg) exit(1) } diff --git a/test.py b/test.py index 5721310b..9b0116ec 100644 --- a/test.py +++ b/test.py @@ -46,7 +46,7 @@ def create_random_pkginfo(words, name_min_len, name_max_len): "pkgname": name, "pkgbase": name, "pkgver": ver, - "arch": "x86_64" + "arch": "any" } return "\n".join(f"{key} = {value}" for key, value in data.items()) @@ -97,7 +97,7 @@ async def upload_random_package(tar_path, sem): async with sem: with open(tar_path, 'rb') as f: async with aiohttp.ClientSession() as s: - async with s.post("http://localhost:8000/publish", data=f.read(), headers={"x-api-key": "test"}) as r: + async with s.post("http://localhost:8000/vieter/publish", data=f.read(), headers={"x-api-key": "test"}) as r: return await check_output(r) diff --git a/vieter.toml b/vieter.toml index 75929423..8e0447b2 100644 --- a/vieter.toml +++ b/vieter.toml @@ -1,9 +1,10 @@ # This file contains settings used during development api_key = "test" download_dir = "data/downloads" -repo_dir = "data/repo" +repos_dir = "data/repos" pkg_dir = "data/pkgs" log_level = "DEBUG" repos_file = "data/repos.json" +default_arch = "x86_64" address = "http://localhost:8000"