refactor(server): rewrite part of repo logic; remove need for default

arch
repo-db
Jef Roosens 2023-08-02 18:48:17 +02:00
parent efc8114704
commit 2e5c84a48d
Signed by: Jef Roosens
GPG Key ID: B75D4F293C7052DB
4 changed files with 113 additions and 93 deletions

View File

@ -26,8 +26,6 @@ pub struct Cli {
pub pkg_dir: PathBuf, pub pkg_dir: PathBuf,
/// Directory where repository metadata is stored /// Directory where repository metadata is stored
pub repo_dir: PathBuf, pub repo_dir: PathBuf,
/// Default architecture to add packages with arch "any" to
pub default_arch: String,
} }
impl FromRef<Global> for Arc<RwLock<RepoGroupManager>> { impl FromRef<Global> for Arc<RwLock<RepoGroupManager>> {
@ -51,7 +49,7 @@ impl Cli {
repo_dir: self.repo_dir.clone(), repo_dir: self.repo_dir.clone(),
pkg_dir: self.pkg_dir.clone(), pkg_dir: self.pkg_dir.clone(),
}; };
let repo_manager = RepoGroupManager::new(&self.repo_dir, &self.pkg_dir, &self.default_arch); let repo_manager = RepoGroupManager::new(&self.repo_dir, &self.pkg_dir);
let global = Global { let global = Global {
config, config,

View File

@ -25,6 +25,8 @@ impl Error for ServerError {}
impl IntoResponse for ServerError { impl IntoResponse for ServerError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
tracing::error!("{:?}", self);
match self { match self {
ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),

View File

@ -1,28 +1,33 @@
use super::package::Package; use super::package::Package;
use libarchive::write::{Builder, WriteEntry}; use libarchive::write::{Builder, WriteEntry};
use libarchive::{Entry, WriteFilter, WriteFormat}; use libarchive::{Entry, WriteFilter, WriteFormat};
use std::collections::HashSet;
use std::fs; use std::fs;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
const ANY_ARCH: &str = "any";
/// Overarching abstraction that orchestrates updating the repositories stored on the server /// Overarching abstraction that orchestrates updating the repositories stored on the server
pub struct RepoGroupManager { pub struct RepoGroupManager {
repo_dir: PathBuf, repo_dir: PathBuf,
pkg_dir: PathBuf, pkg_dir: PathBuf,
default_arch: String, }
fn parse_pkg_filename(file_name: &str) -> (String, &str, &str, &str) {
let name_parts = file_name.split('-').collect::<Vec<_>>();
let name = name_parts[..name_parts.len() - 3].join("-");
let version = name_parts[name_parts.len() - 3];
let release = name_parts[name_parts.len() - 2];
let (arch, _) = name_parts[name_parts.len() - 1].split_once(".").unwrap();
(name, version, release, arch)
} }
impl RepoGroupManager { impl RepoGroupManager {
pub fn new<P1: AsRef<Path>, P2: AsRef<Path>>( pub fn new<P1: AsRef<Path>, P2: AsRef<Path>>(repo_dir: P1, pkg_dir: P2) -> Self {
repo_dir: P1,
pkg_dir: P2,
default_arch: &str,
) -> Self {
RepoGroupManager { RepoGroupManager {
repo_dir: repo_dir.as_ref().to_path_buf(), repo_dir: repo_dir.as_ref().to_path_buf(),
pkg_dir: pkg_dir.as_ref().to_path_buf(), pkg_dir: pkg_dir.as_ref().to_path_buf(),
default_arch: String::from(default_arch),
} }
} }
@ -37,11 +42,27 @@ impl RepoGroupManager {
ar_files.add_filter(WriteFilter::Gzip)?; ar_files.add_filter(WriteFilter::Gzip)?;
ar_files.set_format(WriteFormat::PaxRestricted)?; ar_files.set_format(WriteFormat::PaxRestricted)?;
let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.tar.gz", repo)))?; let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.db.tar.gz", repo)))?;
let mut ar_files = let mut ar_files =
ar_files.open_file(subrepo_path.join(format!("{}.files.tar.gz", repo)))?; ar_files.open_file(subrepo_path.join(format!("{}.files.tar.gz", repo)))?;
for entry in subrepo_path.read_dir()? { // All architectures should also include the "any" architecture, except for the "any"
// architecture itself.
let any_entries_iter = if arch != ANY_ARCH {
let repo_any_dir = self.repo_dir.join(repo).join(ANY_ARCH);
if repo_any_dir.try_exists()? {
Some(repo_any_dir.read_dir()?)
} else {
None
}
} else {
None
}
.into_iter()
.flatten();
for entry in subrepo_path.read_dir()?.chain(any_entries_iter) {
let entry = entry?; let entry = entry?;
if entry.file_type()?.is_dir() { if entry.file_type()?.is_dir() {
@ -68,80 +89,65 @@ impl RepoGroupManager {
} }
} }
ar_db.close().and(ar_files.close()).map_err(Into::into) ar_db.close()?;
ar_files.close()?;
Ok(())
}
/// Synchronize all present architectures' db archives in the given repository.
pub fn sync_all(&mut self, repo: &str) -> io::Result<()> {
for entry in self.repo_dir.join(repo).read_dir()? {
let entry = entry?;
if entry.file_type()?.is_dir() {
self.sync(repo, &entry.file_name().to_string_lossy())?;
}
}
Ok(())
} }
pub fn add_pkg_from_path<P: AsRef<Path>>(&mut self, repo: &str, path: P) -> io::Result<()> { pub fn add_pkg_from_path<P: AsRef<Path>>(&mut self, repo: &str, path: P) -> io::Result<()> {
let mut pkg = Package::open(&path)?; let mut pkg = Package::open(&path)?;
pkg.calculate_checksum()?; pkg.calculate_checksum()?;
let archs = self.add_pkg_in_repo(repo, &pkg)?; self.add_pkg(repo, &pkg)?;
// We add the package to each architecture it was added to by hard-linking the provided // After successfully adding the package, we move it to the packages directory
// package file. This prevents storing a package of type "any" multiple times on disk. let dest_pkg_path = self.pkg_dir.join(repo).join(pkg.file_name());
for arch in archs { fs::create_dir_all(self.pkg_dir.join(repo))?;
let arch_repo_pkg_path = self.pkg_dir.join(repo).join(arch); fs::rename(&path, dest_pkg_path)
let dest_pkg_path = arch_repo_pkg_path.join(pkg.file_name());
fs::create_dir_all(&arch_repo_pkg_path)?;
fs::hard_link(&path, dest_pkg_path)?;
}
fs::remove_file(path)
} }
/// Add a package to the given repo, returning to what architectures the package was added. /// Add a package to the given repo, returning to what architectures the package was added.
pub fn add_pkg_in_repo(&mut self, repo: &str, pkg: &Package) -> io::Result<HashSet<String>> { pub fn add_pkg(&mut self, repo: &str, pkg: &Package) -> io::Result<()> {
let mut arch_repos: HashSet<String> = HashSet::new(); // We first remove any existing version of the package
self.remove_pkg(repo, &pkg.info.arch, &pkg.info.name, false)?;
if pkg.info.arch != "any" { // Write the `desc` and `files` metadata files to disk
self.add_pkg_in_arch_repo(repo, &pkg.info.arch, pkg)?; let metadata_dir = self
arch_repos.insert(pkg.info.arch.clone());
}
// Packages of arch "any" are added to every existing arch
else {
arch_repos.insert(self.default_arch.clone());
let repo_dir = self.repo_dir.join(repo);
if repo_dir.exists() {
for entry in repo_dir.read_dir()? {
arch_repos.insert(entry?.file_name().to_string_lossy().to_string());
}
}
for arch in arch_repos.iter() {
self.add_pkg_in_arch_repo(repo, arch, pkg)?;
}
}
Ok(arch_repos)
}
pub fn add_pkg_in_arch_repo(
&mut self,
repo: &str,
arch: &str,
pkg: &Package,
) -> io::Result<()> {
let pkg_dir = self
.repo_dir .repo_dir
.join(repo) .join(repo)
.join(arch) .join(&pkg.info.arch)
.join(format!("{}-{}", pkg.info.name, pkg.info.version)); .join(format!("{}-{}", pkg.info.name, pkg.info.version));
// We first remove the previous version of the package, if present fs::create_dir_all(&metadata_dir)?;
self.remove_pkg_from_arch_repo(repo, arch, &pkg.info.name, false)?;
fs::create_dir_all(&pkg_dir)?; let mut desc_file = fs::File::create(metadata_dir.join("desc"))?;
let mut desc_file = fs::File::create(pkg_dir.join("desc"))?;
pkg.write_desc(&mut desc_file)?; pkg.write_desc(&mut desc_file)?;
let mut files_file = fs::File::create(pkg_dir.join("files"))?; let mut files_file = fs::File::create(metadata_dir.join("files"))?;
pkg.write_files(&mut files_file)?; pkg.write_files(&mut files_file)?;
self.sync(repo, arch) // If a package of type "any" is added, we need to update every existing database
if pkg.info.arch == ANY_ARCH {
self.sync_all(repo)?;
} else {
self.sync(repo, &pkg.info.arch)?;
}
Ok(())
} }
pub fn remove_repo(&mut self, repo: &str) -> io::Result<bool> { pub fn remove_repo(&mut self, repo: &str) -> io::Result<bool> {
@ -150,41 +156,57 @@ impl RepoGroupManager {
if !repo_dir.exists() { if !repo_dir.exists() {
Ok(false) Ok(false)
} else { } else {
fs::remove_dir_all(&repo_dir) fs::remove_dir_all(&repo_dir)?;
.and_then(|_| fs::remove_dir_all(self.pkg_dir.join(repo)))?; fs::remove_dir_all(self.pkg_dir.join(repo))?;
Ok(true) Ok(true)
} }
} }
pub fn remove_arch_repo(&mut self, repo: &str, arch: &str) -> io::Result<bool> { pub fn remove_repo_arch(&mut self, repo: &str, arch: &str) -> io::Result<bool> {
let sub_path = PathBuf::from(repo).join(arch); let sub_path = PathBuf::from(repo).join(arch);
let repo_dir = self.repo_dir.join(&sub_path); let repo_dir = self.repo_dir.join(&sub_path);
if !repo_dir.exists() { if !repo_dir.exists() {
Ok(false) return Ok(false);
} else { }
fs::remove_dir_all(&repo_dir)
.and_then(|_| fs::remove_dir_all(self.pkg_dir.join(sub_path)))?; fs::remove_dir_all(&repo_dir)?;
// Remove every package archive for the architecture
for entry in self.pkg_dir.join(repo).read_dir()? {
let entry = entry?;
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
let (_, _, _, pkg_arch) = parse_pkg_filename(&file_name);
if arch == pkg_arch {
fs::remove_file(entry.path())?;
}
}
// Removing the "any" architecture updates all other repositories
if arch == ANY_ARCH {
self.sync_all(repo)?;
}
Ok(true) Ok(true)
} }
}
pub fn remove_pkg_from_arch_repo( pub fn remove_pkg(
&mut self, &mut self,
repo: &str, repo: &str,
arch: &str, arch: &str,
pkg_name: &str, pkg_name: &str,
sync: bool, sync: bool,
) -> io::Result<bool> { ) -> io::Result<bool> {
let arch_repo_dir = self.repo_dir.join(repo).join(arch); let repo_arch_dir = self.repo_dir.join(repo).join(arch);
if !arch_repo_dir.exists() { if !repo_arch_dir.exists() {
return Ok(false); return Ok(false);
} }
for entry in arch_repo_dir.read_dir()? { for entry in repo_arch_dir.read_dir()? {
let entry = entry?; let entry = entry?;
// Make sure we skip the archive files // Make sure we skip the archive files
@ -204,18 +226,15 @@ impl RepoGroupManager {
fs::remove_dir_all(entry.path())?; fs::remove_dir_all(entry.path())?;
// Also remove the old package archive // Also remove the old package archive
let arch_repo_pkg_dir = self.pkg_dir.join(repo).join(arch); let repo_pkg_dir = self.pkg_dir.join(repo);
arch_repo_pkg_dir.read_dir()?.try_for_each(|res| { repo_pkg_dir.read_dir()?.try_for_each(|res| {
res.and_then(|entry: fs::DirEntry| { res.and_then(|entry: fs::DirEntry| {
let file_name = entry.file_name(); let file_name = entry.file_name();
let file_name = file_name.to_string_lossy(); let file_name = file_name.to_string_lossy();
let (name, _, _, pkg_arch) = parse_pkg_filename(&file_name);
// Same trick, but for package files, we also need to trim the arch if name == pkg_name && arch == pkg_arch {
let name_parts = file_name.split('-').collect::<Vec<_>>();
let name = name_parts[..name_parts.len() - 3].join("-");
if name == pkg_name {
fs::remove_file(entry.path()) fs::remove_file(entry.path())
} else { } else {
Ok(()) Ok(())
@ -224,8 +243,12 @@ impl RepoGroupManager {
})?; })?;
if sync { if sync {
if arch == ANY_ARCH {
self.sync_all(repo)?;
} else {
self.sync(repo, arch)?; self.sync(repo, arch)?;
} }
}
return Ok(true); return Ok(true);
} }

View File

@ -73,7 +73,7 @@ async fn delete_arch_repo(
let clone = Arc::clone(&global.repo_manager); let clone = Arc::clone(&global.repo_manager);
let repo_removed = let repo_removed =
tokio::task::spawn_blocking(move || clone.write().unwrap().remove_arch_repo(&repo, &arch)) tokio::task::spawn_blocking(move || clone.write().unwrap().remove_repo_arch(&repo, &arch))
.await??; .await??;
if repo_removed { if repo_removed {
@ -100,10 +100,7 @@ async fn delete_package(
let clone = Arc::clone(&global.repo_manager); let clone = Arc::clone(&global.repo_manager);
let pkg_removed = tokio::task::spawn_blocking(move || { let pkg_removed = tokio::task::spawn_blocking(move || {
clone clone.write().unwrap().remove_pkg(&repo, &arch, &name, true)
.write()
.unwrap()
.remove_pkg_from_arch_repo(&repo, &arch, &name, true)
}) })
.await??; .await??;