From 2e5c84a48d5648c415866bcf4876c76944136191 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 2 Aug 2023 18:48:17 +0200 Subject: [PATCH] refactor(server): rewrite part of repo logic; remove need for default arch --- server/src/cli.rs | 4 +- server/src/error.rs | 2 + server/src/repo/manager.rs | 193 +++++++++++++++++++++---------------- server/src/repo/mod.rs | 7 +- 4 files changed, 113 insertions(+), 93 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 74e5941..d8f96f7 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -26,8 +26,6 @@ pub struct Cli { pub pkg_dir: PathBuf, /// Directory where repository metadata is stored pub repo_dir: PathBuf, - /// Default architecture to add packages with arch "any" to - pub default_arch: String, } impl FromRef for Arc> { @@ -51,7 +49,7 @@ impl Cli { repo_dir: self.repo_dir.clone(), pkg_dir: self.pkg_dir.clone(), }; - let repo_manager = RepoGroupManager::new(&self.repo_dir, &self.pkg_dir, &self.default_arch); + let repo_manager = RepoGroupManager::new(&self.repo_dir, &self.pkg_dir); let global = Global { config, diff --git a/server/src/error.rs b/server/src/error.rs index fa58962..be4b423 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -25,6 +25,8 @@ impl Error for ServerError {} impl IntoResponse for ServerError { fn into_response(self) -> Response { + tracing::error!("{:?}", self); + match self { ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index df6ee17..af20319 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -1,28 +1,33 @@ use super::package::Package; use libarchive::write::{Builder, WriteEntry}; use libarchive::{Entry, WriteFilter, WriteFormat}; -use std::collections::HashSet; use std::fs; use std::io; use std::path::{Path, PathBuf}; +const ANY_ARCH: &str = "any"; + /// Overarching abstraction that orchestrates updating the repositories stored on the server pub struct RepoGroupManager { repo_dir: PathBuf, pkg_dir: PathBuf, - default_arch: String, +} + +fn parse_pkg_filename(file_name: &str) -> (String, &str, &str, &str) { + let name_parts = file_name.split('-').collect::>(); + let name = name_parts[..name_parts.len() - 3].join("-"); + let version = name_parts[name_parts.len() - 3]; + let release = name_parts[name_parts.len() - 2]; + let (arch, _) = name_parts[name_parts.len() - 1].split_once(".").unwrap(); + + (name, version, release, arch) } impl RepoGroupManager { - pub fn new, P2: AsRef>( - repo_dir: P1, - pkg_dir: P2, - default_arch: &str, - ) -> Self { + pub fn new, P2: AsRef>(repo_dir: P1, pkg_dir: P2) -> Self { RepoGroupManager { repo_dir: repo_dir.as_ref().to_path_buf(), pkg_dir: pkg_dir.as_ref().to_path_buf(), - default_arch: String::from(default_arch), } } @@ -37,11 +42,27 @@ impl RepoGroupManager { ar_files.add_filter(WriteFilter::Gzip)?; ar_files.set_format(WriteFormat::PaxRestricted)?; - let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.tar.gz", repo)))?; + let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.db.tar.gz", repo)))?; let mut ar_files = ar_files.open_file(subrepo_path.join(format!("{}.files.tar.gz", repo)))?; - for entry in subrepo_path.read_dir()? { + // All architectures should also include the "any" architecture, except for the "any" + // architecture itself. + let any_entries_iter = if arch != ANY_ARCH { + let repo_any_dir = self.repo_dir.join(repo).join(ANY_ARCH); + + if repo_any_dir.try_exists()? { + Some(repo_any_dir.read_dir()?) + } else { + None + } + } else { + None + } + .into_iter() + .flatten(); + + for entry in subrepo_path.read_dir()?.chain(any_entries_iter) { let entry = entry?; if entry.file_type()?.is_dir() { @@ -68,80 +89,65 @@ impl RepoGroupManager { } } - ar_db.close().and(ar_files.close()).map_err(Into::into) + ar_db.close()?; + ar_files.close()?; + + Ok(()) + } + + /// Synchronize all present architectures' db archives in the given repository. + pub fn sync_all(&mut self, repo: &str) -> io::Result<()> { + for entry in self.repo_dir.join(repo).read_dir()? { + let entry = entry?; + + if entry.file_type()?.is_dir() { + self.sync(repo, &entry.file_name().to_string_lossy())?; + } + } + + Ok(()) } pub fn add_pkg_from_path>(&mut self, repo: &str, path: P) -> io::Result<()> { let mut pkg = Package::open(&path)?; pkg.calculate_checksum()?; - let archs = self.add_pkg_in_repo(repo, &pkg)?; + self.add_pkg(repo, &pkg)?; - // We add the package to each architecture it was added to by hard-linking the provided - // package file. This prevents storing a package of type "any" multiple times on disk. - for arch in archs { - let arch_repo_pkg_path = self.pkg_dir.join(repo).join(arch); - let dest_pkg_path = arch_repo_pkg_path.join(pkg.file_name()); - - fs::create_dir_all(&arch_repo_pkg_path)?; - fs::hard_link(&path, dest_pkg_path)?; - } - - fs::remove_file(path) + // After successfully adding the package, we move it to the packages directory + let dest_pkg_path = self.pkg_dir.join(repo).join(pkg.file_name()); + fs::create_dir_all(self.pkg_dir.join(repo))?; + fs::rename(&path, dest_pkg_path) } /// Add a package to the given repo, returning to what architectures the package was added. - pub fn add_pkg_in_repo(&mut self, repo: &str, pkg: &Package) -> io::Result> { - let mut arch_repos: HashSet = HashSet::new(); + pub fn add_pkg(&mut self, repo: &str, pkg: &Package) -> io::Result<()> { + // We first remove any existing version of the package + self.remove_pkg(repo, &pkg.info.arch, &pkg.info.name, false)?; - if pkg.info.arch != "any" { - self.add_pkg_in_arch_repo(repo, &pkg.info.arch, pkg)?; - arch_repos.insert(pkg.info.arch.clone()); - } - // Packages of arch "any" are added to every existing arch - else { - arch_repos.insert(self.default_arch.clone()); - - let repo_dir = self.repo_dir.join(repo); - - if repo_dir.exists() { - for entry in repo_dir.read_dir()? { - arch_repos.insert(entry?.file_name().to_string_lossy().to_string()); - } - } - - for arch in arch_repos.iter() { - self.add_pkg_in_arch_repo(repo, arch, pkg)?; - } - } - - Ok(arch_repos) - } - - pub fn add_pkg_in_arch_repo( - &mut self, - repo: &str, - arch: &str, - pkg: &Package, - ) -> io::Result<()> { - let pkg_dir = self + // Write the `desc` and `files` metadata files to disk + let metadata_dir = self .repo_dir .join(repo) - .join(arch) + .join(&pkg.info.arch) .join(format!("{}-{}", pkg.info.name, pkg.info.version)); - // We first remove the previous version of the package, if present - self.remove_pkg_from_arch_repo(repo, arch, &pkg.info.name, false)?; + fs::create_dir_all(&metadata_dir)?; - fs::create_dir_all(&pkg_dir)?; - - let mut desc_file = fs::File::create(pkg_dir.join("desc"))?; + let mut desc_file = fs::File::create(metadata_dir.join("desc"))?; pkg.write_desc(&mut desc_file)?; - let mut files_file = fs::File::create(pkg_dir.join("files"))?; + let mut files_file = fs::File::create(metadata_dir.join("files"))?; pkg.write_files(&mut files_file)?; - self.sync(repo, arch) + // If a package of type "any" is added, we need to update every existing database + if pkg.info.arch == ANY_ARCH { + self.sync_all(repo)?; + } else { + self.sync(repo, &pkg.info.arch)?; + } + + Ok(()) } pub fn remove_repo(&mut self, repo: &str) -> io::Result { @@ -150,41 +156,57 @@ impl RepoGroupManager { if !repo_dir.exists() { Ok(false) } else { - fs::remove_dir_all(&repo_dir) - .and_then(|_| fs::remove_dir_all(self.pkg_dir.join(repo)))?; + fs::remove_dir_all(&repo_dir)?; + fs::remove_dir_all(self.pkg_dir.join(repo))?; Ok(true) } } - pub fn remove_arch_repo(&mut self, repo: &str, arch: &str) -> io::Result { + pub fn remove_repo_arch(&mut self, repo: &str, arch: &str) -> io::Result { let sub_path = PathBuf::from(repo).join(arch); let repo_dir = self.repo_dir.join(&sub_path); if !repo_dir.exists() { - Ok(false) - } else { - fs::remove_dir_all(&repo_dir) - .and_then(|_| fs::remove_dir_all(self.pkg_dir.join(sub_path)))?; - - Ok(true) + return Ok(false); } + + fs::remove_dir_all(&repo_dir)?; + + // Remove every package archive for the architecture + for entry in self.pkg_dir.join(repo).read_dir()? { + let entry = entry?; + let file_name = entry.file_name(); + let file_name = file_name.to_string_lossy(); + let (_, _, _, pkg_arch) = parse_pkg_filename(&file_name); + + if arch == pkg_arch { + fs::remove_file(entry.path())?; + } + } + + // Removing the "any" architecture updates all other repositories + if arch == ANY_ARCH { + self.sync_all(repo)?; + } + + Ok(true) } - pub fn remove_pkg_from_arch_repo( + pub fn remove_pkg( &mut self, repo: &str, arch: &str, pkg_name: &str, sync: bool, ) -> io::Result { - let arch_repo_dir = self.repo_dir.join(repo).join(arch); + let repo_arch_dir = self.repo_dir.join(repo).join(arch); - if !arch_repo_dir.exists() { + if !repo_arch_dir.exists() { return Ok(false); } - for entry in arch_repo_dir.read_dir()? { + for entry in repo_arch_dir.read_dir()? { let entry = entry?; // Make sure we skip the archive files @@ -204,18 +226,15 @@ impl RepoGroupManager { fs::remove_dir_all(entry.path())?; // Also remove the old package archive - let arch_repo_pkg_dir = self.pkg_dir.join(repo).join(arch); + let repo_pkg_dir = self.pkg_dir.join(repo); - arch_repo_pkg_dir.read_dir()?.try_for_each(|res| { + repo_pkg_dir.read_dir()?.try_for_each(|res| { res.and_then(|entry: fs::DirEntry| { let file_name = entry.file_name(); let file_name = file_name.to_string_lossy(); + let (name, _, _, pkg_arch) = parse_pkg_filename(&file_name); - // Same trick, but for package files, we also need to trim the arch - let name_parts = file_name.split('-').collect::>(); - let name = name_parts[..name_parts.len() - 3].join("-"); - - if name == pkg_name { + if name == pkg_name && arch == pkg_arch { fs::remove_file(entry.path()) } else { Ok(()) @@ -224,7 +243,11 @@ impl RepoGroupManager { })?; if sync { - self.sync(repo, arch)?; + if arch == ANY_ARCH { + self.sync_all(repo)?; + } else { + self.sync(repo, arch)?; + } } return Ok(true); diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 0618b11..8d0e464 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -73,7 +73,7 @@ async fn delete_arch_repo( let clone = Arc::clone(&global.repo_manager); let repo_removed = - tokio::task::spawn_blocking(move || clone.write().unwrap().remove_arch_repo(&repo, &arch)) + tokio::task::spawn_blocking(move || clone.write().unwrap().remove_repo_arch(&repo, &arch)) .await??; if repo_removed { @@ -100,10 +100,7 @@ async fn delete_package( let clone = Arc::clone(&global.repo_manager); let pkg_removed = tokio::task::spawn_blocking(move || { - clone - .write() - .unwrap() - .remove_pkg_from_arch_repo(&repo, &arch, &name, true) + clone.write().unwrap().remove_pkg(&repo, &arch, &name, true) }) .await??;