255 lines
8.1 KiB
Rust
255 lines
8.1 KiB
Rust
use super::package::Package;
|
|
use libarchive::write::{Builder, WriteEntry};
|
|
use libarchive::{Entry, WriteFilter, WriteFormat};
|
|
use std::fs;
|
|
use std::io;
|
|
use std::path::{Path, PathBuf};
|
|
|
|
pub const ANY_ARCH: &str = "any";
|
|
|
|
/// Overarching abstraction that orchestrates updating the repositories stored on the server
|
|
pub struct RepoGroupManager {
|
|
repo_dir: PathBuf,
|
|
pkg_dir: PathBuf,
|
|
}
|
|
|
|
fn parse_pkg_filename(file_name: &str) -> (String, &str, &str, &str) {
|
|
let name_parts = file_name.split('-').collect::<Vec<_>>();
|
|
let name = name_parts[..name_parts.len() - 3].join("-");
|
|
let version = name_parts[name_parts.len() - 3];
|
|
let release = name_parts[name_parts.len() - 2];
|
|
let (arch, _) = name_parts[name_parts.len() - 1].split_once('.').unwrap();
|
|
|
|
(name, version, release, arch)
|
|
}
|
|
|
|
impl RepoGroupManager {
|
|
pub fn new<P1: AsRef<Path>, P2: AsRef<Path>>(repo_dir: P1, pkg_dir: P2) -> Self {
|
|
RepoGroupManager {
|
|
repo_dir: repo_dir.as_ref().to_path_buf(),
|
|
pkg_dir: pkg_dir.as_ref().to_path_buf(),
|
|
}
|
|
}
|
|
|
|
pub fn sync(&mut self, repo: &str, arch: &str) -> io::Result<()> {
|
|
let subrepo_path = self.repo_dir.join(repo).join(arch);
|
|
|
|
let mut ar_db = Builder::new();
|
|
ar_db.add_filter(WriteFilter::Gzip)?;
|
|
ar_db.set_format(WriteFormat::PaxRestricted)?;
|
|
|
|
let mut ar_files = Builder::new();
|
|
ar_files.add_filter(WriteFilter::Gzip)?;
|
|
ar_files.set_format(WriteFormat::PaxRestricted)?;
|
|
|
|
let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.db.tar.gz", repo)))?;
|
|
let mut ar_files =
|
|
ar_files.open_file(subrepo_path.join(format!("{}.files.tar.gz", repo)))?;
|
|
|
|
// All architectures should also include the "any" architecture, except for the "any"
|
|
// architecture itself.
|
|
let repo_any_dir = self.repo_dir.join(repo).join(ANY_ARCH);
|
|
|
|
let any_entries_iter = if arch != ANY_ARCH && repo_any_dir.try_exists()? {
|
|
Some(repo_any_dir.read_dir()?)
|
|
} else {
|
|
None
|
|
}
|
|
.into_iter()
|
|
.flatten();
|
|
|
|
for entry in subrepo_path.read_dir()?.chain(any_entries_iter) {
|
|
let entry = entry?;
|
|
|
|
if entry.file_type()?.is_dir() {
|
|
// The desc file needs to be added to both archives
|
|
let path_in_tar = PathBuf::from(entry.file_name()).join("desc");
|
|
let src_path = entry.path().join("desc");
|
|
|
|
let mut ar_entry = WriteEntry::new();
|
|
ar_entry.set_pathname(&path_in_tar);
|
|
ar_entry.set_mode(0o100644);
|
|
|
|
ar_db.append_path(&mut ar_entry, &src_path)?;
|
|
ar_files.append_path(&mut ar_entry, src_path)?;
|
|
|
|
// The files file is only required in the files database
|
|
let path_in_tar = PathBuf::from(entry.file_name()).join("files");
|
|
let src_path = entry.path().join("files");
|
|
|
|
let mut ar_entry = WriteEntry::new();
|
|
ar_entry.set_pathname(&path_in_tar);
|
|
ar_entry.set_mode(0o100644);
|
|
|
|
ar_files.append_path(&mut ar_entry, src_path)?;
|
|
}
|
|
}
|
|
|
|
ar_db.close()?;
|
|
ar_files.close()?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Synchronize all present architectures' db archives in the given repository.
|
|
pub fn sync_all(&mut self, repo: &str) -> io::Result<()> {
|
|
for entry in self.repo_dir.join(repo).read_dir()? {
|
|
let entry = entry?;
|
|
|
|
if entry.file_type()?.is_dir() {
|
|
self.sync(repo, &entry.file_name().to_string_lossy())?;
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn add_pkg_from_path<P: AsRef<Path>>(
|
|
&mut self,
|
|
repo: &str,
|
|
path: P,
|
|
) -> io::Result<Package> {
|
|
let pkg = Package::open(&path)?;
|
|
|
|
self.add_pkg(repo, &pkg)?;
|
|
|
|
// After successfully adding the package, we move it to the packages directory
|
|
let dest_pkg_path = self
|
|
.pkg_dir
|
|
.join(repo)
|
|
.join(&pkg.info.arch)
|
|
.join(pkg.file_name());
|
|
|
|
fs::create_dir_all(dest_pkg_path.parent().unwrap())?;
|
|
fs::rename(&path, dest_pkg_path)?;
|
|
|
|
Ok(pkg)
|
|
}
|
|
|
|
/// Add a package to the given repo, returning to what architectures the package was added.
|
|
pub fn add_pkg(&mut self, repo: &str, pkg: &Package) -> io::Result<()> {
|
|
// We first remove any existing version of the package
|
|
self.remove_pkg(repo, &pkg.info.arch, &pkg.info.name, false)?;
|
|
|
|
// Write the `desc` and `files` metadata files to disk
|
|
let metadata_dir = self
|
|
.repo_dir
|
|
.join(repo)
|
|
.join(&pkg.info.arch)
|
|
.join(format!("{}-{}", pkg.info.name, pkg.info.version));
|
|
|
|
fs::create_dir_all(&metadata_dir)?;
|
|
|
|
let mut desc_file = fs::File::create(metadata_dir.join("desc"))?;
|
|
pkg.write_desc(&mut desc_file)?;
|
|
|
|
let mut files_file = fs::File::create(metadata_dir.join("files"))?;
|
|
pkg.write_files(&mut files_file)?;
|
|
|
|
// If a package of type "any" is added, we need to update every existing database
|
|
if pkg.info.arch == ANY_ARCH {
|
|
self.sync_all(repo)?;
|
|
} else {
|
|
self.sync(repo, &pkg.info.arch)?;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn remove_repo(&mut self, repo: &str) -> io::Result<bool> {
|
|
let repo_dir = self.repo_dir.join(repo);
|
|
|
|
if !repo_dir.exists() {
|
|
Ok(false)
|
|
} else {
|
|
fs::remove_dir_all(&repo_dir)?;
|
|
fs::remove_dir_all(self.pkg_dir.join(repo))?;
|
|
|
|
Ok(true)
|
|
}
|
|
}
|
|
|
|
pub fn remove_repo_arch(&mut self, repo: &str, arch: &str) -> io::Result<bool> {
|
|
let sub_path = PathBuf::from(repo).join(arch);
|
|
let repo_dir = self.repo_dir.join(&sub_path);
|
|
|
|
if !repo_dir.exists() {
|
|
return Ok(false);
|
|
}
|
|
|
|
fs::remove_dir_all(&repo_dir)?;
|
|
fs::remove_dir_all(self.pkg_dir.join(sub_path))?;
|
|
|
|
// Removing the "any" architecture updates all other repositories
|
|
if arch == ANY_ARCH {
|
|
self.sync_all(repo)?;
|
|
}
|
|
|
|
Ok(true)
|
|
}
|
|
|
|
pub fn remove_pkg(
|
|
&mut self,
|
|
repo: &str,
|
|
arch: &str,
|
|
pkg_name: &str,
|
|
sync: bool,
|
|
) -> io::Result<bool> {
|
|
let repo_arch_dir = self.repo_dir.join(repo).join(arch);
|
|
|
|
if !repo_arch_dir.exists() {
|
|
return Ok(false);
|
|
}
|
|
|
|
for entry in repo_arch_dir.read_dir()? {
|
|
let entry = entry?;
|
|
|
|
// Make sure we skip the archive files
|
|
if !entry.metadata()?.is_dir() {
|
|
continue;
|
|
}
|
|
|
|
let file_name = entry.file_name();
|
|
let file_name = file_name.to_string_lossy();
|
|
|
|
// The directory name should only contain the name of the package. The last two parts
|
|
// when splitting on a dash are the pkgver and pkgrel, so we trim those
|
|
let name_parts = file_name.split('-').collect::<Vec<_>>();
|
|
let name = name_parts[..name_parts.len() - 2].join("-");
|
|
|
|
if name == pkg_name {
|
|
fs::remove_dir_all(entry.path())?;
|
|
|
|
// Also remove the old package archive
|
|
let repo_arch_pkg_dir = self.pkg_dir.join(repo).join(arch);
|
|
|
|
repo_arch_pkg_dir.read_dir()?.try_for_each(|res| {
|
|
res.and_then(|entry: fs::DirEntry| {
|
|
let file_name = entry.file_name();
|
|
let file_name = file_name.to_string_lossy();
|
|
let (name, _, _, _) = parse_pkg_filename(&file_name);
|
|
|
|
if name == pkg_name {
|
|
fs::remove_file(entry.path())
|
|
} else {
|
|
Ok(())
|
|
}
|
|
})
|
|
})?;
|
|
|
|
if sync {
|
|
if arch == ANY_ARCH {
|
|
self.sync_all(repo)?;
|
|
} else {
|
|
self.sync(repo, arch)?;
|
|
}
|
|
}
|
|
|
|
return Ok(true);
|
|
}
|
|
}
|
|
|
|
Ok(false)
|
|
}
|
|
}
|