Compare commits

..

2 Commits

Author SHA1 Message Date
Jef Roosens fc844c685f
feat: log added packages 2024-05-29 10:15:19 +02:00
Jef Roosens 60d4478d83
feat: re-implement package remove route 2024-05-29 09:58:19 +02:00
8 changed files with 57 additions and 48 deletions

1
Cargo.lock generated
View File

@ -1660,7 +1660,6 @@ dependencies = [
"futures", "futures",
"http-body-util", "http-body-util",
"libarchive", "libarchive",
"regex",
"sea-orm", "sea-orm",
"sea-orm-migration", "sea-orm-migration",
"serde", "serde",

View File

@ -22,6 +22,13 @@ Another usecase for this would be creating a local mirror of your
distribution's repositories, which can greatly reduce your update times distribution's repositories, which can greatly reduce your update times
depending on your internet connection. depending on your internet connection.
Most users however don't need a full copy of a distro's package repository, so
Rieter also provides a "smart mirror" mode. In this mode, a Rieter instance
only syncs packages that have been requested before, e.g. from a previous
system update. This way, your updates will still be a lot faster as the
required packages are cached, but packages you don't use don't get stored,
saving you a lot of storage space.
### Build system ### Build system
The second goal is to create an easy-to-use build system for Pacman packages. The second goal is to create an easy-to-use build system for Pacman packages.

View File

@ -13,7 +13,6 @@ clap = { version = "4.3.12", features = ["env", "derive"] }
futures = "0.3.28" futures = "0.3.28"
http-body-util = "0.1.1" http-body-util = "0.1.1"
libarchive = { path = "../libarchive" } libarchive = { path = "../libarchive" }
regex = "1.10.4"
sea-orm-migration = "0.12.1" sea-orm-migration = "0.12.1"
serde = { version = "1.0.178", features = ["derive"] } serde = { version = "1.0.178", features = ["derive"] }
sha256 = "1.1.4" sha256 = "1.1.4"

View File

@ -83,13 +83,10 @@ impl Cli {
}; };
let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos")); let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos"));
let pkg_filename_re = regex::Regex::new(r"^([a-z0-9@_+][a-z0-9@_+-.]*)-((?:[0-9]+:)?[a-z0-9._]+-[0-9.]+)-([a-z0-9_]+)\.pkg\.tar\.([a-z0-9]+)$").unwrap();
let global = Global { let global = Global {
config, config,
repo_manager: Arc::new(RwLock::new(repo_manager)), repo_manager: Arc::new(RwLock::new(repo_manager)),
db, db,
pkg_filename_re,
}; };
// build our application with a single route // build our application with a single route

View File

@ -133,7 +133,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack
.chain( .chain(
info.makedepends info.makedepends
.iter() .iter()
.map(|s| (PackageRelatedEnum::Depend, s)), .map(|s| (PackageRelatedEnum::Makedepend, s)),
) )
.chain( .chain(
info.checkdepends info.checkdepends

View File

@ -21,7 +21,6 @@ pub struct Global {
config: Config, config: Config,
repo_manager: Arc<RwLock<MetaRepoMgr>>, repo_manager: Arc<RwLock<MetaRepoMgr>>,
db: sea_orm::DbConn, db: sea_orm::DbConn,
pkg_filename_re: regex::Regex,
} }
#[tokio::main] #[tokio::main]

View File

@ -107,8 +107,10 @@ impl MetaRepoMgr {
ar_db.close().await?; ar_db.close().await?;
ar_files.close().await?; ar_files.close().await?;
tokio::fs::remove_file(desc_tmp_file_path).await?; // If this fails there's no point in failing the function + if there were no packages in
tokio::fs::remove_file(files_tmp_file_path).await?; // the repo, this fails anyway because the temp file doesn't exist
let _ = tokio::fs::remove_file(desc_tmp_file_path).await;
let _ = tokio::fs::remove_file(files_tmp_file_path).await;
Ok(()) Ok(())
} }
@ -188,8 +190,21 @@ impl MetaRepoMgr {
let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?;
if let Some(pkg) = pkg { if let Some(pkg) = pkg {
// Remove package from database // Remove package from database & file system
tokio::fs::remove_file(
self.repo_dir
.join(&repo.name)
.join(super::package::filename(&pkg)),
)
.await?;
pkg.delete(conn).await?; pkg.delete(conn).await?;
if arch == ANY_ARCH {
self.generate_archives_all(conn, &repo.name).await?;
} else {
self.generate_archives(conn, &repo.name, arch).await?;
}
Ok(true) Ok(true)
} else { } else {
Ok(false) Ok(false)
@ -204,7 +219,7 @@ impl MetaRepoMgr {
conn: &DbConn, conn: &DbConn,
reader: &mut R, reader: &mut R,
repo: &str, repo: &str,
) -> crate::Result<()> { ) -> crate::Result<(String, String, String)> {
// Copy file contents to temporary path so libarchive can work with it // Copy file contents to temporary path so libarchive can work with it
let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); let uuid: uuid::fmt::Simple = Uuid::new_v4().into();
let path = self.repo_dir.join(uuid.to_string()); let path = self.repo_dir.join(uuid.to_string());
@ -247,6 +262,8 @@ impl MetaRepoMgr {
let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name());
// Insert new package into database // Insert new package into database
let name = pkg.info.name.clone();
let version = pkg.info.version.clone();
let arch = pkg.info.arch.clone(); let arch = pkg.info.arch.clone();
db::query::package::insert(conn, repo_id, pkg).await?; db::query::package::insert(conn, repo_id, pkg).await?;
@ -256,9 +273,11 @@ impl MetaRepoMgr {
// Synchronize archive databases // Synchronize archive databases
if arch == ANY_ARCH { if arch == ANY_ARCH {
self.generate_archives_all(conn, repo).await self.generate_archives_all(conn, repo).await?;
} else { } else {
self.generate_archives(conn, repo, &arch).await self.generate_archives(conn, repo, &arch).await?;
} }
Ok((name, version, arch))
} }
} }

View File

@ -69,19 +69,17 @@ async fn post_package_archive(
Path(repo): Path<String>, Path(repo): Path<String>,
body: Body, body: Body,
) -> crate::Result<()> { ) -> crate::Result<()> {
let body = body.into_data_stream(); let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other));
let body = body.map_err(std::io::Error::other); let (name, version, arch) = global
let mut body = StreamReader::new(body);
global
.repo_manager .repo_manager
.write() .write()
.await .await
.add_pkg_from_reader(&global.db, &mut body, &repo) .add_pkg_from_reader(&global.db, &mut body, &repo)
.await?; .await?;
Ok(()) tracing::info!("Added '{}-{}' to repository '{}' ({})", name, version, repo, arch);
//tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); Ok(())
} }
async fn delete_repo( async fn delete_repo(
@ -126,34 +124,25 @@ async fn delete_arch_repo(
async fn delete_package( async fn delete_package(
State(global): State<crate::Global>, State(global): State<crate::Global>,
Path((repo, arch, file_name)): Path<(String, String, String)>, Path((repo, arch, pkg_name)): Path<(String, String, String)>,
) -> crate::Result<StatusCode> { ) -> crate::Result<StatusCode> {
Ok(StatusCode::NOT_FOUND) let pkg_removed = global
//global.repo_manager.write().unwrap().remove_pkg(&global.db, &repo, &arch, name) .repo_manager
//let clone = Arc::clone(&global.repo_manager); .write()
//let path = PathBuf::from(&repo).join(arch).join(&file_name); .await
// .remove_pkg(&global.db, &repo, &arch, &pkg_name)
//let res = tokio::task::spawn_blocking(move || { .await?;
// clone.write().unwrap().remove_pkg_from_path(path, true)
//}) if pkg_removed {
//.await??; tracing::info!(
// "Removed package '{}' ({}) from repository '{}'",
//if let Some((name, version, release, arch)) = res { pkg_name,
// let res = db::query::repo::by_name(&global.db, &repo).await?; arch,
// repo
// if let Some(repo_entry) = res { );
// let res =
// db::query::package::by_fields(&global.db, repo_entry.id, &arch, &name).await?; Ok(StatusCode::OK)
// } else {
// if let Some(entry) = res { Ok(StatusCode::NOT_FOUND)
// entry.delete(&global.db).await?; }
// }
// }
//
// tracing::info!("Removed '{}' from repository '{}'", file_name, repo);
//
// Ok(StatusCode::OK)
//} else {
// Ok(StatusCode::NOT_FOUND)
//}
} }