feat: further work on new repo & package implementation
This commit is contained in:
parent
2e0c6d1fa6
commit
c95feadca1
5 changed files with 113 additions and 2 deletions
|
|
@ -1,4 +1,5 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use libarchive::write::{Builder, WriteEntry};
|
||||
use libarchive::{Entry, WriteFilter, WriteFormat};
|
||||
|
|
@ -7,6 +8,7 @@ use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter};
|
|||
|
||||
use futures::StreamExt;
|
||||
|
||||
use super::package_new;
|
||||
use crate::db;
|
||||
use crate::error::Result;
|
||||
|
||||
|
|
@ -37,7 +39,7 @@ impl MetaRepoMgr {
|
|||
let parent_dir = self.repo_dir.join(&repo.name).join(arch);
|
||||
|
||||
let repo_name = repo.name.clone();
|
||||
let (mut ar_db, mut ar_files) = tokio::task::spawn_blocking(move || {
|
||||
let (ar_db, ar_files) = tokio::task::spawn_blocking(move || {
|
||||
let mut ar_db = Builder::new();
|
||||
ar_db.add_filter(WriteFilter::Gzip)?;
|
||||
ar_db.set_format(WriteFormat::PaxRestricted)?;
|
||||
|
|
@ -69,12 +71,30 @@ impl MetaRepoMgr {
|
|||
.stream(conn)
|
||||
.await?;
|
||||
|
||||
let ar_files = Arc::new(Mutex::new(ar_files));
|
||||
let ar_db = Arc::new(Mutex::new(ar_db));
|
||||
|
||||
while let Some(pkg) = pkgs.next().await {
|
||||
let pkg = pkg?;
|
||||
|
||||
// TODO for each package, write entry to archive files
|
||||
package_new::append_files_entry(conn, &pkg, Arc::clone(&ar_files)).await?;
|
||||
|
||||
// TODO db archive
|
||||
}
|
||||
|
||||
// Close archives explicitely for better error handling
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let r1 = ar_files.lock().unwrap().close();
|
||||
let r2 = ar_db.lock().unwrap().close();
|
||||
|
||||
match (r1, r2) {
|
||||
(Ok(_), Ok(_)) => Ok(()),
|
||||
(Err(err), _) | (_, Err(err)) => Err(err),
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
mod manager;
|
||||
mod manager_new;
|
||||
pub mod package;
|
||||
pub mod package_new;
|
||||
|
||||
pub use manager::RepoGroupManager;
|
||||
|
||||
|
|
|
|||
56
server/src/repo/package_new.rs
Normal file
56
server/src/repo/package_new.rs
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
use sea_orm::{DbConn, ModelTrait};
|
||||
|
||||
use libarchive::write::{FileWriter, WriteEntry};
|
||||
use libarchive::Entry;
|
||||
|
||||
use futures::StreamExt;
|
||||
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::db;
|
||||
|
||||
/// Return the full name of the package, consisting of its package name, pkgver and pkgrel
|
||||
fn full_pkg_name(pkg: &db::entities::package::Model) -> String {
|
||||
format!("{}-{}", pkg.name, pkg.version)
|
||||
}
|
||||
|
||||
pub async fn append_files_entry(
|
||||
conn: &DbConn,
|
||||
pkg: &db::entities::package::Model,
|
||||
ar: Arc<Mutex<FileWriter>>,
|
||||
) -> crate::Result<()> {
|
||||
let full_name = full_pkg_name(pkg);
|
||||
let ar_clone = Arc::clone(&ar);
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut ar_entry = WriteEntry::new();
|
||||
ar_entry.set_filetype(libarchive::archive::FileType::RegularFile);
|
||||
ar_entry.set_pathname(PathBuf::from(full_name).join("files"));
|
||||
ar_entry.set_mode(0o100644);
|
||||
// TODO set entry size?
|
||||
|
||||
ar_clone.lock().unwrap().append_entry(&mut ar_entry)
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
// Write first header line
|
||||
let ar_clone = Arc::clone(&ar);
|
||||
tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "%FILES%"))
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
let mut files = pkg.find_related(db::PackageFile).stream(conn).await?;
|
||||
|
||||
while let Some(file) = files.next().await.transpose()? {
|
||||
let ar_clone = Arc::clone(&ar);
|
||||
|
||||
tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "{}", file.path))
|
||||
.await
|
||||
.unwrap()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue