Compare commits
2 Commits
513a760040
...
32e27978ec
Author | SHA1 | Date |
---|---|---|
Jef Roosens | 32e27978ec | |
Jef Roosens | bf100049b1 |
|
@ -1,4 +1,4 @@
|
|||
use crate::repo::{MetaRepoMgr, RepoGroupManager};
|
||||
use crate::repo::MetaRepoMgr;
|
||||
use crate::{Config, Global};
|
||||
|
||||
use axum::extract::FromRef;
|
||||
|
@ -82,7 +82,6 @@ impl Cli {
|
|||
|
||||
let config = Config {
|
||||
data_dir: self.data_dir.clone(),
|
||||
api_key: self.api_key.clone(),
|
||||
};
|
||||
let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos"));
|
||||
|
||||
|
|
|
@ -9,7 +9,10 @@ pub struct Filter {
|
|||
|
||||
impl IntoCondition for Filter {
|
||||
fn into_condition(self) -> Condition {
|
||||
Condition::all().add_option(self.name.map(|name| package::Column::Name.like(name)))
|
||||
Condition::all().add_option(
|
||||
self.name
|
||||
.map(|name| repo::Column::Name.like(format!("%{}%", name))),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ mod repo;
|
|||
use clap::Parser;
|
||||
pub use error::{Result, ServerError};
|
||||
use repo::MetaRepoMgr;
|
||||
use repo::RepoGroupManager;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
@ -15,7 +14,6 @@ use tokio::sync::RwLock;
|
|||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
data_dir: PathBuf,
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
use std::io::{self, Write};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
|
||||
use libarchive::write::{Builder, FileWriter, WriteEntry};
|
||||
use libarchive::{Entry, WriteFilter, WriteFormat};
|
||||
|
||||
enum Message {
|
||||
AppendFilesEntry(oneshot::Sender<io::Result<()>>, String),
|
||||
AppendLine(oneshot::Sender<io::Result<()>>, String),
|
||||
Close(oneshot::Sender<io::Result<()>>),
|
||||
}
|
||||
|
||||
/// Struct to abstract away the intrinsics of writing entries to an archive file
|
||||
pub struct RepoArchiveWriter {
|
||||
ar: Arc<Mutex<FileWriter>>,
|
||||
|
|
|
@ -133,6 +133,51 @@ impl MetaRepoMgr {
|
|||
}
|
||||
}
|
||||
|
||||
/// Remove all packages from the repository with the given arch.
|
||||
pub async fn remove_repo_arch(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<bool> {
|
||||
let repo = db::query::repo::by_name(conn, repo).await?;
|
||||
|
||||
if let Some(repo) = repo {
|
||||
let mut pkgs = repo
|
||||
.find_related(db::Package)
|
||||
.filter(db::package::Column::Arch.eq(arch))
|
||||
.stream(conn)
|
||||
.await?;
|
||||
|
||||
while let Some(pkg) = pkgs.next().await.transpose()? {
|
||||
let path = self
|
||||
.repo_dir
|
||||
.join(&repo.name)
|
||||
.join(super::package::filename(&pkg));
|
||||
tokio::fs::remove_file(path).await?;
|
||||
|
||||
pkg.delete(conn).await?;
|
||||
}
|
||||
|
||||
tokio::fs::remove_file(
|
||||
self.repo_dir
|
||||
.join(&repo.name)
|
||||
.join(format!("{}.db.tar.gz", arch)),
|
||||
)
|
||||
.await?;
|
||||
tokio::fs::remove_file(
|
||||
self.repo_dir
|
||||
.join(&repo.name)
|
||||
.join(format!("{}.files.tar.gz", arch)),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// If we removed all "any" packages, we need to resync all databases
|
||||
if arch == ANY_ARCH {
|
||||
self.generate_archives_all(conn, &repo.name).await?;
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn remove_pkg(
|
||||
&self,
|
||||
conn: &DbConn,
|
||||
|
|
|
@ -3,13 +3,9 @@ mod manager;
|
|||
mod manager_new;
|
||||
pub mod package;
|
||||
|
||||
pub use manager::RepoGroupManager;
|
||||
pub use manager_new::MetaRepoMgr;
|
||||
use tokio_util::io::StreamReader;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db;
|
||||
use axum::body::Body;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::Request;
|
||||
|
@ -18,17 +14,9 @@ use axum::response::IntoResponse;
|
|||
use axum::routing::{delete, post};
|
||||
use axum::Router;
|
||||
use futures::TryStreamExt;
|
||||
use futures::{Stream, StreamExt};
|
||||
use regex::Regex;
|
||||
use sea_orm::ModelTrait;
|
||||
use std::sync::Arc;
|
||||
use tokio::{fs, io::AsyncWriteExt};
|
||||
use tower::util::ServiceExt;
|
||||
use tower_http::services::{ServeDir, ServeFile};
|
||||
use tower_http::services::ServeFile;
|
||||
use tower_http::validate_request::ValidateRequestHeaderLayer;
|
||||
use uuid::Uuid;
|
||||
|
||||
const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"];
|
||||
|
||||
pub fn router(api_key: &str) -> Router<crate::Global> {
|
||||
Router::new()
|
||||
|
@ -120,31 +108,20 @@ async fn delete_arch_repo(
|
|||
State(global): State<crate::Global>,
|
||||
Path((repo, arch)): Path<(String, String)>,
|
||||
) -> crate::Result<StatusCode> {
|
||||
Ok(StatusCode::NOT_FOUND)
|
||||
//let clone = Arc::clone(&global.repo_manager);
|
||||
//
|
||||
//let arch_clone = arch.clone();
|
||||
//let repo_clone = repo.clone();
|
||||
//let repo_removed = tokio::task::spawn_blocking(move || {
|
||||
// clone
|
||||
// .write()
|
||||
// .unwrap()
|
||||
// .remove_repo_arch(&repo_clone, &arch_clone)
|
||||
//})
|
||||
//.await??;
|
||||
//
|
||||
//if repo_removed {
|
||||
// let res = db::query::repo::by_name(&global.db, &repo).await?;
|
||||
//
|
||||
// if let Some(repo_entry) = res {
|
||||
// db::query::package::delete_with_arch(&global.db, repo_entry.id, &arch).await?;
|
||||
// }
|
||||
// tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo);
|
||||
//
|
||||
// Ok(StatusCode::OK)
|
||||
//} else {
|
||||
// Ok(StatusCode::NOT_FOUND)
|
||||
//}
|
||||
let repo_removed = global
|
||||
.repo_manager
|
||||
.write()
|
||||
.await
|
||||
.remove_repo_arch(&global.db, &repo, &arch)
|
||||
.await?;
|
||||
|
||||
if repo_removed {
|
||||
tracing::info!("Removed arch '{}' from repository '{}'", arch, repo);
|
||||
|
||||
Ok(StatusCode::OK)
|
||||
} else {
|
||||
Ok(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_package(
|
||||
|
|
Loading…
Reference in New Issue