From 2f7c4c34f7849112f7b1c023e25b06e565435ad6 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 13 Aug 2023 20:40:52 +0200 Subject: [PATCH 01/73] refactor(server): split initial migration --- .../m20230813_000001_create_dist_tables.rs | 48 +++++++++++++++++++ ...m20230813_000002_create_package_tables.rs} | 23 +-------- server/src/db/migrator/mod.rs | 10 ++-- 3 files changed, 56 insertions(+), 25 deletions(-) create mode 100644 server/src/db/migrator/m20230813_000001_create_dist_tables.rs rename server/src/db/migrator/{m20230730_000001_create_repo_tables.rs => m20230813_000002_create_package_tables.rs} (94%) diff --git a/server/src/db/migrator/m20230813_000001_create_dist_tables.rs b/server/src/db/migrator/m20230813_000001_create_dist_tables.rs new file mode 100644 index 0000000..97191b8 --- /dev/null +++ b/server/src/db/migrator/m20230813_000001_create_dist_tables.rs @@ -0,0 +1,48 @@ +use sea_orm_migration::prelude::*; + +pub struct Migration; + +impl MigrationName for Migration { + fn name(&self) -> &str { + "m_20230813_000001_create_dist_tables" + } +} + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Repo::Table) + .col( + ColumnDef::new(Repo::Id) + .integer() + .not_null() + .auto_increment() + .primary_key(), + ) + .col(ColumnDef::new(Repo::Name).string().not_null().unique_key()) + .col(ColumnDef::new(Repo::Description).string()) + .to_owned(), + ) + .await?; + + Ok(()) + } + + // Define how to rollback this migration: Drop the Bakery table. + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(Repo::Table).to_owned()) + .await + } +} + +#[derive(Iden)] +pub enum Repo { + Table, + Id, + Name, + Description, +} diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230813_000002_create_package_tables.rs similarity index 94% rename from server/src/db/migrator/m20230730_000001_create_repo_tables.rs rename to server/src/db/migrator/m20230813_000002_create_package_tables.rs index adefe56..1cfc208 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230813_000002_create_package_tables.rs @@ -4,29 +4,13 @@ pub struct Migration; impl MigrationName for Migration { fn name(&self) -> &str { - "m_20230730_000001_create_repo_tables" + "m_20230813_000002_create_package_tables" } } #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { - manager - .create_table( - Table::create() - .table(Repo::Table) - .col( - ColumnDef::new(Repo::Id) - .integer() - .not_null() - .auto_increment() - .primary_key(), - ) - .col(ColumnDef::new(Repo::Name).string().not_null().unique_key()) - .col(ColumnDef::new(Repo::Description).string()) - .to_owned(), - ) - .await?; manager .create_table( Table::create() @@ -292,9 +276,6 @@ impl MigrationTrait for Migration { .await?; manager .drop_table(Table::drop().table(Package::Table).to_owned()) - .await?; - manager - .drop_table(Table::drop().table(Repo::Table).to_owned()) .await } } @@ -303,8 +284,6 @@ impl MigrationTrait for Migration { pub enum Repo { Table, Id, - Name, - Description, } #[derive(Iden)] diff --git a/server/src/db/migrator/mod.rs b/server/src/db/migrator/mod.rs index d939276..0d8899b 100644 --- a/server/src/db/migrator/mod.rs +++ b/server/src/db/migrator/mod.rs @@ -1,12 +1,16 @@ +mod m20230813_000001_create_dist_tables; +mod m20230813_000002_create_package_tables; + use sea_orm_migration::prelude::*; pub struct Migrator; -mod m20230730_000001_create_repo_tables; - #[async_trait::async_trait] impl MigratorTrait for Migrator { fn migrations() -> Vec> { - vec![Box::new(m20230730_000001_create_repo_tables::Migration)] + vec![ + Box::new(m20230813_000001_create_dist_tables::Migration), + Box::new(m20230813_000002_create_package_tables::Migration), + ] } } From 0565328ea8b15137bb06d78f51e97803de198af0 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 13 Aug 2023 21:16:43 +0200 Subject: [PATCH 02/73] feat(server): start distro table --- .../m20230813_000001_create_dist_tables.rs | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/server/src/db/migrator/m20230813_000001_create_dist_tables.rs b/server/src/db/migrator/m20230813_000001_create_dist_tables.rs index 97191b8..46a2842 100644 --- a/server/src/db/migrator/m20230813_000001_create_dist_tables.rs +++ b/server/src/db/migrator/m20230813_000001_create_dist_tables.rs @@ -11,6 +11,34 @@ impl MigrationName for Migration { #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Distro::Table) + .col( + ColumnDef::new(Distro::Id) + .integer() + .not_null() + .auto_increment() + .primary_key(), + ) + .col( + ColumnDef::new(Distro::Slug) + .string_len(255) + .not_null() + .unique_key(), + ) + .col( + ColumnDef::new(Distro::Name) + .string() + .not_null() + .unique_key(), + ) + .col(ColumnDef::new(Distro::Description).string()) + .col(ColumnDef::new(Distro::Url).string()) + .to_owned(), + ) + .await?; manager .create_table( Table::create() @@ -22,8 +50,16 @@ impl MigrationTrait for Migration { .auto_increment() .primary_key(), ) + .col(ColumnDef::new(Repo::DistroId).integer().not_null()) .col(ColumnDef::new(Repo::Name).string().not_null().unique_key()) .col(ColumnDef::new(Repo::Description).string()) + .foreign_key( + ForeignKey::create() + .name("fk-repo-distro_id") + .from(Repo::Table, Repo::DistroId) + .to(Distro::Table, Distro::Id) + .on_delete(ForeignKeyAction::Cascade), + ) .to_owned(), ) .await?; @@ -35,14 +71,28 @@ impl MigrationTrait for Migration { async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { manager .drop_table(Table::drop().table(Repo::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(Distro::Table).to_owned()) .await } } +#[derive(Iden)] +pub enum Distro { + Table, + Id, + Slug, + Name, + Description, + Url, +} + #[derive(Iden)] pub enum Repo { Table, Id, + DistroId, Name, Description, } From 1b80bcd757b1e9c43160a5bd1ad7d6cc375b8b63 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 17 Aug 2023 09:50:17 +0200 Subject: [PATCH 03/73] feat(server): add read-only distro api routes --- server/src/api/mod.rs | 31 +++++++++++++++++++++ server/src/db/entities/distro.rs | 31 +++++++++++++++++++++ server/src/db/entities/mod.rs | 1 + server/src/db/entities/prelude.rs | 1 + server/src/db/entities/repo.rs | 15 ++++++++++ server/src/db/mod.rs | 2 ++ server/src/db/query/distro.rs | 46 +++++++++++++++++++++++++++++++ server/src/db/query/mod.rs | 2 ++ server/src/db/query/repo.rs | 2 ++ 9 files changed, 131 insertions(+) create mode 100644 server/src/db/entities/distro.rs create mode 100644 server/src/db/query/distro.rs diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index 800587f..7557956 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -11,12 +11,43 @@ use crate::db; pub fn router() -> Router { Router::new() + .route("/distros", get(get_distros)) + .route("/distros/:id", get(get_single_distro)) .route("/repos", get(get_repos)) .route("/repos/:id", get(get_single_repo)) .route("/packages", get(get_packages)) .route("/packages/:id", get(get_single_package)) } +async fn get_distros( + State(global): State, + Query(pagination): Query, +) -> crate::Result>> { + let (total_pages, repos) = global + .db + .distro + .page( + pagination.per_page.unwrap_or(25), + pagination.page.unwrap_or(1) - 1, + ) + .await?; + Ok(Json(pagination.res(total_pages, repos))) +} + +async fn get_single_distro( + State(global): State, + Path(id): Path, +) -> crate::Result> { + let repo = global + .db + .distro + .by_id(id) + .await? + .ok_or(axum::http::StatusCode::NOT_FOUND)?; + + Ok(Json(repo)) +} + async fn get_repos( State(global): State, Query(pagination): Query, diff --git a/server/src/db/entities/distro.rs b/server/src/db/entities/distro.rs new file mode 100644 index 0000000..1d96872 --- /dev/null +++ b/server/src/db/entities/distro.rs @@ -0,0 +1,31 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "distro")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i32, + #[sea_orm(unique)] + pub slug: String, + #[sea_orm(unique)] + pub name: String, + pub description: Option, + pub url: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::repo::Entity")] + Repo, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Repo.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/mod.rs b/server/src/db/entities/mod.rs index 1111e7a..96603bf 100644 --- a/server/src/db/entities/mod.rs +++ b/server/src/db/entities/mod.rs @@ -2,6 +2,7 @@ pub mod prelude; +pub mod distro; pub mod package; pub mod package_conflicts; pub mod package_depends; diff --git a/server/src/db/entities/prelude.rs b/server/src/db/entities/prelude.rs index bee503c..1314174 100644 --- a/server/src/db/entities/prelude.rs +++ b/server/src/db/entities/prelude.rs @@ -1,5 +1,6 @@ //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 +pub use super::distro::Entity as Distro; pub use super::package::Entity as Package; pub use super::package_conflicts::Entity as PackageConflicts; pub use super::package_depends::Entity as PackageDepends; diff --git a/server/src/db/entities/repo.rs b/server/src/db/entities/repo.rs index b7a1af1..d68e226 100644 --- a/server/src/db/entities/repo.rs +++ b/server/src/db/entities/repo.rs @@ -8,6 +8,7 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key)] pub id: i32, + pub distro_id: i32, #[sea_orm(unique)] pub name: String, pub description: Option, @@ -15,10 +16,24 @@ pub struct Model { #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm( + belongs_to = "super::distro::Entity", + from = "Column::DistroId", + to = "super::distro::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + Distro, #[sea_orm(has_many = "super::package::Entity")] Package, } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Distro.def() + } +} + impl Related for Entity { fn to() -> RelationDef { Relation::Package.def() diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 6ff5a8f..32c0452 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -42,6 +42,7 @@ pub struct RieterDb { conn: DatabaseConnection, pub pkg: query::PackageQuery, pub repo: query::RepoQuery, + pub distro: query::DistroQuery, } impl RieterDb { @@ -54,6 +55,7 @@ impl RieterDb { conn: db.clone(), pkg: query::PackageQuery::new(db.clone()), repo: query::RepoQuery::new(db.clone()), + distro: query::DistroQuery::new(db.clone()), }) } } diff --git a/server/src/db/query/distro.rs b/server/src/db/query/distro.rs new file mode 100644 index 0000000..2d4d1c6 --- /dev/null +++ b/server/src/db/query/distro.rs @@ -0,0 +1,46 @@ +use sea_orm::*; + +use crate::db::*; + +#[derive(Clone, Debug)] +pub struct DistroQuery { + conn: DatabaseConnection, +} + +impl DistroQuery { + pub fn new(conn: DatabaseConnection) -> Self { + Self { conn } + } + + pub async fn page(&self, per_page: u64, page: u64) -> Result<(u64, Vec)> { + let paginator = Distro::find() + .order_by_asc(distro::Column::Id) + .paginate(&self.conn, per_page); + let results = paginator.fetch_page(page).await?; + let total_pages = paginator.num_pages().await?; + + Ok((total_pages, results)) + } + + pub async fn by_id(&self, id: i32) -> Result> { + distro::Entity::find_by_id(id).one(&self.conn).await + } + + pub async fn insert( + &self, + slug: &str, + name: &str, + description: Option<&str>, + url: Option<&str>, + ) -> Result> { + let model = distro::ActiveModel { + id: NotSet, + slug: Set(String::from(slug)), + name: Set(String::from(name)), + description: Set(description.map(String::from)), + url: Set(url.map(String::from)), + }; + + Distro::insert(model).exec(&self.conn).await + } +} diff --git a/server/src/db/query/mod.rs b/server/src/db/query/mod.rs index a52cccf..4ffa4cb 100644 --- a/server/src/db/query/mod.rs +++ b/server/src/db/query/mod.rs @@ -1,6 +1,8 @@ +mod distro; mod package; mod repo; +pub use distro::DistroQuery; pub use package::PackageQuery; pub use repo::RepoQuery; diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 5e54fdc..8b30db8 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -40,6 +40,8 @@ impl RepoQuery { ) -> Result> { let model = repo::ActiveModel { id: NotSet, + // TODO CHANGE THIS + distro_id: NotSet, name: Set(String::from(name)), description: Set(description.map(String::from)), }; From 80fb6d22f8953e2ad89e9794b45b78902d895335 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 17 Aug 2023 10:10:55 +0200 Subject: [PATCH 04/73] refactor(server): separate web logic into separate module --- Cargo.lock | 1 + server/Cargo.toml | 1 + server/src/cli.rs | 19 +- server/src/main.rs | 2 +- server/src/repo/mod.rs | 253 +------------------------ server/src/{ => web}/api/mod.rs | 0 server/src/{ => web}/api/pagination.rs | 0 server/src/web/mod.rs | 19 ++ server/src/web/repo.rs | 250 ++++++++++++++++++++++++ 9 files changed, 277 insertions(+), 268 deletions(-) rename server/src/{ => web}/api/mod.rs (100%) rename server/src/{ => web}/api/pagination.rs (100%) create mode 100644 server/src/web/mod.rs create mode 100644 server/src/web/repo.rs diff --git a/Cargo.lock b/Cargo.lock index 804a3d1..a02d9ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1636,6 +1636,7 @@ dependencies = [ "chrono", "clap", "futures", + "hyper", "libarchive", "sea-orm", "sea-orm-migration", diff --git a/server/Cargo.toml b/server/Cargo.toml index bf4fd7f..0bdfee3 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -8,6 +8,7 @@ authors = ["Jef Roosens"] [dependencies] axum = { version = "0.6.18", features = ["http2"] } +hyper = "*" chrono = { version = "0.4.26", features = ["serde"] } clap = { version = "4.3.12", features = ["env", "derive"] } futures = "0.3.28" diff --git a/server/src/cli.rs b/server/src/cli.rs index 540f457..ec93f8b 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -2,12 +2,10 @@ use crate::repo::RepoGroupManager; use crate::{Config, Global}; use axum::extract::FromRef; -use axum::Router; use clap::Parser; use std::io; use std::path::PathBuf; use std::sync::{Arc, RwLock}; -use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; @@ -95,18 +93,9 @@ impl Cli { }; // build our application with a single route - let app = Router::new() - .nest("/api", crate::api::router()) - .merge(crate::repo::router(&self.api_key)) - .with_state(global) - .layer(TraceLayer::new_for_http()); - - // run it with hyper on localhost:3000 - Ok( - axum::Server::bind(&format!("0.0.0.0:{}", self.port).parse().unwrap()) - .serve(app.into_make_service()) - .await - .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?, - ) + let app = crate::web::app(global, &self.api_key); + Ok(crate::web::serve(app, self.port) + .await + .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?) } } diff --git a/server/src/main.rs b/server/src/main.rs index fc5c110..1d90a24 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,8 +1,8 @@ -mod api; mod cli; pub mod db; mod error; mod repo; +mod web; use clap::Parser; pub use error::{Result, ServerError}; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 21acf81..44f0e10 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,255 +1,4 @@ -mod manager; +pub mod manager; pub mod package; pub use manager::RepoGroupManager; - -use std::path::PathBuf; - -use axum::body::Body; -use axum::extract::{BodyStream, Path, State}; -use axum::http::Request; -use axum::http::StatusCode; -use axum::response::IntoResponse; -use axum::routing::{delete, post}; -use axum::Router; -use futures::StreamExt; -use sea_orm::ModelTrait; -use std::sync::Arc; -use tokio::{fs, io::AsyncWriteExt}; -use tower::util::ServiceExt; -use tower_http::services::{ServeDir, ServeFile}; -use tower_http::validate_request::ValidateRequestHeaderLayer; -use uuid::Uuid; - -const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; - -pub fn router(api_key: &str) -> Router { - Router::new() - .route( - "/:repo", - post(post_package_archive) - .delete(delete_repo) - .route_layer(ValidateRequestHeaderLayer::bearer(api_key)), - ) - .route( - "/:repo/:arch", - delete(delete_arch_repo).route_layer(ValidateRequestHeaderLayer::bearer(api_key)), - ) - // Routes added after the layer do not get that layer applied, so the GET requests will not - // be authorized - .route( - "/:repo/:arch/:filename", - delete(delete_package) - .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) - .get(get_file), - ) -} - -/// Serve the package archive files and database archives. If files are requested for an -/// architecture that does not have any explicit packages, a repository containing only "any" files -/// is returned. -async fn get_file( - State(global): State, - Path((repo, arch, mut file_name)): Path<(String, String, String)>, - req: Request, -) -> crate::Result { - let repo_dir = global.config.repo_dir.join(&repo).join(&arch); - let repo_exists = tokio::fs::try_exists(&repo_dir).await?; - - let res = if DB_FILE_EXTS.iter().any(|ext| file_name.ends_with(ext)) { - // Append tar extension to ensure we find the file - if !file_name.ends_with(".tar.gz") { - file_name.push_str(".tar.gz"); - }; - - if repo_exists { - ServeFile::new(repo_dir.join(file_name)).oneshot(req).await - } else { - let path = global - .config - .repo_dir - .join(repo) - .join(manager::ANY_ARCH) - .join(file_name); - - ServeFile::new(path).oneshot(req).await - } - } else { - let any_file = global - .config - .pkg_dir - .join(repo) - .join(manager::ANY_ARCH) - .join(file_name); - - if repo_exists { - ServeDir::new(global.config.pkg_dir) - .fallback(ServeFile::new(any_file)) - .oneshot(req) - .await - } else { - ServeFile::new(any_file).oneshot(req).await - } - }; - - Ok(res) -} - -async fn post_package_archive( - State(global): State, - Path(repo): Path, - mut body: BodyStream, -) -> crate::Result<()> { - // We first stream the uploaded file to disk - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let path = global.config.pkg_dir.join(uuid.to_string()); - let mut f = fs::File::create(&path).await?; - - while let Some(chunk) = body.next().await { - f.write_all(&chunk?).await?; - } - - let clone = Arc::clone(&global.repo_manager); - let path_clone = path.clone(); - let repo_clone = repo.clone(); - let res = tokio::task::spawn_blocking(move || { - clone - .write() - .unwrap() - .add_pkg_from_path(&repo_clone, &path_clone) - }) - .await?; - - match res { - // Insert the newly added package into the database - Ok(pkg) => { - tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); - - // Query the repo for its ID, or create it if it does not already exist - let res = global.db.repo.by_name(&repo).await?; - - let repo_id = if let Some(repo_entity) = res { - repo_entity.id - } else { - global.db.repo.insert(&repo, None).await?.last_insert_id - }; - - // If the package already exists in the database, we remove it first - let res = global - .db - .pkg - .by_fields(repo_id, &pkg.info.name, None, &pkg.info.arch) - .await?; - - if let Some(entry) = res { - entry.delete(&global.db).await?; - } - - global.db.pkg.insert(repo_id, pkg).await?; - - Ok(()) - } - // Remove the uploaded file and return the error - Err(err) => { - tokio::fs::remove_file(path).await?; - - Err(err.into()) - } - } -} - -async fn delete_repo( - State(global): State, - Path(repo): Path, -) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - - let repo_clone = repo.clone(); - let repo_removed = - tokio::task::spawn_blocking(move || clone.write().unwrap().remove_repo(&repo_clone)) - .await??; - - if repo_removed { - let res = global.db.repo.by_name(&repo).await?; - - if let Some(repo_entry) = res { - repo_entry.delete(&global.db).await?; - } - - tracing::info!("Removed repository '{}'", repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } -} - -async fn delete_arch_repo( - State(global): State, - Path((repo, arch)): Path<(String, String)>, -) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - - let arch_clone = arch.clone(); - let repo_clone = repo.clone(); - let repo_removed = tokio::task::spawn_blocking(move || { - clone - .write() - .unwrap() - .remove_repo_arch(&repo_clone, &arch_clone) - }) - .await??; - - if repo_removed { - let res = global.db.repo.by_name(&repo).await?; - - if let Some(repo_entry) = res { - global.db.pkg.delete_with_arch(repo_entry.id, &arch).await?; - } - tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } -} - -async fn delete_package( - State(global): State, - Path((repo, arch, file_name)): Path<(String, String, String)>, -) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - let path = PathBuf::from(&repo).join(arch).join(&file_name); - - let res = tokio::task::spawn_blocking(move || { - clone.write().unwrap().remove_pkg_from_path(path, true) - }) - .await??; - - if let Some((name, version, release, arch)) = res { - let res = global.db.repo.by_name(&repo).await?; - - if let Some(repo_entry) = res { - let res = global - .db - .pkg - .by_fields( - repo_entry.id, - &name, - Some(&format!("{}-{}", version, release)), - &arch, - ) - .await?; - - if let Some(entry) = res { - entry.delete(&global.db).await?; - } - } - - tracing::info!("Removed '{}' from repository '{}'", file_name, repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } -} diff --git a/server/src/api/mod.rs b/server/src/web/api/mod.rs similarity index 100% rename from server/src/api/mod.rs rename to server/src/web/api/mod.rs diff --git a/server/src/api/pagination.rs b/server/src/web/api/pagination.rs similarity index 100% rename from server/src/api/pagination.rs rename to server/src/web/api/pagination.rs diff --git a/server/src/web/mod.rs b/server/src/web/mod.rs new file mode 100644 index 0000000..ce32a07 --- /dev/null +++ b/server/src/web/mod.rs @@ -0,0 +1,19 @@ +mod api; +mod repo; + +use axum::{Router, Server}; +use tower_http::trace::TraceLayer; + +pub fn app(global: crate::Global, api_key: &str) -> Router { + Router::new() + .nest("/api", api::router()) + .merge(repo::router(api_key)) + .with_state(global) + .layer(TraceLayer::new_for_http()) +} + +pub async fn serve(app: Router, port: u16) -> Result<(), hyper::Error> { + Server::bind(&format!("0.0.0.0:{}", port).parse().unwrap()) + .serve(app.into_make_service()) + .await +} diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs new file mode 100644 index 0000000..f8c3d65 --- /dev/null +++ b/server/src/web/repo.rs @@ -0,0 +1,250 @@ +use std::path::PathBuf; + +use axum::body::Body; +use axum::extract::{BodyStream, Path, State}; +use axum::http::Request; +use axum::http::StatusCode; +use axum::response::IntoResponse; +use axum::routing::{delete, post}; +use axum::Router; +use futures::StreamExt; +use sea_orm::ModelTrait; +use std::sync::Arc; +use tokio::{fs, io::AsyncWriteExt}; +use tower::util::ServiceExt; +use tower_http::services::{ServeDir, ServeFile}; +use tower_http::validate_request::ValidateRequestHeaderLayer; +use uuid::Uuid; + +const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; + +pub fn router(api_key: &str) -> Router { + Router::new() + .route( + "/:repo", + post(post_package_archive) + .delete(delete_repo) + .route_layer(ValidateRequestHeaderLayer::bearer(api_key)), + ) + .route( + "/:repo/:arch", + delete(delete_arch_repo).route_layer(ValidateRequestHeaderLayer::bearer(api_key)), + ) + // Routes added after the layer do not get that layer applied, so the GET requests will not + // be authorized + .route( + "/:repo/:arch/:filename", + delete(delete_package) + .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) + .get(get_file), + ) +} + +/// Serve the package archive files and database archives. If files are requested for an +/// architecture that does not have any explicit packages, a repository containing only "any" files +/// is returned. +async fn get_file( + State(global): State, + Path((repo, arch, mut file_name)): Path<(String, String, String)>, + req: Request, +) -> crate::Result { + let repo_dir = global.config.repo_dir.join(&repo).join(&arch); + let repo_exists = tokio::fs::try_exists(&repo_dir).await?; + + let res = if DB_FILE_EXTS.iter().any(|ext| file_name.ends_with(ext)) { + // Append tar extension to ensure we find the file + if !file_name.ends_with(".tar.gz") { + file_name.push_str(".tar.gz"); + }; + + if repo_exists { + ServeFile::new(repo_dir.join(file_name)).oneshot(req).await + } else { + let path = global + .config + .repo_dir + .join(repo) + .join(crate::repo::manager::ANY_ARCH) + .join(file_name); + + ServeFile::new(path).oneshot(req).await + } + } else { + let any_file = global + .config + .pkg_dir + .join(repo) + .join(crate::repo::manager::ANY_ARCH) + .join(file_name); + + if repo_exists { + ServeDir::new(global.config.pkg_dir) + .fallback(ServeFile::new(any_file)) + .oneshot(req) + .await + } else { + ServeFile::new(any_file).oneshot(req).await + } + }; + + Ok(res) +} + +async fn post_package_archive( + State(global): State, + Path(repo): Path, + mut body: BodyStream, +) -> crate::Result<()> { + // We first stream the uploaded file to disk + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let path = global.config.pkg_dir.join(uuid.to_string()); + let mut f = fs::File::create(&path).await?; + + while let Some(chunk) = body.next().await { + f.write_all(&chunk?).await?; + } + + let clone = Arc::clone(&global.repo_manager); + let path_clone = path.clone(); + let repo_clone = repo.clone(); + let res = tokio::task::spawn_blocking(move || { + clone + .write() + .unwrap() + .add_pkg_from_path(&repo_clone, &path_clone) + }) + .await?; + + match res { + // Insert the newly added package into the database + Ok(pkg) => { + tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); + + // Query the repo for its ID, or create it if it does not already exist + let res = global.db.repo.by_name(&repo).await?; + + let repo_id = if let Some(repo_entity) = res { + repo_entity.id + } else { + global.db.repo.insert(&repo, None).await?.last_insert_id + }; + + // If the package already exists in the database, we remove it first + let res = global + .db + .pkg + .by_fields(repo_id, &pkg.info.name, None, &pkg.info.arch) + .await?; + + if let Some(entry) = res { + entry.delete(&global.db).await?; + } + + global.db.pkg.insert(repo_id, pkg).await?; + + Ok(()) + } + // Remove the uploaded file and return the error + Err(err) => { + tokio::fs::remove_file(path).await?; + + Err(err.into()) + } + } +} + +async fn delete_repo( + State(global): State, + Path(repo): Path, +) -> crate::Result { + let clone = Arc::clone(&global.repo_manager); + + let repo_clone = repo.clone(); + let repo_removed = + tokio::task::spawn_blocking(move || clone.write().unwrap().remove_repo(&repo_clone)) + .await??; + + if repo_removed { + let res = global.db.repo.by_name(&repo).await?; + + if let Some(repo_entry) = res { + repo_entry.delete(&global.db).await?; + } + + tracing::info!("Removed repository '{}'", repo); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } +} + +async fn delete_arch_repo( + State(global): State, + Path((repo, arch)): Path<(String, String)>, +) -> crate::Result { + let clone = Arc::clone(&global.repo_manager); + + let arch_clone = arch.clone(); + let repo_clone = repo.clone(); + let repo_removed = tokio::task::spawn_blocking(move || { + clone + .write() + .unwrap() + .remove_repo_arch(&repo_clone, &arch_clone) + }) + .await??; + + if repo_removed { + let res = global.db.repo.by_name(&repo).await?; + + if let Some(repo_entry) = res { + global.db.pkg.delete_with_arch(repo_entry.id, &arch).await?; + } + tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } +} + +async fn delete_package( + State(global): State, + Path((repo, arch, file_name)): Path<(String, String, String)>, +) -> crate::Result { + let clone = Arc::clone(&global.repo_manager); + let path = PathBuf::from(&repo).join(arch).join(&file_name); + + let res = tokio::task::spawn_blocking(move || { + clone.write().unwrap().remove_pkg_from_path(path, true) + }) + .await??; + + if let Some((name, version, release, arch)) = res { + let res = global.db.repo.by_name(&repo).await?; + + if let Some(repo_entry) = res { + let res = global + .db + .pkg + .by_fields( + repo_entry.id, + &name, + Some(&format!("{}-{}", version, release)), + &arch, + ) + .await?; + + if let Some(entry) = res { + entry.delete(&global.db).await?; + } + } + + tracing::info!("Removed '{}' from repository '{}'", file_name, repo); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } +} From b7be311485f27bef2c87e596c3d1c0b1667011be Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 17 Aug 2023 10:24:29 +0200 Subject: [PATCH 05/73] refactor(server): separate api pieces into modules --- server/src/web/api/distros.rs | 43 +++++++++++++ server/src/web/api/mod.rs | 107 ++------------------------------- server/src/web/api/packages.rs | 44 ++++++++++++++ server/src/web/api/repos.rs | 43 +++++++++++++ 4 files changed, 136 insertions(+), 101 deletions(-) create mode 100644 server/src/web/api/distros.rs create mode 100644 server/src/web/api/packages.rs create mode 100644 server/src/web/api/repos.rs diff --git a/server/src/web/api/distros.rs b/server/src/web/api/distros.rs new file mode 100644 index 0000000..feb3679 --- /dev/null +++ b/server/src/web/api/distros.rs @@ -0,0 +1,43 @@ +use axum::{ + extract::{Path, Query, State}, + routing::get, + Json, Router, +}; + +use super::pagination::{self, PaginatedResponse}; +use crate::db; + +pub fn router() -> Router { + Router::new() + .route("/", get(get_distros)) + .route("/:id", get(get_single_distro)) +} + +async fn get_distros( + State(global): State, + Query(pagination): Query, +) -> crate::Result>> { + let (total_pages, repos) = global + .db + .distro + .page( + pagination.per_page.unwrap_or(25), + pagination.page.unwrap_or(1) - 1, + ) + .await?; + Ok(Json(pagination.res(total_pages, repos))) +} + +async fn get_single_distro( + State(global): State, + Path(id): Path, +) -> crate::Result> { + let repo = global + .db + .distro + .by_id(id) + .await? + .ok_or(axum::http::StatusCode::NOT_FOUND)?; + + Ok(Json(repo)) +} diff --git a/server/src/web/api/mod.rs b/server/src/web/api/mod.rs index 7557956..16940b4 100644 --- a/server/src/web/api/mod.rs +++ b/server/src/web/api/mod.rs @@ -1,108 +1,13 @@ +mod distros; +mod packages; mod pagination; +mod repos; -use axum::extract::{Path, Query, State}; -use axum::routing::get; -use axum::Json; use axum::Router; -use pagination::PaginatedResponse; - -use crate::db; - pub fn router() -> Router { Router::new() - .route("/distros", get(get_distros)) - .route("/distros/:id", get(get_single_distro)) - .route("/repos", get(get_repos)) - .route("/repos/:id", get(get_single_repo)) - .route("/packages", get(get_packages)) - .route("/packages/:id", get(get_single_package)) -} - -async fn get_distros( - State(global): State, - Query(pagination): Query, -) -> crate::Result>> { - let (total_pages, repos) = global - .db - .distro - .page( - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - ) - .await?; - Ok(Json(pagination.res(total_pages, repos))) -} - -async fn get_single_distro( - State(global): State, - Path(id): Path, -) -> crate::Result> { - let repo = global - .db - .distro - .by_id(id) - .await? - .ok_or(axum::http::StatusCode::NOT_FOUND)?; - - Ok(Json(repo)) -} - -async fn get_repos( - State(global): State, - Query(pagination): Query, -) -> crate::Result>> { - let (total_pages, repos) = global - .db - .repo - .page( - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - ) - .await?; - Ok(Json(pagination.res(total_pages, repos))) -} - -async fn get_single_repo( - State(global): State, - Path(id): Path, -) -> crate::Result> { - let repo = global - .db - .repo - .by_id(id) - .await? - .ok_or(axum::http::StatusCode::NOT_FOUND)?; - - Ok(Json(repo)) -} - -async fn get_packages( - State(global): State, - Query(pagination): Query, -) -> crate::Result>> { - let (total_pages, pkgs) = global - .db - .pkg - .page( - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - ) - .await?; - - Ok(Json(pagination.res(total_pages, pkgs))) -} - -async fn get_single_package( - State(global): State, - Path(id): Path, -) -> crate::Result> { - let entry = global - .db - .pkg - .full(id) - .await? - .ok_or(axum::http::StatusCode::NOT_FOUND)?; - - Ok(Json(entry)) + .nest("/distros", distros::router()) + .nest("/repos", repos::router()) + .nest("/packages", packages::router()) } diff --git a/server/src/web/api/packages.rs b/server/src/web/api/packages.rs new file mode 100644 index 0000000..c0c9fc3 --- /dev/null +++ b/server/src/web/api/packages.rs @@ -0,0 +1,44 @@ +use axum::{ + extract::{Path, Query, State}, + routing::get, + Json, Router, +}; + +use super::pagination::{self, PaginatedResponse}; +use crate::db; + +pub fn router() -> Router { + Router::new() + .route("/", get(get_packages)) + .route("/:id", get(get_single_package)) +} + +async fn get_packages( + State(global): State, + Query(pagination): Query, +) -> crate::Result>> { + let (total_pages, pkgs) = global + .db + .pkg + .page( + pagination.per_page.unwrap_or(25), + pagination.page.unwrap_or(1) - 1, + ) + .await?; + + Ok(Json(pagination.res(total_pages, pkgs))) +} + +async fn get_single_package( + State(global): State, + Path(id): Path, +) -> crate::Result> { + let entry = global + .db + .pkg + .full(id) + .await? + .ok_or(axum::http::StatusCode::NOT_FOUND)?; + + Ok(Json(entry)) +} diff --git a/server/src/web/api/repos.rs b/server/src/web/api/repos.rs new file mode 100644 index 0000000..601ccb9 --- /dev/null +++ b/server/src/web/api/repos.rs @@ -0,0 +1,43 @@ +use axum::{ + extract::{Path, Query, State}, + routing::get, + Json, Router, +}; + +use super::pagination::{self, PaginatedResponse}; +use crate::db; + +pub fn router() -> Router { + Router::new() + .route("/", get(get_repos)) + .route("/:id", get(get_single_repo)) +} + +async fn get_repos( + State(global): State, + Query(pagination): Query, +) -> crate::Result>> { + let (total_pages, repos) = global + .db + .repo + .page( + pagination.per_page.unwrap_or(25), + pagination.page.unwrap_or(1) - 1, + ) + .await?; + Ok(Json(pagination.res(total_pages, repos))) +} + +async fn get_single_repo( + State(global): State, + Path(id): Path, +) -> crate::Result> { + let repo = global + .db + .repo + .by_id(id) + .await? + .ok_or(axum::http::StatusCode::NOT_FOUND)?; + + Ok(Json(repo)) +} From 50ebffb45987e95e8b2327fb70ec924e2ca3ab4c Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 17 Aug 2023 10:56:08 +0200 Subject: [PATCH 06/73] feat(server): POST request to create distros --- server/src/db/entities/distro.rs | 1 + server/src/db/query/distro.rs | 6 ++++++ server/src/error.rs | 8 +++++--- server/src/web/api/distros.rs | 9 ++++++++- 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/server/src/db/entities/distro.rs b/server/src/db/entities/distro.rs index 1d96872..835cba5 100644 --- a/server/src/db/entities/distro.rs +++ b/server/src/db/entities/distro.rs @@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize}; #[sea_orm(table_name = "distro")] pub struct Model { #[sea_orm(primary_key)] + #[serde(skip_deserializing)] pub id: i32, #[sea_orm(unique)] pub slug: String, diff --git a/server/src/db/query/distro.rs b/server/src/db/query/distro.rs index 2d4d1c6..1913f99 100644 --- a/server/src/db/query/distro.rs +++ b/server/src/db/query/distro.rs @@ -43,4 +43,10 @@ impl DistroQuery { Distro::insert(model).exec(&self.conn).await } + + pub async fn insert_model(&self, model: distro::Model) -> Result { + let mut model: distro::ActiveModel = model.into(); + model.id = NotSet; + model.insert(&self.conn).await + } } diff --git a/server/src/error.rs b/server/src/error.rs index 4fbb7c4..723e944 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -35,10 +35,12 @@ impl IntoResponse for ServerError { ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), ServerError::Status(status) => status.into_response(), - ServerError::Db(sea_orm::DbErr::RecordNotFound(_)) => { - StatusCode::NOT_FOUND.into_response() + ServerError::Db(err) => match err { + sea_orm::DbErr::RecordNotFound(_) => StatusCode::NOT_FOUND, + sea_orm::DbErr::Query(_) => StatusCode::BAD_REQUEST, + _ => StatusCode::INTERNAL_SERVER_ERROR, } - ServerError::Db(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + .into_response(), } } } diff --git a/server/src/web/api/distros.rs b/server/src/web/api/distros.rs index feb3679..d846398 100644 --- a/server/src/web/api/distros.rs +++ b/server/src/web/api/distros.rs @@ -9,7 +9,7 @@ use crate::db; pub fn router() -> Router { Router::new() - .route("/", get(get_distros)) + .route("/", get(get_distros).post(post_distro)) .route("/:id", get(get_single_distro)) } @@ -41,3 +41,10 @@ async fn get_single_distro( Ok(Json(repo)) } + +async fn post_distro( + State(global): State, + Json(model): Json, +) -> crate::Result> { + Ok(Json(global.db.distro.insert_model(model).await?)) +} From 9ad19eb36d1dee92c5654b618490ca920fffe09c Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 17 Aug 2023 14:25:25 +0200 Subject: [PATCH 07/73] refactor(server): move some consts around --- server/src/repo/manager.rs | 14 ++++++-------- server/src/repo/mod.rs | 3 +++ server/src/web/repo.rs | 11 ++++++----- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index c288f30..1c5556e 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -5,8 +5,6 @@ use std::fs; use std::io; use std::path::{Path, PathBuf}; -pub const ANY_ARCH: &str = "any"; - /// Overarching abstraction that orchestrates updating the repositories stored on the server pub struct RepoGroupManager { repo_dir: PathBuf, @@ -48,9 +46,9 @@ impl RepoGroupManager { // All architectures should also include the "any" architecture, except for the "any" // architecture itself. - let repo_any_dir = self.repo_dir.join(repo).join(ANY_ARCH); + let repo_any_dir = self.repo_dir.join(repo).join(super::ANY_ARCH); - let any_entries_iter = if arch != ANY_ARCH && repo_any_dir.try_exists()? { + let any_entries_iter = if arch != super::ANY_ARCH && repo_any_dir.try_exists()? { Some(repo_any_dir.read_dir()?) } else { None @@ -159,7 +157,7 @@ impl RepoGroupManager { pkg.write_files(&mut files_file)?; // If a package of type "any" is added, we need to update every existing database - if pkg.info.arch == ANY_ARCH { + if pkg.info.arch == super::ANY_ARCH { self.sync_all(repo)?; } else { self.sync(repo, &pkg.info.arch)?; @@ -193,7 +191,7 @@ impl RepoGroupManager { fs::remove_dir_all(self.pkg_dir.join(sub_path))?; // Removing the "any" architecture updates all other repositories - if arch == ANY_ARCH { + if arch == super::ANY_ARCH { self.sync_all(repo)?; } @@ -250,7 +248,7 @@ impl RepoGroupManager { })?; if sync { - if arch == ANY_ARCH { + if arch == super::ANY_ARCH { self.sync_all(repo)?; } else { self.sync(repo, arch)?; @@ -288,7 +286,7 @@ impl RepoGroupManager { fs::remove_dir_all(self.repo_dir.join(repo).join(arch).join(metadata_dir_name))?; if sync { - if arch == ANY_ARCH { + if arch == super::ANY_ARCH { self.sync_all(&repo.to_string_lossy())?; } else { self.sync(&repo.to_string_lossy(), arch)?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 44f0e10..958420e 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -2,3 +2,6 @@ pub mod manager; pub mod package; pub use manager::RepoGroupManager; + +pub const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; +pub const ANY_ARCH: &str = "any"; diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs index f8c3d65..9466af5 100644 --- a/server/src/web/repo.rs +++ b/server/src/web/repo.rs @@ -16,8 +16,6 @@ use tower_http::services::{ServeDir, ServeFile}; use tower_http::validate_request::ValidateRequestHeaderLayer; use uuid::Uuid; -const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; - pub fn router(api_key: &str) -> Router { Router::new() .route( @@ -51,7 +49,10 @@ async fn get_file( let repo_dir = global.config.repo_dir.join(&repo).join(&arch); let repo_exists = tokio::fs::try_exists(&repo_dir).await?; - let res = if DB_FILE_EXTS.iter().any(|ext| file_name.ends_with(ext)) { + let res = if crate::repo::DB_FILE_EXTS + .iter() + .any(|ext| file_name.ends_with(ext)) + { // Append tar extension to ensure we find the file if !file_name.ends_with(".tar.gz") { file_name.push_str(".tar.gz"); @@ -64,7 +65,7 @@ async fn get_file( .config .repo_dir .join(repo) - .join(crate::repo::manager::ANY_ARCH) + .join(crate::repo::ANY_ARCH) .join(file_name); ServeFile::new(path).oneshot(req).await @@ -74,7 +75,7 @@ async fn get_file( .config .pkg_dir .join(repo) - .join(crate::repo::manager::ANY_ARCH) + .join(crate::repo::ANY_ARCH) .join(file_name); if repo_exists { From 9963cff724c39fd92ddfb82b0992ee6cde46cdc9 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 18 May 2024 14:20:05 +0200 Subject: [PATCH 08/73] feat: simplify database scheme --- server/src/db/entities/mod.rs | 5 +- server/src/db/entities/package.rs | 34 +--- server/src/db/entities/package_conflicts.rs | 33 ---- server/src/db/entities/package_file.rs | 2 +- server/src/db/entities/package_group.rs | 2 +- server/src/db/entities/package_license.rs | 2 +- server/src/db/entities/package_provides.rs | 33 ---- ...{package_depends.rs => package_related.rs} | 6 +- server/src/db/entities/package_replaces.rs | 33 ---- server/src/db/entities/prelude.rs | 5 +- server/src/db/entities/repo.rs | 1 - .../m20230730_000001_create_repo_tables.rs | 165 +++--------------- server/src/db/mod.rs | 28 +-- server/src/db/query/package.rs | 115 +++++------- server/src/repo/package.rs | 2 +- 15 files changed, 96 insertions(+), 370 deletions(-) delete mode 100644 server/src/db/entities/package_conflicts.rs delete mode 100644 server/src/db/entities/package_provides.rs rename server/src/db/entities/{package_depends.rs => package_related.rs} (88%) delete mode 100644 server/src/db/entities/package_replaces.rs diff --git a/server/src/db/entities/mod.rs b/server/src/db/entities/mod.rs index 1111e7a..ab8f32a 100644 --- a/server/src/db/entities/mod.rs +++ b/server/src/db/entities/mod.rs @@ -3,11 +3,8 @@ pub mod prelude; pub mod package; -pub mod package_conflicts; -pub mod package_depends; pub mod package_file; pub mod package_group; pub mod package_license; -pub mod package_provides; -pub mod package_replaces; +pub mod package_related; pub mod repo; diff --git a/server/src/db/entities/package.rs b/server/src/db/entities/package.rs index b2e2b0b..8ea6797 100644 --- a/server/src/db/entities/package.rs +++ b/server/src/db/entities/package.rs @@ -17,7 +17,7 @@ pub struct Model { pub c_size: i64, pub description: Option, pub url: Option, - pub build_date: DateTime, + pub build_date: String, pub packager: Option, pub pgp_sig: Option, pub pgp_sig_size: Option, @@ -26,20 +26,14 @@ pub struct Model { #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { - #[sea_orm(has_many = "super::package_conflicts::Entity")] - PackageConflicts, - #[sea_orm(has_many = "super::package_depends::Entity")] - PackageDepends, #[sea_orm(has_many = "super::package_file::Entity")] PackageFile, #[sea_orm(has_many = "super::package_group::Entity")] PackageGroup, #[sea_orm(has_many = "super::package_license::Entity")] PackageLicense, - #[sea_orm(has_many = "super::package_provides::Entity")] - PackageProvides, - #[sea_orm(has_many = "super::package_replaces::Entity")] - PackageReplaces, + #[sea_orm(has_many = "super::package_related::Entity")] + PackageRelated, #[sea_orm( belongs_to = "super::repo::Entity", from = "Column::RepoId", @@ -50,18 +44,6 @@ pub enum Relation { Repo, } -impl Related for Entity { - fn to() -> RelationDef { - Relation::PackageConflicts.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::PackageDepends.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::PackageFile.def() @@ -80,15 +62,9 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::PackageProvides.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::PackageReplaces.def() + Relation::PackageRelated.def() } } diff --git a/server/src/db/entities/package_conflicts.rs b/server/src/db/entities/package_conflicts.rs deleted file mode 100644 index e9e8ba8..0000000 --- a/server/src/db/entities/package_conflicts.rs +++ /dev/null @@ -1,33 +0,0 @@ -//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 - -use sea_orm::entity::prelude::*; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "package_conflicts")] -pub struct Model { - #[sea_orm(primary_key, auto_increment = false)] - pub package_id: i32, - #[sea_orm(primary_key, auto_increment = false)] - pub value: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::package::Entity", - from = "Column::PackageId", - to = "super::package::Column::Id", - on_update = "NoAction", - on_delete = "Cascade" - )] - Package, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Package.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/package_file.rs b/server/src/db/entities/package_file.rs index 6e994e0..8607cf8 100644 --- a/server/src/db/entities/package_file.rs +++ b/server/src/db/entities/package_file.rs @@ -9,7 +9,7 @@ pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub package_id: i32, #[sea_orm(primary_key, auto_increment = false)] - pub value: String, + pub path: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/entities/package_group.rs b/server/src/db/entities/package_group.rs index 61e69f2..59948c3 100644 --- a/server/src/db/entities/package_group.rs +++ b/server/src/db/entities/package_group.rs @@ -9,7 +9,7 @@ pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub package_id: i32, #[sea_orm(primary_key, auto_increment = false)] - pub value: String, + pub name: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/entities/package_license.rs b/server/src/db/entities/package_license.rs index 2920d3f..c4e0d71 100644 --- a/server/src/db/entities/package_license.rs +++ b/server/src/db/entities/package_license.rs @@ -9,7 +9,7 @@ pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub package_id: i32, #[sea_orm(primary_key, auto_increment = false)] - pub value: String, + pub name: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/entities/package_provides.rs b/server/src/db/entities/package_provides.rs deleted file mode 100644 index 7fca6ee..0000000 --- a/server/src/db/entities/package_provides.rs +++ /dev/null @@ -1,33 +0,0 @@ -//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 - -use sea_orm::entity::prelude::*; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "package_provides")] -pub struct Model { - #[sea_orm(primary_key, auto_increment = false)] - pub package_id: i32, - #[sea_orm(primary_key, auto_increment = false)] - pub value: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::package::Entity", - from = "Column::PackageId", - to = "super::package::Column::Id", - on_update = "NoAction", - on_delete = "Cascade" - )] - Package, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Package.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/package_depends.rs b/server/src/db/entities/package_related.rs similarity index 88% rename from server/src/db/entities/package_depends.rs rename to server/src/db/entities/package_related.rs index 7e94374..3e25ff3 100644 --- a/server/src/db/entities/package_depends.rs +++ b/server/src/db/entities/package_related.rs @@ -4,14 +4,14 @@ use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "package_depends")] +#[sea_orm(table_name = "package_related")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub package_id: i32, #[sea_orm(primary_key, auto_increment = false)] - pub r#type: crate::db::PackageDepend, + pub r#type: crate::db::PackageRelatedEnum, #[sea_orm(primary_key, auto_increment = false)] - pub value: String, + pub name: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/entities/package_replaces.rs b/server/src/db/entities/package_replaces.rs deleted file mode 100644 index 0946b2d..0000000 --- a/server/src/db/entities/package_replaces.rs +++ /dev/null @@ -1,33 +0,0 @@ -//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 - -use sea_orm::entity::prelude::*; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] -#[sea_orm(table_name = "package_replaces")] -pub struct Model { - #[sea_orm(primary_key, auto_increment = false)] - pub package_id: i32, - #[sea_orm(primary_key, auto_increment = false)] - pub value: String, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::package::Entity", - from = "Column::PackageId", - to = "super::package::Column::Id", - on_update = "NoAction", - on_delete = "Cascade" - )] - Package, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Package.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/prelude.rs b/server/src/db/entities/prelude.rs index bee503c..1a6e503 100644 --- a/server/src/db/entities/prelude.rs +++ b/server/src/db/entities/prelude.rs @@ -1,11 +1,8 @@ //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 pub use super::package::Entity as Package; -pub use super::package_conflicts::Entity as PackageConflicts; -pub use super::package_depends::Entity as PackageDepends; pub use super::package_file::Entity as PackageFile; pub use super::package_group::Entity as PackageGroup; pub use super::package_license::Entity as PackageLicense; -pub use super::package_provides::Entity as PackageProvides; -pub use super::package_replaces::Entity as PackageReplaces; +pub use super::package_related::Entity as PackageRelated; pub use super::repo::Entity as Repo; diff --git a/server/src/db/entities/repo.rs b/server/src/db/entities/repo.rs index b7a1af1..25291da 100644 --- a/server/src/db/entities/repo.rs +++ b/server/src/db/entities/repo.rs @@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key)] pub id: i32, - #[sea_orm(unique)] pub name: String, pub description: Option, } diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index adefe56..866319e 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -72,14 +72,14 @@ impl MigrationTrait for Migration { .not_null(), ) .col( - ColumnDef::new(PackageLicense::Value) + ColumnDef::new(PackageLicense::Name) .string_len(255) .not_null(), ) .primary_key( Index::create() .col(PackageLicense::PackageId) - .col(PackageLicense::Value), + .col(PackageLicense::Name), ) .foreign_key( ForeignKey::create() @@ -97,14 +97,14 @@ impl MigrationTrait for Migration { .table(PackageGroup::Table) .col(ColumnDef::new(PackageGroup::PackageId).integer().not_null()) .col( - ColumnDef::new(PackageGroup::Value) + ColumnDef::new(PackageGroup::Name) .string_len(255) .not_null(), ) .primary_key( Index::create() .col(PackageGroup::PackageId) - .col(PackageGroup::Value), + .col(PackageGroup::Name), ) .foreign_key( ForeignKey::create() @@ -119,119 +119,28 @@ impl MigrationTrait for Migration { manager .create_table( Table::create() - .table(PackageReplaces::Table) + .table(PackageRelated::Table) .col( - ColumnDef::new(PackageReplaces::PackageId) + ColumnDef::new(PackageRelated::PackageId) .integer() .not_null(), ) + .col(ColumnDef::new(PackageRelated::Type).integer().not_null()) .col( - ColumnDef::new(PackageReplaces::Value) + ColumnDef::new(PackageRelated::Name) .string_len(255) .not_null(), ) .primary_key( Index::create() - .col(PackageReplaces::PackageId) - .col(PackageReplaces::Value), - ) - .foreign_key( - ForeignKey::create() - .name("fk-package_replaces-package_id") - .from(PackageReplaces::Table, PackageReplaces::PackageId) - .to(Package::Table, Package::Id) - .on_delete(ForeignKeyAction::Cascade), - ) - .to_owned(), - ) - .await?; - manager - .create_table( - Table::create() - .table(PackageConflicts::Table) - .col( - ColumnDef::new(PackageConflicts::PackageId) - .integer() - .not_null(), - ) - .col( - ColumnDef::new(PackageConflicts::Value) - .string_len(255) - .not_null(), - ) - .primary_key( - Index::create() - .col(PackageConflicts::PackageId) - .col(PackageConflicts::Value), - ) - .foreign_key( - ForeignKey::create() - .name("fk-package_conflicts-package_id") - .from(PackageConflicts::Table, PackageConflicts::PackageId) - .to(Package::Table, Package::Id) - .on_delete(ForeignKeyAction::Cascade), - ) - .to_owned(), - ) - .await?; - manager - .create_table( - Table::create() - .table(PackageProvides::Table) - .col( - ColumnDef::new(PackageProvides::PackageId) - .integer() - .not_null(), - ) - .col( - ColumnDef::new(PackageProvides::Value) - .string_len(255) - .not_null(), - ) - .primary_key( - Index::create() - .col(PackageProvides::PackageId) - .col(PackageProvides::Value), - ) - .foreign_key( - ForeignKey::create() - .name("fk-package_provides-package_id") - .from(PackageProvides::Table, PackageProvides::PackageId) - .to(Package::Table, Package::Id) - .on_delete(ForeignKeyAction::Cascade), - ) - .to_owned(), - ) - .await?; - manager - .create_table( - Table::create() - .table(PackageDepends::Table) - .col( - ColumnDef::new(PackageDepends::PackageId) - .integer() - .not_null(), - ) - .col( - ColumnDef::new(PackageDepends::Type) - .string_len(6) - .not_null(), - ) - .col( - ColumnDef::new(PackageDepends::Value) - .string_len(255) - .not_null(), - ) - .primary_key( - Index::create() - .col(PackageDepends::PackageId) - .col(PackageDepends::Type) - .col(PackageDepends::Value), + .col(PackageRelated::PackageId) + .col(PackageRelated::Type) + .col(PackageRelated::Name), ) .foreign_key( ForeignKey::create() .name("fk-package_depends-package_id") - .from(PackageDepends::Table, PackageDepends::PackageId) + .from(PackageRelated::Table, PackageRelated::PackageId) .to(Package::Table, Package::Id) .on_delete(ForeignKeyAction::Cascade), ) @@ -243,15 +152,11 @@ impl MigrationTrait for Migration { Table::create() .table(PackageFile::Table) .col(ColumnDef::new(PackageFile::PackageId).integer().not_null()) - .col( - ColumnDef::new(PackageFile::Value) - .string_len(255) - .not_null(), - ) + .col(ColumnDef::new(PackageFile::Path).string_len(255).not_null()) .primary_key( Index::create() .col(PackageFile::PackageId) - .col(PackageFile::Value), + .col(PackageFile::Path), ) .foreign_key( ForeignKey::create() @@ -276,16 +181,7 @@ impl MigrationTrait for Migration { .drop_table(Table::drop().table(PackageGroup::Table).to_owned()) .await?; manager - .drop_table(Table::drop().table(PackageReplaces::Table).to_owned()) - .await?; - manager - .drop_table(Table::drop().table(PackageConflicts::Table).to_owned()) - .await?; - manager - .drop_table(Table::drop().table(PackageProvides::Table).to_owned()) - .await?; - manager - .drop_table(Table::drop().table(PackageDepends::Table).to_owned()) + .drop_table(Table::drop().table(PackageRelated::Table).to_owned()) .await?; manager .drop_table(Table::drop().table(PackageFile::Table).to_owned()) @@ -331,48 +227,27 @@ pub enum Package { pub enum PackageLicense { Table, PackageId, - Value, + Name, } #[derive(Iden)] pub enum PackageGroup { Table, PackageId, - Value, + Name, } #[derive(Iden)] -pub enum PackageReplaces { - Table, - PackageId, - Value, -} - -#[derive(Iden)] -pub enum PackageConflicts { - Table, - PackageId, - Value, -} - -#[derive(Iden)] -pub enum PackageProvides { - Table, - PackageId, - Value, -} - -#[derive(Iden)] -pub enum PackageDepends { +pub enum PackageRelated { Table, PackageId, Type, - Value, + Name, } #[derive(Iden)] pub enum PackageFile { Table, PackageId, - Value, + Path, } diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 6ff5a8f..587052d 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -13,16 +13,22 @@ use migrator::Migrator; type Result = std::result::Result; #[derive(EnumIter, DeriveActiveEnum, Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] -#[sea_orm(rs_type = "String", db_type = "String(Some(6))")] -pub enum PackageDepend { - #[sea_orm(string_value = "depend")] +#[sea_orm(rs_type = "i32", db_type = "Integer")] +pub enum PackageRelatedEnum { + #[sea_orm(num_value = 0)] + Conflicts, + #[sea_orm(num_value = 1)] + Replaces, + #[sea_orm(num_value = 2)] + Provides, + #[sea_orm(num_value = 3)] Depend, - #[sea_orm(string_value = "make")] - Make, - #[sea_orm(string_value = "check")] - Check, - #[sea_orm(string_value = "opt")] - Opt, + #[sea_orm(num_value = 4)] + Makedepend, + #[sea_orm(num_value = 5)] + Checkdepend, + #[sea_orm(num_value = 6)] + Optdepend, } #[derive(Serialize)] @@ -31,9 +37,7 @@ pub struct FullPackage { entry: package::Model, licenses: Vec, groups: Vec, - replaces: Vec, - provides: Vec, - depends: Vec<(PackageDepend, String)>, + related: Vec<(PackageRelatedEnum, String)>, files: Vec, } diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index eed5ead..c977984 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -71,7 +71,7 @@ impl PackageQuery { c_size: Set(info.csize), description: Set(info.description), url: Set(info.url), - build_date: Set(info.build_date), + build_date: Set(info.build_date.to_string()), packager: Set(info.packager), pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), @@ -83,7 +83,7 @@ impl PackageQuery { // Insert all the related tables PackageLicense::insert_many(info.licenses.iter().map(|s| package_license::ActiveModel { package_id: Set(pkg_entry.id), - value: Set(s.to_string()), + name: Set(s.to_string()), })) .on_empty_do_nothing() .exec(&self.conn) @@ -91,64 +91,57 @@ impl PackageQuery { PackageGroup::insert_many(info.groups.iter().map(|s| package_group::ActiveModel { package_id: Set(pkg_entry.id), - value: Set(s.to_string()), + name: Set(s.to_string()), })) .on_empty_do_nothing() .exec(&self.conn) .await?; - PackageReplaces::insert_many(info.replaces.iter().map(|s| package_replaces::ActiveModel { + let related = info + .conflicts + .iter() + .map(|s| (PackageRelatedEnum::Conflicts, s)) + .chain( + info.replaces + .iter() + .map(|s| (PackageRelatedEnum::Replaces, s)), + ) + .chain( + info.provides + .iter() + .map(|s| (PackageRelatedEnum::Provides, s)), + ) + .chain(info.depends.iter().map(|s| (PackageRelatedEnum::Depend, s))) + .chain( + info.makedepends + .iter() + .map(|s| (PackageRelatedEnum::Depend, s)), + ) + .chain( + info.checkdepends + .iter() + .map(|s| (PackageRelatedEnum::Checkdepend, s)), + ) + .chain( + info.optdepends + .iter() + .map(|s| (PackageRelatedEnum::Optdepend, s)), + ); + + PackageRelated::insert_many(related.map(|(t, s)| package_related::ActiveModel { package_id: Set(pkg_entry.id), - value: Set(s.to_string()), - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - PackageConflicts::insert_many(info.conflicts.iter().map(|s| { - package_conflicts::ActiveModel { - package_id: Set(pkg_entry.id), - value: Set(s.to_string()), - } - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - PackageProvides::insert_many(info.provides.iter().map(|s| package_provides::ActiveModel { - package_id: Set(pkg_entry.id), - value: Set(s.to_string()), - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; + r#type: Set(t), + name: Set(s.to_string()), + })); PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { package_id: Set(pkg_entry.id), - value: Set(s.display().to_string()), + path: Set(s.display().to_string()), })) .on_empty_do_nothing() .exec(&self.conn) .await?; - let deps = info - .depends - .iter() - .map(|d| (PackageDepend::Depend, d)) - .chain(info.makedepends.iter().map(|d| (PackageDepend::Make, d))) - .chain(info.checkdepends.iter().map(|d| (PackageDepend::Check, d))) - .chain(info.optdepends.iter().map(|d| (PackageDepend::Opt, d))) - .map(|(t, s)| package_depends::ActiveModel { - package_id: Set(pkg_entry.id), - r#type: Set(t), - value: Set(s.to_string()), - }); - - PackageDepends::insert_many(deps) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - Ok(()) } @@ -159,51 +152,35 @@ impl PackageQuery { .all(&self.conn) .await? .into_iter() - .map(|e| e.value) + .map(|e| e.name) .collect(); let groups = entry .find_related(PackageGroup) .all(&self.conn) .await? .into_iter() - .map(|e| e.value) + .map(|e| e.name) .collect(); - let replaces = entry - .find_related(PackageReplaces) + let related = entry + .find_related(PackageRelated) .all(&self.conn) .await? .into_iter() - .map(|e| e.value) - .collect(); - let provides = entry - .find_related(PackageProvides) - .all(&self.conn) - .await? - .into_iter() - .map(|e| e.value) - .collect(); - let depends = entry - .find_related(PackageDepends) - .all(&self.conn) - .await? - .into_iter() - .map(|e| (e.r#type, e.value)) + .map(|e| (e.r#type, e.name)) .collect(); let files = entry .find_related(PackageFile) .all(&self.conn) .await? .into_iter() - .map(|e| e.value) + .map(|e| e.path) .collect(); Ok(Some(FullPackage { entry, licenses, groups, - replaces, - provides, - depends, + related, files, })) } else { diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 18c69c3..f519a91 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -281,7 +281,7 @@ impl From for package::ActiveModel { c_size: Set(info.csize), description: Set(info.description), url: Set(info.url), - build_date: Set(info.build_date), + build_date: Set(info.build_date.to_string()), packager: Set(info.packager), pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), From e684cfb84ebf889e0968f45aa913b9f3efcbf99e Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 19 May 2024 09:47:39 +0200 Subject: [PATCH 09/73] chore: update dependencies --- Cargo.lock | 1362 ++++++++++++++++++++++------------------ server/Cargo.toml | 4 +- server/src/cli.rs | 11 +- server/src/repo/mod.rs | 8 +- 4 files changed, 751 insertions(+), 634 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 804a3d1..e59252c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ "getrandom", "once_cell", @@ -30,21 +30,22 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -57,9 +58,9 @@ checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -78,51 +79,51 @@ dependencies = [ [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.1" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -150,18 +151,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] @@ -174,25 +175,32 @@ dependencies = [ ] [[package]] -name = "autocfg" -version = "1.1.0" +name = "atomic-waker" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "axum" -version = "0.6.18" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core", - "bitflags 1.3.2", "bytes", "futures-util", "http", "http-body", + "http-body-util", "hyper", + "hyper-util", "itoa", "matchit", "memchr", @@ -204,35 +212,40 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.1", "tokio", "tower", "tower-layer", "tower-service", + "tracing", ] [[package]] name = "axum-core" -version = "0.3.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" dependencies = [ "async-trait", "bytes", "futures-util", "http", "http-body", + "http-body-util", "mime", + "pin-project-lite", "rustversion", + "sync_wrapper 0.1.2", "tower-layer", "tower-service", + "tracing", ] [[package]] name = "backtrace" -version = "0.3.68" +version = "0.3.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", @@ -245,15 +258,9 @@ dependencies = [ [[package]] name = "base64" -version = "0.20.0" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea22880d78093b0cbe17c89f64a7d457941e65759157ec6cb31a31d652b05e5" - -[[package]] -name = "base64" -version = "0.21.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -280,9 +287,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" dependencies = [ "serde", ] @@ -310,60 +317,39 @@ dependencies = [ [[package]] name = "borsh" -version = "0.10.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" +checksum = "dbe5b10e214954177fb1dc9fbd20a1a2608fe99e6c832033bdc7cea287a20d77" dependencies = [ "borsh-derive", - "hashbrown 0.12.3", + "cfg_aliases", ] [[package]] name = "borsh-derive" -version = "0.10.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" +checksum = "d7a8646f94ab393e43e8b35a2558b1624bed28b97ee09c5d15456e3c9463f46d" dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", + "once_cell", "proc-macro-crate", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" -dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", + "syn 2.0.64", + "syn_derive", ] [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytecheck" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" dependencies = [ "bytecheck_derive", "ptr_meta", @@ -372,9 +358,9 @@ dependencies = [ [[package]] name = "bytecheck_derive" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" dependencies = [ "proc-macro2", "quote", @@ -383,21 +369,21 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" [[package]] name = "cfg-if" @@ -406,37 +392,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] -name = "chrono" -version = "0.4.26" +name = "cfg_aliases" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "winapi", + "windows-targets 0.52.5", ] [[package]] name = "clap" -version = "4.3.12" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eab9e8ceb9afdade1ab3f0fd8dbce5b1b2f468ad653baf10e771781b2b67b73" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.12" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f2763db829349bf00cfc06251268865ed4363b93a943174f638daf3ecdba2cd" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ "anstream", "anstyle", @@ -446,82 +436,78 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] name = "const-oid" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "795bc6e66a8e340f075fcf6227e417a2dc976b92b91f3cdc778bb858778b6747" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.0.1" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crypto-common" @@ -535,15 +521,25 @@ dependencies = [ [[package]] name = "der" -version = "0.7.7" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ed52955ce76b1554f509074bb357d3fb8ac9b51288a65a3fd480d1dfba946" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", "pem-rfc7468", "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + [[package]] name = "derivative" version = "2.2.0" @@ -575,9 +571,9 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "either" -version = "1.9.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" dependencies = [ "serde", ] @@ -590,23 +586,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.1" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -617,7 +602,7 @@ checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ "cfg-if", "home", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -628,19 +613,24 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "fastrand" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" [[package]] name = "flume" -version = "0.10.14" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" dependencies = [ "futures-core", "futures-sink", - "pin-project", "spin 0.9.8", ] @@ -652,9 +642,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -667,9 +657,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -682,9 +672,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -692,15 +682,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -720,38 +710,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -777,20 +767,20 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] name = "gimli" -version = "0.27.3" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "glob" @@ -800,17 +790,17 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "h2" -version = "0.3.19" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d357c7ae988e7d2182f7d7871d0b963962420b0678b0997ce7de72001aeab782" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" dependencies = [ + "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "futures-util", "http", - "indexmap 1.9.3", + "indexmap", "slab", "tokio", "tokio-util", @@ -823,26 +813,26 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.8", ] [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.11", "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.0", + "hashbrown 0.14.5", ] [[package]] @@ -855,19 +845,16 @@ dependencies = [ ] [[package]] -name = "hermit-abi" -version = "0.2.6" +name = "heck" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -877,9 +864,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] @@ -895,18 +882,18 @@ dependencies = [ [[package]] name = "home" -version = "0.5.5" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "http" -version = "0.2.9" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -915,20 +902,32 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ "bytes", "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", "pin-project-lite", ] [[package]] name = "http-range-header" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" +checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" [[package]] name = "httparse" @@ -938,19 +937,18 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.27" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ "bytes", "futures-channel", - "futures-core", "futures-util", "h2", "http", @@ -959,25 +957,38 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", + "smallvec", + "tokio", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", "socket2", "tokio", - "tower-service", - "tracing", - "want", ] [[package]] name = "iana-time-zone" -version = "0.1.57" +version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows", + "windows-core", ] [[package]] @@ -991,9 +1002,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -1001,66 +1012,51 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.3" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - -[[package]] -name = "indexmap" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.5", ] [[package]] name = "inherent" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce243b1bfa62ffc028f1cc3b6034ec63d649f3031bc8a4fbbb004e1ac17d1f68" +checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] -name = "is-terminal" -version = "0.4.9" +name = "is_terminal_polyfill" +version = "1.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi 0.3.2", - "rustix", - "windows-sys", -] +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" [[package]] name = "itertools" -version = "0.10.5" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.8" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -1092,21 +1088,21 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.147" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libsqlite3-sys" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" dependencies = [ "cc", "pkg-config", @@ -1115,15 +1111,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -1131,9 +1127,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.19" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "matchers" @@ -1146,24 +1142,25 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "md-5" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if", "digest", ] [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "mime" @@ -1189,22 +1186,22 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "wasi", + "windows-sys 0.48.0", ] [[package]] @@ -1229,11 +1226,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -1256,20 +1252,25 @@ dependencies = [ ] [[package]] -name = "num-integer" -version = "0.1.45" +name = "num-conv" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] name = "num-iter" -version = "0.1.43" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", "num-integer", @@ -1278,9 +1279,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -1288,34 +1289,34 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.31.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "ordered-float" -version = "3.7.0" +version = "3.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213" +checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" dependencies = [ "num-traits", ] @@ -1337,11 +1338,11 @@ version = "0.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec4c6225c69b4ca778c0aea097321a64c421cf4577b331c61b229267edabb6f8" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] @@ -1352,9 +1353,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" dependencies = [ "lock_api", "parking_lot_core", @@ -1362,22 +1363,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.5.1", "smallvec", - "windows-targets", + "windows-targets 0.52.5", ] [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pem-rfc7468" @@ -1390,35 +1391,35 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project" -version = "1.1.2" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.2" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -1449,9 +1450,15 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" @@ -1461,11 +1468,11 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro-crate" -version = "0.1.5" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "toml", + "toml_edit", ] [[package]] @@ -1494,9 +1501,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.64" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" dependencies = [ "unicode-ident", ] @@ -1523,9 +1530,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.29" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] @@ -1568,23 +1575,32 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] -name = "regex" -version = "1.9.1" +name = "redox_syscall" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +dependencies = [ + "bitflags 2.5.0", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.3", - "regex-syntax 0.7.4", + "regex-automata 0.4.6", + "regex-syntax 0.8.3", ] [[package]] @@ -1598,13 +1614,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax 0.8.3", ] [[package]] @@ -1615,15 +1631,15 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "rend" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" dependencies = [ "bytecheck", ] @@ -1652,27 +1668,28 @@ dependencies = [ [[package]] name = "ring" -version = "0.16.20" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", + "cfg-if", + "getrandom", "libc", - "once_cell", - "spin 0.5.2", + "spin 0.9.8", "untrusted", - "web-sys", - "winapi", + "windows-sys 0.52.0", ] [[package]] name = "rkyv" -version = "0.7.42" +version = "0.7.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" +checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" dependencies = [ "bitvec", "bytecheck", + "bytes", "hashbrown 0.12.3", "ptr_meta", "rend", @@ -1684,9 +1701,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.42" +version = "0.7.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" +checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" dependencies = [ "proc-macro2", "quote", @@ -1695,16 +1712,14 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.2" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ - "byteorder", "const-oid", "digest", "num-bigint-dig", "num-integer", - "num-iter", "num-traits", "pkcs1", "pkcs8", @@ -1717,13 +1732,12 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.31.0" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a2ab0025103a60ecaaf3abf24db1db240a4e1c15837090d2c32f625ac98abea" +checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" dependencies = [ "arrayvec", "borsh", - "byteorder", "bytes", "num-traits", "rand", @@ -1734,28 +1748,28 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.5" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "ring", "rustls-webpki", @@ -1764,18 +1778,18 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.2", + "base64", ] [[package]] name = "rustls-webpki" -version = "0.101.2" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513722fd73ad80a71f72b61009ea1b584bcfa1483ca93949c8f290298837fa59" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ "ring", "untrusted", @@ -1783,27 +1797,27 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.13" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" -version = "1.0.14" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", @@ -1815,18 +1829,18 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3bd3534a9978d0aa7edd2808dc1f8f31c4d0ecd31ddf71d997b3c98e9f3c9114" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "sea-orm" -version = "0.12.1" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90b508f060b689cd91abd76973ba4f1b0f416bb09915e2177fd3c6f853be76bc" +checksum = "c8814e37dc25de54398ee62228323657520b7f29713b8e238649385dbe473ee0" dependencies = [ "async-stream", "async-trait", @@ -1844,7 +1858,7 @@ dependencies = [ "sqlx", "strum", "thiserror", - "time 0.3.23", + "time", "tracing", "url", "uuid", @@ -1852,9 +1866,9 @@ dependencies = [ [[package]] name = "sea-orm-cli" -version = "0.12.1" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d3b389690bccc6565e6a6af367599957b196288c4f556a7b4157a8c5086eb8d" +checksum = "620bc560062ae251b1366bde43b3f1508445cab5c2c8cbdb397034638ab1b357" dependencies = [ "chrono", "clap", @@ -1869,23 +1883,23 @@ dependencies = [ [[package]] name = "sea-orm-macros" -version = "0.12.1" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d49560a5a1bbd57f82fa1a622a8deefa51e44ce3a0b27f012f50bb6092a914f" +checksum = "5e115c6b078e013aa963cc2d38c196c2c40b05f03d0ac872fe06b6e0d5265603" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "sea-bae", - "syn 2.0.25", + "syn 2.0.64", "unicode-ident", ] [[package]] name = "sea-orm-migration" -version = "0.12.1" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdccbb31e93d1a426a0df84f5f36d0b84d302444c3156e3a4fedb0b8da64ae87" +checksum = "ee8269bc6ff71afd6b78aa4333ac237a69eebd2cdb439036291e64fb4b8db23c" dependencies = [ "async-trait", "clap", @@ -1900,9 +1914,9 @@ dependencies = [ [[package]] name = "sea-query" -version = "0.30.0" +version = "0.30.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeb899964df7038e7274306b742951b82a04f835bca8a4683a4c254a6bf35fa" +checksum = "4166a1e072292d46dc91f31617c2a1cdaf55a8be4b5c9f4bf2ba248e3ac4999b" dependencies = [ "bigdecimal", "chrono", @@ -1912,7 +1926,7 @@ dependencies = [ "rust_decimal", "sea-query-derive", "serde_json", - "time 0.3.23", + "time", "uuid", ] @@ -1928,28 +1942,28 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time 0.3.23", + "time", "uuid", ] [[package]] name = "sea-query-derive" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd78f2e0ee8e537e9195d1049b752e0433e2cac125426bccb7b5c3e508096117" +checksum = "25a82fcb49253abcb45cdcb2adf92956060ec0928635eb21b4f7a6d8f25ab0bc" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.64", "thiserror", ] [[package]] name = "sea-schema" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e09eb40c78cee8fef8dfbb648036a26b7ad1f618499203ad0e8b6f97593f7f" +checksum = "30d148608012d25222442d1ebbfafd1228dbc5221baf4ec35596494e27a2394e" dependencies = [ "futures", "sea-query", @@ -1962,7 +1976,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6f686050f76bffc4f635cda8aea6df5548666b830b52387e8bc7de11056d11e" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "syn 1.0.109", @@ -1976,29 +1990,29 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "serde" -version = "1.0.178" +version = "1.0.202" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60363bdd39a7be0266a520dab25fdc9241d2f987b08a01e01f0ec6d06a981348" +checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.178" +version = "1.0.202" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28482318d6641454cb273da158647922d1be6b5a2fcc6165cd89ebdd7ed576b" +checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "serde_json" -version = "1.0.100" +version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "itoa", "ryu", @@ -2007,9 +2021,9 @@ dependencies = [ [[package]] name = "serde_path_to_error" -version = "0.1.13" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc4422959dd87a76cb117c191dcbffc20467f06c9100b76721dab370f24d3a" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ "itoa", "serde", @@ -2029,9 +2043,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -2040,9 +2054,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -2051,37 +2065,40 @@ dependencies = [ [[package]] name = "sha256" -version = "1.1.4" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a975c1bc0941703000eaf232c4d8ce188d8d5408d6344b6b2c8c6262772828" +checksum = "18278f6a914fa3070aa316493f7d2ddfb9ac86ebc06fa3b83bffda487e9065b0" dependencies = [ + "async-trait", + "bytes", "hex", "sha2", + "tokio", ] [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", "rand_core", @@ -2095,27 +2112,27 @@ checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.4.9" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -2135,9 +2152,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -2145,9 +2162,9 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ "itertools", "nom", @@ -2156,9 +2173,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e58421b6bc416714d5115a2ca953718f6c621a51b68e4f4922aea5a4391a721" +checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" dependencies = [ "sqlx-core", "sqlx-macros", @@ -2169,11 +2186,11 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd4cef4251aabbae751a3710927945901ee1d97ee96d757f6880ebb9a79bfd53" +checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.11", "atoi", "bigdecimal", "byteorder", @@ -2181,7 +2198,6 @@ dependencies = [ "chrono", "crc", "crossbeam-queue", - "dotenvy", "either", "event-listener", "futures-channel", @@ -2191,7 +2207,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.0.0", + "indexmap", "log", "memchr", "once_cell", @@ -2206,7 +2222,7 @@ dependencies = [ "smallvec", "sqlformat", "thiserror", - "time 0.3.23", + "time", "tokio", "tokio-stream", "tracing", @@ -2217,9 +2233,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "208e3165167afd7f3881b16c1ef3f2af69fa75980897aac8874a0696516d12c2" +checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ "proc-macro2", "quote", @@ -2230,13 +2246,13 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a4a8336d278c62231d87f24e8a7a74898156e34c1c18942857be2acb29c7dfc" +checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" dependencies = [ "dotenvy", "either", - "heck", + "heck 0.4.1", "hex", "once_cell", "proc-macro2", @@ -2256,14 +2272,14 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca69bf415b93b60b80dc8fda3cb4ef52b2336614d8da2de5456cc942a110482" +checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" dependencies = [ "atoi", - "base64 0.21.2", + "base64", "bigdecimal", - "bitflags 2.3.3", + "bitflags 2.5.0", "byteorder", "bytes", "chrono", @@ -2295,7 +2311,7 @@ dependencies = [ "sqlx-core", "stringprep", "thiserror", - "time 0.3.23", + "time", "tracing", "uuid", "whoami", @@ -2303,14 +2319,14 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0db2df1b8731c3651e204629dd55e52adbae0462fa1bdcbed56a2302c18181e" +checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" dependencies = [ "atoi", - "base64 0.21.2", + "base64", "bigdecimal", - "bitflags 2.3.3", + "bitflags 2.5.0", "byteorder", "chrono", "crc", @@ -2334,13 +2350,12 @@ dependencies = [ "rust_decimal", "serde", "serde_json", - "sha1", "sha2", "smallvec", "sqlx-core", "stringprep", "thiserror", - "time 0.3.23", + "time", "tracing", "uuid", "whoami", @@ -2348,9 +2363,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4c21bf34c7cae5b283efb3ac1bcc7670df7561124dc2f8bdc0b59be40f79a2" +checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" dependencies = [ "atoi", "chrono", @@ -2365,9 +2380,10 @@ dependencies = [ "percent-encoding", "serde", "sqlx-core", - "time 0.3.23", + "time", "tracing", "url", + "urlencoding", "uuid", ] @@ -2379,19 +2395,20 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "stringprep" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ + "finl_unicode", "unicode-bidi", "unicode-normalization", ] [[package]] name = "strsim" -version = "0.10.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" @@ -2418,21 +2435,39 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.25" +version = "2.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" +checksum = "7ad3dee41f36859875573074334c200d1add8e4a87bb37113ebd31d926b7b11f" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "syn_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.64", +] + [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + [[package]] name = "tap" version = "1.0.1" @@ -2441,42 +2476,41 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.7.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", "rustix", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "thiserror" -version = "1.0.44" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.44" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -2484,22 +2518,14 @@ dependencies = [ [[package]] name = "time" -version = "0.1.45" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ + "deranged", "itoa", + "num-conv", + "powerfmt", "serde", "time-core", "time-macros", @@ -2507,16 +2533,17 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.10" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ + "num-conv", "time-core", ] @@ -2537,11 +2564,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", @@ -2552,25 +2578,25 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -2579,25 +2605,32 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] -name = "toml" -version = "0.5.11" +name = "toml_datetime" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" + +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "serde", + "indexmap", + "toml_datetime", + "winnow", ] [[package]] @@ -2618,17 +2651,17 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8bd22a874a2d0b70452d5597b12c537331d49060824a95f49f108994f94aa4c" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ - "base64 0.20.0", - "bitflags 2.3.3", + "base64", + "bitflags 2.5.0", "bytes", - "futures-core", "futures-util", "http", "http-body", + "http-body-util", "http-range-header", "httpdate", "mime", @@ -2656,11 +2689,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -2669,20 +2701,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -2690,20 +2722,20 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", @@ -2717,53 +2749,47 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "try-lock" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" - [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode_categories" @@ -2773,21 +2799,27 @@ checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + [[package]] name = "utf8parse" version = "0.2.1" @@ -2796,9 +2828,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" dependencies = [ "getrandom", "serde", @@ -2822,21 +2854,6 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -2844,10 +2861,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasm-bindgen" -version = "0.2.87" +name = "wasite" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -2855,24 +2878,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2880,47 +2903,38 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.64", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" - -[[package]] -name = "web-sys" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" -dependencies = [ - "js-sys", - "wasm-bindgen", -] +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "webpki-roots" -version = "0.24.0" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" -dependencies = [ - "rustls-webpki", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "whoami" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" +checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +dependencies = [ + "redox_syscall 0.4.1", + "wasite", +] [[package]] name = "winapi" @@ -2945,12 +2959,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows" -version = "0.48.0" +name = "windows-core" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets", + "windows-targets 0.52.5", ] [[package]] @@ -2959,65 +2973,147 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] [[package]] name = "wyz" @@ -3029,7 +3125,27 @@ dependencies = [ ] [[package]] -name = "zeroize" -version = "1.6.0" +name = "zerocopy" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.64", +] + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/server/Cargo.toml b/server/Cargo.toml index bf4fd7f..a234377 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Jef Roosens"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -axum = { version = "0.6.18", features = ["http2"] } +axum = { version = "0.7.5", features = ["http2"] } chrono = { version = "0.4.26", features = ["serde"] } clap = { version = "4.3.12", features = ["env", "derive"] } futures = "0.3.28" @@ -18,7 +18,7 @@ sha256 = "1.1.4" tokio = { version = "1.29.1", features = ["full"] } tokio-util = { version = "0.7.8", features = ["io"] } tower = { version = "0.4.13", features = ["make"] } -tower-http = { version = "0.4.1", features = ["fs", "trace", "auth"] } +tower-http = { version = "0.5.2", features = ["fs", "trace", "auth"] } tracing = "0.1.37" tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } uuid = { version = "1.4.0", features = ["v4"] } diff --git a/server/src/cli.rs b/server/src/cli.rs index 540f457..14a5808 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -101,12 +101,11 @@ impl Cli { .with_state(global) .layer(TraceLayer::new_for_http()); + let domain: String = format!("0.0.0.0:{}", self.port).parse().unwrap(); + let listener = tokio::net::TcpListener::bind(domain).await?; // run it with hyper on localhost:3000 - Ok( - axum::Server::bind(&format!("0.0.0.0:{}", self.port).parse().unwrap()) - .serve(app.into_make_service()) - .await - .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?, - ) + Ok(axum::serve(listener, app.into_make_service()) + .await + .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?) } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 21acf81..4681fb6 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -5,8 +5,8 @@ pub use manager::RepoGroupManager; use std::path::PathBuf; -use axum::body::Body; -use axum::extract::{BodyStream, Path, State}; +use axum::body::{Body, BodyDataStream}; +use axum::extract::{Path, State}; use axum::http::Request; use axum::http::StatusCode; use axum::response::IntoResponse; @@ -98,13 +98,15 @@ async fn get_file( async fn post_package_archive( State(global): State, Path(repo): Path, - mut body: BodyStream, + body: Body, ) -> crate::Result<()> { // We first stream the uploaded file to disk let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); let path = global.config.pkg_dir.join(uuid.to_string()); let mut f = fs::File::create(&path).await?; + let mut body = body.into_data_stream(); + while let Some(chunk) = body.next().await { f.write_all(&chunk?).await?; } From e1642d939ba1e44b4c7f2e665d02ebc4646fc747 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 21 May 2024 08:49:46 +0200 Subject: [PATCH 10/73] feat: some experimentation with api filtering --- server/src/api/mod.rs | 33 ++++++++++++++++++++++++--------- server/src/api/pagination.rs | 2 +- server/src/db/mod.rs | 3 ++- server/src/db/query/mod.rs | 4 ++-- server/src/db/query/package.rs | 29 +++++++++++++++++++++++++++-- server/src/db/query/repo.rs | 13 ++++++++++++- 6 files changed, 68 insertions(+), 16 deletions(-) diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index 800587f..2577718 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -1,5 +1,7 @@ mod pagination; +use sea_orm::{sea_query::IntoCondition, *}; + use axum::extract::{Path, Query, State}; use axum::routing::get; use axum::Json; @@ -7,7 +9,7 @@ use axum::Router; use pagination::PaginatedResponse; -use crate::db; +use crate::db::{self, *}; pub fn router() -> Router { Router::new() @@ -20,16 +22,27 @@ pub fn router() -> Router { async fn get_repos( State(global): State, Query(pagination): Query, + Query(filter): Query, ) -> crate::Result>> { - let (total_pages, repos) = global - .db - .repo - .page( - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - ) + let page = pagination.page.unwrap_or(1) - 1; + let per_page = pagination.per_page.unwrap_or(25); + + let paginator = Repo::find() + .filter(filter) + .order_by_asc(package::Column::Id) + .paginate(&global.db, pagination.per_page.unwrap_or(25)); + let items = paginator + .fetch_page(pagination.page.unwrap_or(1) - 1) .await?; - Ok(Json(pagination.res(total_pages, repos))) + let total_pages = paginator.num_pages().await?; + + Ok(Json(PaginatedResponse { + page, + per_page, + total_pages, + count: items.len(), + items, + })) } async fn get_single_repo( @@ -49,6 +62,7 @@ async fn get_single_repo( async fn get_packages( State(global): State, Query(pagination): Query, + Query(filter): Query, ) -> crate::Result>> { let (total_pages, pkgs) = global .db @@ -56,6 +70,7 @@ async fn get_packages( .page( pagination.per_page.unwrap_or(25), pagination.page.unwrap_or(1) - 1, + filter, ) .await?; diff --git a/server/src/api/pagination.rs b/server/src/api/pagination.rs index aa1e5cb..db1ffa0 100644 --- a/server/src/api/pagination.rs +++ b/server/src/api/pagination.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -pub const DEFAULT_PAGE: u64 = 0; +pub const DEFAULT_PAGE: u64 = 1; pub const DEFAULT_PER_PAGE: u64 = 25; #[derive(Deserialize)] diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 587052d..a7a7f66 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -1,7 +1,7 @@ mod conn; pub mod entities; mod migrator; -mod query; +pub mod query; use sea_orm::{ConnectOptions, Database, DatabaseConnection, DeriveActiveEnum, EnumIter}; use sea_orm_migration::MigratorTrait; @@ -14,6 +14,7 @@ type Result = std::result::Result; #[derive(EnumIter, DeriveActiveEnum, Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] #[sea_orm(rs_type = "i32", db_type = "Integer")] +#[serde(rename_all = "lowercase")] pub enum PackageRelatedEnum { #[sea_orm(num_value = 0)] Conflicts, diff --git a/server/src/db/query/mod.rs b/server/src/db/query/mod.rs index a52cccf..32cc3c5 100644 --- a/server/src/db/query/mod.rs +++ b/server/src/db/query/mod.rs @@ -1,5 +1,5 @@ -mod package; -mod repo; +pub mod package; +pub mod repo; pub use package::PackageQuery; pub use repo::RepoQuery; diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index c977984..8649dea 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -1,4 +1,5 @@ -use sea_orm::*; +use sea_orm::{sea_query::IntoCondition, *}; +use serde::Deserialize; use crate::db::*; @@ -7,6 +8,25 @@ pub struct PackageQuery { conn: DatabaseConnection, } +#[derive(Deserialize)] +pub struct Filter { + repo: Option, + arch: Option, + name: Option, +} + +impl IntoCondition for Filter { + fn into_condition(self) -> Condition { + Condition::all() + .add_option(self.repo.map(|repo| package::Column::RepoId.eq(repo))) + .add_option(self.arch.map(|arch| package::Column::Arch.eq(arch))) + .add_option( + self.name + .map(|name| package::Column::Name.like(format!("%{}%", name))), + ) + } +} + impl PackageQuery { pub fn new(conn: DatabaseConnection) -> Self { Self { conn } @@ -16,8 +36,10 @@ impl PackageQuery { &self, per_page: u64, page: u64, + filter: Filter, ) -> super::Result<(u64, Vec)> { let paginator = Package::find() + .filter(filter) .order_by_asc(package::Column::Id) .paginate(&self.conn, per_page); let packages = paginator.fetch_page(page).await?; @@ -132,7 +154,10 @@ impl PackageQuery { package_id: Set(pkg_entry.id), r#type: Set(t), name: Set(s.to_string()), - })); + })) + .on_empty_do_nothing() + .exec(&self.conn) + .await?; PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { package_id: Set(pkg_entry.id), diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 5e54fdc..65177df 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -1,4 +1,4 @@ -use sea_orm::*; +use sea_orm::{sea_query::IntoCondition, *}; use crate::db::*; @@ -7,6 +7,17 @@ pub struct RepoQuery { conn: DatabaseConnection, } +#[derive(Deserialize)] +pub struct Filter { + name: Option, +} + +impl IntoCondition for Filter { + fn into_condition(self) -> Condition { + Condition::all().add_option(self.name.map(|name| package::Column::Name.like(name))) + } +} + impl RepoQuery { pub fn new(conn: DatabaseConnection) -> Self { Self { conn } From 45f1abade39390332056e41c24ff1c2f2e4cf958 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 21 May 2024 09:16:45 +0200 Subject: [PATCH 11/73] refactor: restructure database query code --- server/src/api/mod.rs | 28 +-- server/src/cli.rs | 7 +- server/src/db/conn.rs | 61 ------ server/src/db/mod.rs | 29 +-- server/src/db/query/mod.rs | 3 - server/src/db/query/package.rs | 341 ++++++++++++++++----------------- server/src/db/query/repo.rs | 75 ++++---- server/src/main.rs | 2 +- server/src/repo/mod.rs | 51 ++--- 9 files changed, 243 insertions(+), 354 deletions(-) delete mode 100644 server/src/db/conn.rs diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index 2577718..09b6c95 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -1,6 +1,6 @@ mod pagination; -use sea_orm::{sea_query::IntoCondition, *}; +use sea_orm::{*}; use axum::extract::{Path, Query, State}; use axum::routing::get; @@ -49,10 +49,7 @@ async fn get_single_repo( State(global): State, Path(id): Path, ) -> crate::Result> { - let repo = global - .db - .repo - .by_id(id) + let repo = db::query::repo::by_id(&global.db, id) .await? .ok_or(axum::http::StatusCode::NOT_FOUND)?; @@ -64,15 +61,13 @@ async fn get_packages( Query(pagination): Query, Query(filter): Query, ) -> crate::Result>> { - let (total_pages, pkgs) = global - .db - .pkg - .page( - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - filter, - ) - .await?; + let (total_pages, pkgs) = db::query::package::page( + &global.db, + pagination.per_page.unwrap_or(25), + pagination.page.unwrap_or(1) - 1, + filter, + ) + .await?; Ok(Json(pagination.res(total_pages, pkgs))) } @@ -81,10 +76,7 @@ async fn get_single_package( State(global): State, Path(id): Path, ) -> crate::Result> { - let entry = global - .db - .pkg - .full(id) + let entry = db::query::package::full(&global.db, id) .await? .ok_or(axum::http::StatusCode::NOT_FOUND)?; diff --git a/server/src/cli.rs b/server/src/cli.rs index 14a5808..1ae6de4 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -10,6 +10,7 @@ use std::sync::{Arc, RwLock}; use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; +use sea_orm_migration::MigratorTrait; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -75,10 +76,8 @@ impl Cli { debug!("Connecting to database with URL {}", db_url); - let db = crate::db::RieterDb::connect(db_url).await?; - // let db = crate::db::init("postgres://rieter:rieter@localhost:5432/rieter") - // .await - // .unwrap(); + let db = sea_orm::Database::connect(db_url).await?; + crate::db::Migrator::up(&db, None).await?; let config = Config { data_dir: self.data_dir.clone(), diff --git a/server/src/db/conn.rs b/server/src/db/conn.rs deleted file mode 100644 index 2756236..0000000 --- a/server/src/db/conn.rs +++ /dev/null @@ -1,61 +0,0 @@ -use super::RieterDb; -use sea_orm::{DbBackend, DbErr, ExecResult, QueryResult, Statement}; -use std::{future::Future, pin::Pin}; - -// Allows RieterDb objects to be passed to ORM functions -impl sea_orm::ConnectionTrait for RieterDb { - fn get_database_backend(&self) -> DbBackend { - self.conn.get_database_backend() - } - fn execute<'life0, 'async_trait>( - &'life0 self, - stmt: Statement, - ) -> Pin> + Send + 'async_trait>> - where - Self: 'async_trait, - 'life0: 'async_trait, - { - self.conn.execute(stmt) - } - fn execute_unprepared<'life0, 'life1, 'async_trait>( - &'life0 self, - sql: &'life1 str, - ) -> Pin> + Send + 'async_trait>> - where - Self: 'async_trait, - 'life0: 'async_trait, - 'life1: 'async_trait, - { - self.conn.execute_unprepared(sql) - } - fn query_one<'life0, 'async_trait>( - &'life0 self, - stmt: Statement, - ) -> Pin< - Box< - dyn Future, DbErr>> - + Send - + 'async_trait, - >, - > - where - Self: 'async_trait, - 'life0: 'async_trait, - { - self.conn.query_one(stmt) - } - fn query_all<'life0, 'async_trait>( - &'life0 self, - stmt: Statement, - ) -> Pin< - Box< - dyn Future, DbErr>> + Send + 'async_trait, - >, - > - where - Self: 'async_trait, - 'life0: 'async_trait, - { - self.conn.query_all(stmt) - } -} diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index a7a7f66..b29f3d3 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -1,14 +1,14 @@ -mod conn; pub mod entities; mod migrator; pub mod query; -use sea_orm::{ConnectOptions, Database, DatabaseConnection, DeriveActiveEnum, EnumIter}; -use sea_orm_migration::MigratorTrait; +use sea_orm::{DeriveActiveEnum, EnumIter}; + use serde::{Deserialize, Serialize}; pub use entities::{prelude::*, *}; -use migrator::Migrator; +pub use migrator::Migrator; + type Result = std::result::Result; @@ -41,24 +41,3 @@ pub struct FullPackage { related: Vec<(PackageRelatedEnum, String)>, files: Vec, } - -#[derive(Clone, Debug)] -pub struct RieterDb { - conn: DatabaseConnection, - pub pkg: query::PackageQuery, - pub repo: query::RepoQuery, -} - -impl RieterDb { - pub async fn connect>(opt: C) -> Result { - let db = Database::connect(opt).await?; - - Migrator::up(&db, None).await?; - - Ok(Self { - conn: db.clone(), - pkg: query::PackageQuery::new(db.clone()), - repo: query::RepoQuery::new(db.clone()), - }) - } -} diff --git a/server/src/db/query/mod.rs b/server/src/db/query/mod.rs index 32cc3c5..87d61e3 100644 --- a/server/src/db/query/mod.rs +++ b/server/src/db/query/mod.rs @@ -1,7 +1,4 @@ pub mod package; pub mod repo; -pub use package::PackageQuery; -pub use repo::RepoQuery; - type Result = std::result::Result; diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 8649dea..9d7a9f2 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -3,11 +3,6 @@ use serde::Deserialize; use crate::db::*; -#[derive(Clone, Debug)] -pub struct PackageQuery { - conn: DatabaseConnection, -} - #[derive(Deserialize)] pub struct Filter { repo: Option, @@ -27,189 +22,183 @@ impl IntoCondition for Filter { } } -impl PackageQuery { - pub fn new(conn: DatabaseConnection) -> Self { - Self { conn } +pub async fn page( + conn: &DbConn, + per_page: u64, + page: u64, + filter: Filter, +) -> super::Result<(u64, Vec)> { + let paginator = Package::find() + .filter(filter) + .order_by_asc(package::Column::Id) + .paginate(conn, per_page); + let packages = paginator.fetch_page(page).await?; + let total_pages = paginator.num_pages().await?; + + Ok((total_pages, packages)) +} + +pub async fn by_id(conn: &DbConn, id: i32) -> Result> { + package::Entity::find_by_id(id).one(conn).await +} + +pub async fn by_fields( + conn: &DbConn, + repo_id: i32, + name: &str, + version: Option<&str>, + arch: &str, +) -> Result> { + let mut query = Package::find() + .filter(package::Column::RepoId.eq(repo_id)) + .filter(package::Column::Name.eq(name)) + .filter(package::Column::Arch.eq(arch)); + + if let Some(version) = version { + query = query.filter(package::Column::Version.eq(version)); } - pub async fn page( - &self, - per_page: u64, - page: u64, - filter: Filter, - ) -> super::Result<(u64, Vec)> { - let paginator = Package::find() - .filter(filter) - .order_by_asc(package::Column::Id) - .paginate(&self.conn, per_page); - let packages = paginator.fetch_page(page).await?; - let total_pages = paginator.num_pages().await?; + query.one(conn).await +} - Ok((total_pages, packages)) - } +pub async fn delete_with_arch(conn: &DbConn, repo_id: i32, arch: &str) -> Result { + Package::delete_many() + .filter(package::Column::RepoId.eq(repo_id)) + .filter(package::Column::Arch.eq(arch)) + .exec(conn) + .await +} - pub async fn by_id(&self, id: i32) -> Result> { - package::Entity::find_by_id(id).one(&self.conn).await - } +pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Package) -> Result<()> { + let info = pkg.info; - pub async fn by_fields( - &self, - repo_id: i32, - name: &str, - version: Option<&str>, - arch: &str, - ) -> Result> { - let mut query = Package::find() - .filter(package::Column::RepoId.eq(repo_id)) - .filter(package::Column::Name.eq(name)) - .filter(package::Column::Arch.eq(arch)); + let model = package::ActiveModel { + id: NotSet, + repo_id: Set(repo_id), + base: Set(info.base), + name: Set(info.name), + version: Set(info.version), + arch: Set(info.arch), + size: Set(info.size), + c_size: Set(info.csize), + description: Set(info.description), + url: Set(info.url), + build_date: Set(info.build_date.to_string()), + packager: Set(info.packager), + pgp_sig: Set(info.pgpsig), + pgp_sig_size: Set(info.pgpsigsize), + sha256_sum: Set(info.sha256sum), + }; - if let Some(version) = version { - query = query.filter(package::Column::Version.eq(version)); - } + let pkg_entry = model.insert(conn).await?; - query.one(&self.conn).await - } + // Insert all the related tables + PackageLicense::insert_many(info.licenses.iter().map(|s| package_license::ActiveModel { + package_id: Set(pkg_entry.id), + name: Set(s.to_string()), + })) + .on_empty_do_nothing() + .exec(conn) + .await?; - pub async fn delete_with_arch(&self, repo_id: i32, arch: &str) -> Result { - Package::delete_many() - .filter(package::Column::RepoId.eq(repo_id)) - .filter(package::Column::Arch.eq(arch)) - .exec(&self.conn) - .await - } + PackageGroup::insert_many(info.groups.iter().map(|s| package_group::ActiveModel { + package_id: Set(pkg_entry.id), + name: Set(s.to_string()), + })) + .on_empty_do_nothing() + .exec(conn) + .await?; - pub async fn insert(&self, repo_id: i32, pkg: crate::repo::package::Package) -> Result<()> { - let info = pkg.info; + let related = info + .conflicts + .iter() + .map(|s| (PackageRelatedEnum::Conflicts, s)) + .chain( + info.replaces + .iter() + .map(|s| (PackageRelatedEnum::Replaces, s)), + ) + .chain( + info.provides + .iter() + .map(|s| (PackageRelatedEnum::Provides, s)), + ) + .chain(info.depends.iter().map(|s| (PackageRelatedEnum::Depend, s))) + .chain( + info.makedepends + .iter() + .map(|s| (PackageRelatedEnum::Depend, s)), + ) + .chain( + info.checkdepends + .iter() + .map(|s| (PackageRelatedEnum::Checkdepend, s)), + ) + .chain( + info.optdepends + .iter() + .map(|s| (PackageRelatedEnum::Optdepend, s)), + ); - let model = package::ActiveModel { - id: NotSet, - repo_id: Set(repo_id), - base: Set(info.base), - name: Set(info.name), - version: Set(info.version), - arch: Set(info.arch), - size: Set(info.size), - c_size: Set(info.csize), - description: Set(info.description), - url: Set(info.url), - build_date: Set(info.build_date.to_string()), - packager: Set(info.packager), - pgp_sig: Set(info.pgpsig), - pgp_sig_size: Set(info.pgpsigsize), - sha256_sum: Set(info.sha256sum), - }; + PackageRelated::insert_many(related.map(|(t, s)| package_related::ActiveModel { + package_id: Set(pkg_entry.id), + r#type: Set(t), + name: Set(s.to_string()), + })) + .on_empty_do_nothing() + .exec(conn) + .await?; - let pkg_entry = model.insert(&self.conn).await?; + PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { + package_id: Set(pkg_entry.id), + path: Set(s.display().to_string()), + })) + .on_empty_do_nothing() + .exec(conn) + .await?; - // Insert all the related tables - PackageLicense::insert_many(info.licenses.iter().map(|s| package_license::ActiveModel { - package_id: Set(pkg_entry.id), - name: Set(s.to_string()), + Ok(()) +} + +pub async fn full(conn: &DbConn, id: i32) -> Result> { + if let Some(entry) = by_id(conn, id).await? { + let licenses = entry + .find_related(PackageLicense) + .all(conn) + .await? + .into_iter() + .map(|e| e.name) + .collect(); + let groups = entry + .find_related(PackageGroup) + .all(conn) + .await? + .into_iter() + .map(|e| e.name) + .collect(); + let related = entry + .find_related(PackageRelated) + .all(conn) + .await? + .into_iter() + .map(|e| (e.r#type, e.name)) + .collect(); + let files = entry + .find_related(PackageFile) + .all(conn) + .await? + .into_iter() + .map(|e| e.path) + .collect(); + + Ok(Some(FullPackage { + entry, + licenses, + groups, + related, + files, })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - PackageGroup::insert_many(info.groups.iter().map(|s| package_group::ActiveModel { - package_id: Set(pkg_entry.id), - name: Set(s.to_string()), - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - let related = info - .conflicts - .iter() - .map(|s| (PackageRelatedEnum::Conflicts, s)) - .chain( - info.replaces - .iter() - .map(|s| (PackageRelatedEnum::Replaces, s)), - ) - .chain( - info.provides - .iter() - .map(|s| (PackageRelatedEnum::Provides, s)), - ) - .chain(info.depends.iter().map(|s| (PackageRelatedEnum::Depend, s))) - .chain( - info.makedepends - .iter() - .map(|s| (PackageRelatedEnum::Depend, s)), - ) - .chain( - info.checkdepends - .iter() - .map(|s| (PackageRelatedEnum::Checkdepend, s)), - ) - .chain( - info.optdepends - .iter() - .map(|s| (PackageRelatedEnum::Optdepend, s)), - ); - - PackageRelated::insert_many(related.map(|(t, s)| package_related::ActiveModel { - package_id: Set(pkg_entry.id), - r#type: Set(t), - name: Set(s.to_string()), - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { - package_id: Set(pkg_entry.id), - path: Set(s.display().to_string()), - })) - .on_empty_do_nothing() - .exec(&self.conn) - .await?; - - Ok(()) - } - - pub async fn full(&self, id: i32) -> Result> { - if let Some(entry) = self.by_id(id).await? { - let licenses = entry - .find_related(PackageLicense) - .all(&self.conn) - .await? - .into_iter() - .map(|e| e.name) - .collect(); - let groups = entry - .find_related(PackageGroup) - .all(&self.conn) - .await? - .into_iter() - .map(|e| e.name) - .collect(); - let related = entry - .find_related(PackageRelated) - .all(&self.conn) - .await? - .into_iter() - .map(|e| (e.r#type, e.name)) - .collect(); - let files = entry - .find_related(PackageFile) - .all(&self.conn) - .await? - .into_iter() - .map(|e| e.path) - .collect(); - - Ok(Some(FullPackage { - entry, - licenses, - groups, - related, - files, - })) - } else { - Ok(None) - } + } else { + Ok(None) } } diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 65177df..34fbb81 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -2,11 +2,6 @@ use sea_orm::{sea_query::IntoCondition, *}; use crate::db::*; -#[derive(Clone, Debug)] -pub struct RepoQuery { - conn: DatabaseConnection, -} - #[derive(Deserialize)] pub struct Filter { name: Option, @@ -18,43 +13,37 @@ impl IntoCondition for Filter { } } -impl RepoQuery { - pub fn new(conn: DatabaseConnection) -> Self { - Self { conn } - } +pub async fn page(conn: &DbConn, per_page: u64, page: u64) -> Result<(u64, Vec)> { + let paginator = Repo::find() + .order_by_asc(repo::Column::Id) + .paginate(conn, per_page); + let repos = paginator.fetch_page(page).await?; + let total_pages = paginator.num_pages().await?; - pub async fn page(&self, per_page: u64, page: u64) -> Result<(u64, Vec)> { - let paginator = Repo::find() - .order_by_asc(repo::Column::Id) - .paginate(&self.conn, per_page); - let repos = paginator.fetch_page(page).await?; - let total_pages = paginator.num_pages().await?; - - Ok((total_pages, repos)) - } - - pub async fn by_id(&self, id: i32) -> Result> { - repo::Entity::find_by_id(id).one(&self.conn).await - } - - pub async fn by_name(&self, name: &str) -> Result> { - Repo::find() - .filter(repo::Column::Name.eq(name)) - .one(&self.conn) - .await - } - - pub async fn insert( - &self, - name: &str, - description: Option<&str>, - ) -> Result> { - let model = repo::ActiveModel { - id: NotSet, - name: Set(String::from(name)), - description: Set(description.map(String::from)), - }; - - Repo::insert(model).exec(&self.conn).await - } + Ok((total_pages, repos)) +} + +pub async fn by_id(conn: &DbConn, id: i32) -> Result> { + repo::Entity::find_by_id(id).one(conn).await +} + +pub async fn by_name(conn: &DbConn, name: &str) -> Result> { + Repo::find() + .filter(repo::Column::Name.eq(name)) + .one(conn) + .await +} + +pub async fn insert( + conn: &DbConn, + name: &str, + description: Option<&str>, +) -> Result> { + let model = repo::ActiveModel { + id: NotSet, + name: Set(String::from(name)), + description: Set(description.map(String::from)), + }; + + Repo::insert(model).exec(conn).await } diff --git a/server/src/main.rs b/server/src/main.rs index fc5c110..9068bd7 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -22,7 +22,7 @@ pub struct Config { pub struct Global { config: Config, repo_manager: Arc>, - db: db::RieterDb, + db: sea_orm::DbConn, } #[tokio::main] diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 4681fb6..5bf00a6 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -5,7 +5,7 @@ pub use manager::RepoGroupManager; use std::path::PathBuf; -use axum::body::{Body, BodyDataStream}; +use axum::body::{Body}; use axum::extract::{Path, State}; use axum::http::Request; use axum::http::StatusCode; @@ -21,6 +21,8 @@ use tower_http::services::{ServeDir, ServeFile}; use tower_http::validate_request::ValidateRequestHeaderLayer; use uuid::Uuid; +use crate::db; + const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; pub fn router(api_key: &str) -> Router { @@ -128,26 +130,31 @@ async fn post_package_archive( tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); // Query the repo for its ID, or create it if it does not already exist - let res = global.db.repo.by_name(&repo).await?; + let res = db::query::repo::by_name(&global.db, &repo).await?; let repo_id = if let Some(repo_entity) = res { repo_entity.id } else { - global.db.repo.insert(&repo, None).await?.last_insert_id + db::query::repo::insert(&global.db, &repo, None) + .await? + .last_insert_id }; // If the package already exists in the database, we remove it first - let res = global - .db - .pkg - .by_fields(repo_id, &pkg.info.name, None, &pkg.info.arch) - .await?; + let res = db::query::package::by_fields( + &global.db, + repo_id, + &pkg.info.name, + None, + &pkg.info.arch, + ) + .await?; if let Some(entry) = res { entry.delete(&global.db).await?; } - global.db.pkg.insert(repo_id, pkg).await?; + db::query::package::insert(&global.db, repo_id, pkg).await?; Ok(()) } @@ -172,7 +179,7 @@ async fn delete_repo( .await??; if repo_removed { - let res = global.db.repo.by_name(&repo).await?; + let res = db::query::repo::by_name(&global.db, &repo).await?; if let Some(repo_entry) = res { repo_entry.delete(&global.db).await?; @@ -203,10 +210,10 @@ async fn delete_arch_repo( .await??; if repo_removed { - let res = global.db.repo.by_name(&repo).await?; + let res = db::query::repo::by_name(&global.db, &repo).await?; if let Some(repo_entry) = res { - global.db.pkg.delete_with_arch(repo_entry.id, &arch).await?; + db::query::package::delete_with_arch(&global.db, repo_entry.id, &arch).await?; } tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); @@ -229,19 +236,17 @@ async fn delete_package( .await??; if let Some((name, version, release, arch)) = res { - let res = global.db.repo.by_name(&repo).await?; + let res = db::query::repo::by_name(&global.db, &repo).await?; if let Some(repo_entry) = res { - let res = global - .db - .pkg - .by_fields( - repo_entry.id, - &name, - Some(&format!("{}-{}", version, release)), - &arch, - ) - .await?; + let res = db::query::package::by_fields( + &global.db, + repo_entry.id, + &name, + Some(&format!("{}-{}", version, release)), + &arch, + ) + .await?; if let Some(entry) = res { entry.delete(&global.db).await?; From 421f6ae69be32fed1cef9a98c0626a893f976465 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 22 May 2024 09:42:42 +0200 Subject: [PATCH 12/73] chore: some more api code cleanup --- server/src/api/mod.rs | 35 +++++++---------------------------- server/src/api/pagination.rs | 21 ++++++++++++++------- server/src/cli.rs | 2 +- server/src/db/mod.rs | 1 - server/src/db/query/repo.rs | 8 +++++++- server/src/repo/mod.rs | 2 +- 6 files changed, 30 insertions(+), 39 deletions(-) diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index 09b6c95..fd54cd8 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -1,7 +1,5 @@ mod pagination; -use sea_orm::{*}; - use axum::extract::{Path, Query, State}; use axum::routing::get; use axum::Json; @@ -9,7 +7,7 @@ use axum::Router; use pagination::PaginatedResponse; -use crate::db::{self, *}; +use crate::db; pub fn router() -> Router { Router::new() @@ -24,25 +22,10 @@ async fn get_repos( Query(pagination): Query, Query(filter): Query, ) -> crate::Result>> { - let page = pagination.page.unwrap_or(1) - 1; - let per_page = pagination.per_page.unwrap_or(25); + let (total_pages, items) = + db::query::repo::page(&global.db, pagination.per_page, pagination.page - 1, filter).await?; - let paginator = Repo::find() - .filter(filter) - .order_by_asc(package::Column::Id) - .paginate(&global.db, pagination.per_page.unwrap_or(25)); - let items = paginator - .fetch_page(pagination.page.unwrap_or(1) - 1) - .await?; - let total_pages = paginator.num_pages().await?; - - Ok(Json(PaginatedResponse { - page, - per_page, - total_pages, - count: items.len(), - items, - })) + Ok(Json(pagination.res(total_pages, items))) } async fn get_single_repo( @@ -61,13 +44,9 @@ async fn get_packages( Query(pagination): Query, Query(filter): Query, ) -> crate::Result>> { - let (total_pages, pkgs) = db::query::package::page( - &global.db, - pagination.per_page.unwrap_or(25), - pagination.page.unwrap_or(1) - 1, - filter, - ) - .await?; + let (total_pages, pkgs) = + db::query::package::page(&global.db, pagination.per_page, pagination.page - 1, filter) + .await?; Ok(Json(pagination.res(total_pages, pkgs))) } diff --git a/server/src/api/pagination.rs b/server/src/api/pagination.rs index db1ffa0..02e32dc 100644 --- a/server/src/api/pagination.rs +++ b/server/src/api/pagination.rs @@ -1,12 +1,19 @@ use serde::{Deserialize, Serialize}; -pub const DEFAULT_PAGE: u64 = 1; -pub const DEFAULT_PER_PAGE: u64 = 25; - #[derive(Deserialize)] +#[serde(default)] pub struct Query { - pub page: Option, - pub per_page: Option, + pub page: u64, + pub per_page: u64, +} + +impl Default for Query { + fn default() -> Self { + Query { + page: 1, + per_page: 25, + } + } } #[derive(Serialize)] @@ -28,8 +35,8 @@ impl Query { items: Vec, ) -> PaginatedResponse { PaginatedResponse { - page: self.page.unwrap_or(DEFAULT_PAGE), - per_page: self.per_page.unwrap_or(DEFAULT_PER_PAGE), + page: self.page, + per_page: self.per_page, total_pages, count: items.len(), items, diff --git a/server/src/cli.rs b/server/src/cli.rs index 1ae6de4..2976d1c 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -4,13 +4,13 @@ use crate::{Config, Global}; use axum::extract::FromRef; use axum::Router; use clap::Parser; +use sea_orm_migration::MigratorTrait; use std::io; use std::path::PathBuf; use std::sync::{Arc, RwLock}; use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; -use sea_orm_migration::MigratorTrait; #[derive(Parser)] #[command(author, version, about, long_about = None)] diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index b29f3d3..0194079 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize}; pub use entities::{prelude::*, *}; pub use migrator::Migrator; - type Result = std::result::Result; #[derive(EnumIter, DeriveActiveEnum, Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 34fbb81..399d227 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -13,8 +13,14 @@ impl IntoCondition for Filter { } } -pub async fn page(conn: &DbConn, per_page: u64, page: u64) -> Result<(u64, Vec)> { +pub async fn page( + conn: &DbConn, + per_page: u64, + page: u64, + filter: Filter, +) -> Result<(u64, Vec)> { let paginator = Repo::find() + .filter(filter) .order_by_asc(repo::Column::Id) .paginate(conn, per_page); let repos = paginator.fetch_page(page).await?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 5bf00a6..7d3f7e4 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -5,7 +5,7 @@ pub use manager::RepoGroupManager; use std::path::PathBuf; -use axum::body::{Body}; +use axum::body::Body; use axum::extract::{Path, State}; use axum::http::Request; use axum::http::StatusCode; From cc2dc9b28f6b100db524a846433b1a2d7d679428 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 23 May 2024 16:33:52 +0200 Subject: [PATCH 13/73] feat: start of better repo manager --- libarchive/src/archive.rs | 10 +-- server/src/db/entities/package.rs | 1 + server/src/db/entities/package_related.rs | 4 +- .../m20230730_000001_create_repo_tables.rs | 2 + server/src/db/query/package.rs | 16 ++--- server/src/repo/manager_new.rs | 62 +++++++++++++++++++ server/src/repo/mod.rs | 10 +-- server/src/repo/package.rs | 2 +- 8 files changed, 85 insertions(+), 22 deletions(-) create mode 100644 server/src/repo/manager_new.rs diff --git a/libarchive/src/archive.rs b/libarchive/src/archive.rs index 97c9d0a..3369a44 100644 --- a/libarchive/src/archive.rs +++ b/libarchive/src/archive.rs @@ -64,11 +64,11 @@ impl ReadFilter { pub fn extension(&self) -> Option<&str> { match self { ReadFilter::None => Some(""), - ReadFilter::Gzip => Some(".gz"), - ReadFilter::Bzip2 => Some(".bz2"), - ReadFilter::Lzma => Some(".lzma"), - ReadFilter::Xz => Some(".xz"), - ReadFilter::Zstd => Some(".zst"), + ReadFilter::Gzip => Some("gz"), + ReadFilter::Bzip2 => Some("bz2"), + ReadFilter::Lzma => Some("lzma"), + ReadFilter::Xz => Some("xz"), + ReadFilter::Zstd => Some("zst"), _ => None, } } diff --git a/server/src/db/entities/package.rs b/server/src/db/entities/package.rs index 8ea6797..c09a310 100644 --- a/server/src/db/entities/package.rs +++ b/server/src/db/entities/package.rs @@ -22,6 +22,7 @@ pub struct Model { pub pgp_sig: Option, pub pgp_sig_size: Option, pub sha256_sum: String, + pub compression: String, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/entities/package_related.rs b/server/src/db/entities/package_related.rs index 3e25ff3..7241c1a 100644 --- a/server/src/db/entities/package_related.rs +++ b/server/src/db/entities/package_related.rs @@ -3,13 +3,15 @@ use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; +use crate::db::PackageRelatedEnum; + #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "package_related")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub package_id: i32, #[sea_orm(primary_key, auto_increment = false)] - pub r#type: crate::db::PackageRelatedEnum, + pub r#type: PackageRelatedEnum, #[sea_orm(primary_key, auto_increment = false)] pub name: String, } diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index 866319e..9d76afc 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -52,6 +52,7 @@ impl MigrationTrait for Migration { .col(ColumnDef::new(Package::PgpSig).string_len(255)) .col(ColumnDef::new(Package::PgpSigSize).big_integer()) .col(ColumnDef::new(Package::Sha256Sum).char_len(64).not_null()) + .col(ColumnDef::new(Package::Compression).char_len(16).not_null()) .foreign_key( ForeignKey::create() .name("fk-package-repo_id") @@ -221,6 +222,7 @@ pub enum Package { PgpSig, PgpSigSize, Sha256Sum, + Compression, } #[derive(Iden)] diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 9d7a9f2..726a1f2 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -45,20 +45,15 @@ pub async fn by_id(conn: &DbConn, id: i32) -> Result> { pub async fn by_fields( conn: &DbConn, repo_id: i32, - name: &str, - version: Option<&str>, arch: &str, + name: &str, ) -> Result> { - let mut query = Package::find() + Package::find() .filter(package::Column::RepoId.eq(repo_id)) .filter(package::Column::Name.eq(name)) - .filter(package::Column::Arch.eq(arch)); - - if let Some(version) = version { - query = query.filter(package::Column::Version.eq(version)); - } - - query.one(conn).await + .filter(package::Column::Arch.eq(arch)) + .one(conn) + .await } pub async fn delete_with_arch(conn: &DbConn, repo_id: i32, arch: &str) -> Result { @@ -88,6 +83,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), sha256_sum: Set(info.sha256sum), + compression: Set(pkg.compression.extension().unwrap().to_string()) }; let pkg_entry = model.insert(conn).await?; diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs new file mode 100644 index 0000000..40678fd --- /dev/null +++ b/server/src/repo/manager_new.rs @@ -0,0 +1,62 @@ +use std::path::{Path, PathBuf}; + +use sea_orm::{DbConn, ModelTrait}; + +use crate::db; +use crate::error::Result; + +pub struct MetaRepoMngr { + repo_dir: PathBuf, + pkg_dir: PathBuf, +} + +impl MetaRepoMngr { + pub fn new, P2: AsRef>(repo_dir: P1, pkg_dir: P2) -> Self { + MetaRepoMngr { + repo_dir: repo_dir.as_ref().to_path_buf(), + pkg_dir: pkg_dir.as_ref().to_path_buf(), + } + } + + /// Remove the repo with the given name, if it existed + pub async fn remove_repo(&self, conn: &DbConn, repo: &str) -> Result { + let res = db::query::repo::by_name(conn, repo).await?; + + if let Some(repo_entry) = res { + // Remove repository from database + repo_entry.delete(conn).await?; + + // Remove files from file system + tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; + tokio::fs::remove_dir_all(self.pkg_dir.join(repo)).await?; + + Ok(true) + } else { + Ok(false) + } + } + + pub async fn remove_pkg( + &self, + conn: &DbConn, + repo: &str, + arch: &str, + name: &str, + ) -> Result { + let repo = db::query::repo::by_name(conn, repo).await?; + + if let Some(repo) = repo { + let pkg = db::query::package::by_fields(conn, repo.id, arch, name).await?; + + if let Some(pkg) = pkg { + // Remove package from database + pkg.delete(conn).await?; + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } + } +} diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 7d3f7e4..aca8503 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,4 +1,5 @@ mod manager; +mod manager_new; pub mod package; pub use manager::RepoGroupManager; @@ -117,7 +118,8 @@ async fn post_package_archive( let path_clone = path.clone(); let repo_clone = repo.clone(); let res = tokio::task::spawn_blocking(move || { - clone + global + .repo_manager .write() .unwrap() .add_pkg_from_path(&repo_clone, &path_clone) @@ -144,9 +146,8 @@ async fn post_package_archive( let res = db::query::package::by_fields( &global.db, repo_id, - &pkg.info.name, - None, &pkg.info.arch, + &pkg.info.name, ) .await?; @@ -242,9 +243,8 @@ async fn delete_package( let res = db::query::package::by_fields( &global.db, repo_entry.id, - &name, - Some(&format!("{}-{}", version, release)), &arch, + &name, ) .await?; diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index f519a91..3c11c38 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -193,7 +193,7 @@ impl Package { // This unwrap should be safe, because we only allow passing through compressions with // known file extensions format!( - "{}.pkg.tar{}", + "{}.pkg.tar.{}", self.full_name(), self.compression.extension().unwrap() ) From 2e0c6d1fa6632428f45edc2333da0e66602b9b24 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 23 May 2024 21:09:35 +0200 Subject: [PATCH 14/73] feat: start of archive generation for new manager --- libarchive/src/write/builder.rs | 2 + libarchive/src/write/file.rs | 2 + libarchive/src/write/mod.rs | 2 + server/src/db/query/package.rs | 2 +- server/src/error.rs | 17 +++++++-- server/src/repo/manager_new.rs | 68 +++++++++++++++++++++++++++++++-- server/src/repo/mod.rs | 19 +++------ 7 files changed, 90 insertions(+), 22 deletions(-) diff --git a/libarchive/src/write/builder.rs b/libarchive/src/write/builder.rs index b075284..b1cef46 100644 --- a/libarchive/src/write/builder.rs +++ b/libarchive/src/write/builder.rs @@ -11,6 +11,8 @@ pub struct Builder { consumed: bool, } +unsafe impl Send for Builder {} + impl Builder { pub fn new() -> Self { Builder::default() diff --git a/libarchive/src/write/file.rs b/libarchive/src/write/file.rs index 5d932f1..7f81915 100644 --- a/libarchive/src/write/file.rs +++ b/libarchive/src/write/file.rs @@ -13,6 +13,8 @@ pub struct FileWriter { closed: bool, } +unsafe impl Send for FileWriter {} + impl Handle for FileWriter { unsafe fn handle(&self) -> *const ffi::Struct_archive { self.handle as *const _ diff --git a/libarchive/src/write/mod.rs b/libarchive/src/write/mod.rs index 446edba..642fc18 100644 --- a/libarchive/src/write/mod.rs +++ b/libarchive/src/write/mod.rs @@ -9,6 +9,8 @@ pub struct WriteEntry { entry: *mut ffi::Struct_archive_entry, } +unsafe impl Send for WriteEntry {} + impl WriteEntry { pub fn new() -> Self { let entry = unsafe { ffi::archive_entry_new() }; diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 726a1f2..ecd306e 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -83,7 +83,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), sha256_sum: Set(info.sha256sum), - compression: Set(pkg.compression.extension().unwrap().to_string()) + compression: Set(pkg.compression.extension().unwrap().to_string()), }; let pkg_entry = model.insert(conn).await?; diff --git a/server/src/error.rs b/server/src/error.rs index 4fbb7c4..d0ba36b 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -1,9 +1,10 @@ -use axum::http::StatusCode; -use axum::response::{IntoResponse, Response}; use std::error::Error; use std::fmt; use std::io; +use axum::http::StatusCode; +use axum::response::{IntoResponse, Response}; + pub type Result = std::result::Result; #[derive(Debug)] @@ -12,6 +13,7 @@ pub enum ServerError { Axum(axum::Error), Db(sea_orm::DbErr), Status(StatusCode), + Archive(libarchive::error::ArchiveError), } impl fmt::Display for ServerError { @@ -21,6 +23,7 @@ impl fmt::Display for ServerError { ServerError::Axum(err) => write!(fmt, "{}", err), ServerError::Status(status) => write!(fmt, "{}", status), ServerError::Db(err) => write!(fmt, "{}", err), + ServerError::Archive(err) => write!(fmt, "{}", err), } } } @@ -38,7 +41,9 @@ impl IntoResponse for ServerError { ServerError::Db(sea_orm::DbErr::RecordNotFound(_)) => { StatusCode::NOT_FOUND.into_response() } - ServerError::Db(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + ServerError::Db(_) | ServerError::Archive(_) => { + StatusCode::INTERNAL_SERVER_ERROR.into_response() + } } } } @@ -72,3 +77,9 @@ impl From for ServerError { ServerError::Db(err) } } + +impl From for ServerError { + fn from(err: libarchive::error::ArchiveError) -> Self { + ServerError::Archive(err) + } +} diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 40678fd..9e7fb91 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -1,23 +1,83 @@ use std::path::{Path, PathBuf}; -use sea_orm::{DbConn, ModelTrait}; +use libarchive::write::{Builder, WriteEntry}; +use libarchive::{Entry, WriteFilter, WriteFormat}; + +use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter}; + +use futures::StreamExt; use crate::db; use crate::error::Result; -pub struct MetaRepoMngr { +pub const ANY_ARCH: &str = "any"; + +pub struct MetaRepoMgr { repo_dir: PathBuf, pkg_dir: PathBuf, } -impl MetaRepoMngr { +impl MetaRepoMgr { pub fn new, P2: AsRef>(repo_dir: P1, pkg_dir: P2) -> Self { - MetaRepoMngr { + MetaRepoMgr { repo_dir: repo_dir.as_ref().to_path_buf(), pkg_dir: pkg_dir.as_ref().to_path_buf(), } } + /// Generate the `db` and `files` archive files for the given repo and architecture. + pub async fn generate_archives(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(conn, repo).await?; + + if repo.is_none() { + return Ok(()); + } + + let repo = repo.unwrap(); + let parent_dir = self.repo_dir.join(&repo.name).join(arch); + + let repo_name = repo.name.clone(); + let (mut ar_db, mut ar_files) = tokio::task::spawn_blocking(move || { + let mut ar_db = Builder::new(); + ar_db.add_filter(WriteFilter::Gzip)?; + ar_db.set_format(WriteFormat::PaxRestricted)?; + + let mut ar_files = Builder::new(); + ar_files.add_filter(WriteFilter::Gzip)?; + ar_files.set_format(WriteFormat::PaxRestricted)?; + + let ar_db = ar_db.open_file(parent_dir.join(format!("{}.db.tar.gz", repo_name))); + let ar_files = + ar_files.open_file(parent_dir.join(format!("{}.files.tar.gz", repo_name))); + + match (ar_db, ar_files) { + (Ok(ar_db), Ok(ar_files)) => Ok((ar_db, ar_files)), + (Err(err), _) | (_, Err(err)) => Err(err), + } + }) + .await + .unwrap()?; + //let mut ar_db = ar_db.open_file(parent_dir.join(format!("{}.db.tar.gz", &repo.name)))?; + //let mut ar_files = + // ar_files.open_file(parent_dir.join(format!("{}.files.tar.gz", &repo.name)))?; + + // Query all packages in the repo that have the given architecture or the "any" + // architecture + let mut pkgs = repo + .find_related(crate::db::Package) + .filter(db::package::Column::Arch.eq(arch).or(ANY_ARCH.into())) + .stream(conn) + .await?; + + while let Some(pkg) = pkgs.next().await { + let pkg = pkg?; + + // TODO for each package, write entry to archive files + } + + Ok(()) + } + /// Remove the repo with the given name, if it existed pub async fn remove_repo(&self, conn: &DbConn, repo: &str) -> Result { let res = db::query::repo::by_name(conn, repo).await?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index aca8503..419aa61 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -143,13 +143,9 @@ async fn post_package_archive( }; // If the package already exists in the database, we remove it first - let res = db::query::package::by_fields( - &global.db, - repo_id, - &pkg.info.arch, - &pkg.info.name, - ) - .await?; + let res = + db::query::package::by_fields(&global.db, repo_id, &pkg.info.arch, &pkg.info.name) + .await?; if let Some(entry) = res { entry.delete(&global.db).await?; @@ -240,13 +236,8 @@ async fn delete_package( let res = db::query::repo::by_name(&global.db, &repo).await?; if let Some(repo_entry) = res { - let res = db::query::package::by_fields( - &global.db, - repo_entry.id, - &arch, - &name, - ) - .await?; + let res = + db::query::package::by_fields(&global.db, repo_entry.id, &arch, &name).await?; if let Some(entry) = res { entry.delete(&global.db).await?; From c95feadca104f39bbf25045844b764f82a821828 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 25 May 2024 13:31:46 +0200 Subject: [PATCH 15/73] feat: further work on new repo & package implementation --- libarchive/src/write/file.rs | 33 ++++++++++++++++++++ libarchive/src/write/mod.rs | 1 + server/src/repo/manager_new.rs | 24 +++++++++++++-- server/src/repo/mod.rs | 1 + server/src/repo/package_new.rs | 56 ++++++++++++++++++++++++++++++++++ 5 files changed, 113 insertions(+), 2 deletions(-) create mode 100644 server/src/repo/package_new.rs diff --git a/libarchive/src/write/file.rs b/libarchive/src/write/file.rs index 7f81915..fa39a13 100644 --- a/libarchive/src/write/file.rs +++ b/libarchive/src/write/file.rs @@ -2,10 +2,12 @@ use super::WriteEntry; use crate::error::ArchiveError; use crate::Entry; use crate::Handle; +use core::ffi::c_void; use libarchive3_sys::ffi; use std::fs; use std::io; use std::io::Read; +use std::io::Write; use std::path::Path; pub struct FileWriter { @@ -33,6 +35,17 @@ impl FileWriter { } } + /// Append the given entry to the archive. After successfully calling this function, writing to + /// the archive now writes to this entry. + pub fn append_entry(&mut self, entry: &mut WriteEntry) -> crate::Result<()> { + unsafe { + match ffi::archive_write_header(self.handle_mut(), entry.entry_mut()) { + ffi::ARCHIVE_OK => Ok(()), + _ => Err(ArchiveError::from(self as &dyn Handle).into()), + } + } + } + pub fn append_data(&mut self, entry: &mut WriteEntry, r: &mut R) -> crate::Result<()> { unsafe { match ffi::archive_write_header(self.handle_mut(), entry.entry_mut()) { @@ -109,3 +122,23 @@ impl Drop for FileWriter { } } } + +impl Write for FileWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let res = unsafe { + ffi::archive_write_data(self.handle_mut(), buf.as_ptr() as *const c_void, buf.len()) + } as isize; + + if res < 0 { + Err(ArchiveError::from(self as &dyn Handle).into()) + } else { + // Unwrap is safe as we check if the value is negative in the if statement + Ok(res.try_into().unwrap()) + } + } + + fn flush(&mut self) -> io::Result<()> { + // Libarchive doesn't seem to provide a flush mechanism + Ok(()) + } +} diff --git a/libarchive/src/write/mod.rs b/libarchive/src/write/mod.rs index 642fc18..5f583e0 100644 --- a/libarchive/src/write/mod.rs +++ b/libarchive/src/write/mod.rs @@ -3,6 +3,7 @@ mod file; use crate::Entry; pub use builder::Builder; +pub use file::FileWriter; use libarchive3_sys::ffi; pub struct WriteEntry { diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 9e7fb91..5e3787e 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -1,4 +1,5 @@ use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; use libarchive::write::{Builder, WriteEntry}; use libarchive::{Entry, WriteFilter, WriteFormat}; @@ -7,6 +8,7 @@ use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter}; use futures::StreamExt; +use super::package_new; use crate::db; use crate::error::Result; @@ -37,7 +39,7 @@ impl MetaRepoMgr { let parent_dir = self.repo_dir.join(&repo.name).join(arch); let repo_name = repo.name.clone(); - let (mut ar_db, mut ar_files) = tokio::task::spawn_blocking(move || { + let (ar_db, ar_files) = tokio::task::spawn_blocking(move || { let mut ar_db = Builder::new(); ar_db.add_filter(WriteFilter::Gzip)?; ar_db.set_format(WriteFormat::PaxRestricted)?; @@ -69,12 +71,30 @@ impl MetaRepoMgr { .stream(conn) .await?; + let ar_files = Arc::new(Mutex::new(ar_files)); + let ar_db = Arc::new(Mutex::new(ar_db)); + while let Some(pkg) = pkgs.next().await { let pkg = pkg?; - // TODO for each package, write entry to archive files + package_new::append_files_entry(conn, &pkg, Arc::clone(&ar_files)).await?; + + // TODO db archive } + // Close archives explicitely for better error handling + tokio::task::spawn_blocking(move || { + let r1 = ar_files.lock().unwrap().close(); + let r2 = ar_db.lock().unwrap().close(); + + match (r1, r2) { + (Ok(_), Ok(_)) => Ok(()), + (Err(err), _) | (_, Err(err)) => Err(err), + } + }) + .await + .unwrap()?; + Ok(()) } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 419aa61..c6ca77c 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,6 +1,7 @@ mod manager; mod manager_new; pub mod package; +pub mod package_new; pub use manager::RepoGroupManager; diff --git a/server/src/repo/package_new.rs b/server/src/repo/package_new.rs new file mode 100644 index 0000000..187d980 --- /dev/null +++ b/server/src/repo/package_new.rs @@ -0,0 +1,56 @@ +use sea_orm::{DbConn, ModelTrait}; + +use libarchive::write::{FileWriter, WriteEntry}; +use libarchive::Entry; + +use futures::StreamExt; + +use std::io::Write; +use std::path::PathBuf; +use std::sync::{Arc, Mutex}; + +use crate::db; + +/// Return the full name of the package, consisting of its package name, pkgver and pkgrel +fn full_pkg_name(pkg: &db::entities::package::Model) -> String { + format!("{}-{}", pkg.name, pkg.version) +} + +pub async fn append_files_entry( + conn: &DbConn, + pkg: &db::entities::package::Model, + ar: Arc>, +) -> crate::Result<()> { + let full_name = full_pkg_name(pkg); + let ar_clone = Arc::clone(&ar); + + tokio::task::spawn_blocking(move || { + let mut ar_entry = WriteEntry::new(); + ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); + ar_entry.set_pathname(PathBuf::from(full_name).join("files")); + ar_entry.set_mode(0o100644); + // TODO set entry size? + + ar_clone.lock().unwrap().append_entry(&mut ar_entry) + }) + .await + .unwrap()?; + + // Write first header line + let ar_clone = Arc::clone(&ar); + tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "%FILES%")) + .await + .unwrap()?; + + let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; + + while let Some(file) = files.next().await.transpose()? { + let ar_clone = Arc::clone(&ar); + + tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "{}", file.path)) + .await + .unwrap()?; + } + + Ok(()) +} From c5ef7c3c2817f74fc47ff8ef65061913ba9feec5 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 25 May 2024 18:55:02 +0200 Subject: [PATCH 16/73] feat: message-passing interface for archive structs; implement pkg add --- server/src/repo/archive.rs | 106 +++++++++++++++++++++++++++++++ server/src/repo/manager_new.rs | 113 ++++++++++++++++++++------------- server/src/repo/mod.rs | 2 +- server/src/repo/package_new.rs | 56 ---------------- 4 files changed, 176 insertions(+), 101 deletions(-) create mode 100644 server/src/repo/archive.rs delete mode 100644 server/src/repo/package_new.rs diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs new file mode 100644 index 0000000..a8471eb --- /dev/null +++ b/server/src/repo/archive.rs @@ -0,0 +1,106 @@ +use std::io::{self, Write}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tokio::sync::{mpsc, oneshot}; + +use libarchive::write::{Builder, FileWriter, WriteEntry}; +use libarchive::{Entry, WriteFilter, WriteFormat}; + +enum Message { + AppendFilesEntry(oneshot::Sender>, String), + AppendLine(oneshot::Sender>, String), + Close(oneshot::Sender>), +} + +/// Struct to abstract away the intrinsics of writing entries to an archive file +pub struct RepoArchiveWriter { + tx: mpsc::Sender, +} + +fn archive_manager_task(mut rx: mpsc::Receiver, mut ar: FileWriter) { + // Once the accompanying struct and the final sender get dropped, this will return None and the + // task will exit + while let Some(msg) = rx.blocking_recv() { + match msg { + Message::AppendFilesEntry(tx, full_name) => { + let mut ar_entry = WriteEntry::new(); + ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); + ar_entry.set_pathname(PathBuf::from(full_name).join("files")); + ar_entry.set_mode(0o100644); + // TODO set entry size? + + let mut res = ar.append_entry(&mut ar_entry).map_err(io::Error::from); + + if res.is_ok() { + res = writeln!(ar, "%FILES%"); + } + + tx.send(res); + + // All "files" entries start with this line + } + Message::AppendLine(tx, line) => { + let res = writeln!(ar, "{}", line); + + tx.send(res); + } + Message::Close(tx) => { + let res = ar.close(); + + tx.send(res.map_err(io::Error::from)); + } + } + } +} + +impl RepoArchiveWriter { + pub async fn open>(path: P) -> io::Result { + let path = PathBuf::from(path.as_ref()); + let (tx, rx) = mpsc::channel(1); + + // Open the archive file + let ar = tokio::task::spawn_blocking(move || { + let mut builder = Builder::new(); + builder.add_filter(WriteFilter::Gzip)?; + builder.set_format(WriteFormat::PaxRestricted)?; + + builder.open_file(path) + }) + .await + .unwrap()?; + + // Spawn blocking task to perform blocking actions + tokio::task::spawn_blocking(move || archive_manager_task(rx, ar)); + + Ok(Self { tx }) + } + + /// Set the current entry to be a new "files" list + pub async fn add_files_entry(&self, full_name: &str) -> io::Result<()> { + let full_name = String::from(full_name); + let (tx, rx) = oneshot::channel(); + + self.tx.send(Message::AppendFilesEntry(tx, full_name)).await; + + rx.await.unwrap() + } + // + ///// Append the given line to the currently active entry + pub async fn write_line(&self, line: &str) -> io::Result<()> { + let line = String::from(line); + let (tx, rx) = oneshot::channel(); + + self.tx.send(Message::AppendLine(tx, line)).await; + + rx.await.unwrap() + } + + pub async fn close(&self) -> io::Result<()> { + let (tx, rx) = oneshot::channel(); + + self.tx.send(Message::Close(tx)).await; + + rx.await.unwrap() + } +} diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 5e3787e..a7f2f1b 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -5,10 +5,13 @@ use libarchive::write::{Builder, WriteEntry}; use libarchive::{Entry, WriteFilter, WriteFormat}; use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter}; +use uuid::Uuid; use futures::StreamExt; +use tokio::io::AsyncRead; -use super::package_new; +use super::archive; +use super::package; use crate::db; use crate::error::Result; @@ -38,30 +41,9 @@ impl MetaRepoMgr { let repo = repo.unwrap(); let parent_dir = self.repo_dir.join(&repo.name).join(arch); - let repo_name = repo.name.clone(); - let (ar_db, ar_files) = tokio::task::spawn_blocking(move || { - let mut ar_db = Builder::new(); - ar_db.add_filter(WriteFilter::Gzip)?; - ar_db.set_format(WriteFormat::PaxRestricted)?; - - let mut ar_files = Builder::new(); - ar_files.add_filter(WriteFilter::Gzip)?; - ar_files.set_format(WriteFormat::PaxRestricted)?; - - let ar_db = ar_db.open_file(parent_dir.join(format!("{}.db.tar.gz", repo_name))); - let ar_files = - ar_files.open_file(parent_dir.join(format!("{}.files.tar.gz", repo_name))); - - match (ar_db, ar_files) { - (Ok(ar_db), Ok(ar_files)) => Ok((ar_db, ar_files)), - (Err(err), _) | (_, Err(err)) => Err(err), - } - }) - .await - .unwrap()?; - //let mut ar_db = ar_db.open_file(parent_dir.join(format!("{}.db.tar.gz", &repo.name)))?; - //let mut ar_files = - // ar_files.open_file(parent_dir.join(format!("{}.files.tar.gz", &repo.name)))?; + let ar_files = + archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", repo.name))) + .await?; // Query all packages in the repo that have the given architecture or the "any" // architecture @@ -71,29 +53,19 @@ impl MetaRepoMgr { .stream(conn) .await?; - let ar_files = Arc::new(Mutex::new(ar_files)); - let ar_db = Arc::new(Mutex::new(ar_db)); + while let Some(pkg) = pkgs.next().await.transpose()? { + ar_files + .add_files_entry(&format!("{}-{}", pkg.name, pkg.version)) + .await?; - while let Some(pkg) = pkgs.next().await { - let pkg = pkg?; + let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; - package_new::append_files_entry(conn, &pkg, Arc::clone(&ar_files)).await?; - - // TODO db archive + while let Some(file) = files.next().await.transpose()? { + ar_files.write_line(&file.path).await?; + } } - // Close archives explicitely for better error handling - tokio::task::spawn_blocking(move || { - let r1 = ar_files.lock().unwrap().close(); - let r2 = ar_db.lock().unwrap().close(); - - match (r1, r2) { - (Ok(_), Ok(_)) => Ok(()), - (Err(err), _) | (_, Err(err)) => Err(err), - } - }) - .await - .unwrap()?; + ar_files.close().await?; Ok(()) } @@ -139,4 +111,57 @@ impl MetaRepoMgr { Ok(false) } } + + pub async fn add_pkg_from_reader(&self, conn: &DbConn, reader: &mut R, repo: &str) -> crate::Result<()> { + // Copy file contents to temporary path so libarchive can work with it + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let path = self.pkg_dir.join(uuid.to_string()); + let mut temp_file = tokio::fs::File::create(&path).await?; + + tokio::io::copy(reader, &mut temp_file).await?; + + // Parse the package + let path_clone = path.clone(); + let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)).await.unwrap()?; + + // Query the repo for its ID, or create it if it does not already exist + let res = db::query::repo::by_name(conn, &repo).await?; + + let repo_id = if let Some(repo_entity) = res { + repo_entity.id + } else { + db::query::repo::insert(conn, repo, None) + .await? + .last_insert_id + }; + + // If the package already exists in the database, we remove it first + let res = + db::query::package::by_fields(conn, repo_id, &pkg.info.arch, &pkg.info.name) + .await?; + + if let Some(entry) = res { + entry.delete(conn).await?; + } + + let dest_pkg_path = self + .pkg_dir + .join(repo) + .join(&pkg.info.arch) + .join(pkg.file_name()); + + // Insert new package into database + let arch = pkg.info.arch.clone(); + db::query::package::insert(conn, repo_id, pkg).await?; + + // Move the package to its final resting place + tokio::fs::create_dir_all(dest_pkg_path.parent().unwrap()).await?; + tokio::fs::rename(path, dest_pkg_path).await?; + + // Synchronize archive databases + // TODO account for "any" architecture here + self.generate_archives(conn, repo, &arch).await?; + + Ok(()) + } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index c6ca77c..4b3ff29 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,7 +1,7 @@ +mod archive; mod manager; mod manager_new; pub mod package; -pub mod package_new; pub use manager::RepoGroupManager; diff --git a/server/src/repo/package_new.rs b/server/src/repo/package_new.rs deleted file mode 100644 index 187d980..0000000 --- a/server/src/repo/package_new.rs +++ /dev/null @@ -1,56 +0,0 @@ -use sea_orm::{DbConn, ModelTrait}; - -use libarchive::write::{FileWriter, WriteEntry}; -use libarchive::Entry; - -use futures::StreamExt; - -use std::io::Write; -use std::path::PathBuf; -use std::sync::{Arc, Mutex}; - -use crate::db; - -/// Return the full name of the package, consisting of its package name, pkgver and pkgrel -fn full_pkg_name(pkg: &db::entities::package::Model) -> String { - format!("{}-{}", pkg.name, pkg.version) -} - -pub async fn append_files_entry( - conn: &DbConn, - pkg: &db::entities::package::Model, - ar: Arc>, -) -> crate::Result<()> { - let full_name = full_pkg_name(pkg); - let ar_clone = Arc::clone(&ar); - - tokio::task::spawn_blocking(move || { - let mut ar_entry = WriteEntry::new(); - ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - ar_entry.set_pathname(PathBuf::from(full_name).join("files")); - ar_entry.set_mode(0o100644); - // TODO set entry size? - - ar_clone.lock().unwrap().append_entry(&mut ar_entry) - }) - .await - .unwrap()?; - - // Write first header line - let ar_clone = Arc::clone(&ar); - tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "%FILES%")) - .await - .unwrap()?; - - let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; - - while let Some(file) = files.next().await.transpose()? { - let ar_clone = Arc::clone(&ar); - - tokio::task::spawn_blocking(move || writeln!(ar_clone.lock().unwrap(), "{}", file.path)) - .await - .unwrap()?; - } - - Ok(()) -} From f209c8175906c8065d707fcbd86775729543ffef Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 25 May 2024 22:53:46 +0200 Subject: [PATCH 17/73] feat: started using new meta repo manager --- Cargo.lock | 107 +++++++++-------- server/Cargo.toml | 3 +- server/src/cli.rs | 9 +- server/src/main.rs | 6 +- server/src/repo/manager_new.rs | 17 ++- server/src/repo/mod.rs | 207 ++++++++++++++------------------- 6 files changed, 168 insertions(+), 181 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e59252c..333bc72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -151,7 +151,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -162,7 +162,7 @@ checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -194,6 +194,7 @@ checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core", + "axum-macros", "bytes", "futures-util", "http", @@ -241,6 +242,18 @@ dependencies = [ "tracing", ] +[[package]] +name = "axum-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00c055ee2d014ae5981ce1016374e8213682aa14d9bf40e48ab48b5f3ef20eaa" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.66", +] + [[package]] name = "backtrace" version = "0.3.71" @@ -335,7 +348,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", "syn_derive", ] @@ -381,9 +394,9 @@ checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "cc" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" +checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" [[package]] name = "cfg-if" @@ -443,7 +456,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -505,9 +518,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" @@ -617,12 +630,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "flume" version = "0.11.0" @@ -722,7 +729,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -963,9 +970,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +checksum = "3d8d52be92d09acc2e01dddb7fde3ad983fc6489c7db4837e605bc3fca4cb63e" dependencies = [ "bytes", "futures-util", @@ -973,7 +980,6 @@ dependencies = [ "http-body", "hyper", "pin-project-lite", - "socket2", "tokio", ] @@ -1028,7 +1034,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -1342,7 +1348,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -1353,9 +1359,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -1412,7 +1418,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -1501,9 +1507,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.82" +version = "1.0.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" +checksum = "ec96c6a92621310b51366f1e28d05ef11489516e93be030060e5fc12024a49d6" dependencies = [ "unicode-ident", ] @@ -1652,6 +1658,7 @@ dependencies = [ "chrono", "clap", "futures", + "http-body-util", "libarchive", "sea-orm", "sea-orm-migration", @@ -1833,7 +1840,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -1891,7 +1898,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.64", + "syn 2.0.66", "unicode-ident", ] @@ -1955,7 +1962,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", "thiserror", ] @@ -1990,22 +1997,22 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "serde" -version = "1.0.202" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.202" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -2395,13 +2402,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "stringprep" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" dependencies = [ - "finl_unicode", "unicode-bidi", "unicode-normalization", + "unicode-properties", ] [[package]] @@ -2435,9 +2442,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.64" +version = "2.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ad3dee41f36859875573074334c200d1add8e4a87bb37113ebd31d926b7b11f" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" dependencies = [ "proc-macro2", "quote", @@ -2453,7 +2460,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -2503,7 +2510,7 @@ checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -2589,7 +2596,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -2707,7 +2714,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] @@ -2785,6 +2792,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-properties" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" + [[package]] name = "unicode-segmentation" version = "1.11.0" @@ -2887,7 +2900,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", "wasm-bindgen-shared", ] @@ -2909,7 +2922,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3141,11 +3154,11 @@ checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.64", + "syn 2.0.66", ] [[package]] name = "zeroize" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/server/Cargo.toml b/server/Cargo.toml index a234377..c2c5d74 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -7,10 +7,11 @@ authors = ["Jef Roosens"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -axum = { version = "0.7.5", features = ["http2"] } +axum = { version = "0.7.5", features = ["http2", "macros"] } chrono = { version = "0.4.26", features = ["serde"] } clap = { version = "4.3.12", features = ["env", "derive"] } futures = "0.3.28" +http-body-util = "0.1.1" libarchive = { path = "../libarchive" } sea-orm-migration = "0.12.1" serde = { version = "1.0.178", features = ["derive"] } diff --git a/server/src/cli.rs b/server/src/cli.rs index 2976d1c..91569de 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,4 +1,4 @@ -use crate::repo::RepoGroupManager; +use crate::repo::{MetaRepoMgr, RepoGroupManager}; use crate::{Config, Global}; use axum::extract::FromRef; @@ -7,7 +7,8 @@ use clap::Parser; use sea_orm_migration::MigratorTrait; use std::io; use std::path::PathBuf; -use std::sync::{Arc, RwLock}; +use std::sync::Arc; +use tokio::sync::RwLock; use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; @@ -48,7 +49,7 @@ pub struct Cli { pub log: String, } -impl FromRef for Arc> { +impl FromRef for Arc> { fn from_ref(global: &Global) -> Self { Arc::clone(&global.repo_manager) } @@ -85,7 +86,7 @@ impl Cli { pkg_dir: self.pkg_dir.clone(), api_key: self.api_key.clone(), }; - let repo_manager = RepoGroupManager::new(&config.repo_dir, &self.pkg_dir); + let repo_manager = MetaRepoMgr::new(&config.repo_dir, &self.pkg_dir); let global = Global { config, diff --git a/server/src/main.rs b/server/src/main.rs index 9068bd7..7038203 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -6,9 +6,11 @@ mod repo; use clap::Parser; pub use error::{Result, ServerError}; +use repo::MetaRepoMgr; use repo::RepoGroupManager; use std::path::PathBuf; -use std::sync::{Arc, RwLock}; +use std::sync::Arc; +use tokio::sync::RwLock; #[derive(Clone)] pub struct Config { @@ -21,7 +23,7 @@ pub struct Config { #[derive(Clone)] pub struct Global { config: Config, - repo_manager: Arc>, + repo_manager: Arc>, db: sea_orm::DbConn, } diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index a7f2f1b..f7c0fb1 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -4,7 +4,7 @@ use std::sync::{Arc, Mutex}; use libarchive::write::{Builder, WriteEntry}; use libarchive::{Entry, WriteFilter, WriteFormat}; -use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter}; +use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter, QuerySelect}; use uuid::Uuid; use futures::StreamExt; @@ -40,6 +40,7 @@ impl MetaRepoMgr { let repo = repo.unwrap(); let parent_dir = self.repo_dir.join(&repo.name).join(arch); + tokio::fs::create_dir_all(&parent_dir).await?; let ar_files = archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", repo.name))) @@ -112,7 +113,12 @@ impl MetaRepoMgr { } } - pub async fn add_pkg_from_reader(&self, conn: &DbConn, reader: &mut R, repo: &str) -> crate::Result<()> { + pub async fn add_pkg_from_reader( + &self, + conn: &DbConn, + reader: &mut R, + repo: &str, + ) -> crate::Result<()> { // Copy file contents to temporary path so libarchive can work with it let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); let path = self.pkg_dir.join(uuid.to_string()); @@ -122,7 +128,9 @@ impl MetaRepoMgr { // Parse the package let path_clone = path.clone(); - let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)).await.unwrap()?; + let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) + .await + .unwrap()?; // Query the repo for its ID, or create it if it does not already exist let res = db::query::repo::by_name(conn, &repo).await?; @@ -137,8 +145,7 @@ impl MetaRepoMgr { // If the package already exists in the database, we remove it first let res = - db::query::package::by_fields(conn, repo_id, &pkg.info.arch, &pkg.info.name) - .await?; + db::query::package::by_fields(conn, repo_id, &pkg.info.arch, &pkg.info.name).await?; if let Some(entry) = res { entry.delete(conn).await?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 4b3ff29..1992e22 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -4,9 +4,12 @@ mod manager_new; pub mod package; pub use manager::RepoGroupManager; +pub use manager_new::MetaRepoMgr; +use tokio_util::io::StreamReader; use std::path::PathBuf; +use crate::db; use axum::body::Body; use axum::extract::{Path, State}; use axum::http::Request; @@ -14,7 +17,8 @@ use axum::http::StatusCode; use axum::response::IntoResponse; use axum::routing::{delete, post}; use axum::Router; -use futures::StreamExt; +use futures::TryStreamExt; +use futures::{Stream, StreamExt}; use sea_orm::ModelTrait; use std::sync::Arc; use tokio::{fs, io::AsyncWriteExt}; @@ -23,8 +27,6 @@ use tower_http::services::{ServeDir, ServeFile}; use tower_http::validate_request::ValidateRequestHeaderLayer; use uuid::Uuid; -use crate::db; - const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; pub fn router(api_key: &str) -> Router { @@ -99,90 +101,40 @@ async fn get_file( Ok(res) } +#[axum::debug_handler] async fn post_package_archive( State(global): State, Path(repo): Path, body: Body, ) -> crate::Result<()> { - // We first stream the uploaded file to disk - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let path = global.config.pkg_dir.join(uuid.to_string()); - let mut f = fs::File::create(&path).await?; + let body = body.into_data_stream(); + let body = body.map_err(std::io::Error::other); + let mut body = StreamReader::new(body); + global + .repo_manager + .write() + .await + .add_pkg_from_reader(&global.db, &mut body, &repo) + .await?; - let mut body = body.into_data_stream(); + Ok(()) - while let Some(chunk) = body.next().await { - f.write_all(&chunk?).await?; - } - - let clone = Arc::clone(&global.repo_manager); - let path_clone = path.clone(); - let repo_clone = repo.clone(); - let res = tokio::task::spawn_blocking(move || { - global - .repo_manager - .write() - .unwrap() - .add_pkg_from_path(&repo_clone, &path_clone) - }) - .await?; - - match res { - // Insert the newly added package into the database - Ok(pkg) => { - tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); - - // Query the repo for its ID, or create it if it does not already exist - let res = db::query::repo::by_name(&global.db, &repo).await?; - - let repo_id = if let Some(repo_entity) = res { - repo_entity.id - } else { - db::query::repo::insert(&global.db, &repo, None) - .await? - .last_insert_id - }; - - // If the package already exists in the database, we remove it first - let res = - db::query::package::by_fields(&global.db, repo_id, &pkg.info.arch, &pkg.info.name) - .await?; - - if let Some(entry) = res { - entry.delete(&global.db).await?; - } - - db::query::package::insert(&global.db, repo_id, pkg).await?; - - Ok(()) - } - // Remove the uploaded file and return the error - Err(err) => { - tokio::fs::remove_file(path).await?; - - Err(err.into()) - } - } + //tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); } +#[axum::debug_handler] async fn delete_repo( State(global): State, Path(repo): Path, ) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - - let repo_clone = repo.clone(); - let repo_removed = - tokio::task::spawn_blocking(move || clone.write().unwrap().remove_repo(&repo_clone)) - .await??; + let repo_removed = global + .repo_manager + .write() + .await + .remove_repo(&global.db, &repo) + .await?; if repo_removed { - let res = db::query::repo::by_name(&global.db, &repo).await?; - - if let Some(repo_entry) = res { - repo_entry.delete(&global.db).await?; - } - tracing::info!("Removed repository '{}'", repo); Ok(StatusCode::OK) @@ -195,60 +147,71 @@ async fn delete_arch_repo( State(global): State, Path((repo, arch)): Path<(String, String)>, ) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - - let arch_clone = arch.clone(); - let repo_clone = repo.clone(); - let repo_removed = tokio::task::spawn_blocking(move || { - clone - .write() - .unwrap() - .remove_repo_arch(&repo_clone, &arch_clone) - }) - .await??; - - if repo_removed { - let res = db::query::repo::by_name(&global.db, &repo).await?; - - if let Some(repo_entry) = res { - db::query::package::delete_with_arch(&global.db, repo_entry.id, &arch).await?; - } - tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //let clone = Arc::clone(&global.repo_manager); + // + //let arch_clone = arch.clone(); + //let repo_clone = repo.clone(); + //let repo_removed = tokio::task::spawn_blocking(move || { + // clone + // .write() + // .unwrap() + // .remove_repo_arch(&repo_clone, &arch_clone) + //}) + //.await??; + // + //if repo_removed { + // let res = db::query::repo::by_name(&global.db, &repo).await?; + // + // if let Some(repo_entry) = res { + // db::query::package::delete_with_arch(&global.db, repo_entry.id, &arch).await?; + // } + // tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); + // + // Ok(StatusCode::OK) + //} else { + // Ok(StatusCode::NOT_FOUND) + //} } async fn delete_package( State(global): State, Path((repo, arch, file_name)): Path<(String, String, String)>, ) -> crate::Result { - let clone = Arc::clone(&global.repo_manager); - let path = PathBuf::from(&repo).join(arch).join(&file_name); - - let res = tokio::task::spawn_blocking(move || { - clone.write().unwrap().remove_pkg_from_path(path, true) - }) - .await??; - - if let Some((name, version, release, arch)) = res { - let res = db::query::repo::by_name(&global.db, &repo).await?; - - if let Some(repo_entry) = res { - let res = - db::query::package::by_fields(&global.db, repo_entry.id, &arch, &name).await?; - - if let Some(entry) = res { - entry.delete(&global.db).await?; - } - } - - tracing::info!("Removed '{}' from repository '{}'", file_name, repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //global.repo_manager.write().unwrap().remove_pkg(&global.db, &repo, &arch, name) + //let clone = Arc::clone(&global.repo_manager); + //let path = PathBuf::from(&repo).join(arch).join(&file_name); + // + //let res = tokio::task::spawn_blocking(move || { + // clone.write().unwrap().remove_pkg_from_path(path, true) + //}) + //.await??; + // + //if let Some((name, version, release, arch)) = res { + // let res = db::query::repo::by_name(&global.db, &repo).await?; + // + // if let Some(repo_entry) = res { + // let res = + // db::query::package::by_fields(&global.db, repo_entry.id, &arch, &name).await?; + // + // if let Some(entry) = res { + // entry.delete(&global.db).await?; + // } + // } + // + // tracing::info!("Removed '{}' from repository '{}'", file_name, repo); + // + // Ok(StatusCode::OK) + //} else { + // Ok(StatusCode::NOT_FOUND) + //} +} + +fn help_me_figure_this_out(_: impl Send) {} + +#[allow(dead_code)] +fn assert_my_handler_is_ok() { + // This is the fun part: we need to call our handler, and *not* await it's Future. + help_me_figure_this_out(delete_repo) } From 2d4cfee27a215dec18bd34a873477bb0b02d9a80 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 26 May 2024 17:51:40 +0200 Subject: [PATCH 18/73] feat: implement archive writer just mutex and spawn_blocking --- server/src/cli.rs | 5 +- server/src/repo/archive.rs | 104 +++++++++++++-------------------- server/src/repo/manager_new.rs | 22 +++++-- 3 files changed, 63 insertions(+), 68 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 91569de..0c802f2 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -77,7 +77,10 @@ impl Cli { debug!("Connecting to database with URL {}", db_url); - let db = sea_orm::Database::connect(db_url).await?; + let mut options = sea_orm::ConnectOptions::new(db_url); + options.max_connections(16); + + let db = sea_orm::Database::connect(options).await?; crate::db::Migrator::up(&db, None).await?; let config = Config { diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index a8471eb..2e31fb7 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -1,7 +1,6 @@ use std::io::{self, Write}; use std::path::{Path, PathBuf}; -use std::sync::Arc; -use tokio::sync::Mutex; +use std::sync::{Arc, Mutex}; use tokio::sync::{mpsc, oneshot}; use libarchive::write::{Builder, FileWriter, WriteEntry}; @@ -15,49 +14,12 @@ enum Message { /// Struct to abstract away the intrinsics of writing entries to an archive file pub struct RepoArchiveWriter { - tx: mpsc::Sender, -} - -fn archive_manager_task(mut rx: mpsc::Receiver, mut ar: FileWriter) { - // Once the accompanying struct and the final sender get dropped, this will return None and the - // task will exit - while let Some(msg) = rx.blocking_recv() { - match msg { - Message::AppendFilesEntry(tx, full_name) => { - let mut ar_entry = WriteEntry::new(); - ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - ar_entry.set_pathname(PathBuf::from(full_name).join("files")); - ar_entry.set_mode(0o100644); - // TODO set entry size? - - let mut res = ar.append_entry(&mut ar_entry).map_err(io::Error::from); - - if res.is_ok() { - res = writeln!(ar, "%FILES%"); - } - - tx.send(res); - - // All "files" entries start with this line - } - Message::AppendLine(tx, line) => { - let res = writeln!(ar, "{}", line); - - tx.send(res); - } - Message::Close(tx) => { - let res = ar.close(); - - tx.send(res.map_err(io::Error::from)); - } - } - } + ar: Arc>, } impl RepoArchiveWriter { pub async fn open>(path: P) -> io::Result { let path = PathBuf::from(path.as_ref()); - let (tx, rx) = mpsc::channel(1); // Open the archive file let ar = tokio::task::spawn_blocking(move || { @@ -70,37 +32,55 @@ impl RepoArchiveWriter { .await .unwrap()?; - // Spawn blocking task to perform blocking actions - tokio::task::spawn_blocking(move || archive_manager_task(rx, ar)); - - Ok(Self { tx }) + Ok(Self { + ar: Arc::new(Mutex::new(ar)), + }) } /// Set the current entry to be a new "files" list - pub async fn add_files_entry(&self, full_name: &str) -> io::Result<()> { + pub async fn add_files_entry>( + &self, + full_name: &str, + path: P, + ) -> io::Result<()> { + let metadata = tokio::fs::metadata(&path).await?; + let file_size = metadata.len(); + + let ar = Arc::clone(&self.ar); let full_name = String::from(full_name); - let (tx, rx) = oneshot::channel(); + let path = PathBuf::from(path.as_ref()); - self.tx.send(Message::AppendFilesEntry(tx, full_name)).await; + Ok(tokio::task::spawn_blocking(move || { + let mut ar_entry = WriteEntry::new(); + ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); + ar_entry.set_pathname(PathBuf::from(full_name).join("files")); + ar_entry.set_mode(0o100644); + ar_entry.set_size(file_size.try_into().unwrap()); - rx.await.unwrap() - } - // - ///// Append the given line to the currently active entry - pub async fn write_line(&self, line: &str) -> io::Result<()> { - let line = String::from(line); - let (tx, rx) = oneshot::channel(); - - self.tx.send(Message::AppendLine(tx, line)).await; - - rx.await.unwrap() + ar.lock().unwrap().append_path(&mut ar_entry, path) + }) + .await + .unwrap()?) } pub async fn close(&self) -> io::Result<()> { - let (tx, rx) = oneshot::channel(); + let ar = Arc::clone(&self.ar); - self.tx.send(Message::Close(tx)).await; - - rx.await.unwrap() + Ok( + tokio::task::spawn_blocking(move || ar.lock().unwrap().close()) + .await + .unwrap()?, + ) } + + // + ///// Append the given line to the currently active entry + //pub async fn write_line(&self, line: &str) -> io::Result<()> { + // let line = String::from(line); + // let (tx, rx) = oneshot::channel(); + // + // self.tx.send(Message::AppendLine(tx, line)).await; + // + // rx.await.unwrap() + //} } diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index f7c0fb1..8b3a1bd 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -9,6 +9,7 @@ use uuid::Uuid; use futures::StreamExt; use tokio::io::AsyncRead; +use tokio::io::{AsyncSeekExt, AsyncWriteExt}; use super::archive; use super::package; @@ -54,16 +55,27 @@ impl MetaRepoMgr { .stream(conn) .await?; - while let Some(pkg) = pkgs.next().await.transpose()? { - ar_files - .add_files_entry(&format!("{}-{}", pkg.name, pkg.version)) - .await?; + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let tmp_file_path = self.pkg_dir.join(uuid.to_string()); + while let Some(pkg) = pkgs.next().await.transpose()? { + let mut tmp_file = tokio::fs::File::create(&tmp_file_path).await?; + + let line = "%FILES%\n"; + tmp_file.write_all(line.as_bytes()).await?; + + // Generate the files list for the package let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; while let Some(file) = files.next().await.transpose()? { - ar_files.write_line(&file.path).await?; + tmp_file + .write_all(format!("{}\n", file.path).as_bytes()) + .await?; } + + ar_files + .add_files_entry(&format!("{}-{}", pkg.name, pkg.version), &tmp_file_path) + .await?; } ar_files.close().await?; From 88addc7a7ab81f288c8c385de2cc72d932fde8a3 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Sun, 26 May 2024 20:37:17 +0200 Subject: [PATCH 19/73] feat: generate all archives for "any" package --- server/src/repo/manager_new.rs | 52 +++++++++++++++++++++++++--------- 1 file changed, 39 insertions(+), 13 deletions(-) diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 8b3a1bd..4dbea8a 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -1,22 +1,18 @@ use std::path::{Path, PathBuf}; -use std::sync::{Arc, Mutex}; -use libarchive::write::{Builder, WriteEntry}; -use libarchive::{Entry, WriteFilter, WriteFormat}; - -use sea_orm::{ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter, QuerySelect}; +use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; use uuid::Uuid; use futures::StreamExt; use tokio::io::AsyncRead; -use tokio::io::{AsyncSeekExt, AsyncWriteExt}; +use tokio::io::AsyncWriteExt; use super::archive; use super::package; use crate::db; use crate::error::Result; -pub const ANY_ARCH: &str = "any"; +pub const ANY_ARCH: &'static str = "any"; pub struct MetaRepoMgr { repo_dir: PathBuf, @@ -31,7 +27,34 @@ impl MetaRepoMgr { } } - /// Generate the `db` and `files` archive files for the given repo and architecture. + /// Generate archive databases for all known architectures in the repository, including the + /// "any" architecture. + pub async fn generate_archives_all(&self, conn: &DbConn, repo: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(conn, repo).await?; + + if repo.is_none() { + return Ok(()); + } + + let repo = repo.unwrap(); + + let mut archs = repo + .find_related(crate::db::Package) + .select_only() + .column(crate::db::package::Column::Arch) + .distinct() + .into_tuple::() + .stream(conn) + .await?; + + while let Some(arch) = archs.next().await.transpose()? { + self.generate_archives(conn, &repo.name, &arch).await?; + } + + Ok(()) + } + + /// Generate the archive databases for the given repository and architecture. pub async fn generate_archives(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<()> { let repo = crate::db::query::repo::by_name(conn, repo).await?; @@ -40,6 +63,7 @@ impl MetaRepoMgr { } let repo = repo.unwrap(); + let parent_dir = self.repo_dir.join(&repo.name).join(arch); tokio::fs::create_dir_all(&parent_dir).await?; @@ -51,7 +75,7 @@ impl MetaRepoMgr { // architecture let mut pkgs = repo .find_related(crate::db::Package) - .filter(db::package::Column::Arch.eq(arch).or(ANY_ARCH.into())) + .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) .stream(conn) .await?; @@ -79,6 +103,7 @@ impl MetaRepoMgr { } ar_files.close().await?; + tokio::fs::remove_file(tmp_file_path).await?; Ok(()) } @@ -178,9 +203,10 @@ impl MetaRepoMgr { tokio::fs::rename(path, dest_pkg_path).await?; // Synchronize archive databases - // TODO account for "any" architecture here - self.generate_archives(conn, repo, &arch).await?; - - Ok(()) + if arch == ANY_ARCH { + self.generate_archives_all(conn, repo).await + } else { + self.generate_archives(conn, repo, &arch).await + } } } From ce7b5159e815ba6928c34583bf5aafe087e884c4 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 26 May 2024 23:06:59 +0200 Subject: [PATCH 20/73] feat: almost full desc file writer --- server/src/repo/archive.rs | 8 ++- server/src/repo/manager_new.rs | 29 ++++---- server/src/repo/package.rs | 124 ++++++++++++++++++++++++++++++++- 3 files changed, 142 insertions(+), 19 deletions(-) diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index 2e31fb7..8f7fb49 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -38,10 +38,11 @@ impl RepoArchiveWriter { } /// Set the current entry to be a new "files" list - pub async fn add_files_entry>( + pub async fn add_entry>( &self, full_name: &str, path: P, + desc: bool, ) -> io::Result<()> { let metadata = tokio::fs::metadata(&path).await?; let file_size = metadata.len(); @@ -53,7 +54,10 @@ impl RepoArchiveWriter { Ok(tokio::task::spawn_blocking(move || { let mut ar_entry = WriteEntry::new(); ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - ar_entry.set_pathname(PathBuf::from(full_name).join("files")); + + let name = if desc { "desc" } else { "files" }; + + ar_entry.set_pathname(PathBuf::from(full_name).join(name)); ar_entry.set_mode(0o100644); ar_entry.set_size(file_size.try_into().unwrap()); diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 4dbea8a..9728764 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -80,30 +80,29 @@ impl MetaRepoMgr { .await?; let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let tmp_file_path = self.pkg_dir.join(uuid.to_string()); + let files_tmp_file_path = self.pkg_dir.join(uuid.to_string()); + + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let desc_tmp_file_path = self.pkg_dir.join(uuid.to_string()); while let Some(pkg) = pkgs.next().await.transpose()? { - let mut tmp_file = tokio::fs::File::create(&tmp_file_path).await?; + let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; + let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; - let line = "%FILES%\n"; - tmp_file.write_all(line.as_bytes()).await?; - - // Generate the files list for the package - let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; - - while let Some(file) = files.next().await.transpose()? { - tmp_file - .write_all(format!("{}\n", file.path).as_bytes()) - .await?; - } + package::write_files(conn, &mut files_tmp_file, &pkg).await?; + package::write_desc(conn, &mut desc_tmp_file, &pkg).await?; + let full_name = format!("{}-{}", pkg.name, pkg.version); ar_files - .add_files_entry(&format!("{}-{}", pkg.name, pkg.version), &tmp_file_path) + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; + ar_files + .add_entry(&full_name, &files_tmp_file_path, false) .await?; } ar_files.close().await?; - tokio::fs::remove_file(tmp_file_path).await?; + tokio::fs::remove_file(files_tmp_file_path).await?; Ok(()) } diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 3c11c38..7c9a778 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -1,13 +1,15 @@ use chrono::NaiveDateTime; +use futures::StreamExt; use libarchive::read::{Archive, Builder}; use libarchive::{Entry, ReadFilter}; -use sea_orm::ActiveValue::Set; +use sea_orm::{ActiveValue::Set, ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; use std::fmt; use std::fs; use std::io::{self, BufRead, BufReader, BufWriter, Read, Write}; use std::path::{Path, PathBuf}; +use tokio::io::{AsyncWrite, AsyncWriteExt}; -use crate::db::entities::package; +use crate::db::{self, entities::package, PackageRelatedEnum}; const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"]; @@ -290,3 +292,121 @@ impl From for package::ActiveModel { } } } + +pub fn filename(pkg: &package::Model) -> String { + format!( + "{}-{}-{}.pkg.tar.{}", + pkg.name, pkg.version, pkg.arch, pkg.compression + ) +} + +async fn write_attribute( + writer: &mut W, + key: &str, + value: &str, +) -> io::Result<()> { + if !value.is_empty() { + let s = format!("\n%{}%\n{}\n", key, value); + writer.write_all(s.as_bytes()).await?; + } + + Ok(()) +} + +pub async fn write_desc( + conn: &DbConn, + writer: &mut W, + pkg: &package::Model, +) -> crate::Result<()> { + writer + .write_all(format!("%FILENAME%\n{}\n", filename(pkg)).as_bytes()) + .await?; + + write_attribute(writer, "NAME", &pkg.name).await?; + write_attribute(writer, "BASE", &pkg.base).await?; + write_attribute(writer, "VERSION", &pkg.version).await?; + + if let Some(ref description) = pkg.description { + write_attribute(writer, "DESC", description).await?; + } + + let groups: Vec = pkg + .find_related(db::PackageGroup) + .select_only() + .column(db::package_group::Column::Name) + .into_tuple() + .all(conn) + .await?; + write_attribute(writer, "GROUPS", &groups.join("\n")).await?; + + write_attribute(writer, "CSIZE", &pkg.c_size.to_string()).await?; + write_attribute(writer, "ISIZE", &pkg.size.to_string()).await?; + write_attribute(writer, "SHA256SUM", &pkg.sha256_sum).await?; + + if let Some(ref url) = pkg.url { + write_attribute(writer, "URL", url).await?; + } + + let licenses: Vec = pkg + .find_related(db::PackageLicense) + .select_only() + .column(db::package_license::Column::Name) + .into_tuple() + .all(conn) + .await?; + write_attribute(writer, "LICENSE", &licenses.join("\n")).await?; + + write_attribute(writer, "ARCH", &pkg.arch).await?; + + // TODO build date + //write_attribute(writer, "BUILDDATE", &pkg.build_) + + if let Some(ref packager) = pkg.packager { + write_attribute(writer, "PACKAGER", packager).await?; + } + + let related = [ + ("REPLACES", PackageRelatedEnum::Replaces), + ("CONFLICTS", PackageRelatedEnum::Conflicts), + ("PROVIDES", PackageRelatedEnum::Provides), + ("DEPENDS", PackageRelatedEnum::Depend), + ("OPTDEPENDS", PackageRelatedEnum::Optdepend), + ("MAKEDEPENDS", PackageRelatedEnum::Makedepend), + ("CHECKDEPENDS", PackageRelatedEnum::Checkdepend), + ]; + + for (key, attr) in related.into_iter() { + let items: Vec = pkg + .find_related(db::PackageRelated) + .filter(db::package_related::Column::Type.eq(attr)) + .select_only() + .column(db::package_related::Column::Name) + .into_tuple() + .all(conn) + .await?; + + write_attribute(writer, key, &items.join("\n")).await?; + } + + Ok(()) +} + +pub async fn write_files( + conn: &DbConn, + writer: &mut W, + pkg: &package::Model, +) -> crate::Result<()> { + let line = "%FILES%\n"; + writer.write_all(line.as_bytes()).await?; + + // Generate the files list for the package + let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; + + while let Some(file) = files.next().await.transpose()? { + writer + .write_all(format!("{}\n", file.path).as_bytes()) + .await?; + } + + Ok(()) +} From 633e67084095a672dc029e9bdbb7974ae897c226 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 27 May 2024 09:50:10 +0200 Subject: [PATCH 21/73] feat: partially implemented package GET request --- Cargo.lock | 1 + server/Cargo.toml | 1 + server/src/cli.rs | 3 ++ server/src/db/query/package.rs | 18 ++++++-- server/src/main.rs | 1 + server/src/repo/manager_new.rs | 13 ++++-- server/src/repo/mod.rs | 79 +++++++++++++--------------------- 7 files changed, 61 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 333bc72..d0c00b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1660,6 +1660,7 @@ dependencies = [ "futures", "http-body-util", "libarchive", + "regex", "sea-orm", "sea-orm-migration", "serde", diff --git a/server/Cargo.toml b/server/Cargo.toml index c2c5d74..1d5dc1d 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -13,6 +13,7 @@ clap = { version = "4.3.12", features = ["env", "derive"] } futures = "0.3.28" http-body-util = "0.1.1" libarchive = { path = "../libarchive" } +regex = "1.10.4" sea-orm-migration = "0.12.1" serde = { version = "1.0.178", features = ["derive"] } sha256 = "1.1.4" diff --git a/server/src/cli.rs b/server/src/cli.rs index 0c802f2..35245c2 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -91,10 +91,13 @@ impl Cli { }; let repo_manager = MetaRepoMgr::new(&config.repo_dir, &self.pkg_dir); + let pkg_filename_re = regex::Regex::new(r"^([a-z0-9@_+][a-z0-9@_+-.]*)-((?:[0-9]+:)?[a-z0-9._]+-[0-9.]+)-([a-z0-9_]+)\.pkg\.tar\.([a-z0-9]+)$").unwrap(); + let global = Global { config, repo_manager: Arc::new(RwLock::new(repo_manager)), db, + pkg_filename_re, }; // build our application with a single route diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index ecd306e..c5ad135 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -47,13 +47,23 @@ pub async fn by_fields( repo_id: i32, arch: &str, name: &str, + version: Option<&str>, + compression: Option<&str>, ) -> Result> { - Package::find() + let mut query = Package::find() .filter(package::Column::RepoId.eq(repo_id)) .filter(package::Column::Name.eq(name)) - .filter(package::Column::Arch.eq(arch)) - .one(conn) - .await + .filter(package::Column::Arch.eq(arch)); + + if let Some(version) = version { + query = query.filter(package::Column::Version.eq(version)); + } + + if let Some(compression) = compression { + query = query.filter(package::Column::Compression.eq(compression)); + } + + query.one(conn).await } pub async fn delete_with_arch(conn: &DbConn, repo_id: i32, arch: &str) -> Result { diff --git a/server/src/main.rs b/server/src/main.rs index 7038203..6ea2a04 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -25,6 +25,7 @@ pub struct Global { config: Config, repo_manager: Arc>, db: sea_orm::DbConn, + pkg_filename_re: regex::Regex, } #[tokio::main] diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 9728764..05cdc16 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -135,7 +135,7 @@ impl MetaRepoMgr { let repo = db::query::repo::by_name(conn, repo).await?; if let Some(repo) = repo { - let pkg = db::query::package::by_fields(conn, repo.id, arch, name).await?; + let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; if let Some(pkg) = pkg { // Remove package from database @@ -180,8 +180,15 @@ impl MetaRepoMgr { }; // If the package already exists in the database, we remove it first - let res = - db::query::package::by_fields(conn, repo_id, &pkg.info.arch, &pkg.info.name).await?; + let res = db::query::package::by_fields( + conn, + repo_id, + &pkg.info.arch, + &pkg.info.name, + None, + None, + ) + .await?; if let Some(entry) = res { entry.delete(conn).await?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 1992e22..f63a489 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -19,6 +19,7 @@ use axum::routing::{delete, post}; use axum::Router; use futures::TryStreamExt; use futures::{Stream, StreamExt}; +use regex::Regex; use sea_orm::ModelTrait; use std::sync::Arc; use tokio::{fs, io::AsyncWriteExt}; @@ -56,52 +57,43 @@ pub fn router(api_key: &str) -> Router { /// is returned. async fn get_file( State(global): State, - Path((repo, arch, mut file_name)): Path<(String, String, String)>, + Path((repo, _arch, file_name)): Path<(String, String, String)>, req: Request, ) -> crate::Result { - let repo_dir = global.config.repo_dir.join(&repo).join(&arch); - let repo_exists = tokio::fs::try_exists(&repo_dir).await?; + // Query the repo to see if it exists + let repo = db::query::repo::by_name(&global.db, &repo) + .await? + .ok_or(StatusCode::NOT_FOUND)?; - let res = if DB_FILE_EXTS.iter().any(|ext| file_name.ends_with(ext)) { - // Append tar extension to ensure we find the file - if !file_name.ends_with(".tar.gz") { - file_name.push_str(".tar.gz"); - }; + // Match the filename + let caps = global + .pkg_filename_re + .captures(&file_name) + .ok_or(StatusCode::NOT_FOUND)?; + let arch = caps.get(3).unwrap().as_str(); - if repo_exists { - ServeFile::new(repo_dir.join(file_name)).oneshot(req).await - } else { - let path = global - .config - .repo_dir - .join(repo) - .join(manager::ANY_ARCH) - .join(file_name); + // Query the package to see if it exists + let pkg = db::query::package::by_fields( + &global.db, + repo.id, + arch, + caps.get(1).unwrap().as_str(), + Some(caps.get(2).unwrap().as_str()), + Some(caps.get(4).unwrap().as_str()), + ) + .await? + .ok_or(StatusCode::NOT_FOUND)?; - ServeFile::new(path).oneshot(req).await - } - } else { - let any_file = global - .config - .pkg_dir - .join(repo) - .join(manager::ANY_ARCH) - .join(file_name); - - if repo_exists { - ServeDir::new(global.config.pkg_dir) - .fallback(ServeFile::new(any_file)) - .oneshot(req) - .await - } else { - ServeFile::new(any_file).oneshot(req).await - } - }; - - Ok(res) + // Serve the file if it idoes + let pkg_path = global + .config + .pkg_dir + .join(repo.name) + .join(arch) + .join(package::filename(&pkg)); + Ok(ServeFile::new(pkg_path).oneshot(req).await.unwrap()) } -#[axum::debug_handler] async fn post_package_archive( State(global): State, Path(repo): Path, @@ -122,7 +114,6 @@ async fn post_package_archive( //tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); } -#[axum::debug_handler] async fn delete_repo( State(global): State, Path(repo): Path, @@ -207,11 +198,3 @@ async fn delete_package( // Ok(StatusCode::NOT_FOUND) //} } - -fn help_me_figure_this_out(_: impl Send) {} - -#[allow(dead_code)] -fn assert_my_handler_is_ok() { - // This is the fun part: we need to call our handler, and *not* await it's Future. - help_me_figure_this_out(delete_repo) -} From 513a760040513765225ff83f2b008555d61eafa6 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 27 May 2024 10:59:32 +0200 Subject: [PATCH 22/73] feat: simplified repo structure; fully implemented repo db archives --- server/src/cli.rs | 7 +---- server/src/db/entities/package.rs | 3 ++- server/src/db/query/package.rs | 2 +- server/src/main.rs | 2 -- server/src/repo/manager_new.rs | 34 +++++++++++++----------- server/src/repo/mod.rs | 44 +++++++++---------------------- server/src/repo/package.rs | 9 +++++-- 7 files changed, 43 insertions(+), 58 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 35245c2..2b85492 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -16,9 +16,6 @@ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[derive(Parser)] #[command(author, version, about, long_about = None)] pub struct Cli { - /// Directory where package archives will be stored - #[arg(env = "RIETER_PKG_DIR")] - pub pkg_dir: PathBuf, /// Directory where repository metadata & SQLite database is stored #[arg(env = "RIETER_DATA_DIR")] pub data_dir: PathBuf, @@ -85,11 +82,9 @@ impl Cli { let config = Config { data_dir: self.data_dir.clone(), - repo_dir: self.data_dir.join("repos"), - pkg_dir: self.pkg_dir.clone(), api_key: self.api_key.clone(), }; - let repo_manager = MetaRepoMgr::new(&config.repo_dir, &self.pkg_dir); + let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos")); let pkg_filename_re = regex::Regex::new(r"^([a-z0-9@_+][a-z0-9@_+-.]*)-((?:[0-9]+:)?[a-z0-9._]+-[0-9.]+)-([a-z0-9_]+)\.pkg\.tar\.([a-z0-9]+)$").unwrap(); diff --git a/server/src/db/entities/package.rs b/server/src/db/entities/package.rs index c09a310..112cde4 100644 --- a/server/src/db/entities/package.rs +++ b/server/src/db/entities/package.rs @@ -1,5 +1,6 @@ //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 +use chrono::NaiveDateTime; use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; @@ -17,7 +18,7 @@ pub struct Model { pub c_size: i64, pub description: Option, pub url: Option, - pub build_date: String, + pub build_date: NaiveDateTime, pub packager: Option, pub pgp_sig: Option, pub pgp_sig_size: Option, diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index c5ad135..31f3464 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -88,7 +88,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack c_size: Set(info.csize), description: Set(info.description), url: Set(info.url), - build_date: Set(info.build_date.to_string()), + build_date: Set(info.build_date), packager: Set(info.packager), pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), diff --git a/server/src/main.rs b/server/src/main.rs index 6ea2a04..eea6dc3 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -15,8 +15,6 @@ use tokio::sync::RwLock; #[derive(Clone)] pub struct Config { data_dir: PathBuf, - repo_dir: PathBuf, - pkg_dir: PathBuf, api_key: String, } diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 05cdc16..b61dd57 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -5,7 +5,6 @@ use uuid::Uuid; use futures::StreamExt; use tokio::io::AsyncRead; -use tokio::io::AsyncWriteExt; use super::archive; use super::package; @@ -16,14 +15,12 @@ pub const ANY_ARCH: &'static str = "any"; pub struct MetaRepoMgr { repo_dir: PathBuf, - pkg_dir: PathBuf, } impl MetaRepoMgr { - pub fn new, P2: AsRef>(repo_dir: P1, pkg_dir: P2) -> Self { + pub fn new>(repo_dir: P) -> Self { MetaRepoMgr { repo_dir: repo_dir.as_ref().to_path_buf(), - pkg_dir: pkg_dir.as_ref().to_path_buf(), } } @@ -64,11 +61,14 @@ impl MetaRepoMgr { let repo = repo.unwrap(); - let parent_dir = self.repo_dir.join(&repo.name).join(arch); + let parent_dir = self.repo_dir.join(&repo.name); tokio::fs::create_dir_all(&parent_dir).await?; + let ar_db = + archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", arch))) + .await?; let ar_files = - archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", repo.name))) + archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.files.tar.gz", arch))) .await?; // Query all packages in the repo that have the given architecture or the "any" @@ -79,11 +79,12 @@ impl MetaRepoMgr { .stream(conn) .await?; + // Create two temp file paths to write our entries to let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let files_tmp_file_path = self.pkg_dir.join(uuid.to_string()); + let files_tmp_file_path = self.repo_dir.join(uuid.to_string()); let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let desc_tmp_file_path = self.pkg_dir.join(uuid.to_string()); + let desc_tmp_file_path = self.repo_dir.join(uuid.to_string()); while let Some(pkg) = pkgs.next().await.transpose()? { let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; @@ -93,6 +94,10 @@ impl MetaRepoMgr { package::write_desc(conn, &mut desc_tmp_file, &pkg).await?; let full_name = format!("{}-{}", pkg.name, pkg.version); + + ar_db + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; ar_files .add_entry(&full_name, &desc_tmp_file_path, true) .await?; @@ -101,7 +106,11 @@ impl MetaRepoMgr { .await?; } + // Cleanup + ar_db.close().await?; ar_files.close().await?; + + tokio::fs::remove_file(desc_tmp_file_path).await?; tokio::fs::remove_file(files_tmp_file_path).await?; Ok(()) @@ -117,7 +126,6 @@ impl MetaRepoMgr { // Remove files from file system tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; - tokio::fs::remove_dir_all(self.pkg_dir.join(repo)).await?; Ok(true) } else { @@ -157,7 +165,7 @@ impl MetaRepoMgr { ) -> crate::Result<()> { // Copy file contents to temporary path so libarchive can work with it let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let path = self.pkg_dir.join(uuid.to_string()); + let path = self.repo_dir.join(uuid.to_string()); let mut temp_file = tokio::fs::File::create(&path).await?; tokio::io::copy(reader, &mut temp_file).await?; @@ -194,11 +202,7 @@ impl MetaRepoMgr { entry.delete(conn).await?; } - let dest_pkg_path = self - .pkg_dir - .join(repo) - .join(&pkg.info.arch) - .join(pkg.file_name()); + let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); // Insert new package into database let arch = pkg.info.arch.clone(); diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index f63a489..a50f4fd 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -57,41 +57,23 @@ pub fn router(api_key: &str) -> Router { /// is returned. async fn get_file( State(global): State, - Path((repo, _arch, file_name)): Path<(String, String, String)>, + Path((repo, arch, file_name)): Path<(String, String, String)>, req: Request, ) -> crate::Result { - // Query the repo to see if it exists - let repo = db::query::repo::by_name(&global.db, &repo) - .await? - .ok_or(StatusCode::NOT_FOUND)?; + let repo_dir = global.config.data_dir.join("repos").join(&repo); - // Match the filename - let caps = global - .pkg_filename_re - .captures(&file_name) - .ok_or(StatusCode::NOT_FOUND)?; - let arch = caps.get(3).unwrap().as_str(); + let file_name = + if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else { + file_name + }; - // Query the package to see if it exists - let pkg = db::query::package::by_fields( - &global.db, - repo.id, - arch, - caps.get(1).unwrap().as_str(), - Some(caps.get(2).unwrap().as_str()), - Some(caps.get(4).unwrap().as_str()), - ) - .await? - .ok_or(StatusCode::NOT_FOUND)?; - - // Serve the file if it idoes - let pkg_path = global - .config - .pkg_dir - .join(repo.name) - .join(arch) - .join(package::filename(&pkg)); - Ok(ServeFile::new(pkg_path).oneshot(req).await.unwrap()) + Ok(ServeFile::new(repo_dir.join(file_name)).oneshot(req).await) } async fn post_package_archive( diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 7c9a778..980cbb6 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -283,7 +283,7 @@ impl From for package::ActiveModel { c_size: Set(info.csize), description: Set(info.description), url: Set(info.url), - build_date: Set(info.build_date.to_string()), + build_date: Set(info.build_date), packager: Set(info.packager), pgp_sig: Set(info.pgpsig), pgp_sig_size: Set(info.pgpsigsize), @@ -359,7 +359,12 @@ pub async fn write_desc( write_attribute(writer, "ARCH", &pkg.arch).await?; // TODO build date - //write_attribute(writer, "BUILDDATE", &pkg.build_) + write_attribute( + writer, + "BUILDDATE", + &pkg.build_date.and_utc().timestamp().to_string(), + ) + .await?; if let Some(ref packager) = pkg.packager { write_attribute(writer, "PACKAGER", packager).await?; From bf100049b1c0109b09fb39e189a731cbc469dea3 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 27 May 2024 13:33:44 +0200 Subject: [PATCH 23/73] feat: implement repo arch remove --- server/src/repo/manager_new.rs | 45 ++++++++++++++++++++++++++++++++++ server/src/repo/mod.rs | 39 +++++++++++------------------ 2 files changed, 59 insertions(+), 25 deletions(-) diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index b61dd57..e7ffe65 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -133,6 +133,51 @@ impl MetaRepoMgr { } } + /// Remove all packages from the repository with the given arch. + pub async fn remove_repo_arch(&self, conn: &DbConn, repo: &str, arch: &str) -> Result { + let repo = db::query::repo::by_name(conn, repo).await?; + + if let Some(repo) = repo { + let mut pkgs = repo + .find_related(db::Package) + .filter(db::package::Column::Arch.eq(arch)) + .stream(conn) + .await?; + + while let Some(pkg) = pkgs.next().await.transpose()? { + let path = self + .repo_dir + .join(&repo.name) + .join(super::package::filename(&pkg)); + tokio::fs::remove_file(path).await?; + + pkg.delete(conn).await?; + } + + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(format!("{}.db.tar.gz", arch)), + ) + .await?; + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(format!("{}.files.tar.gz", arch)), + ) + .await?; + + // If we removed all "any" packages, we need to resync all databases + if arch == ANY_ARCH { + self.generate_archives_all(conn, &repo.name).await?; + } + + Ok(true) + } else { + Ok(false) + } + } + pub async fn remove_pkg( &self, conn: &DbConn, diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index a50f4fd..c535f0f 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -120,31 +120,20 @@ async fn delete_arch_repo( State(global): State, Path((repo, arch)): Path<(String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //let clone = Arc::clone(&global.repo_manager); - // - //let arch_clone = arch.clone(); - //let repo_clone = repo.clone(); - //let repo_removed = tokio::task::spawn_blocking(move || { - // clone - // .write() - // .unwrap() - // .remove_repo_arch(&repo_clone, &arch_clone) - //}) - //.await??; - // - //if repo_removed { - // let res = db::query::repo::by_name(&global.db, &repo).await?; - // - // if let Some(repo_entry) = res { - // db::query::package::delete_with_arch(&global.db, repo_entry.id, &arch).await?; - // } - // tracing::info!("Removed architecture '{}' from repository '{}'", arch, repo); - // - // Ok(StatusCode::OK) - //} else { - // Ok(StatusCode::NOT_FOUND) - //} + let repo_removed = global + .repo_manager + .write() + .await + .remove_repo_arch(&global.db, &repo, &arch) + .await?; + + if repo_removed { + tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } async fn delete_package( From 32e27978ec2aa51d0f4feadedd404b22d3d4dcda Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 27 May 2024 13:47:53 +0200 Subject: [PATCH 24/73] chore: bit of cleanup --- server/src/cli.rs | 3 +-- server/src/db/query/repo.rs | 5 ++++- server/src/main.rs | 2 -- server/src/repo/archive.rs | 9 +-------- server/src/repo/mod.rs | 14 +------------- 5 files changed, 7 insertions(+), 26 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 2b85492..2d61ad4 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,4 +1,4 @@ -use crate::repo::{MetaRepoMgr, RepoGroupManager}; +use crate::repo::MetaRepoMgr; use crate::{Config, Global}; use axum::extract::FromRef; @@ -82,7 +82,6 @@ impl Cli { let config = Config { data_dir: self.data_dir.clone(), - api_key: self.api_key.clone(), }; let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos")); diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 399d227..dfc92ad 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -9,7 +9,10 @@ pub struct Filter { impl IntoCondition for Filter { fn into_condition(self) -> Condition { - Condition::all().add_option(self.name.map(|name| package::Column::Name.like(name))) + Condition::all().add_option( + self.name + .map(|name| repo::Column::Name.like(format!("%{}%", name))), + ) } } diff --git a/server/src/main.rs b/server/src/main.rs index eea6dc3..32360e2 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -7,7 +7,6 @@ mod repo; use clap::Parser; pub use error::{Result, ServerError}; use repo::MetaRepoMgr; -use repo::RepoGroupManager; use std::path::PathBuf; use std::sync::Arc; use tokio::sync::RwLock; @@ -15,7 +14,6 @@ use tokio::sync::RwLock; #[derive(Clone)] pub struct Config { data_dir: PathBuf, - api_key: String, } #[derive(Clone)] diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index 8f7fb49..bce0d1a 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -1,17 +1,10 @@ -use std::io::{self, Write}; +use std::io; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; -use tokio::sync::{mpsc, oneshot}; use libarchive::write::{Builder, FileWriter, WriteEntry}; use libarchive::{Entry, WriteFilter, WriteFormat}; -enum Message { - AppendFilesEntry(oneshot::Sender>, String), - AppendLine(oneshot::Sender>, String), - Close(oneshot::Sender>), -} - /// Struct to abstract away the intrinsics of writing entries to an archive file pub struct RepoArchiveWriter { ar: Arc>, diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index c535f0f..87cfda1 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -3,13 +3,9 @@ mod manager; mod manager_new; pub mod package; -pub use manager::RepoGroupManager; pub use manager_new::MetaRepoMgr; use tokio_util::io::StreamReader; -use std::path::PathBuf; - -use crate::db; use axum::body::Body; use axum::extract::{Path, State}; use axum::http::Request; @@ -18,17 +14,9 @@ use axum::response::IntoResponse; use axum::routing::{delete, post}; use axum::Router; use futures::TryStreamExt; -use futures::{Stream, StreamExt}; -use regex::Regex; -use sea_orm::ModelTrait; -use std::sync::Arc; -use tokio::{fs, io::AsyncWriteExt}; use tower::util::ServiceExt; -use tower_http::services::{ServeDir, ServeFile}; +use tower_http::services::ServeFile; use tower_http::validate_request::ValidateRequestHeaderLayer; -use uuid::Uuid; - -const DB_FILE_EXTS: [&str; 4] = [".db", ".files", ".db.tar.gz", ".files.tar.gz"]; pub fn router(api_key: &str) -> Router { Router::new() From 48f2d00c943e5b9f789131c0bb91091f5c534497 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 27 May 2024 22:56:37 +0200 Subject: [PATCH 25/73] chore: clean up imports --- server/Cargo.toml | 3 ++- server/src/api/mod.rs | 12 ++++++------ server/src/cli.rs | 16 +++++++--------- server/src/db/mod.rs | 7 +++---- server/src/db/query/package.rs | 4 ++-- server/src/db/query/repo.rs | 4 ++-- server/src/error.rs | 10 +++++----- server/src/main.rs | 6 +++--- server/src/repo/archive.rs | 14 +++++++++----- server/src/repo/manager_new.rs | 13 +++++-------- server/src/repo/mod.rs | 20 ++++++++++---------- server/src/repo/package.rs | 20 ++++++++++++-------- 12 files changed, 66 insertions(+), 63 deletions(-) diff --git a/server/Cargo.toml b/server/Cargo.toml index 1d5dc1d..03ff93e 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -32,5 +32,6 @@ features = [ "sqlx-postgres", "runtime-tokio-rustls", "macros", - "with-chrono" + "with-chrono", + "debug-print" ] diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index fd54cd8..0a0a56e 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -1,13 +1,13 @@ mod pagination; -use axum::extract::{Path, Query, State}; -use axum::routing::get; -use axum::Json; -use axum::Router; - +use crate::db; use pagination::PaginatedResponse; -use crate::db; +use axum::{ + extract::{Path, Query, State}, + routing::get, + Json, Router, +}; pub fn router() -> Router { Router::new() diff --git a/server/src/cli.rs b/server/src/cli.rs index 2d61ad4..db7f705 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,13 +1,9 @@ -use crate::repo::MetaRepoMgr; -use crate::{Config, Global}; +use crate::{repo::MetaRepoMgr, Config, Global}; -use axum::extract::FromRef; -use axum::Router; +use axum::{extract::FromRef, Router}; use clap::Parser; use sea_orm_migration::MigratorTrait; -use std::io; -use std::path::PathBuf; -use std::sync::Arc; +use std::{io, path::PathBuf, sync::Arc}; use tokio::sync::RwLock; use tower_http::trace::TraceLayer; use tracing::debug; @@ -40,7 +36,7 @@ pub struct Cli { #[arg( long, value_name = "LOG_LEVEL", - default_value = "tower_http=debug,rieterd=debug", + default_value = "tower_http=debug,rieterd=debug,sea_orm=debug", env = "RIETER_LOG" )] pub log: String, @@ -67,7 +63,7 @@ impl Cli { url.clone() } else { format!( - "sqlite://{}", + "sqlite://{}?mode=rwc", self.data_dir.join("rieter.sqlite").to_string_lossy() ) }; @@ -80,6 +76,8 @@ impl Cli { let db = sea_orm::Database::connect(options).await?; crate::db::Migrator::up(&db, None).await?; + debug!("Successfully applied migrations"); + let config = Config { data_dir: self.data_dir.clone(), }; diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 0194079..597cf20 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -2,13 +2,12 @@ pub mod entities; mod migrator; pub mod query; -use sea_orm::{DeriveActiveEnum, EnumIter}; - -use serde::{Deserialize, Serialize}; - pub use entities::{prelude::*, *}; pub use migrator::Migrator; +use sea_orm::{DeriveActiveEnum, EnumIter}; +use serde::{Deserialize, Serialize}; + type Result = std::result::Result; #[derive(EnumIter, DeriveActiveEnum, Serialize, Deserialize, PartialEq, Eq, Clone, Debug)] diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 31f3464..8a211bf 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -1,8 +1,8 @@ +use crate::db::*; + use sea_orm::{sea_query::IntoCondition, *}; use serde::Deserialize; -use crate::db::*; - #[derive(Deserialize)] pub struct Filter { repo: Option, diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index dfc92ad..94627f7 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -1,7 +1,7 @@ -use sea_orm::{sea_query::IntoCondition, *}; - use crate::db::*; +use sea_orm::{sea_query::IntoCondition, *}; + #[derive(Deserialize)] pub struct Filter { name: Option, diff --git a/server/src/error.rs b/server/src/error.rs index d0ba36b..5c3e920 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -1,9 +1,9 @@ -use std::error::Error; -use std::fmt; -use std::io; +use std::{error::Error, fmt, io}; -use axum::http::StatusCode; -use axum::response::{IntoResponse, Response}; +use axum::{ + http::StatusCode, + response::{IntoResponse, Response}, +}; pub type Result = std::result::Result; diff --git a/server/src/main.rs b/server/src/main.rs index 32360e2..c539b9e 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -4,11 +4,11 @@ pub mod db; mod error; mod repo; -use clap::Parser; pub use error::{Result, ServerError}; use repo::MetaRepoMgr; -use std::path::PathBuf; -use std::sync::Arc; + +use clap::Parser; +use std::{path::PathBuf, sync::Arc}; use tokio::sync::RwLock; #[derive(Clone)] diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index bce0d1a..d103f13 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -1,9 +1,13 @@ -use std::io; -use std::path::{Path, PathBuf}; -use std::sync::{Arc, Mutex}; +use std::{ + io, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, +}; -use libarchive::write::{Builder, FileWriter, WriteEntry}; -use libarchive::{Entry, WriteFilter, WriteFormat}; +use libarchive::{ + write::{Builder, FileWriter, WriteEntry}, + Entry, WriteFilter, WriteFormat, +}; /// Struct to abstract away the intrinsics of writing entries to an archive file pub struct RepoArchiveWriter { diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index e7ffe65..4d60bcf 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -1,15 +1,12 @@ +use super::{archive, package}; +use crate::{db, error::Result}; + use std::path::{Path, PathBuf}; -use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use uuid::Uuid; - use futures::StreamExt; +use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; use tokio::io::AsyncRead; - -use super::archive; -use super::package; -use crate::db; -use crate::error::Result; +use uuid::Uuid; pub const ANY_ARCH: &'static str = "any"; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 87cfda1..072ceef 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -4,19 +4,19 @@ mod manager_new; pub mod package; pub use manager_new::MetaRepoMgr; -use tokio_util::io::StreamReader; -use axum::body::Body; -use axum::extract::{Path, State}; -use axum::http::Request; -use axum::http::StatusCode; -use axum::response::IntoResponse; -use axum::routing::{delete, post}; -use axum::Router; +use axum::{ + body::Body, + extract::{Path, State}, + http::{Request, StatusCode}, + response::IntoResponse, + routing::{delete, post}, + Router, +}; use futures::TryStreamExt; +use tokio_util::io::StreamReader; use tower::util::ServiceExt; -use tower_http::services::ServeFile; -use tower_http::validate_request::ValidateRequestHeaderLayer; +use tower_http::{services::ServeFile, validate_request::ValidateRequestHeaderLayer}; pub fn router(api_key: &str) -> Router { Router::new() diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 980cbb6..24979eb 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -1,16 +1,20 @@ +use crate::db::{self, entities::package, PackageRelatedEnum}; + +use std::{ + fmt, fs, + io::{self, BufRead, BufReader, BufWriter, Read, Write}, + path::{Path, PathBuf}, +}; + use chrono::NaiveDateTime; use futures::StreamExt; -use libarchive::read::{Archive, Builder}; -use libarchive::{Entry, ReadFilter}; +use libarchive::{ + read::{Archive, Builder}, + Entry, ReadFilter, +}; use sea_orm::{ActiveValue::Set, ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use std::fmt; -use std::fs; -use std::io::{self, BufRead, BufReader, BufWriter, Read, Write}; -use std::path::{Path, PathBuf}; use tokio::io::{AsyncWrite, AsyncWriteExt}; -use crate::db::{self, entities::package, PackageRelatedEnum}; - const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"]; #[derive(Debug, Clone)] From 044d3a33eb636bda630c6b431d19297f45ed750b Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 28 May 2024 16:22:54 +0200 Subject: [PATCH 26/73] doc: write goals in readme --- README.md | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 3bdb5a4..ed400e6 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,31 @@ -# rieter +# Rieter +Rieter is both a Pacman repository server, as well as a build system for Pacman +packages. + +## Goals + +### Repository server + +My first goal for this project is to create a convenient all-round repository +server implementation that could be used for everything from self-hosting a +local repository to managing an entire distribution's package repository. It +should be easy to deploy, lightweight, and work with any distribution. It +should support any number of repositories and packages, and work with any +package architecture. + +The repositories can be populated by manually uploading packages to the server +(e.g. from a CI build), or by mirroring already existing repositories. The +mirroring feature in particular makes it trivial to set up a new mirror for a +distribution, as the server would take care of keeping the mirror up-to-date. +Another usecase for this would be creating a local mirror of your +distribution's repositories, which can greatly reduce your update times +depending on your internet connection. + +### Build system + +The second goal is to create an easy-to-use build system for Pacman packages. +This could for example be used to automatically build AUR packages and publish +them to one of your repositories. This can greatly reduce update times, as you +no longer need to build AUR packages locally, as this automatically happens "in +the cloud". From 60d4478d836663bb7c9d29e28e1d1fc00e8d66aa Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 29 May 2024 09:58:19 +0200 Subject: [PATCH 27/73] feat: re-implement package remove route --- Cargo.lock | 1 - README.md | 7 +++++ server/Cargo.toml | 1 - server/src/cli.rs | 3 --- server/src/main.rs | 1 - server/src/repo/manager_new.rs | 21 ++++++++++++--- server/src/repo/mod.rs | 49 ++++++++++++++-------------------- 7 files changed, 45 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d0c00b0..333bc72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1660,7 +1660,6 @@ dependencies = [ "futures", "http-body-util", "libarchive", - "regex", "sea-orm", "sea-orm-migration", "serde", diff --git a/README.md b/README.md index ed400e6..1111131 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,13 @@ Another usecase for this would be creating a local mirror of your distribution's repositories, which can greatly reduce your update times depending on your internet connection. +Most users however don't need a full copy of a distro's package repository, so +Rieter also provides a "smart mirror" mode. In this mode, a Rieter instance +only syncs packages that have been requested before, e.g. from a previous +system update. This way, your updates will still be a lot faster as the +required packages are cached, but packages you don't use don't get stored, +saving you a lot of storage space. + ### Build system The second goal is to create an easy-to-use build system for Pacman packages. diff --git a/server/Cargo.toml b/server/Cargo.toml index 03ff93e..cd86713 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -13,7 +13,6 @@ clap = { version = "4.3.12", features = ["env", "derive"] } futures = "0.3.28" http-body-util = "0.1.1" libarchive = { path = "../libarchive" } -regex = "1.10.4" sea-orm-migration = "0.12.1" serde = { version = "1.0.178", features = ["derive"] } sha256 = "1.1.4" diff --git a/server/src/cli.rs b/server/src/cli.rs index db7f705..036c70b 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -83,13 +83,10 @@ impl Cli { }; let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos")); - let pkg_filename_re = regex::Regex::new(r"^([a-z0-9@_+][a-z0-9@_+-.]*)-((?:[0-9]+:)?[a-z0-9._]+-[0-9.]+)-([a-z0-9_]+)\.pkg\.tar\.([a-z0-9]+)$").unwrap(); - let global = Global { config, repo_manager: Arc::new(RwLock::new(repo_manager)), db, - pkg_filename_re, }; // build our application with a single route diff --git a/server/src/main.rs b/server/src/main.rs index c539b9e..c57420a 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -21,7 +21,6 @@ pub struct Global { config: Config, repo_manager: Arc>, db: sea_orm::DbConn, - pkg_filename_re: regex::Regex, } #[tokio::main] diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 4d60bcf..5ac4e13 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -107,8 +107,10 @@ impl MetaRepoMgr { ar_db.close().await?; ar_files.close().await?; - tokio::fs::remove_file(desc_tmp_file_path).await?; - tokio::fs::remove_file(files_tmp_file_path).await?; + // If this fails there's no point in failing the function + if there were no packages in + // the repo, this fails anyway because the temp file doesn't exist + let _ = tokio::fs::remove_file(desc_tmp_file_path).await; + let _ = tokio::fs::remove_file(files_tmp_file_path).await; Ok(()) } @@ -188,8 +190,21 @@ impl MetaRepoMgr { let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; if let Some(pkg) = pkg { - // Remove package from database + // Remove package from database & file system + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(super::package::filename(&pkg)), + ) + .await?; pkg.delete(conn).await?; + + if arch == ANY_ARCH { + self.generate_archives_all(conn, &repo.name).await?; + } else { + self.generate_archives(conn, &repo.name, arch).await?; + } + Ok(true) } else { Ok(false) diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 072ceef..fb00804 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -126,34 +126,25 @@ async fn delete_arch_repo( async fn delete_package( State(global): State, - Path((repo, arch, file_name)): Path<(String, String, String)>, + Path((repo, arch, pkg_name)): Path<(String, String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //global.repo_manager.write().unwrap().remove_pkg(&global.db, &repo, &arch, name) - //let clone = Arc::clone(&global.repo_manager); - //let path = PathBuf::from(&repo).join(arch).join(&file_name); - // - //let res = tokio::task::spawn_blocking(move || { - // clone.write().unwrap().remove_pkg_from_path(path, true) - //}) - //.await??; - // - //if let Some((name, version, release, arch)) = res { - // let res = db::query::repo::by_name(&global.db, &repo).await?; - // - // if let Some(repo_entry) = res { - // let res = - // db::query::package::by_fields(&global.db, repo_entry.id, &arch, &name).await?; - // - // if let Some(entry) = res { - // entry.delete(&global.db).await?; - // } - // } - // - // tracing::info!("Removed '{}' from repository '{}'", file_name, repo); - // - // Ok(StatusCode::OK) - //} else { - // Ok(StatusCode::NOT_FOUND) - //} + let pkg_removed = global + .repo_manager + .write() + .await + .remove_pkg(&global.db, &repo, &arch, &pkg_name) + .await?; + + if pkg_removed { + tracing::info!( + "Removed package '{}' ({}) from repository '{}'", + pkg_name, + arch, + repo + ); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } From fc844c685f3d392981ef9864524585c97d305ebf Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 29 May 2024 10:02:38 +0200 Subject: [PATCH 28/73] feat: log added packages --- server/src/db/query/package.rs | 2 +- server/src/repo/manager_new.rs | 10 +++++++--- server/src/repo/mod.rs | 10 ++++------ 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 8a211bf..6cd709f 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -133,7 +133,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack .chain( info.makedepends .iter() - .map(|s| (PackageRelatedEnum::Depend, s)), + .map(|s| (PackageRelatedEnum::Makedepend, s)), ) .chain( info.checkdepends diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs index 5ac4e13..adf37a9 100644 --- a/server/src/repo/manager_new.rs +++ b/server/src/repo/manager_new.rs @@ -219,7 +219,7 @@ impl MetaRepoMgr { conn: &DbConn, reader: &mut R, repo: &str, - ) -> crate::Result<()> { + ) -> crate::Result<(String, String, String)> { // Copy file contents to temporary path so libarchive can work with it let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); let path = self.repo_dir.join(uuid.to_string()); @@ -262,6 +262,8 @@ impl MetaRepoMgr { let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); // Insert new package into database + let name = pkg.info.name.clone(); + let version = pkg.info.version.clone(); let arch = pkg.info.arch.clone(); db::query::package::insert(conn, repo_id, pkg).await?; @@ -271,9 +273,11 @@ impl MetaRepoMgr { // Synchronize archive databases if arch == ANY_ARCH { - self.generate_archives_all(conn, repo).await + self.generate_archives_all(conn, repo).await?; } else { - self.generate_archives(conn, repo, &arch).await + self.generate_archives(conn, repo, &arch).await?; } + + Ok((name, version, arch)) } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index fb00804..f5e48d4 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -69,19 +69,17 @@ async fn post_package_archive( Path(repo): Path, body: Body, ) -> crate::Result<()> { - let body = body.into_data_stream(); - let body = body.map_err(std::io::Error::other); - let mut body = StreamReader::new(body); - global + let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); + let (name, version, arch) = global .repo_manager .write() .await .add_pkg_from_reader(&global.db, &mut body, &repo) .await?; - Ok(()) + tracing::info!("Added '{}-{}' to repository '{}' ({})", name, version, repo, arch); - //tracing::info!("Added '{}' to repository '{}'", pkg.file_name(), repo); + Ok(()) } async fn delete_repo( From 0b1c8b640f45e633156ddb1573e0cee5e48bb0d2 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 29 May 2024 13:26:02 +0200 Subject: [PATCH 29/73] refactor: remove old manager; some refactoring --- server/src/db/query/package.rs | 62 ++--- server/src/db/query/repo.rs | 8 +- server/src/repo/manager.rs | 494 ++++++++++++++++----------------- server/src/repo/manager_new.rs | 283 ------------------- server/src/repo/mod.rs | 11 +- 5 files changed, 270 insertions(+), 588 deletions(-) delete mode 100644 server/src/repo/manager_new.rs diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 6cd709f..c76e532 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -1,4 +1,4 @@ -use crate::db::*; +use crate::db::{self, *}; use sea_orm::{sea_query::IntoCondition, *}; use serde::Deserialize; @@ -50,20 +50,14 @@ pub async fn by_fields( version: Option<&str>, compression: Option<&str>, ) -> Result> { - let mut query = Package::find() - .filter(package::Column::RepoId.eq(repo_id)) - .filter(package::Column::Name.eq(name)) - .filter(package::Column::Arch.eq(arch)); + let cond = Condition::all() + .add(package::Column::RepoId.eq(repo_id)) + .add(package::Column::Name.eq(name)) + .add(package::Column::Arch.eq(arch)) + .add_option(version.map(|version| package::Column::Version.eq(version))) + .add_option(compression.map(|compression| package::Column::Compression.eq(compression))); - if let Some(version) = version { - query = query.filter(package::Column::Version.eq(version)); - } - - if let Some(compression) = compression { - query = query.filter(package::Column::Compression.eq(compression)); - } - - query.one(conn).await + Package::find().filter(cond).one(conn).await } pub async fn delete_with_arch(conn: &DbConn, repo_id: i32, arch: &str) -> Result { @@ -168,34 +162,34 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack pub async fn full(conn: &DbConn, id: i32) -> Result> { if let Some(entry) = by_id(conn, id).await? { - let licenses = entry + let licenses: Vec = entry .find_related(PackageLicense) + .select_only() + .column(package_license::Column::Name) + .into_tuple() .all(conn) - .await? - .into_iter() - .map(|e| e.name) - .collect(); - let groups = entry + .await?; + let groups: Vec = entry .find_related(PackageGroup) + .select_only() + .column(package_group::Column::Name) + .into_tuple() .all(conn) - .await? - .into_iter() - .map(|e| e.name) - .collect(); - let related = entry + .await?; + let related: Vec<(db::PackageRelatedEnum, String)> = entry .find_related(PackageRelated) + .select_only() + .columns([package_related::Column::Type, package_related::Column::Name]) + .into_tuple() .all(conn) - .await? - .into_iter() - .map(|e| (e.r#type, e.name)) - .collect(); - let files = entry + .await?; + let files: Vec = entry .find_related(PackageFile) + .select_only() + .column(package_file::Column::Path) + .into_tuple() .all(conn) - .await? - .into_iter() - .map(|e| e.path) - .collect(); + .await?; Ok(Some(FullPackage { entry, diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 94627f7..0370c2b 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -43,16 +43,12 @@ pub async fn by_name(conn: &DbConn, name: &str) -> Result> { .await } -pub async fn insert( - conn: &DbConn, - name: &str, - description: Option<&str>, -) -> Result> { +pub async fn insert(conn: &DbConn, name: &str, description: Option<&str>) -> Result { let model = repo::ActiveModel { id: NotSet, name: Set(String::from(name)), description: Set(description.map(String::from)), }; - Repo::insert(model).exec(conn).await + model.insert(conn).await } diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index c288f30..ebfd01c 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -1,311 +1,281 @@ -use super::package::Package; -use libarchive::write::{Builder, WriteEntry}; -use libarchive::{Entry, WriteFilter, WriteFormat}; -use std::fs; -use std::io; +use super::{archive, package}; +use crate::{db, error::Result}; + use std::path::{Path, PathBuf}; -pub const ANY_ARCH: &str = "any"; +use futures::StreamExt; +use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; +use tokio::io::AsyncRead; +use uuid::Uuid; -/// Overarching abstraction that orchestrates updating the repositories stored on the server -pub struct RepoGroupManager { +pub const ANY_ARCH: &'static str = "any"; + +pub struct MetaRepoMgr { repo_dir: PathBuf, - pkg_dir: PathBuf, } -fn parse_pkg_filename(file_name: &str) -> (String, &str, &str, &str) { - let name_parts = file_name.split('-').collect::>(); - let name = name_parts[..name_parts.len() - 3].join("-"); - let version = name_parts[name_parts.len() - 3]; - let release = name_parts[name_parts.len() - 2]; - let (arch, _) = name_parts[name_parts.len() - 1].split_once('.').unwrap(); - - (name, version, release, arch) -} - -impl RepoGroupManager { - pub fn new, P2: AsRef>(repo_dir: P1, pkg_dir: P2) -> Self { - RepoGroupManager { +impl MetaRepoMgr { + pub fn new>(repo_dir: P) -> Self { + MetaRepoMgr { repo_dir: repo_dir.as_ref().to_path_buf(), - pkg_dir: pkg_dir.as_ref().to_path_buf(), } } - pub fn sync(&mut self, repo: &str, arch: &str) -> io::Result<()> { - let subrepo_path = self.repo_dir.join(repo).join(arch); + /// Generate archive databases for all known architectures in the repository, including the + /// "any" architecture. + pub async fn generate_archives_all(&self, conn: &DbConn, repo: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(conn, repo).await?; - let mut ar_db = Builder::new(); - ar_db.add_filter(WriteFilter::Gzip)?; - ar_db.set_format(WriteFormat::PaxRestricted)?; - - let mut ar_files = Builder::new(); - ar_files.add_filter(WriteFilter::Gzip)?; - ar_files.set_format(WriteFormat::PaxRestricted)?; - - let mut ar_db = ar_db.open_file(subrepo_path.join(format!("{}.db.tar.gz", repo)))?; - let mut ar_files = - ar_files.open_file(subrepo_path.join(format!("{}.files.tar.gz", repo)))?; - - // All architectures should also include the "any" architecture, except for the "any" - // architecture itself. - let repo_any_dir = self.repo_dir.join(repo).join(ANY_ARCH); - - let any_entries_iter = if arch != ANY_ARCH && repo_any_dir.try_exists()? { - Some(repo_any_dir.read_dir()?) - } else { - None - } - .into_iter() - .flatten(); - - for entry in subrepo_path.read_dir()?.chain(any_entries_iter) { - let entry = entry?; - - if entry.file_type()?.is_dir() { - // The desc file needs to be added to both archives - let path_in_tar = PathBuf::from(entry.file_name()).join("desc"); - let src_path = entry.path().join("desc"); - let metadata = src_path.metadata()?; - - let mut ar_entry = WriteEntry::new(); - ar_entry.set_pathname(&path_in_tar); - // These small text files will definitely fit inside an i64 - ar_entry.set_size(metadata.len().try_into().unwrap()); - ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - ar_entry.set_mode(0o100644); - - ar_db.append_path(&mut ar_entry, &src_path)?; - ar_files.append_path(&mut ar_entry, src_path)?; - - // The files file is only required in the files database - let path_in_tar = PathBuf::from(entry.file_name()).join("files"); - let src_path = entry.path().join("files"); - let metadata = src_path.metadata()?; - - let mut ar_entry = WriteEntry::new(); - ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - ar_entry.set_pathname(&path_in_tar); - ar_entry.set_mode(0o100644); - // These small text files will definitely fit inside an i64 - ar_entry.set_size(metadata.len().try_into().unwrap()); - - ar_files.append_path(&mut ar_entry, src_path)?; - } + if repo.is_none() { + return Ok(()); } - ar_db.close()?; - ar_files.close()?; + let repo = repo.unwrap(); - Ok(()) - } + let mut archs = repo + .find_related(crate::db::Package) + .select_only() + .column(crate::db::package::Column::Arch) + .distinct() + .into_tuple::() + .stream(conn) + .await?; - /// Synchronize all present architectures' db archives in the given repository. - pub fn sync_all(&mut self, repo: &str) -> io::Result<()> { - for entry in self.repo_dir.join(repo).read_dir()? { - let entry = entry?; - - if entry.file_type()?.is_dir() { - self.sync(repo, &entry.file_name().to_string_lossy())?; - } + while let Some(arch) = archs.next().await.transpose()? { + self.generate_archives(conn, &repo.name, &arch).await?; } Ok(()) } - pub fn add_pkg_from_path>( - &mut self, - repo: &str, - path: P, - ) -> io::Result { - let pkg = Package::open(&path)?; + /// Generate the archive databases for the given repository and architecture. + pub async fn generate_archives(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(conn, repo).await?; - self.add_pkg(repo, &pkg)?; - - // After successfully adding the package, we move it to the packages directory - let dest_pkg_path = self - .pkg_dir - .join(repo) - .join(&pkg.info.arch) - .join(pkg.file_name()); - - fs::create_dir_all(dest_pkg_path.parent().unwrap())?; - fs::rename(&path, dest_pkg_path)?; - - Ok(pkg) - } - - /// Add a package to the given repo, returning to what architectures the package was added. - pub fn add_pkg(&mut self, repo: &str, pkg: &Package) -> io::Result<()> { - // TODO - // * if arch is "any", check if package doesn't already exist for other architecture - // * if arch isn't "any", check if package doesn't already exist for "any" architecture - - // We first remove any existing version of the package - self.remove_pkg(repo, &pkg.info.arch, &pkg.info.name, false)?; - - // Write the `desc` and `files` metadata files to disk - let metadata_dir = self - .repo_dir - .join(repo) - .join(&pkg.info.arch) - .join(format!("{}-{}", pkg.info.name, pkg.info.version)); - - fs::create_dir_all(&metadata_dir)?; - - let mut desc_file = fs::File::create(metadata_dir.join("desc"))?; - pkg.write_desc(&mut desc_file)?; - - let mut files_file = fs::File::create(metadata_dir.join("files"))?; - pkg.write_files(&mut files_file)?; - - // If a package of type "any" is added, we need to update every existing database - if pkg.info.arch == ANY_ARCH { - self.sync_all(repo)?; - } else { - self.sync(repo, &pkg.info.arch)?; + if repo.is_none() { + return Ok(()); } + let repo = repo.unwrap(); + + let parent_dir = self.repo_dir.join(&repo.name); + tokio::fs::create_dir_all(&parent_dir).await?; + + let ar_db = + archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", arch))) + .await?; + let ar_files = + archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.files.tar.gz", arch))) + .await?; + + // Query all packages in the repo that have the given architecture or the "any" + // architecture + let mut pkgs = repo + .find_related(crate::db::Package) + .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) + .stream(conn) + .await?; + + // Create two temp file paths to write our entries to + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let files_tmp_file_path = self.repo_dir.join(uuid.to_string()); + + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let desc_tmp_file_path = self.repo_dir.join(uuid.to_string()); + + while let Some(pkg) = pkgs.next().await.transpose()? { + let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; + let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; + + package::write_files(conn, &mut files_tmp_file, &pkg).await?; + package::write_desc(conn, &mut desc_tmp_file, &pkg).await?; + + let full_name = format!("{}-{}", pkg.name, pkg.version); + + ar_db + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; + ar_files + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; + ar_files + .add_entry(&full_name, &files_tmp_file_path, false) + .await?; + } + + // Cleanup + ar_db.close().await?; + ar_files.close().await?; + + // If this fails there's no point in failing the function + if there were no packages in + // the repo, this fails anyway because the temp file doesn't exist + let _ = tokio::fs::remove_file(desc_tmp_file_path).await; + let _ = tokio::fs::remove_file(files_tmp_file_path).await; + Ok(()) } - pub fn remove_repo(&mut self, repo: &str) -> io::Result { - let repo_dir = self.repo_dir.join(repo); + /// Remove the repo with the given name, if it existed + pub async fn remove_repo(&self, conn: &DbConn, repo: &str) -> Result { + let res = db::query::repo::by_name(conn, repo).await?; - if !repo_dir.exists() { - Ok(false) - } else { - fs::remove_dir_all(&repo_dir)?; - fs::remove_dir_all(self.pkg_dir.join(repo))?; + if let Some(repo_entry) = res { + // Remove repository from database + repo_entry.delete(conn).await?; + + // Remove files from file system + tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; Ok(true) + } else { + Ok(false) } } - pub fn remove_repo_arch(&mut self, repo: &str, arch: &str) -> io::Result { - let sub_path = PathBuf::from(repo).join(arch); - let repo_dir = self.repo_dir.join(&sub_path); + /// Remove all packages from the repository with the given arch. + pub async fn remove_repo_arch(&self, conn: &DbConn, repo: &str, arch: &str) -> Result { + let repo = db::query::repo::by_name(conn, repo).await?; - if !repo_dir.exists() { - return Ok(false); + if let Some(repo) = repo { + let mut pkgs = repo + .find_related(db::Package) + .filter(db::package::Column::Arch.eq(arch)) + .stream(conn) + .await?; + + while let Some(pkg) = pkgs.next().await.transpose()? { + let path = self + .repo_dir + .join(&repo.name) + .join(super::package::filename(&pkg)); + tokio::fs::remove_file(path).await?; + + pkg.delete(conn).await?; + } + + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(format!("{}.db.tar.gz", arch)), + ) + .await?; + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(format!("{}.files.tar.gz", arch)), + ) + .await?; + + // If we removed all "any" packages, we need to resync all databases + if arch == ANY_ARCH { + self.generate_archives_all(conn, &repo.name).await?; + } + + Ok(true) + } else { + Ok(false) } - - fs::remove_dir_all(&repo_dir)?; - fs::remove_dir_all(self.pkg_dir.join(sub_path))?; - - // Removing the "any" architecture updates all other repositories - if arch == ANY_ARCH { - self.sync_all(repo)?; - } - - Ok(true) } - pub fn remove_pkg( - &mut self, + pub async fn remove_pkg( + &self, + conn: &DbConn, repo: &str, arch: &str, - pkg_name: &str, - sync: bool, - ) -> io::Result { - let repo_arch_dir = self.repo_dir.join(repo).join(arch); + name: &str, + ) -> Result { + let repo = db::query::repo::by_name(conn, repo).await?; - if !repo_arch_dir.exists() { - return Ok(false); - } + if let Some(repo) = repo { + let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; - for entry in repo_arch_dir.read_dir()? { - let entry = entry?; + if let Some(pkg) = pkg { + // Remove package from database & file system + tokio::fs::remove_file( + self.repo_dir + .join(&repo.name) + .join(super::package::filename(&pkg)), + ) + .await?; + pkg.delete(conn).await?; - // Make sure we skip the archive files - if !entry.metadata()?.is_dir() { - continue; - } - - let file_name = entry.file_name(); - let file_name = file_name.to_string_lossy(); - - // The directory name should only contain the name of the package. The last two parts - // when splitting on a dash are the pkgver and pkgrel, so we trim those - let name_parts = file_name.split('-').collect::>(); - let name = name_parts[..name_parts.len() - 2].join("-"); - - if name == pkg_name { - fs::remove_dir_all(entry.path())?; - - // Also remove the old package archive - let repo_arch_pkg_dir = self.pkg_dir.join(repo).join(arch); - - repo_arch_pkg_dir.read_dir()?.try_for_each(|res| { - res.and_then(|entry: fs::DirEntry| { - let file_name = entry.file_name(); - let file_name = file_name.to_string_lossy(); - let (name, _, _, _) = parse_pkg_filename(&file_name); - - if name == pkg_name { - fs::remove_file(entry.path()) - } else { - Ok(()) - } - }) - })?; - - if sync { - if arch == ANY_ARCH { - self.sync_all(repo)?; - } else { - self.sync(repo, arch)?; - } + if arch == ANY_ARCH { + self.generate_archives_all(conn, &repo.name).await?; + } else { + self.generate_archives(conn, &repo.name, arch).await?; } - return Ok(true); - } - } - - Ok(false) - } - - /// Wrapper around `remove_pkg` that accepts a path relative to the package directory to a - /// package archive. - pub fn remove_pkg_from_path>( - &mut self, - path: P, - sync: bool, - ) -> io::Result> { - let path = path.as_ref(); - let components: Vec<_> = path.iter().collect(); - - if let [repo, _arch, file_name] = components[..] { - let full_path = self.pkg_dir.join(path); - - if full_path.try_exists()? { - let file_name = file_name.to_string_lossy(); - let (name, version, release, arch) = parse_pkg_filename(&file_name); - - let metadata_dir_name = format!("{}-{}-{}", name, version, release); - - // Remove package archive and entry in database - fs::remove_file(full_path)?; - fs::remove_dir_all(self.repo_dir.join(repo).join(arch).join(metadata_dir_name))?; - - if sync { - if arch == ANY_ARCH { - self.sync_all(&repo.to_string_lossy())?; - } else { - self.sync(&repo.to_string_lossy(), arch)?; - } - } - - Ok(Some(( - name, - version.to_string(), - release.to_string(), - arch.to_string(), - ))) + Ok(true) } else { - Ok(None) + Ok(false) } } else { - Ok(None) + Ok(false) } } + + pub async fn add_pkg_from_reader( + &self, + conn: &DbConn, + reader: &mut R, + repo: &str, + ) -> crate::Result<(String, String, String)> { + // Copy file contents to temporary path so libarchive can work with it + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + let path = self.repo_dir.join(uuid.to_string()); + let mut temp_file = tokio::fs::File::create(&path).await?; + + tokio::io::copy(reader, &mut temp_file).await?; + + // Parse the package + let path_clone = path.clone(); + let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) + .await + .unwrap()?; + + // Query the repo for its ID, or create it if it does not already exist + let res = db::query::repo::by_name(conn, &repo).await?; + + let repo_id = if let Some(repo_entity) = res { + repo_entity.id + } else { + db::query::repo::insert(conn, repo, None).await?.id + }; + + // If the package already exists in the database, we remove it first + let res = db::query::package::by_fields( + conn, + repo_id, + &pkg.info.arch, + &pkg.info.name, + None, + None, + ) + .await?; + + if let Some(entry) = res { + entry.delete(conn).await?; + } + + let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); + + // Insert new package into database + let name = pkg.info.name.clone(); + let version = pkg.info.version.clone(); + let arch = pkg.info.arch.clone(); + db::query::package::insert(conn, repo_id, pkg).await?; + + // Move the package to its final resting place + tokio::fs::create_dir_all(dest_pkg_path.parent().unwrap()).await?; + tokio::fs::rename(path, dest_pkg_path).await?; + + // Synchronize archive databases + if arch == ANY_ARCH { + self.generate_archives_all(conn, repo).await?; + } else { + self.generate_archives(conn, repo, &arch).await?; + } + + Ok((name, version, arch)) + } } diff --git a/server/src/repo/manager_new.rs b/server/src/repo/manager_new.rs deleted file mode 100644 index adf37a9..0000000 --- a/server/src/repo/manager_new.rs +++ /dev/null @@ -1,283 +0,0 @@ -use super::{archive, package}; -use crate::{db, error::Result}; - -use std::path::{Path, PathBuf}; - -use futures::StreamExt; -use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use tokio::io::AsyncRead; -use uuid::Uuid; - -pub const ANY_ARCH: &'static str = "any"; - -pub struct MetaRepoMgr { - repo_dir: PathBuf, -} - -impl MetaRepoMgr { - pub fn new>(repo_dir: P) -> Self { - MetaRepoMgr { - repo_dir: repo_dir.as_ref().to_path_buf(), - } - } - - /// Generate archive databases for all known architectures in the repository, including the - /// "any" architecture. - pub async fn generate_archives_all(&self, conn: &DbConn, repo: &str) -> Result<()> { - let repo = crate::db::query::repo::by_name(conn, repo).await?; - - if repo.is_none() { - return Ok(()); - } - - let repo = repo.unwrap(); - - let mut archs = repo - .find_related(crate::db::Package) - .select_only() - .column(crate::db::package::Column::Arch) - .distinct() - .into_tuple::() - .stream(conn) - .await?; - - while let Some(arch) = archs.next().await.transpose()? { - self.generate_archives(conn, &repo.name, &arch).await?; - } - - Ok(()) - } - - /// Generate the archive databases for the given repository and architecture. - pub async fn generate_archives(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<()> { - let repo = crate::db::query::repo::by_name(conn, repo).await?; - - if repo.is_none() { - return Ok(()); - } - - let repo = repo.unwrap(); - - let parent_dir = self.repo_dir.join(&repo.name); - tokio::fs::create_dir_all(&parent_dir).await?; - - let ar_db = - archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", arch))) - .await?; - let ar_files = - archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.files.tar.gz", arch))) - .await?; - - // Query all packages in the repo that have the given architecture or the "any" - // architecture - let mut pkgs = repo - .find_related(crate::db::Package) - .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) - .stream(conn) - .await?; - - // Create two temp file paths to write our entries to - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let files_tmp_file_path = self.repo_dir.join(uuid.to_string()); - - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let desc_tmp_file_path = self.repo_dir.join(uuid.to_string()); - - while let Some(pkg) = pkgs.next().await.transpose()? { - let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; - let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; - - package::write_files(conn, &mut files_tmp_file, &pkg).await?; - package::write_desc(conn, &mut desc_tmp_file, &pkg).await?; - - let full_name = format!("{}-{}", pkg.name, pkg.version); - - ar_db - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &files_tmp_file_path, false) - .await?; - } - - // Cleanup - ar_db.close().await?; - ar_files.close().await?; - - // If this fails there's no point in failing the function + if there were no packages in - // the repo, this fails anyway because the temp file doesn't exist - let _ = tokio::fs::remove_file(desc_tmp_file_path).await; - let _ = tokio::fs::remove_file(files_tmp_file_path).await; - - Ok(()) - } - - /// Remove the repo with the given name, if it existed - pub async fn remove_repo(&self, conn: &DbConn, repo: &str) -> Result { - let res = db::query::repo::by_name(conn, repo).await?; - - if let Some(repo_entry) = res { - // Remove repository from database - repo_entry.delete(conn).await?; - - // Remove files from file system - tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; - - Ok(true) - } else { - Ok(false) - } - } - - /// Remove all packages from the repository with the given arch. - pub async fn remove_repo_arch(&self, conn: &DbConn, repo: &str, arch: &str) -> Result { - let repo = db::query::repo::by_name(conn, repo).await?; - - if let Some(repo) = repo { - let mut pkgs = repo - .find_related(db::Package) - .filter(db::package::Column::Arch.eq(arch)) - .stream(conn) - .await?; - - while let Some(pkg) = pkgs.next().await.transpose()? { - let path = self - .repo_dir - .join(&repo.name) - .join(super::package::filename(&pkg)); - tokio::fs::remove_file(path).await?; - - pkg.delete(conn).await?; - } - - tokio::fs::remove_file( - self.repo_dir - .join(&repo.name) - .join(format!("{}.db.tar.gz", arch)), - ) - .await?; - tokio::fs::remove_file( - self.repo_dir - .join(&repo.name) - .join(format!("{}.files.tar.gz", arch)), - ) - .await?; - - // If we removed all "any" packages, we need to resync all databases - if arch == ANY_ARCH { - self.generate_archives_all(conn, &repo.name).await?; - } - - Ok(true) - } else { - Ok(false) - } - } - - pub async fn remove_pkg( - &self, - conn: &DbConn, - repo: &str, - arch: &str, - name: &str, - ) -> Result { - let repo = db::query::repo::by_name(conn, repo).await?; - - if let Some(repo) = repo { - let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; - - if let Some(pkg) = pkg { - // Remove package from database & file system - tokio::fs::remove_file( - self.repo_dir - .join(&repo.name) - .join(super::package::filename(&pkg)), - ) - .await?; - pkg.delete(conn).await?; - - if arch == ANY_ARCH { - self.generate_archives_all(conn, &repo.name).await?; - } else { - self.generate_archives(conn, &repo.name, arch).await?; - } - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) - } - } - - pub async fn add_pkg_from_reader( - &self, - conn: &DbConn, - reader: &mut R, - repo: &str, - ) -> crate::Result<(String, String, String)> { - // Copy file contents to temporary path so libarchive can work with it - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let path = self.repo_dir.join(uuid.to_string()); - let mut temp_file = tokio::fs::File::create(&path).await?; - - tokio::io::copy(reader, &mut temp_file).await?; - - // Parse the package - let path_clone = path.clone(); - let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) - .await - .unwrap()?; - - // Query the repo for its ID, or create it if it does not already exist - let res = db::query::repo::by_name(conn, &repo).await?; - - let repo_id = if let Some(repo_entity) = res { - repo_entity.id - } else { - db::query::repo::insert(conn, repo, None) - .await? - .last_insert_id - }; - - // If the package already exists in the database, we remove it first - let res = db::query::package::by_fields( - conn, - repo_id, - &pkg.info.arch, - &pkg.info.name, - None, - None, - ) - .await?; - - if let Some(entry) = res { - entry.delete(conn).await?; - } - - let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); - - // Insert new package into database - let name = pkg.info.name.clone(); - let version = pkg.info.version.clone(); - let arch = pkg.info.arch.clone(); - db::query::package::insert(conn, repo_id, pkg).await?; - - // Move the package to its final resting place - tokio::fs::create_dir_all(dest_pkg_path.parent().unwrap()).await?; - tokio::fs::rename(path, dest_pkg_path).await?; - - // Synchronize archive databases - if arch == ANY_ARCH { - self.generate_archives_all(conn, repo).await?; - } else { - self.generate_archives(conn, repo, &arch).await?; - } - - Ok((name, version, arch)) - } -} diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index f5e48d4..9721451 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,9 +1,8 @@ mod archive; mod manager; -mod manager_new; pub mod package; -pub use manager_new::MetaRepoMgr; +pub use manager::MetaRepoMgr; use axum::{ body::Body, @@ -77,7 +76,13 @@ async fn post_package_archive( .add_pkg_from_reader(&global.db, &mut body, &repo) .await?; - tracing::info!("Added '{}-{}' to repository '{}' ({})", name, version, repo, arch); + tracing::info!( + "Added '{}-{}' to repository '{}' ({})", + name, + version, + repo, + arch + ); Ok(()) } From ecc33f01534a4db31e4b3e233f8e32805d65934b Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 29 May 2024 15:04:20 +0200 Subject: [PATCH 30/73] feat: atomatically update db archives --- server/src/cli.rs | 2 +- server/src/repo/archive.rs | 15 +---- server/src/repo/manager.rs | 112 +++++++++++++++++++------------------ server/src/repo/mod.rs | 13 ++--- 4 files changed, 67 insertions(+), 75 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 036c70b..2419606 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -81,7 +81,7 @@ impl Cli { let config = Config { data_dir: self.data_dir.clone(), }; - let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos")); + let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos"), db.clone()); let global = Global { config, diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index d103f13..a979c09 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -30,11 +30,13 @@ impl RepoArchiveWriter { .unwrap()?; Ok(Self { + // In practice, mutex is only ever used by one thread at a time. It's simply here so we + // can use spawn_blocking without issues. ar: Arc::new(Mutex::new(ar)), }) } - /// Set the current entry to be a new "files" list + /// Add either a "desc" or "files" entry to the archive pub async fn add_entry>( &self, full_name: &str, @@ -73,15 +75,4 @@ impl RepoArchiveWriter { .unwrap()?, ) } - - // - ///// Append the given line to the currently active entry - //pub async fn write_line(&self, line: &str) -> io::Result<()> { - // let line = String::from(line); - // let (tx, rx) = oneshot::channel(); - // - // self.tx.send(Message::AppendLine(tx, line)).await; - // - // rx.await.unwrap() - //} } diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index ebfd01c..e2d9c4d 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -12,19 +12,21 @@ pub const ANY_ARCH: &'static str = "any"; pub struct MetaRepoMgr { repo_dir: PathBuf, + conn: DbConn, } impl MetaRepoMgr { - pub fn new>(repo_dir: P) -> Self { + pub fn new>(repo_dir: P, conn: DbConn) -> Self { MetaRepoMgr { repo_dir: repo_dir.as_ref().to_path_buf(), + conn, } } /// Generate archive databases for all known architectures in the repository, including the /// "any" architecture. - pub async fn generate_archives_all(&self, conn: &DbConn, repo: &str) -> Result<()> { - let repo = crate::db::query::repo::by_name(conn, repo).await?; + pub async fn generate_archives_all(&self, repo: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(&self.conn, repo).await?; if repo.is_none() { return Ok(()); @@ -38,19 +40,19 @@ impl MetaRepoMgr { .column(crate::db::package::Column::Arch) .distinct() .into_tuple::() - .stream(conn) + .stream(&self.conn) .await?; while let Some(arch) = archs.next().await.transpose()? { - self.generate_archives(conn, &repo.name, &arch).await?; + self.generate_archives(&repo.name, &arch).await?; } Ok(()) } /// Generate the archive databases for the given repository and architecture. - pub async fn generate_archives(&self, conn: &DbConn, repo: &str, arch: &str) -> Result<()> { - let repo = crate::db::query::repo::by_name(conn, repo).await?; + pub async fn generate_archives(&self, repo: &str, arch: &str) -> Result<()> { + let repo = crate::db::query::repo::by_name(&self.conn, repo).await?; if repo.is_none() { return Ok(()); @@ -61,34 +63,25 @@ impl MetaRepoMgr { let parent_dir = self.repo_dir.join(&repo.name); tokio::fs::create_dir_all(&parent_dir).await?; - let ar_db = - archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.db.tar.gz", arch))) - .await?; - let ar_files = - archive::RepoArchiveWriter::open(parent_dir.join(format!("{}.files.tar.gz", arch))) - .await?; + let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = + self.random_file_paths(); + let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; + let ar_files = archive::RepoArchiveWriter::open(&tmp_ar_files_path).await?; // Query all packages in the repo that have the given architecture or the "any" // architecture let mut pkgs = repo .find_related(crate::db::Package) .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) - .stream(conn) + .stream(&self.conn) .await?; - // Create two temp file paths to write our entries to - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let files_tmp_file_path = self.repo_dir.join(uuid.to_string()); - - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let desc_tmp_file_path = self.repo_dir.join(uuid.to_string()); - while let Some(pkg) = pkgs.next().await.transpose()? { let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; - package::write_files(conn, &mut files_tmp_file, &pkg).await?; - package::write_desc(conn, &mut desc_tmp_file, &pkg).await?; + package::write_files(&self.conn, &mut files_tmp_file, &pkg).await?; + package::write_desc(&self.conn, &mut desc_tmp_file, &pkg).await?; let full_name = format!("{}-{}", pkg.name, pkg.version); @@ -107,6 +100,18 @@ impl MetaRepoMgr { ar_db.close().await?; ar_files.close().await?; + // Move the db archives to their respective places + tokio::fs::rename( + tmp_ar_db_path, + parent_dir.join(format!("{}.db.tar.gz", arch)), + ) + .await?; + tokio::fs::rename( + tmp_ar_files_path, + parent_dir.join(format!("{}.files.tar.gz", arch)), + ) + .await?; + // If this fails there's no point in failing the function + if there were no packages in // the repo, this fails anyway because the temp file doesn't exist let _ = tokio::fs::remove_file(desc_tmp_file_path).await; @@ -116,12 +121,12 @@ impl MetaRepoMgr { } /// Remove the repo with the given name, if it existed - pub async fn remove_repo(&self, conn: &DbConn, repo: &str) -> Result { - let res = db::query::repo::by_name(conn, repo).await?; + pub async fn remove_repo(&self, repo: &str) -> Result { + let res = db::query::repo::by_name(&self.conn, repo).await?; if let Some(repo_entry) = res { // Remove repository from database - repo_entry.delete(conn).await?; + repo_entry.delete(&self.conn).await?; // Remove files from file system tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; @@ -133,14 +138,14 @@ impl MetaRepoMgr { } /// Remove all packages from the repository with the given arch. - pub async fn remove_repo_arch(&self, conn: &DbConn, repo: &str, arch: &str) -> Result { - let repo = db::query::repo::by_name(conn, repo).await?; + pub async fn remove_repo_arch(&self, repo: &str, arch: &str) -> Result { + let repo = db::query::repo::by_name(&self.conn, repo).await?; if let Some(repo) = repo { let mut pkgs = repo .find_related(db::Package) .filter(db::package::Column::Arch.eq(arch)) - .stream(conn) + .stream(&self.conn) .await?; while let Some(pkg) = pkgs.next().await.transpose()? { @@ -150,7 +155,7 @@ impl MetaRepoMgr { .join(super::package::filename(&pkg)); tokio::fs::remove_file(path).await?; - pkg.delete(conn).await?; + pkg.delete(&self.conn).await?; } tokio::fs::remove_file( @@ -168,7 +173,7 @@ impl MetaRepoMgr { // If we removed all "any" packages, we need to resync all databases if arch == ANY_ARCH { - self.generate_archives_all(conn, &repo.name).await?; + self.generate_archives_all(&repo.name).await?; } Ok(true) @@ -177,17 +182,12 @@ impl MetaRepoMgr { } } - pub async fn remove_pkg( - &self, - conn: &DbConn, - repo: &str, - arch: &str, - name: &str, - ) -> Result { - let repo = db::query::repo::by_name(conn, repo).await?; + pub async fn remove_pkg(&self, repo: &str, arch: &str, name: &str) -> Result { + let repo = db::query::repo::by_name(&self.conn, repo).await?; if let Some(repo) = repo { - let pkg = db::query::package::by_fields(conn, repo.id, arch, name, None, None).await?; + let pkg = + db::query::package::by_fields(&self.conn, repo.id, arch, name, None, None).await?; if let Some(pkg) = pkg { // Remove package from database & file system @@ -197,12 +197,12 @@ impl MetaRepoMgr { .join(super::package::filename(&pkg)), ) .await?; - pkg.delete(conn).await?; + pkg.delete(&self.conn).await?; if arch == ANY_ARCH { - self.generate_archives_all(conn, &repo.name).await?; + self.generate_archives_all(&repo.name).await?; } else { - self.generate_archives(conn, &repo.name, arch).await?; + self.generate_archives(&repo.name, arch).await?; } Ok(true) @@ -216,13 +216,11 @@ impl MetaRepoMgr { pub async fn add_pkg_from_reader( &self, - conn: &DbConn, reader: &mut R, repo: &str, ) -> crate::Result<(String, String, String)> { // Copy file contents to temporary path so libarchive can work with it - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - let path = self.repo_dir.join(uuid.to_string()); + let [path] = self.random_file_paths(); let mut temp_file = tokio::fs::File::create(&path).await?; tokio::io::copy(reader, &mut temp_file).await?; @@ -234,17 +232,17 @@ impl MetaRepoMgr { .unwrap()?; // Query the repo for its ID, or create it if it does not already exist - let res = db::query::repo::by_name(conn, &repo).await?; + let res = db::query::repo::by_name(&self.conn, &repo).await?; let repo_id = if let Some(repo_entity) = res { repo_entity.id } else { - db::query::repo::insert(conn, repo, None).await?.id + db::query::repo::insert(&self.conn, repo, None).await?.id }; // If the package already exists in the database, we remove it first let res = db::query::package::by_fields( - conn, + &self.conn, repo_id, &pkg.info.arch, &pkg.info.name, @@ -254,7 +252,7 @@ impl MetaRepoMgr { .await?; if let Some(entry) = res { - entry.delete(conn).await?; + entry.delete(&self.conn).await?; } let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); @@ -263,7 +261,7 @@ impl MetaRepoMgr { let name = pkg.info.name.clone(); let version = pkg.info.version.clone(); let arch = pkg.info.arch.clone(); - db::query::package::insert(conn, repo_id, pkg).await?; + db::query::package::insert(&self.conn, repo_id, pkg).await?; // Move the package to its final resting place tokio::fs::create_dir_all(dest_pkg_path.parent().unwrap()).await?; @@ -271,11 +269,19 @@ impl MetaRepoMgr { // Synchronize archive databases if arch == ANY_ARCH { - self.generate_archives_all(conn, repo).await?; + self.generate_archives_all(repo).await?; } else { - self.generate_archives(conn, repo, &arch).await?; + self.generate_archives(repo, &arch).await?; } Ok((name, version, arch)) } + + /// Generate a path to a unique file that can be used as a temporary file + pub fn random_file_paths(&self) -> [PathBuf; C] { + std::array::from_fn(|_| { + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + self.repo_dir.join(uuid.to_string()) + }) + } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 9721451..2f2dacb 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -73,7 +73,7 @@ async fn post_package_archive( .repo_manager .write() .await - .add_pkg_from_reader(&global.db, &mut body, &repo) + .add_pkg_from_reader(&mut body, &repo) .await?; tracing::info!( @@ -91,12 +91,7 @@ async fn delete_repo( State(global): State, Path(repo): Path, ) -> crate::Result { - let repo_removed = global - .repo_manager - .write() - .await - .remove_repo(&global.db, &repo) - .await?; + let repo_removed = global.repo_manager.write().await.remove_repo(&repo).await?; if repo_removed { tracing::info!("Removed repository '{}'", repo); @@ -115,7 +110,7 @@ async fn delete_arch_repo( .repo_manager .write() .await - .remove_repo_arch(&global.db, &repo, &arch) + .remove_repo_arch(&repo, &arch) .await?; if repo_removed { @@ -135,7 +130,7 @@ async fn delete_package( .repo_manager .write() .await - .remove_pkg(&global.db, &repo, &arch, &pkg_name) + .remove_pkg(&repo, &arch, &pkg_name) .await?; if pkg_removed { From 5e1dfd22da4440b0251ad6ce00e3e59a8ce35fe9 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 29 May 2024 21:37:56 +0200 Subject: [PATCH 31/73] feat: add distro table --- server/src/db/entities/distro.rs | 18 +++++++++++ server/src/db/entities/mod.rs | 1 + server/src/db/entities/prelude.rs | 1 + .../m20230730_000001_create_repo_tables.rs | 32 +++++++++++++++++++ 4 files changed, 52 insertions(+) create mode 100644 server/src/db/entities/distro.rs diff --git a/server/src/db/entities/distro.rs b/server/src/db/entities/distro.rs new file mode 100644 index 0000000..d819fae --- /dev/null +++ b/server/src/db/entities/distro.rs @@ -0,0 +1,18 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 + +use sea_orm::entity::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] +#[sea_orm(table_name = "distro")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: i32, + pub name: String, + pub description: Option, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/mod.rs b/server/src/db/entities/mod.rs index ab8f32a..b38dbb1 100644 --- a/server/src/db/entities/mod.rs +++ b/server/src/db/entities/mod.rs @@ -2,6 +2,7 @@ pub mod prelude; +pub mod distro; pub mod package; pub mod package_file; pub mod package_group; diff --git a/server/src/db/entities/prelude.rs b/server/src/db/entities/prelude.rs index 1a6e503..8ebe873 100644 --- a/server/src/db/entities/prelude.rs +++ b/server/src/db/entities/prelude.rs @@ -1,5 +1,6 @@ //! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.1 +pub use super::distro::Entity as Distro; pub use super::package::Entity as Package; pub use super::package_file::Entity as PackageFile; pub use super::package_group::Entity as PackageGroup; diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index 9d76afc..45ea97c 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -11,6 +11,27 @@ impl MigrationName for Migration { #[async_trait::async_trait] impl MigrationTrait for Migration { async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Distro::Table) + .col( + ColumnDef::new(Distro::Id) + .integer() + .not_null() + .auto_increment() + .primary_key(), + ) + .col( + ColumnDef::new(Distro::Name) + .string() + .not_null() + .unique_key(), + ) + .col(ColumnDef::new(Distro::Description).string()) + .to_owned(), + ) + .await?; manager .create_table( Table::create() @@ -192,10 +213,21 @@ impl MigrationTrait for Migration { .await?; manager .drop_table(Table::drop().table(Repo::Table).to_owned()) + .await?; + manager + .drop_table(Table::drop().table(Distro::Table).to_owned()) .await } } +#[derive(Iden)] +pub enum Distro { + Table, + Id, + Name, + Description, +} + #[derive(Iden)] pub enum Repo { Table, From 58def483aae9fe87578ca038156da4bc6904b6a9 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 30 May 2024 09:42:28 +0200 Subject: [PATCH 32/73] feat: added distro routes and manager --- server/src/cli.rs | 18 ++-- server/src/db/entities/distro.rs | 11 ++- server/src/db/entities/repo.rs | 15 ++++ .../m20230730_000001_create_repo_tables.rs | 9 ++ server/src/db/query/distro.rs | 47 ++++++++++ server/src/db/query/mod.rs | 1 + server/src/db/query/repo.rs | 8 +- server/src/distro.rs | 69 +++++++++++++++ server/src/main.rs | 3 +- server/src/repo/manager.rs | 8 +- server/src/repo/mod.rs | 87 ++++++++++--------- 11 files changed, 216 insertions(+), 60 deletions(-) create mode 100644 server/src/db/query/distro.rs create mode 100644 server/src/distro.rs diff --git a/server/src/cli.rs b/server/src/cli.rs index 2419606..740e2ad 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,4 +1,4 @@ -use crate::{repo::MetaRepoMgr, Config, Global}; +use crate::{distro::MetaDistroMgr, repo::MetaRepoMgr, Config, Global}; use axum::{extract::FromRef, Router}; use clap::Parser; @@ -42,12 +42,6 @@ pub struct Cli { pub log: String, } -impl FromRef for Arc> { - fn from_ref(global: &Global) -> Self { - Arc::clone(&global.repo_manager) - } -} - impl Cli { pub fn init_tracing(&self) { tracing_subscriber::registry() @@ -81,13 +75,11 @@ impl Cli { let config = Config { data_dir: self.data_dir.clone(), }; - let repo_manager = MetaRepoMgr::new(&self.data_dir.join("repos"), db.clone()); - let global = Global { - config, - repo_manager: Arc::new(RwLock::new(repo_manager)), - db, - }; + let mgr = MetaDistroMgr::new(&self.data_dir.join("distros"), db.clone()); + mgr.bootstrap().await?; + + let global = Global { config, mgr, db }; // build our application with a single route let app = Router::new() diff --git a/server/src/db/entities/distro.rs b/server/src/db/entities/distro.rs index d819fae..f39ad53 100644 --- a/server/src/db/entities/distro.rs +++ b/server/src/db/entities/distro.rs @@ -13,6 +13,15 @@ pub struct Model { } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} +pub enum Relation { + #[sea_orm(has_many = "super::repo::Entity")] + Repo, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Repo.def() + } +} impl ActiveModelBehavior for ActiveModel {} diff --git a/server/src/db/entities/repo.rs b/server/src/db/entities/repo.rs index 25291da..1ddd39e 100644 --- a/server/src/db/entities/repo.rs +++ b/server/src/db/entities/repo.rs @@ -8,16 +8,31 @@ use serde::{Deserialize, Serialize}; pub struct Model { #[sea_orm(primary_key)] pub id: i32, + pub distro_id: i32, pub name: String, pub description: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm( + belongs_to = "super::distro::Entity", + from = "Column::DistroId", + to = "super::distro::Column::Id", + on_update = "NoAction", + on_delete = "Cascade" + )] + Distro, #[sea_orm(has_many = "super::package::Entity")] Package, } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Distro.def() + } +} + impl Related for Entity { fn to() -> RelationDef { Relation::Package.def() diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index 45ea97c..b1e1fbe 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -43,8 +43,16 @@ impl MigrationTrait for Migration { .auto_increment() .primary_key(), ) + .col(ColumnDef::new(Repo::DistroId).integer().not_null()) .col(ColumnDef::new(Repo::Name).string().not_null().unique_key()) .col(ColumnDef::new(Repo::Description).string()) + .foreign_key( + ForeignKey::create() + .name("fk-repo-distro_id") + .from(Repo::Table, Repo::DistroId) + .to(Distro::Table, Distro::Id) + .on_delete(ForeignKeyAction::Cascade), + ) .to_owned(), ) .await?; @@ -232,6 +240,7 @@ pub enum Distro { pub enum Repo { Table, Id, + DistroId, Name, Description, } diff --git a/server/src/db/query/distro.rs b/server/src/db/query/distro.rs new file mode 100644 index 0000000..c4fc70f --- /dev/null +++ b/server/src/db/query/distro.rs @@ -0,0 +1,47 @@ +use crate::db::*; + +use sea_orm::{sea_query::IntoCondition, *}; + +#[derive(Deserialize)] +pub struct Filter { + name: Option, +} + +impl IntoCondition for Filter { + fn into_condition(self) -> Condition { + Condition::all().add_option( + self.name + .map(|name| distro::Column::Name.like(format!("%{}%", name))), + ) + } +} + +pub async fn page( + conn: &DbConn, + per_page: u64, + page: u64, + filter: Filter, +) -> Result<(u64, Vec)> { + let paginator = Distro::find() + .filter(filter) + .order_by_asc(distro::Column::Id) + .paginate(conn, per_page); + let repos = paginator.fetch_page(page).await?; + let total_pages = paginator.num_pages().await?; + + Ok((total_pages, repos)) +} + +pub async fn by_id(conn: &DbConn, id: i32) -> Result> { + distro::Entity::find_by_id(id).one(conn).await +} + +pub async fn insert(conn: &DbConn, name: &str, description: Option<&str>) -> Result { + let model = distro::ActiveModel { + id: NotSet, + name: Set(String::from(name)), + description: Set(description.map(String::from)), + }; + + model.insert(conn).await +} diff --git a/server/src/db/query/mod.rs b/server/src/db/query/mod.rs index 87d61e3..f0a809b 100644 --- a/server/src/db/query/mod.rs +++ b/server/src/db/query/mod.rs @@ -1,3 +1,4 @@ +pub mod distro; pub mod package; pub mod repo; diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 0370c2b..2ad54bf 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -43,9 +43,15 @@ pub async fn by_name(conn: &DbConn, name: &str) -> Result> { .await } -pub async fn insert(conn: &DbConn, name: &str, description: Option<&str>) -> Result { +pub async fn insert( + conn: &DbConn, + distro_id: i32, + name: &str, + description: Option<&str>, +) -> Result { let model = repo::ActiveModel { id: NotSet, + distro_id: Set(distro_id), name: Set(String::from(name)), description: Set(description.map(String::from)), }; diff --git a/server/src/distro.rs b/server/src/distro.rs new file mode 100644 index 0000000..209d227 --- /dev/null +++ b/server/src/distro.rs @@ -0,0 +1,69 @@ +use crate::{db, MetaRepoMgr}; + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::Arc, +}; + +use sea_orm::{DbConn, EntityTrait}; +use tokio::sync::Mutex; + +#[derive(Clone)] +pub struct MetaDistroMgr { + distro_dir: PathBuf, + conn: DbConn, + distros: Arc>>>, +} + +impl MetaDistroMgr { + pub fn new>(distro_dir: P, conn: DbConn) -> Self { + Self { + distro_dir: distro_dir.as_ref().to_path_buf(), + conn, + distros: Arc::new(Mutex::new(HashMap::new())), + } + } + + /// Populate the manager with the currently known distros from the database. + pub async fn bootstrap(&self) -> crate::Result<()> { + let mut map = self.distros.lock().await; + let distros = db::Distro::find().all(&self.conn).await?; + + for distro in distros { + let mgr = MetaRepoMgr::new( + self.distro_dir.join(&distro.name), + distro.id, + self.conn.clone(), + ); + map.insert(distro.name, Arc::new(mgr)); + } + + Ok(()) + } + + pub async fn get_mgr(&self, distro: &str) -> Option> { + let map = self.distros.lock().await; + + map.get(distro).map(|mgr| Arc::clone(mgr)) + } + + pub async fn get_or_create_mgr(&self, distro: &str) -> crate::Result> { + let mut map = self.distros.lock().await; + + if let Some(mgr) = map.get(distro) { + Ok(Arc::clone(mgr)) + } else { + let distro = db::query::distro::insert(&self.conn, distro, None).await?; + + let mgr = Arc::new(MetaRepoMgr::new( + self.distro_dir.join(&distro.name), + distro.id, + self.conn.clone(), + )); + map.insert(distro.name, Arc::clone(&mgr)); + + Ok(mgr) + } + } +} diff --git a/server/src/main.rs b/server/src/main.rs index c57420a..a428e39 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,6 +1,7 @@ mod api; mod cli; pub mod db; +mod distro; mod error; mod repo; @@ -19,7 +20,7 @@ pub struct Config { #[derive(Clone)] pub struct Global { config: Config, - repo_manager: Arc>, + mgr: distro::MetaDistroMgr, db: sea_orm::DbConn, } diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index e2d9c4d..a877d0d 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -12,13 +12,15 @@ pub const ANY_ARCH: &'static str = "any"; pub struct MetaRepoMgr { repo_dir: PathBuf, + distro_id: i32, conn: DbConn, } impl MetaRepoMgr { - pub fn new>(repo_dir: P, conn: DbConn) -> Self { + pub fn new>(repo_dir: P, distro_id: i32, conn: DbConn) -> Self { MetaRepoMgr { repo_dir: repo_dir.as_ref().to_path_buf(), + distro_id, conn, } } @@ -237,7 +239,9 @@ impl MetaRepoMgr { let repo_id = if let Some(repo_entity) = res { repo_entity.id } else { - db::query::repo::insert(&self.conn, repo, None).await?.id + db::query::repo::insert(&self.conn, self.distro_id, repo, None) + .await? + .id }; // If the package already exists in the database, we remove it first diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 2f2dacb..7544faa 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -20,19 +20,19 @@ use tower_http::{services::ServeFile, validate_request::ValidateRequestHeaderLay pub fn router(api_key: &str) -> Router { Router::new() .route( - "/:repo", + "/:distro/:repo", post(post_package_archive) .delete(delete_repo) .route_layer(ValidateRequestHeaderLayer::bearer(api_key)), ) .route( - "/:repo/:arch", + "/:distro/:repo/:arch", delete(delete_arch_repo).route_layer(ValidateRequestHeaderLayer::bearer(api_key)), ) // Routes added after the layer do not get that layer applied, so the GET requests will not // be authorized .route( - "/:repo/:arch/:filename", + "/:distro/:repo/:arch/:filename", delete(delete_package) .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) .get(get_file), @@ -44,10 +44,15 @@ pub fn router(api_key: &str) -> Router { /// is returned. async fn get_file( State(global): State, - Path((repo, arch, file_name)): Path<(String, String, String)>, + Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, req: Request, ) -> crate::Result { - let repo_dir = global.config.data_dir.join("repos").join(&repo); + let repo_dir = global + .config + .data_dir + .join("distros") + .join(&distro) + .join(&repo); let file_name = if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { @@ -65,16 +70,12 @@ async fn get_file( async fn post_package_archive( State(global): State, - Path(repo): Path, + Path((distro, repo)): Path<(String, String)>, body: Body, ) -> crate::Result<()> { let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); - let (name, version, arch) = global - .repo_manager - .write() - .await - .add_pkg_from_reader(&mut body, &repo) - .await?; + let mgr = global.mgr.get_or_create_mgr(&distro).await?; + let (name, version, arch) = mgr.add_pkg_from_reader(&mut body, &repo).await?; tracing::info!( "Added '{}-{}' to repository '{}' ({})", @@ -89,14 +90,18 @@ async fn post_package_archive( async fn delete_repo( State(global): State, - Path(repo): Path, + Path((distro, repo)): Path<(String, String)>, ) -> crate::Result { - let repo_removed = global.repo_manager.write().await.remove_repo(&repo).await?; + if let Some(mgr) = global.mgr.get_mgr(&distro).await { + let repo_removed = mgr.remove_repo(&repo).await?; - if repo_removed { - tracing::info!("Removed repository '{}'", repo); + if repo_removed { + tracing::info!("Removed repository '{}'", repo); - Ok(StatusCode::OK) + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } else { Ok(StatusCode::NOT_FOUND) } @@ -104,19 +109,18 @@ async fn delete_repo( async fn delete_arch_repo( State(global): State, - Path((repo, arch)): Path<(String, String)>, + Path((distro, repo, arch)): Path<(String, String, String)>, ) -> crate::Result { - let repo_removed = global - .repo_manager - .write() - .await - .remove_repo_arch(&repo, &arch) - .await?; + if let Some(mgr) = global.mgr.get_mgr(&distro).await { + let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; - if repo_removed { - tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); + if repo_removed { + tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); - Ok(StatusCode::OK) + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } else { Ok(StatusCode::NOT_FOUND) } @@ -124,24 +128,23 @@ async fn delete_arch_repo( async fn delete_package( State(global): State, - Path((repo, arch, pkg_name)): Path<(String, String, String)>, + Path((distro, repo, arch, pkg_name)): Path<(String, String, String, String)>, ) -> crate::Result { - let pkg_removed = global - .repo_manager - .write() - .await - .remove_pkg(&repo, &arch, &pkg_name) - .await?; + if let Some(mgr) = global.mgr.get_mgr(&distro).await { + let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; - if pkg_removed { - tracing::info!( - "Removed package '{}' ({}) from repository '{}'", - pkg_name, - arch, - repo - ); + if pkg_removed { + tracing::info!( + "Removed package '{}' ({}) from repository '{}'", + pkg_name, + arch, + repo + ); - Ok(StatusCode::OK) + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } else { Ok(StatusCode::NOT_FOUND) } From f9518d6b7de2bb78e1e69d638ea505c248c0101d Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 30 May 2024 10:49:44 +0200 Subject: [PATCH 33/73] refactor: ensure repo dirs exist; rename some things --- server/src/cli.rs | 10 ++-- .../m20230730_000001_create_repo_tables.rs | 2 +- server/src/distro.rs | 51 +++++++++-------- server/src/main.rs | 5 +- server/src/repo/manager.rs | 57 +++++++++---------- server/src/repo/mod.rs | 2 +- 6 files changed, 61 insertions(+), 66 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 740e2ad..4fc94f1 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,10 +1,9 @@ -use crate::{distro::MetaDistroMgr, repo::MetaRepoMgr, Config, Global}; +use crate::{distro::MetaDistroMgr, Config, Global}; -use axum::{extract::FromRef, Router}; +use axum::Router; use clap::Parser; use sea_orm_migration::MigratorTrait; -use std::{io, path::PathBuf, sync::Arc}; -use tokio::sync::RwLock; +use std::{io, path::PathBuf}; use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; @@ -76,8 +75,7 @@ impl Cli { data_dir: self.data_dir.clone(), }; - let mgr = MetaDistroMgr::new(&self.data_dir.join("distros"), db.clone()); - mgr.bootstrap().await?; + let mgr = MetaDistroMgr::new(&self.data_dir.join("distros"), db.clone()).await?; let global = Global { config, mgr, db }; diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index b1e1fbe..2deb05f 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -81,7 +81,7 @@ impl MigrationTrait for Migration { .col(ColumnDef::new(Package::PgpSig).string_len(255)) .col(ColumnDef::new(Package::PgpSigSize).big_integer()) .col(ColumnDef::new(Package::Sha256Sum).char_len(64).not_null()) - .col(ColumnDef::new(Package::Compression).char_len(16).not_null()) + .col(ColumnDef::new(Package::Compression).string_len(16).not_null()) .foreign_key( ForeignKey::create() .name("fk-package-repo_id") diff --git a/server/src/distro.rs b/server/src/distro.rs index 209d227..22563ff 100644 --- a/server/src/distro.rs +++ b/server/src/distro.rs @@ -1,4 +1,4 @@ -use crate::{db, MetaRepoMgr}; +use crate::{db, DistroMgr}; use std::{ collections::HashMap, @@ -13,42 +13,40 @@ use tokio::sync::Mutex; pub struct MetaDistroMgr { distro_dir: PathBuf, conn: DbConn, - distros: Arc>>>, + distros: Arc>>>, } impl MetaDistroMgr { - pub fn new>(distro_dir: P, conn: DbConn) -> Self { - Self { - distro_dir: distro_dir.as_ref().to_path_buf(), - conn, - distros: Arc::new(Mutex::new(HashMap::new())), + pub async fn new>(distro_dir: P, conn: DbConn) -> crate::Result { + if !tokio::fs::try_exists(&distro_dir).await? { + tokio::fs::create_dir(&distro_dir).await?; } - } - /// Populate the manager with the currently known distros from the database. - pub async fn bootstrap(&self) -> crate::Result<()> { - let mut map = self.distros.lock().await; - let distros = db::Distro::find().all(&self.conn).await?; + let distro_dir = distro_dir.as_ref().to_path_buf(); + let mut map: HashMap> = HashMap::new(); + + let distros = db::Distro::find().all(&conn).await?; for distro in distros { - let mgr = MetaRepoMgr::new( - self.distro_dir.join(&distro.name), - distro.id, - self.conn.clone(), - ); + let mgr = + DistroMgr::new(distro_dir.join(&distro.name), distro.id, conn.clone()).await?; map.insert(distro.name, Arc::new(mgr)); } - Ok(()) + Ok(Self { + distro_dir, + conn, + distros: Arc::new(Mutex::new(map)), + }) } - pub async fn get_mgr(&self, distro: &str) -> Option> { + pub async fn get_mgr(&self, distro: &str) -> Option> { let map = self.distros.lock().await; map.get(distro).map(|mgr| Arc::clone(mgr)) } - pub async fn get_or_create_mgr(&self, distro: &str) -> crate::Result> { + pub async fn get_or_create_mgr(&self, distro: &str) -> crate::Result> { let mut map = self.distros.lock().await; if let Some(mgr) = map.get(distro) { @@ -56,11 +54,14 @@ impl MetaDistroMgr { } else { let distro = db::query::distro::insert(&self.conn, distro, None).await?; - let mgr = Arc::new(MetaRepoMgr::new( - self.distro_dir.join(&distro.name), - distro.id, - self.conn.clone(), - )); + let mgr = Arc::new( + DistroMgr::new( + self.distro_dir.join(&distro.name), + distro.id, + self.conn.clone(), + ) + .await?, + ); map.insert(distro.name, Arc::clone(&mgr)); Ok(mgr) diff --git a/server/src/main.rs b/server/src/main.rs index a428e39..d3cbdf0 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -6,11 +6,10 @@ mod error; mod repo; pub use error::{Result, ServerError}; -use repo::MetaRepoMgr; +use repo::DistroMgr; use clap::Parser; -use std::{path::PathBuf, sync::Arc}; -use tokio::sync::RwLock; +use std::path::PathBuf; #[derive(Clone)] pub struct Config { diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index a877d0d..23d693d 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -10,19 +10,23 @@ use uuid::Uuid; pub const ANY_ARCH: &'static str = "any"; -pub struct MetaRepoMgr { - repo_dir: PathBuf, +pub struct DistroMgr { + distro_dir: PathBuf, distro_id: i32, conn: DbConn, } -impl MetaRepoMgr { - pub fn new>(repo_dir: P, distro_id: i32, conn: DbConn) -> Self { - MetaRepoMgr { - repo_dir: repo_dir.as_ref().to_path_buf(), +impl DistroMgr { + pub async fn new>(distro_dir: P, distro_id: i32, conn: DbConn) -> Result { + if !tokio::fs::try_exists(&distro_dir).await? { + tokio::fs::create_dir(&distro_dir).await?; + } + + Ok(Self { + distro_dir: distro_dir.as_ref().to_path_buf(), distro_id, conn, - } + }) } /// Generate archive databases for all known architectures in the repository, including the @@ -62,9 +66,6 @@ impl MetaRepoMgr { let repo = repo.unwrap(); - let parent_dir = self.repo_dir.join(&repo.name); - tokio::fs::create_dir_all(&parent_dir).await?; - let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = self.random_file_paths(); let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; @@ -102,15 +103,13 @@ impl MetaRepoMgr { ar_db.close().await?; ar_files.close().await?; + let repo_dir = self.distro_dir.join(&repo.name); + // Move the db archives to their respective places - tokio::fs::rename( - tmp_ar_db_path, - parent_dir.join(format!("{}.db.tar.gz", arch)), - ) - .await?; + tokio::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch))).await?; tokio::fs::rename( tmp_ar_files_path, - parent_dir.join(format!("{}.files.tar.gz", arch)), + repo_dir.join(format!("{}.files.tar.gz", arch)), ) .await?; @@ -131,7 +130,7 @@ impl MetaRepoMgr { repo_entry.delete(&self.conn).await?; // Remove files from file system - tokio::fs::remove_dir_all(self.repo_dir.join(repo)).await?; + tokio::fs::remove_dir_all(self.distro_dir.join(repo)).await?; Ok(true) } else { @@ -152,7 +151,7 @@ impl MetaRepoMgr { while let Some(pkg) = pkgs.next().await.transpose()? { let path = self - .repo_dir + .distro_dir .join(&repo.name) .join(super::package::filename(&pkg)); tokio::fs::remove_file(path).await?; @@ -161,13 +160,13 @@ impl MetaRepoMgr { } tokio::fs::remove_file( - self.repo_dir + self.distro_dir .join(&repo.name) .join(format!("{}.db.tar.gz", arch)), ) .await?; tokio::fs::remove_file( - self.repo_dir + self.distro_dir .join(&repo.name) .join(format!("{}.files.tar.gz", arch)), ) @@ -194,7 +193,7 @@ impl MetaRepoMgr { if let Some(pkg) = pkg { // Remove package from database & file system tokio::fs::remove_file( - self.repo_dir + self.distro_dir .join(&repo.name) .join(super::package::filename(&pkg)), ) @@ -221,24 +220,23 @@ impl MetaRepoMgr { reader: &mut R, repo: &str, ) -> crate::Result<(String, String, String)> { - // Copy file contents to temporary path so libarchive can work with it let [path] = self.random_file_paths(); let mut temp_file = tokio::fs::File::create(&path).await?; tokio::io::copy(reader, &mut temp_file).await?; - // Parse the package let path_clone = path.clone(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await .unwrap()?; - // Query the repo for its ID, or create it if it does not already exist - let res = db::query::repo::by_name(&self.conn, &repo).await?; + let repo_dir = self.distro_dir.join(repo); - let repo_id = if let Some(repo_entity) = res { - repo_entity.id + let repo_id = if let Some(repo) = db::query::repo::by_name(&self.conn, &repo).await? { + repo.id } else { + tokio::fs::create_dir(&repo_dir).await?; + db::query::repo::insert(&self.conn, self.distro_id, repo, None) .await? .id @@ -259,7 +257,7 @@ impl MetaRepoMgr { entry.delete(&self.conn).await?; } - let dest_pkg_path = self.repo_dir.join(repo).join(pkg.file_name()); + let dest_pkg_path = repo_dir.join(pkg.file_name()); // Insert new package into database let name = pkg.info.name.clone(); @@ -268,7 +266,6 @@ impl MetaRepoMgr { db::query::package::insert(&self.conn, repo_id, pkg).await?; // Move the package to its final resting place - tokio::fs::create_dir_all(dest_pkg_path.parent().unwrap()).await?; tokio::fs::rename(path, dest_pkg_path).await?; // Synchronize archive databases @@ -285,7 +282,7 @@ impl MetaRepoMgr { pub fn random_file_paths(&self) -> [PathBuf; C] { std::array::from_fn(|_| { let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - self.repo_dir.join(uuid.to_string()) + self.distro_dir.join(uuid.to_string()) }) } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 7544faa..7d7e321 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -2,7 +2,7 @@ mod archive; mod manager; pub mod package; -pub use manager::MetaRepoMgr; +pub use manager::DistroMgr; use axum::{ body::Body, From fa6de9b035db4251c6c5e0b83fe7a2fa18f01d37 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 3 Jun 2024 09:46:02 +0200 Subject: [PATCH 34/73] feat: upload new packages to queue --- server/src/cli.rs | 2 +- server/src/db/entities/package.rs | 3 + .../m20230730_000001_create_repo_tables.rs | 8 +- server/src/db/mod.rs | 11 ++ server/src/db/query/package.rs | 25 ++-- server/src/repo/manager.rs | 115 +++++++++++------- 6 files changed, 110 insertions(+), 54 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 4fc94f1..2df7f7c 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -35,7 +35,7 @@ pub struct Cli { #[arg( long, value_name = "LOG_LEVEL", - default_value = "tower_http=debug,rieterd=debug,sea_orm=debug", + default_value = "tower_http=debug,rieterd=debug", env = "RIETER_LOG" )] pub log: String, diff --git a/server/src/db/entities/package.rs b/server/src/db/entities/package.rs index 112cde4..08ac2ab 100644 --- a/server/src/db/entities/package.rs +++ b/server/src/db/entities/package.rs @@ -4,6 +4,8 @@ use chrono::NaiveDateTime; use sea_orm::entity::prelude::*; use serde::{Deserialize, Serialize}; +use crate::db::PackageState; + #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "package")] pub struct Model { @@ -24,6 +26,7 @@ pub struct Model { pub pgp_sig_size: Option, pub sha256_sum: String, pub compression: String, + pub state: PackageState, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs index 2deb05f..f76e639 100644 --- a/server/src/db/migrator/m20230730_000001_create_repo_tables.rs +++ b/server/src/db/migrator/m20230730_000001_create_repo_tables.rs @@ -81,7 +81,12 @@ impl MigrationTrait for Migration { .col(ColumnDef::new(Package::PgpSig).string_len(255)) .col(ColumnDef::new(Package::PgpSigSize).big_integer()) .col(ColumnDef::new(Package::Sha256Sum).char_len(64).not_null()) - .col(ColumnDef::new(Package::Compression).string_len(16).not_null()) + .col( + ColumnDef::new(Package::Compression) + .string_len(16) + .not_null(), + ) + .col(ColumnDef::new(Package::State).integer().not_null()) .foreign_key( ForeignKey::create() .name("fk-package-repo_id") @@ -264,6 +269,7 @@ pub enum Package { PgpSigSize, Sha256Sum, Compression, + State, } #[derive(Iden)] diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 597cf20..98f42a4 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -30,6 +30,17 @@ pub enum PackageRelatedEnum { Optdepend, } +#[derive(EnumIter, DeriveActiveEnum, Deserialize, Serialize, PartialEq, Eq, Clone, Debug)] +#[sea_orm(rs_type = "i32", db_type = "Integer")] +pub enum PackageState { + #[sea_orm(num_value = 0)] + PendingCommit, + #[sea_orm(num_value = 1)] + Committed, + #[sea_orm(num_value = 2)] + PendingDeletion, +} + #[derive(Serialize)] pub struct FullPackage { #[serde(flatten)] diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index c76e532..abbfb9c 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -68,9 +68,17 @@ pub async fn delete_with_arch(conn: &DbConn, repo_id: i32, arch: &str) -> Result .await } -pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Package) -> Result<()> { +pub async fn insert( + conn: &DbConn, + repo_id: i32, + pkg: crate::repo::package::Package, +) -> Result { let info = pkg.info; + // Doing this manually is not the recommended way, but the generic error type of the + // transaction function didn't play well with my current error handling + let txn = conn.begin().await?; + let model = package::ActiveModel { id: NotSet, repo_id: Set(repo_id), @@ -88,9 +96,10 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack pgp_sig_size: Set(info.pgpsigsize), sha256_sum: Set(info.sha256sum), compression: Set(pkg.compression.extension().unwrap().to_string()), + state: Set(PackageState::PendingCommit), }; - let pkg_entry = model.insert(conn).await?; + let pkg_entry = model.insert(&txn).await?; // Insert all the related tables PackageLicense::insert_many(info.licenses.iter().map(|s| package_license::ActiveModel { @@ -98,7 +107,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack name: Set(s.to_string()), })) .on_empty_do_nothing() - .exec(conn) + .exec(&txn) .await?; PackageGroup::insert_many(info.groups.iter().map(|s| package_group::ActiveModel { @@ -106,7 +115,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack name: Set(s.to_string()), })) .on_empty_do_nothing() - .exec(conn) + .exec(&txn) .await?; let related = info @@ -146,7 +155,7 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack name: Set(s.to_string()), })) .on_empty_do_nothing() - .exec(conn) + .exec(&txn) .await?; PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { @@ -154,10 +163,12 @@ pub async fn insert(conn: &DbConn, repo_id: i32, pkg: crate::repo::package::Pack path: Set(s.display().to_string()), })) .on_empty_do_nothing() - .exec(conn) + .exec(&txn) .await?; - Ok(()) + txn.commit().await?; + + Ok(pkg_entry) } pub async fn full(conn: &DbConn, id: i32) -> Result> { diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index 23d693d..691f6b0 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -1,19 +1,25 @@ use super::{archive, package}; use crate::{db, error::Result}; -use std::path::{Path, PathBuf}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use futures::StreamExt; use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use tokio::io::AsyncRead; +use tokio::{io::AsyncRead, sync::Mutex}; use uuid::Uuid; pub const ANY_ARCH: &'static str = "any"; +pub const REPOS_DIR: &'static str = "repos"; +pub const QUEUE_DIR: &'static str = "queue"; pub struct DistroMgr { distro_dir: PathBuf, distro_id: i32, conn: DbConn, + lock: Arc>, } impl DistroMgr { @@ -22,10 +28,23 @@ impl DistroMgr { tokio::fs::create_dir(&distro_dir).await?; } + let repos_dir = distro_dir.as_ref().join(REPOS_DIR); + + if !tokio::fs::try_exists(&repos_dir).await? { + tokio::fs::create_dir(repos_dir).await?; + } + + let queue_dir = distro_dir.as_ref().join(QUEUE_DIR); + + if !tokio::fs::try_exists(&queue_dir).await? { + tokio::fs::create_dir(queue_dir).await?; + } + Ok(Self { distro_dir: distro_dir.as_ref().to_path_buf(), distro_id, conn, + lock: Arc::new(Mutex::new(())), }) } @@ -121,6 +140,18 @@ impl DistroMgr { Ok(()) } + async fn get_or_create_repo(&self, repo: &str) -> Result { + let _guard = self.lock.lock().await; + + if let Some(repo) = db::query::repo::by_name(&self.conn, repo).await? { + Ok(repo) + } else { + tokio::fs::create_dir(self.distro_dir.join(repo)).await?; + + Ok(db::query::repo::insert(&self.conn, self.distro_id, repo, None).await?) + } + } + /// Remove the repo with the given name, if it existed pub async fn remove_repo(&self, repo: &str) -> Result { let res = db::query::repo::by_name(&self.conn, repo).await?; @@ -220,62 +251,56 @@ impl DistroMgr { reader: &mut R, repo: &str, ) -> crate::Result<(String, String, String)> { - let [path] = self.random_file_paths(); - let mut temp_file = tokio::fs::File::create(&path).await?; + let [tmp_file_path] = self.random_file_paths(); + let mut temp_file = tokio::fs::File::create(&tmp_file_path).await?; tokio::io::copy(reader, &mut temp_file).await?; - let path_clone = path.clone(); + let path_clone = tmp_file_path.clone(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await .unwrap()?; - let repo_dir = self.distro_dir.join(repo); + let repo = self.get_or_create_repo(repo).await?; + let pkg = db::query::package::insert(&self.conn, repo.id, pkg).await?; - let repo_id = if let Some(repo) = db::query::repo::by_name(&self.conn, &repo).await? { - repo.id - } else { - tokio::fs::create_dir(&repo_dir).await?; - - db::query::repo::insert(&self.conn, self.distro_id, repo, None) - .await? - .id - }; + let queue_path = self.distro_dir.join(QUEUE_DIR).join(pkg.id.to_string()); + tokio::fs::rename(tmp_file_path, queue_path).await?; // If the package already exists in the database, we remove it first - let res = db::query::package::by_fields( - &self.conn, - repo_id, - &pkg.info.arch, - &pkg.info.name, - None, - None, - ) - .await?; - - if let Some(entry) = res { - entry.delete(&self.conn).await?; - } - - let dest_pkg_path = repo_dir.join(pkg.file_name()); - - // Insert new package into database - let name = pkg.info.name.clone(); - let version = pkg.info.version.clone(); - let arch = pkg.info.arch.clone(); - db::query::package::insert(&self.conn, repo_id, pkg).await?; - - // Move the package to its final resting place - tokio::fs::rename(path, dest_pkg_path).await?; + //let res = db::query::package::by_fields( + // &self.conn, + // repo.id, + // &pkg.info.arch, + // &pkg.info.name, + // None, + // None, + //) + //.await?; + // + //if let Some(entry) = res { + // entry.delete(&self.conn).await?; + //} + //let dest_pkg_path = repo_dir.join(pkg.file_name()); + // + //// Insert new package into database + //let name = pkg.info.name.clone(); + //let version = pkg.info.version.clone(); + //let arch = pkg.info.arch.clone(); + //db::query::package::insert(&self.conn, repo.id, pkg).await?; + // + //// Move the package to its final resting place + //tokio::fs::rename(tmp_file_path, dest_pkg_path).await?; + // // Synchronize archive databases - if arch == ANY_ARCH { - self.generate_archives_all(repo).await?; - } else { - self.generate_archives(repo, &arch).await?; - } + //if arch == ANY_ARCH { + // self.generate_archives_all(&repo.name).await?; + //} else { + // self.generate_archives(&repo.name, &arch).await?; + //} - Ok((name, version, arch)) + Ok((pkg.name, pkg.version, pkg.arch)) } /// Generate a path to a unique file that can be used as a temporary file From 97612e1af61dbcfcce1c136dc7b88eba11ff1bcf Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 9 Jun 2024 23:04:45 +0200 Subject: [PATCH 35/73] feat: better concurrent uploads with limited parallel parsing --- Cargo.lock | 1 + server/Cargo.toml | 1 + server/src/repo/manager.rs | 116 ++++++++++++++++++++++++------------- server/src/repo/mod.rs | 25 +++++--- server/src/repo/package.rs | 2 +- 5 files changed, 96 insertions(+), 49 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 333bc72..d6e9e55 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1662,6 +1662,7 @@ dependencies = [ "libarchive", "sea-orm", "sea-orm-migration", + "sea-query", "serde", "sha256", "tokio", diff --git a/server/Cargo.toml b/server/Cargo.toml index cd86713..75b5a09 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -14,6 +14,7 @@ futures = "0.3.28" http-body-util = "0.1.1" libarchive = { path = "../libarchive" } sea-orm-migration = "0.12.1" +sea-query = { version = "0.30.7", features = ["backend-postgres", "backend-sqlite"] } serde = { version = "1.0.178", features = ["derive"] } sha256 = "1.1.4" tokio = { version = "1.29.1", features = ["full"] } diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index 691f6b0..d3c753c 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -7,19 +7,27 @@ use std::{ }; use futures::StreamExt; -use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use tokio::{io::AsyncRead, sync::Mutex}; +use sea_orm::{ + ActiveModelTrait, ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter, QuerySelect, + Related, RelationTrait, Set, TransactionTrait, +}; +use sea_query::{Expr, Query}; +use tokio::{ + io::AsyncRead, + sync::{Mutex, Semaphore}, +}; use uuid::Uuid; pub const ANY_ARCH: &'static str = "any"; pub const REPOS_DIR: &'static str = "repos"; -pub const QUEUE_DIR: &'static str = "queue"; pub struct DistroMgr { distro_dir: PathBuf, distro_id: i32, conn: DbConn, - lock: Arc>, + repo_lock: Arc>, + sync_lock: Arc>, + pkg_sema: Arc, } impl DistroMgr { @@ -34,23 +42,21 @@ impl DistroMgr { tokio::fs::create_dir(repos_dir).await?; } - let queue_dir = distro_dir.as_ref().join(QUEUE_DIR); - - if !tokio::fs::try_exists(&queue_dir).await? { - tokio::fs::create_dir(queue_dir).await?; - } - Ok(Self { distro_dir: distro_dir.as_ref().to_path_buf(), distro_id, conn, - lock: Arc::new(Mutex::new(())), + repo_lock: Arc::new(Mutex::new(())), + sync_lock: Arc::new(Mutex::new(())), + pkg_sema: Arc::new(Semaphore::new(1)), }) } /// Generate archive databases for all known architectures in the repository, including the /// "any" architecture. - pub async fn generate_archives_all(&self, repo: &str) -> Result<()> { + pub async fn sync_repo(&self, repo: &str) -> Result<()> { + let _guard = self.sync_lock.lock().await; + let repo = crate::db::query::repo::by_name(&self.conn, repo).await?; if repo.is_none() { @@ -60,31 +66,23 @@ impl DistroMgr { let repo = repo.unwrap(); let mut archs = repo - .find_related(crate::db::Package) + .find_related(db::Package) .select_only() - .column(crate::db::package::Column::Arch) + .column(db::package::Column::Arch) .distinct() .into_tuple::() .stream(&self.conn) .await?; while let Some(arch) = archs.next().await.transpose()? { - self.generate_archives(&repo.name, &arch).await?; + self.generate_archives(&repo, &arch).await?; } Ok(()) } /// Generate the archive databases for the given repository and architecture. - pub async fn generate_archives(&self, repo: &str, arch: &str) -> Result<()> { - let repo = crate::db::query::repo::by_name(&self.conn, repo).await?; - - if repo.is_none() { - return Ok(()); - } - - let repo = repo.unwrap(); - + async fn generate_archives(&self, repo: &db::repo::Model, arch: &str) -> Result<()> { let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = self.random_file_paths(); let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; @@ -95,10 +93,23 @@ impl DistroMgr { let mut pkgs = repo .find_related(crate::db::Package) .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) + .filter( + db::package::Column::Id.in_subquery( + Query::select() + .expr(db::package::Column::Id.max()) + .from(db::package::Entity) + .group_by_columns([db::package::Column::Arch, db::package::Column::Name]) + .to_owned(), + ), + ) .stream(&self.conn) .await?; + let mut commited_ids: Vec = Vec::new(); + while let Some(pkg) = pkgs.next().await.transpose()? { + commited_ids.push(pkg.id); + let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; @@ -132,16 +143,34 @@ impl DistroMgr { ) .await?; + // Only after we have successfully written everything to disk do we update the database. + // This order ensures any failure can be recovered, as the database is our single source of + // truth. + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::Committed), + ) + .filter(db::package::Column::Id.is_in(commited_ids)) + .exec(&self.conn) + .await?; + // If this fails there's no point in failing the function + if there were no packages in // the repo, this fails anyway because the temp file doesn't exist let _ = tokio::fs::remove_file(desc_tmp_file_path).await; let _ = tokio::fs::remove_file(files_tmp_file_path).await; + tracing::info!( + "Package archives generated for '{}' ('{}')", + &repo.name, + arch + ); + Ok(()) } async fn get_or_create_repo(&self, repo: &str) -> Result { - let _guard = self.lock.lock().await; + let _guard = self.repo_lock.lock().await; if let Some(repo) = db::query::repo::by_name(&self.conn, repo).await? { Ok(repo) @@ -205,7 +234,7 @@ impl DistroMgr { // If we removed all "any" packages, we need to resync all databases if arch == ANY_ARCH { - self.generate_archives_all(&repo.name).await?; + self.sync_repo(&repo.name).await?; } Ok(true) @@ -231,11 +260,11 @@ impl DistroMgr { .await?; pkg.delete(&self.conn).await?; - if arch == ANY_ARCH { - self.generate_archives_all(&repo.name).await?; - } else { - self.generate_archives(&repo.name, arch).await?; - } + //if arch == ANY_ARCH { + // self.sync_repo(&repo.name).await?; + //} else { + // self.generate_archives(&repo.name, arch).await?; + //} Ok(true) } else { @@ -246,26 +275,33 @@ impl DistroMgr { } } - pub async fn add_pkg_from_reader( + pub async fn add_pkg_from_path>( &self, - reader: &mut R, + path: P, repo: &str, ) -> crate::Result<(String, String, String)> { - let [tmp_file_path] = self.random_file_paths(); - let mut temp_file = tokio::fs::File::create(&tmp_file_path).await?; + let _guard = self.pkg_sema.acquire().await.unwrap(); - tokio::io::copy(reader, &mut temp_file).await?; - - let path_clone = tmp_file_path.clone(); + let path_clone = path.as_ref().to_path_buf(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await .unwrap()?; let repo = self.get_or_create_repo(repo).await?; + + // TODO prevent database from being updated but file failing to move to repo dir? let pkg = db::query::package::insert(&self.conn, repo.id, pkg).await?; - let queue_path = self.distro_dir.join(QUEUE_DIR).join(pkg.id.to_string()); - tokio::fs::rename(tmp_file_path, queue_path).await?; + let queue_path = self.distro_dir.join(&repo.name).join(pkg.id.to_string()); + tokio::fs::rename(path.as_ref(), queue_path).await?; + + tracing::info!( + "Added '{}-{}' to repository '{}' ({})", + pkg.name, + pkg.version, + repo.name, + pkg.arch + ); // If the package already exists in the database, we remove it first //let res = db::query::package::by_fields( diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 7d7e321..cdc6c09 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -75,15 +75,24 @@ async fn post_package_archive( ) -> crate::Result<()> { let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); let mgr = global.mgr.get_or_create_mgr(&distro).await?; - let (name, version, arch) = mgr.add_pkg_from_reader(&mut body, &repo).await?; + let [tmp_path] = mgr.random_file_paths(); - tracing::info!( - "Added '{}-{}' to repository '{}' ({})", - name, - version, - repo, - arch - ); + let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; + tokio::io::copy(&mut body, &mut tmp_file).await?; + + tokio::spawn(async move { mgr.add_pkg_from_path(tmp_path, &repo).await }); + + //let (name, version, arch) = mgr.add_pkg_from_path(&mut body, &repo).await?; + // + //tracing::info!( + // "Added '{}-{}' to repository '{}' ({})", + // name, + // version, + // repo, + // arch + //); + + //tokio::spawn(async move { mgr.sync_repo(&repo).await }); Ok(()) } diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 24979eb..66c8fa1 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -323,7 +323,7 @@ pub async fn write_desc( pkg: &package::Model, ) -> crate::Result<()> { writer - .write_all(format!("%FILENAME%\n{}\n", filename(pkg)).as_bytes()) + .write_all(format!("%FILENAME%\n{}\n", pkg.id).as_bytes()) .await?; write_attribute(writer, "NAME", &pkg.name).await?; From 5839d66213e1e5695a6014cb722171791f20b7d5 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 11 Jun 2024 12:22:44 +0200 Subject: [PATCH 36/73] wip: concurrent repo sync --- server/src/repo/manager.rs | 119 ++++++++++++++++++++++++++++++------- server/src/repo/mod.rs | 8 ++- 2 files changed, 104 insertions(+), 23 deletions(-) diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index d3c753c..3182817 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -3,7 +3,8 @@ use crate::{db, error::Result}; use std::{ path::{Path, PathBuf}, - sync::Arc, + sync::{Arc, atomic::{AtomicBool, AtomicU32, Ordering}}, + collections::HashMap, }; use futures::StreamExt; @@ -14,20 +15,27 @@ use sea_orm::{ use sea_query::{Expr, Query}; use tokio::{ io::AsyncRead, - sync::{Mutex, Semaphore}, + sync::{Mutex, Semaphore, RwLock, Notify}, }; use uuid::Uuid; pub const ANY_ARCH: &'static str = "any"; pub const REPOS_DIR: &'static str = "repos"; +#[derive(Default)] +pub struct RepoState { + queued_pkgs: AtomicU32, + sync_queued: AtomicBool, + sync_notify: Notify, +} + pub struct DistroMgr { distro_dir: PathBuf, distro_id: i32, conn: DbConn, - repo_lock: Arc>, - sync_lock: Arc>, - pkg_sema: Arc, + repos: RwLock>>, + sync_lock: Mutex<()>, + pkg_sema: Semaphore, } impl DistroMgr { @@ -46,18 +54,50 @@ impl DistroMgr { distro_dir: distro_dir.as_ref().to_path_buf(), distro_id, conn, - repo_lock: Arc::new(Mutex::new(())), - sync_lock: Arc::new(Mutex::new(())), - pkg_sema: Arc::new(Semaphore::new(1)), + sync_lock: Mutex::new(()), + pkg_sema: Semaphore::new(1), + repos: RwLock::new(HashMap::new()), }) } + pub async fn schedule_sync(&self, repo_id: i32) -> Result<()> { + let state = { + let repos = self.repos.read().await; + repos.get(&repo_id).map(Arc::clone) + }; + + + if state.is_none() { + tracing::debug!("is none") + return Ok(()); + } + + let state = state.unwrap(); + + let res = state.sync_queued.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst); + + // Already a sync job scheduled, so this one can simply quit + if res.is_err() { + tracing::debug!("shit"); + return Ok(()); + } + + // If the queue is not empty, we wait for a notification that it is before syncing + if state.queued_pkgs.load(Ordering::SeqCst) > 0 { + tracing::debug!("sync waiter waiting"); + state.sync_notify.notified().await; + tracing::debug!("sync waiter notified"); + } + + self.sync_repo(repo_id).await + } + /// Generate archive databases for all known architectures in the repository, including the /// "any" architecture. - pub async fn sync_repo(&self, repo: &str) -> Result<()> { + pub async fn sync_repo(&self, repo_id: i32) -> Result<()> { let _guard = self.sync_lock.lock().await; - let repo = crate::db::query::repo::by_name(&self.conn, repo).await?; + let repo = crate::db::query::repo::by_id(&self.conn, repo_id).await?; if repo.is_none() { return Ok(()); @@ -170,14 +210,17 @@ impl DistroMgr { } async fn get_or_create_repo(&self, repo: &str) -> Result { - let _guard = self.repo_lock.lock().await; + let mut repos = self.repos.write().await; if let Some(repo) = db::query::repo::by_name(&self.conn, repo).await? { Ok(repo) } else { tokio::fs::create_dir(self.distro_dir.join(repo)).await?; + let repo = db::query::repo::insert(&self.conn, self.distro_id, repo, None).await?; - Ok(db::query::repo::insert(&self.conn, self.distro_id, repo, None).await?) + repos.insert(repo.id, Arc::new(RepoState::default())); + + Ok(repo) } } @@ -234,7 +277,7 @@ impl DistroMgr { // If we removed all "any" packages, we need to resync all databases if arch == ANY_ARCH { - self.sync_repo(&repo.name).await?; + //self.sync_repo(&repo.name).await?; } Ok(true) @@ -275,20 +318,16 @@ impl DistroMgr { } } - pub async fn add_pkg_from_path>( + async fn _add_pkg_from_path>( &self, path: P, - repo: &str, - ) -> crate::Result<(String, String, String)> { - let _guard = self.pkg_sema.acquire().await.unwrap(); - + repo: &db::repo::Model, + ) -> crate::Result { let path_clone = path.as_ref().to_path_buf(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await .unwrap()?; - let repo = self.get_or_create_repo(repo).await?; - // TODO prevent database from being updated but file failing to move to repo dir? let pkg = db::query::package::insert(&self.conn, repo.id, pkg).await?; @@ -303,6 +342,44 @@ impl DistroMgr { pkg.arch ); + Ok(pkg) + } + + pub async fn add_pkg_from_path>( + &self, + path: P, + repo: &str, + ) -> crate::Result<(i32, String, String, String)> { + let repo = self.get_or_create_repo(repo).await?; + + { + let repos = self.repos.read().await; + + if let Some(state) = repos.get(&repo.id) { + state.queued_pkgs.fetch_add(1, Ordering::SeqCst); + } + } + + let _guard = self.pkg_sema.acquire().await.unwrap(); + let res = self._add_pkg_from_path(path, &repo).await; + + match res { + Ok(pkg) => { + let repos = self.repos.read().await; + + if let Some(state) = repos.get(&repo.id) { + let old = state.queued_pkgs.fetch_sub(1, Ordering::SeqCst); + + if old - 1 == 0 { + state.sync_notify.notify_one(); + } + } + + Ok((repo.id, pkg.name, pkg.version, pkg.arch)) + }, + Err(e) => Err(e), + } + // If the package already exists in the database, we remove it first //let res = db::query::package::by_fields( // &self.conn, @@ -335,8 +412,6 @@ impl DistroMgr { //} else { // self.generate_archives(&repo.name, &arch).await?; //} - - Ok((pkg.name, pkg.version, pkg.arch)) } /// Generate a path to a unique file that can be used as a temporary file diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index cdc6c09..ebdc655 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -80,7 +80,13 @@ async fn post_package_archive( let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; tokio::io::copy(&mut body, &mut tmp_file).await?; - tokio::spawn(async move { mgr.add_pkg_from_path(tmp_path, &repo).await }); + tokio::spawn(async move { + if let Ok((repo, _, _, _)) = mgr.add_pkg_from_path(tmp_path, &repo).await { + tracing::debug!("starting schedule_sync"); + let _ = mgr.schedule_sync(repo).await; + tracing::debug!("finished schedule_sync"); + }; + }); //let (name, version, arch) = mgr.add_pkg_from_path(&mut body, &repo).await?; // From 5073855696bc096c6847632877d132436d51132f Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Wed, 12 Jun 2024 12:32:49 +0200 Subject: [PATCH 37/73] wip: possible second reimagining of manager --- server/src/repo/manager.rs | 19 +++-- server/src/repo/manager2.rs | 148 ++++++++++++++++++++++++++++++++++++ server/src/repo/mod.rs | 1 + 3 files changed, 161 insertions(+), 7 deletions(-) create mode 100644 server/src/repo/manager2.rs diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index 3182817..4bf2378 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -2,9 +2,12 @@ use super::{archive, package}; use crate::{db, error::Result}; use std::{ - path::{Path, PathBuf}, - sync::{Arc, atomic::{AtomicBool, AtomicU32, Ordering}}, collections::HashMap, + path::{Path, PathBuf}, + sync::{ + atomic::{AtomicBool, AtomicU32, Ordering}, + Arc, + }, }; use futures::StreamExt; @@ -15,7 +18,7 @@ use sea_orm::{ use sea_query::{Expr, Query}; use tokio::{ io::AsyncRead, - sync::{Mutex, Semaphore, RwLock, Notify}, + sync::{Mutex, Notify, RwLock, Semaphore}, }; use uuid::Uuid; @@ -66,15 +69,17 @@ impl DistroMgr { repos.get(&repo_id).map(Arc::clone) }; - if state.is_none() { - tracing::debug!("is none") + tracing::debug!("is none"); return Ok(()); } let state = state.unwrap(); - let res = state.sync_queued.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst); + let res = + state + .sync_queued + .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst); // Already a sync job scheduled, so this one can simply quit if res.is_err() { @@ -376,7 +381,7 @@ impl DistroMgr { } Ok((repo.id, pkg.name, pkg.version, pkg.arch)) - }, + } Err(e) => Err(e), } diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs new file mode 100644 index 0000000..d1e7d23 --- /dev/null +++ b/server/src/repo/manager2.rs @@ -0,0 +1,148 @@ +use super::{archive, package}; +use crate::db; + +use std::path::{Path, PathBuf}; + +use sea_orm::{ + ActiveModelTrait, ColumnTrait, Condition, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, + QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, +}; +use tokio::sync::{ + mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + Mutex, +}; + +struct PkgQueueMsg { + repo: i32, + path: PathBuf, +} + +/// A single instance of this struct orchestrates everything related to managing packages files on +/// disk for all repositories in the server +pub struct RepoMgr { + repos_dir: PathBuf, + conn: DbConn, + pkg_queue: ( + UnboundedSender, + Mutex>, + ), + repos_lock: Mutex<()>, +} + +impl RepoMgr { + pub async fn new>(repos_dir: P, conn: DbConn) -> crate::Result { + if !tokio::fs::try_exists(&repos_dir).await? { + tokio::fs::create_dir(&repos_dir).await?; + } + + let (tx, rx) = unbounded_channel(); + + Ok(Self { + repos_dir: repos_dir.as_ref().to_path_buf(), + conn, + pkg_queue: (tx, Mutex::new(rx)), + repos_lock: Mutex::new(()), + }) + } + + pub async fn pkg_parse_task(&self) { + loop { + // Receive the next message and immediately drop the mutex afterwards. As long as the + // quue is empty, this will lock the mutex. This is okay, as the mutex will be unlocked + // as soon as a message is received, so another worker can pick up the mutex. + let mut recv = self.pkg_queue.1.lock().await; + let msg = recv.recv().await; + drop(recv); + + if let Some(msg) = msg { + // TODO better handle this error (retry if failure wasn't because the package is + // faulty) + let _ = self + .add_pkg_from_path(msg.path, msg.repo) + .await + .inspect_err(|e| tracing::error!("{:?}", e)); + } + } + } + + pub fn queue_pkg(&self, repo: i32, path: PathBuf) { + let _ = self.pkg_queue.0.send(PkgQueueMsg { path, repo }); + } + + pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { + let _guard = self.repos_lock.lock().await; + + let distro_id: Option = db::Distro::find() + .filter(db::distro::Column::Name.eq(distro)) + .select_only() + .column(db::distro::Column::Id) + .into_tuple() + .one(&self.conn) + .await?; + + let distro_id = if let Some(id) = distro_id { + id + } else { + let new_distro = db::distro::ActiveModel { + id: NotSet, + name: Set(distro.to_string()), + description: NotSet, + }; + + new_distro.insert(&self.conn).await?.id + }; + + let repo_id: Option = db::Repo::find() + .filter(db::repo::Column::Name.eq(repo)) + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .one(&self.conn) + .await?; + + let repo_id = if let Some(id) = repo_id { + id + } else { + let new_repo = db::repo::ActiveModel { + id: NotSet, + distro_id: Set(distro_id), + name: Set(repo.to_string()), + description: NotSet, + }; + + new_repo.insert(&self.conn).await?.id + }; + + Ok(repo_id) + } + + async fn add_pkg_from_path>( + &self, + path: P, + repo: i32, + ) -> crate::Result<()> { + let path_clone = path.as_ref().to_path_buf(); + let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) + .await + .unwrap()?; + + // TODO prevent database from being updated but file failing to move to repo dir? + let pkg = db::query::package::insert(&self.conn, repo, pkg).await?; + + let dest_path = self + .repos_dir + .join(repo.to_string()) + .join(pkg.id.to_string()); + tokio::fs::rename(path.as_ref(), dest_path).await?; + + tracing::info!( + "Added '{}-{}-{}' to repository {}", + pkg.name, + pkg.version, + pkg.arch, + repo, + ); + + Ok(()) + } +} diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index ebdc655..a0d4c15 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,5 +1,6 @@ mod archive; mod manager; +mod manager2; pub mod package; pub use manager::DistroMgr; From 6dff65f30d17f95448781af840922e07511bec12 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 13 Jun 2024 09:21:56 +0200 Subject: [PATCH 38/73] wip: cool config stuff --- Cargo.lock | 137 +++++++++++++++++++++++++++++++++++- server/Cargo.toml | 1 + server/rieterd.toml | 3 + server/src/cli.rs | 15 +++- server/src/config.rs | 45 ++++++++++++ server/src/error.rs | 10 ++- server/src/main.rs | 1 + server/src/repo/manager2.rs | 6 +- 8 files changed, 209 insertions(+), 9 deletions(-) create mode 100644 server/rieterd.toml create mode 100644 server/src/config.rs diff --git a/Cargo.lock b/Cargo.lock index d6e9e55..8520e63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -174,6 +174,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d818003e740b63afc82337e3160717f4f63078720a810b7b903e70a5d1d2994" +dependencies = [ + "bytemuck", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -380,6 +389,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "bytemuck" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78834c15cb5d5efe3452d58b1e8ba890dd62d21907f867f383358198e56ebca5" + [[package]] name = "byteorder" version = "1.5.0" @@ -630,6 +645,20 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +[[package]] +name = "figment" +version = "0.10.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" +dependencies = [ + "atomic", + "pear", + "serde", + "toml", + "uncased", + "version_check", +] + [[package]] name = "flume" version = "0.11.0" @@ -1037,6 +1066,12 @@ dependencies = [ "syn 2.0.66", ] +[[package]] +name = "inlinable_string" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" + [[package]] name = "is_terminal_polyfill" version = "1.70.0" @@ -1386,6 +1421,29 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "pear" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" +dependencies = [ + "inlinable_string", + "pear_codegen", + "yansi", +] + +[[package]] +name = "pear_codegen" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" +dependencies = [ + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.66", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -1478,7 +1536,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "toml_edit", + "toml_edit 0.21.1", ] [[package]] @@ -1514,6 +1572,19 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", + "version_check", + "yansi", +] + [[package]] name = "ptr_meta" version = "0.1.4" @@ -1657,6 +1728,7 @@ dependencies = [ "axum", "chrono", "clap", + "figment", "futures", "http-body-util", "libarchive", @@ -2037,6 +2109,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -2624,11 +2705,26 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.22.14", +] + [[package]] name = "toml_datetime" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +dependencies = [ + "serde", +] [[package]] name = "toml_edit" @@ -2638,7 +2734,20 @@ checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ "indexmap", "toml_datetime", - "winnow", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.6.13", ] [[package]] @@ -2763,6 +2872,15 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "version_check", +] + [[package]] name = "unicase" version = "2.7.0" @@ -3129,6 +3247,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +dependencies = [ + "memchr", +] + [[package]] name = "wyz" version = "0.5.1" @@ -3138,6 +3265,12 @@ dependencies = [ "tap", ] +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "zerocopy" version = "0.7.34" diff --git a/server/Cargo.toml b/server/Cargo.toml index 75b5a09..b1fc688 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -10,6 +10,7 @@ authors = ["Jef Roosens"] axum = { version = "0.7.5", features = ["http2", "macros"] } chrono = { version = "0.4.26", features = ["serde"] } clap = { version = "4.3.12", features = ["env", "derive"] } +figment = { version = "0.10.19", features = ["env", "toml"] } futures = "0.3.28" http-body-util = "0.1.1" libarchive = { path = "../libarchive" } diff --git a/server/rieterd.toml b/server/rieterd.toml new file mode 100644 index 0000000..5e3b8b7 --- /dev/null +++ b/server/rieterd.toml @@ -0,0 +1,3 @@ +api_key = "test" +port = 8000 +log_level = "tower_http=debug,rieterd=debug" diff --git a/server/src/cli.rs b/server/src/cli.rs index 2df7f7c..c0867b9 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,9 +1,10 @@ use crate::{distro::MetaDistroMgr, Config, Global}; +use std::{io, path::PathBuf}; + use axum::Router; use clap::Parser; use sea_orm_migration::MigratorTrait; -use std::{io, path::PathBuf}; use tower_http::trace::TraceLayer; use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; @@ -18,6 +19,14 @@ pub struct Cli { #[arg(env = "RIETER_API_KEY")] pub api_key: String, + #[arg( + short, + long, + env = "RIETER_CONFIG_FILE", + default_value = "./rieterd.toml" + )] + pub config_file: PathBuf, + /// Database connection URL; either sqlite:// or postgres://. Defaults to rieter.sqlite in the /// data directory #[arg(short, long, env = "RIETER_DATABASE_URL")] @@ -52,6 +61,10 @@ impl Cli { pub async fn run(&self) -> crate::Result<()> { self.init_tracing(); + tracing::debug!("{:?}", &self.config_file); + let new_config = crate::config::Config::figment(&self.config_file).extract()?; + tracing::debug!("{:?}", new_config); + let db_url = if let Some(url) = &self.database_url { url.clone() } else { diff --git a/server/src/config.rs b/server/src/config.rs new file mode 100644 index 0000000..99e61d3 --- /dev/null +++ b/server/src/config.rs @@ -0,0 +1,45 @@ +use std::path::{Path, PathBuf}; + +use figment::{ + providers::{Env, Format, Toml}, + Figment, +}; +use serde::Deserialize; + +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +#[serde(tag = "type")] +pub enum FsConfig { + Local { data_dir: PathBuf }, +} + +#[derive(Deserialize)] +#[serde(rename_all = "lowercase")] +#[serde(tag = "type")] +pub enum DbConfig { + Sqlite { + path: PathBuf, + }, + Postgres { + host: String, + user: String, + password: String, + }, +} + +#[derive(Deserialize)] +pub struct Config { + api_key: String, + port: u16, + log_level: String, + fs: FsConfig, + db: DbConfig, +} + +impl Config { + pub fn figment(config_file: impl AsRef) -> Figment { + Figment::new() + .merge(Toml::file(config_file)) + .merge(Env::prefixed("RIETER_")) + } +} diff --git a/server/src/error.rs b/server/src/error.rs index 5c3e920..cc86445 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -14,6 +14,7 @@ pub enum ServerError { Db(sea_orm::DbErr), Status(StatusCode), Archive(libarchive::error::ArchiveError), + Figment(figment::Error), } impl fmt::Display for ServerError { @@ -24,6 +25,7 @@ impl fmt::Display for ServerError { ServerError::Status(status) => write!(fmt, "{}", status), ServerError::Db(err) => write!(fmt, "{}", err), ServerError::Archive(err) => write!(fmt, "{}", err), + ServerError::Figment(err) => write!(fmt, "{}", err), } } } @@ -41,7 +43,7 @@ impl IntoResponse for ServerError { ServerError::Db(sea_orm::DbErr::RecordNotFound(_)) => { StatusCode::NOT_FOUND.into_response() } - ServerError::Db(_) | ServerError::Archive(_) => { + ServerError::Db(_) | ServerError::Archive(_) | ServerError::Figment(_) => { StatusCode::INTERNAL_SERVER_ERROR.into_response() } } @@ -83,3 +85,9 @@ impl From for ServerError { ServerError::Archive(err) } } + +impl From for ServerError { + fn from(err: figment::Error) -> Self { + ServerError::Figment(err) + } +} diff --git a/server/src/main.rs b/server/src/main.rs index d3cbdf0..5df9e18 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,5 +1,6 @@ mod api; mod cli; +mod config; pub mod db; mod distro; mod error; diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index d1e7d23..76f7ab0 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -116,11 +116,7 @@ impl RepoMgr { Ok(repo_id) } - async fn add_pkg_from_path>( - &self, - path: P, - repo: i32, - ) -> crate::Result<()> { + async fn add_pkg_from_path>(&self, path: P, repo: i32) -> crate::Result<()> { let path_clone = path.as_ref().to_path_buf(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await From be2ce7bf4552c28e7f9380feb6c701b35e6ad586 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Thu, 13 Jun 2024 18:40:24 +0200 Subject: [PATCH 39/73] wip: mspc-based pkg queue --- server/rieterd.toml | 8 ++ server/src/cli.rs | 18 ++-- server/src/config.rs | 8 +- server/src/error.rs | 4 +- server/src/main.rs | 4 +- server/src/repo/manager2.rs | 160 +++++++++++++++++++++++++++++++++--- server/src/repo/mod.rs | 104 ++++++++++++----------- 7 files changed, 231 insertions(+), 75 deletions(-) diff --git a/server/rieterd.toml b/server/rieterd.toml index 5e3b8b7..781a055 100644 --- a/server/rieterd.toml +++ b/server/rieterd.toml @@ -1,3 +1,11 @@ api_key = "test" port = 8000 log_level = "tower_http=debug,rieterd=debug" + +[fs] +type = "locl" +data_dir = "./data" + +[db] +type = "sqlite" +db_dir = "./data" diff --git a/server/src/cli.rs b/server/src/cli.rs index c0867b9..550d7dc 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,6 +1,6 @@ use crate::{distro::MetaDistroMgr, Config, Global}; -use std::{io, path::PathBuf}; +use std::{io, path::PathBuf, sync::Arc}; use axum::Router; use clap::Parser; @@ -61,9 +61,11 @@ impl Cli { pub async fn run(&self) -> crate::Result<()> { self.init_tracing(); - tracing::debug!("{:?}", &self.config_file); - let new_config = crate::config::Config::figment(&self.config_file).extract()?; - tracing::debug!("{:?}", new_config); + //tracing::debug!("{:?}", &self.config_file); + //let new_config: crate::config::Config = crate::config::Config::figment(&self.config_file).extract().inspect_err( + // |e| tracing::error!("{}", e) + //)?; + //tracing::debug!("{:?}", new_config); let db_url = if let Some(url) = &self.database_url { url.clone() @@ -88,7 +90,13 @@ impl Cli { data_dir: self.data_dir.clone(), }; - let mgr = MetaDistroMgr::new(&self.data_dir.join("distros"), db.clone()).await?; + let mgr = Arc::new(crate::repo::RepoMgr::new(&self.data_dir.join("repos"), db.clone()).await?); + + for _ in 0..1 { + let clone = Arc::clone(&mgr); + + tokio::spawn(async move { clone.pkg_parse_task().await }); + } let global = Global { config, mgr, db }; diff --git a/server/src/config.rs b/server/src/config.rs index 99e61d3..a639362 100644 --- a/server/src/config.rs +++ b/server/src/config.rs @@ -6,19 +6,19 @@ use figment::{ }; use serde::Deserialize; -#[derive(Deserialize)] +#[derive(Deserialize, Debug)] #[serde(rename_all = "lowercase")] #[serde(tag = "type")] pub enum FsConfig { Local { data_dir: PathBuf }, } -#[derive(Deserialize)] +#[derive(Deserialize, Debug)] #[serde(rename_all = "lowercase")] #[serde(tag = "type")] pub enum DbConfig { Sqlite { - path: PathBuf, + db_dir: PathBuf, }, Postgres { host: String, @@ -27,7 +27,7 @@ pub enum DbConfig { }, } -#[derive(Deserialize)] +#[derive(Deserialize, Debug)] pub struct Config { api_key: String, port: u16, diff --git a/server/src/error.rs b/server/src/error.rs index cc86445..26bfc60 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -15,6 +15,7 @@ pub enum ServerError { Status(StatusCode), Archive(libarchive::error::ArchiveError), Figment(figment::Error), + Unit, } impl fmt::Display for ServerError { @@ -26,6 +27,7 @@ impl fmt::Display for ServerError { ServerError::Db(err) => write!(fmt, "{}", err), ServerError::Archive(err) => write!(fmt, "{}", err), ServerError::Figment(err) => write!(fmt, "{}", err), + ServerError::Unit => Ok(()), } } } @@ -43,7 +45,7 @@ impl IntoResponse for ServerError { ServerError::Db(sea_orm::DbErr::RecordNotFound(_)) => { StatusCode::NOT_FOUND.into_response() } - ServerError::Db(_) | ServerError::Archive(_) | ServerError::Figment(_) => { + ServerError::Db(_) | ServerError::Archive(_) | ServerError::Figment(_) | ServerError::Unit => { StatusCode::INTERNAL_SERVER_ERROR.into_response() } } diff --git a/server/src/main.rs b/server/src/main.rs index 5df9e18..f1e70f9 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -10,7 +10,7 @@ pub use error::{Result, ServerError}; use repo::DistroMgr; use clap::Parser; -use std::path::PathBuf; +use std::{path::PathBuf, sync::Arc}; #[derive(Clone)] pub struct Config { @@ -20,7 +20,7 @@ pub struct Config { #[derive(Clone)] pub struct Global { config: Config, - mgr: distro::MetaDistroMgr, + mgr: Arc, db: sea_orm::DbConn, } diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 76f7ab0..070b822 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -1,16 +1,21 @@ use super::{archive, package}; use crate::db; -use std::path::{Path, PathBuf}; +use std::{path::{Path, PathBuf}, sync::{atomic::{Ordering, AtomicU32}, Arc}, collections::HashMap}; +use futures::StreamExt; use sea_orm::{ ActiveModelTrait, ColumnTrait, Condition, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; +use sea_query::{Expr, Query}; use tokio::sync::{ mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, - Mutex, + Mutex, RwLock, }; +use uuid::Uuid; + +pub const ANY_ARCH: &'static str = "any"; struct PkgQueueMsg { repo: i32, @@ -26,7 +31,7 @@ pub struct RepoMgr { UnboundedSender, Mutex>, ), - repos_lock: Mutex<()>, + repos: RwLock>, } impl RepoMgr { @@ -37,22 +42,137 @@ impl RepoMgr { let (tx, rx) = unbounded_channel(); + let mut repos = HashMap::new(); + let repo_ids: Vec = db::Repo::find().select_only().column(db::repo::Column::Id).into_tuple().all(&conn).await?; + + for id in repo_ids { + repos.insert(id, AtomicU32::new(0)); + } + Ok(Self { repos_dir: repos_dir.as_ref().to_path_buf(), conn, pkg_queue: (tx, Mutex::new(rx)), - repos_lock: Mutex::new(()), + repos: RwLock::new(repos) }) } + /// Generate archive databases for all known architectures in the repository, including the + /// "any" architecture. + pub async fn sync_repo(&self, repo_id: i32) -> crate::Result<()> { + let mut archs = db::Package::find() + .filter(db::package::Column::RepoId.eq(repo_id)) + .select_only() + .column(db::package::Column::Arch) + .distinct() + .into_tuple::() + .stream(&self.conn) + .await?; + + while let Some(arch) = archs.next().await.transpose()? { + self.generate_archives(repo_id, &arch).await?; + } + + Ok(()) + } + + /// Generate the archive databases for the given repository and architecture. + async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { + let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = + self.random_file_paths(); + let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; + let ar_files = archive::RepoArchiveWriter::open(&tmp_ar_files_path).await?; + + // Query all packages in the repo that have the given architecture or the "any" + // architecture + let mut pkgs = db::Package::find() + .filter(db::package::Column::RepoId.eq(repo)) + .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) + .filter( + db::package::Column::Id.in_subquery( + Query::select() + .expr(db::package::Column::Id.max()) + .from(db::package::Entity) + .group_by_columns([db::package::Column::Arch, db::package::Column::Name]) + .to_owned(), + ), + ) + .stream(&self.conn) + .await?; + + let mut commited_ids: Vec = Vec::new(); + + while let Some(pkg) = pkgs.next().await.transpose()? { + commited_ids.push(pkg.id); + + let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; + let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; + + package::write_files(&self.conn, &mut files_tmp_file, &pkg).await?; + package::write_desc(&self.conn, &mut desc_tmp_file, &pkg).await?; + + let full_name = format!("{}-{}", pkg.name, pkg.version); + + ar_db + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; + ar_files + .add_entry(&full_name, &desc_tmp_file_path, true) + .await?; + ar_files + .add_entry(&full_name, &files_tmp_file_path, false) + .await?; + } + + // Cleanup + ar_db.close().await?; + ar_files.close().await?; + + let repo_dir = self.repos_dir.join(repo.to_string()); + + // Move the db archives to their respective places + tokio::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch))).await?; + tokio::fs::rename( + tmp_ar_files_path, + repo_dir.join(format!("{}.files.tar.gz", arch)), + ) + .await?; + + // Only after we have successfully written everything to disk do we update the database. + // This order ensures any failure can be recovered, as the database is our single source of + // truth. + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::Committed), + ) + .filter(db::package::Column::Id.is_in(commited_ids)) + .exec(&self.conn) + .await?; + + // If this fails there's no point in failing the function + if there were no packages in + // the repo, this fails anyway because the temp file doesn't exist + let _ = tokio::fs::remove_file(desc_tmp_file_path).await; + let _ = tokio::fs::remove_file(files_tmp_file_path).await; + + tracing::info!( + "Package archives generated for repo {} ('{}')", + repo, + arch + ); + + Ok(()) + } + pub async fn pkg_parse_task(&self) { loop { // Receive the next message and immediately drop the mutex afterwards. As long as the // quue is empty, this will lock the mutex. This is okay, as the mutex will be unlocked // as soon as a message is received, so another worker can pick up the mutex. - let mut recv = self.pkg_queue.1.lock().await; - let msg = recv.recv().await; - drop(recv); + let msg = { + let mut recv = self.pkg_queue.1.lock().await; + recv.recv().await + }; if let Some(msg) = msg { // TODO better handle this error (retry if failure wasn't because the package is @@ -61,16 +181,25 @@ impl RepoMgr { .add_pkg_from_path(msg.path, msg.repo) .await .inspect_err(|e| tracing::error!("{:?}", e)); + + let old = self.repos.read().await.get(&msg.repo).map(|n| n.fetch_sub(1, Ordering::SeqCst) ); + + // Every time the queue for a repo becomes empty, we run a sync job + if old == Some(1) { + // TODO error handling + let _ = self.sync_repo(msg.repo).await; + } } } } - pub fn queue_pkg(&self, repo: i32, path: PathBuf) { + pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { let _ = self.pkg_queue.0.send(PkgQueueMsg { path, repo }); + self.repos.read().await.get(&repo).inspect(|n| { n.fetch_add(1, Ordering::SeqCst); }); } pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { - let _guard = self.repos_lock.lock().await; + let mut repos = self.repos.write().await; let distro_id: Option = db::Distro::find() .filter(db::distro::Column::Name.eq(distro)) @@ -109,8 +238,12 @@ impl RepoMgr { name: Set(repo.to_string()), description: NotSet, }; + let id = new_repo.insert(&self.conn).await?.id; - new_repo.insert(&self.conn).await?.id + tokio::fs::create_dir(self.repos_dir.join(id.to_string())).await?; + repos.insert(id, AtomicU32::new(0)); + + id }; Ok(repo_id) @@ -141,4 +274,11 @@ impl RepoMgr { Ok(()) } + + pub fn random_file_paths(&self) -> [PathBuf; C] { + std::array::from_fn(|_| { + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + self.repos_dir.join(uuid.to_string()) + }) + } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index a0d4c15..c5549ef 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -4,6 +4,7 @@ mod manager2; pub mod package; pub use manager::DistroMgr; +pub use manager2::RepoMgr; use axum::{ body::Body, @@ -75,19 +76,13 @@ async fn post_package_archive( body: Body, ) -> crate::Result<()> { let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); - let mgr = global.mgr.get_or_create_mgr(&distro).await?; - let [tmp_path] = mgr.random_file_paths(); + let repo = global.mgr.get_or_create_repo(&distro, &repo).await?; + let [tmp_path] = global.mgr.random_file_paths(); let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; tokio::io::copy(&mut body, &mut tmp_file).await?; - tokio::spawn(async move { - if let Ok((repo, _, _, _)) = mgr.add_pkg_from_path(tmp_path, &repo).await { - tracing::debug!("starting schedule_sync"); - let _ = mgr.schedule_sync(repo).await; - tracing::debug!("finished schedule_sync"); - }; - }); + global.mgr.queue_pkg(repo, tmp_path).await; //let (name, version, arch) = mgr.add_pkg_from_path(&mut body, &repo).await?; // @@ -108,60 +103,63 @@ async fn delete_repo( State(global): State, Path((distro, repo)): Path<(String, String)>, ) -> crate::Result { - if let Some(mgr) = global.mgr.get_mgr(&distro).await { - let repo_removed = mgr.remove_repo(&repo).await?; - - if repo_removed { - tracing::info!("Removed repository '{}'", repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(mgr) = global.mgr.get_mgr(&distro).await { + // let repo_removed = mgr.remove_repo(&repo).await?; + // + // if repo_removed { + // tracing::info!("Removed repository '{}'", repo); + // + // Ok(StatusCode::OK) + // } else { + // Ok(StatusCode::NOT_FOUND) + // } + //} else { + // Ok(StatusCode::NOT_FOUND) + //} } async fn delete_arch_repo( State(global): State, Path((distro, repo, arch)): Path<(String, String, String)>, ) -> crate::Result { - if let Some(mgr) = global.mgr.get_mgr(&distro).await { - let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; - - if repo_removed { - tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(mgr) = global.mgr.get_mgr(&distro).await { + // let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; + // + // if repo_removed { + // tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); + // + // Ok(StatusCode::OK) + // } else { + // Ok(StatusCode::NOT_FOUND) + // } + //} else { + // Ok(StatusCode::NOT_FOUND) + //} } async fn delete_package( State(global): State, Path((distro, repo, arch, pkg_name)): Path<(String, String, String, String)>, ) -> crate::Result { - if let Some(mgr) = global.mgr.get_mgr(&distro).await { - let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; - - if pkg_removed { - tracing::info!( - "Removed package '{}' ({}) from repository '{}'", - pkg_name, - arch, - repo - ); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(mgr) = global.mgr.get_mgr(&distro).await { + // let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; + // + // if pkg_removed { + // tracing::info!( + // "Removed package '{}' ({}) from repository '{}'", + // pkg_name, + // arch, + // repo + // ); + // + // Ok(StatusCode::OK) + // } else { + // Ok(StatusCode::NOT_FOUND) + // } + //} else { + // Ok(StatusCode::NOT_FOUND) + //} } From a408c14ab1effdd2647e4eca75632330a006a504 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Fri, 14 Jun 2024 10:54:45 +0200 Subject: [PATCH 40/73] feat: write stale packages query --- server/src/cli.rs | 3 +- server/src/db/query/package.rs | 70 +++++++++++++++++++++++++++ server/src/error.rs | 7 +-- server/src/repo/manager2.rs | 86 ++++++++++++++++++++++++++-------- 4 files changed, 143 insertions(+), 23 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 550d7dc..68658ae 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -90,7 +90,8 @@ impl Cli { data_dir: self.data_dir.clone(), }; - let mgr = Arc::new(crate::repo::RepoMgr::new(&self.data_dir.join("repos"), db.clone()).await?); + let mgr = + Arc::new(crate::repo::RepoMgr::new(&self.data_dir.join("repos"), db.clone()).await?); for _ in 0..1 { let clone = Arc::clone(&mgr); diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index abbfb9c..5e400ea 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -1,6 +1,8 @@ use crate::db::{self, *}; +use futures::Stream; use sea_orm::{sea_query::IntoCondition, *}; +use sea_query::{Alias, Expr, Query}; use serde::Deserialize; #[derive(Deserialize)] @@ -213,3 +215,71 @@ pub async fn full(conn: &DbConn, id: i32) -> Result> { Ok(None) } } + +#[derive(FromQueryResult)] +pub struct PkgToRemove { + repo_id: i32, + id: i32, +} + +pub fn to_be_removed_query(conn: &DbConn) -> SelectorRaw> { + let mut max_id_query = Query::select(); + max_id_query + .from(db::package::Entity) + .columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .expr_as(db::package::Column::Id.max(), Alias::new("max_id")) + .group_by_columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .cond_where( + Condition::all().add(db::package::Column::State.eq(db::PackageState::Committed)), + ); + + let (p1, p2) = (Alias::new("p1"), Alias::new("p2")); + let mut query = Query::select(); + query + .from_as(db::package::Entity, p1.clone()) + .columns([ + (p1.clone(), db::package::Column::RepoId), + (p1.clone(), db::package::Column::Id), + ]) + .join_subquery( + JoinType::InnerJoin, + max_id_query, + p2.clone(), + Condition::all() + .add( + Expr::col((p1.clone(), db::package::Column::RepoId)) + .eq(Expr::col((p2.clone(), db::package::Column::RepoId))), + ) + .add( + Expr::col((p1.clone(), db::package::Column::Arch)) + .eq(Expr::col((p2.clone(), db::package::Column::Arch))), + ) + .add( + Expr::col((p1.clone(), db::package::Column::Name)) + .eq(Expr::col((p2.clone(), db::package::Column::Name))), + ), + ) + .cond_where( + Condition::any() + .add( + Expr::col((p1.clone(), db::package::Column::Id)) + .lt(Expr::col((p2.clone(), Alias::new("max_id")))), + ) + .add( + Expr::col((p1.clone(), db::package::Column::Id)) + .eq(db::PackageState::PendingDeletion), + ), + ); + let builder = conn.get_database_backend(); + let sql = builder.build(&query); + + PkgToRemove::find_by_statement(sql) +} diff --git a/server/src/error.rs b/server/src/error.rs index 26bfc60..e0626d4 100644 --- a/server/src/error.rs +++ b/server/src/error.rs @@ -45,9 +45,10 @@ impl IntoResponse for ServerError { ServerError::Db(sea_orm::DbErr::RecordNotFound(_)) => { StatusCode::NOT_FOUND.into_response() } - ServerError::Db(_) | ServerError::Archive(_) | ServerError::Figment(_) | ServerError::Unit => { - StatusCode::INTERNAL_SERVER_ERROR.into_response() - } + ServerError::Db(_) + | ServerError::Archive(_) + | ServerError::Figment(_) + | ServerError::Unit => StatusCode::INTERNAL_SERVER_ERROR.into_response(), } } } diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 070b822..b0df209 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -1,14 +1,21 @@ use super::{archive, package}; use crate::db; -use std::{path::{Path, PathBuf}, sync::{atomic::{Ordering, AtomicU32}, Arc}, collections::HashMap}; +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, + }, +}; use futures::StreamExt; use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, - QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, + ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, + ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; -use sea_query::{Expr, Query}; +use sea_query::{Alias, Expr, Query}; use tokio::sync::{ mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, Mutex, RwLock, @@ -31,7 +38,7 @@ pub struct RepoMgr { UnboundedSender, Mutex>, ), - repos: RwLock>, + repos: RwLock>)>>, } impl RepoMgr { @@ -43,25 +50,44 @@ impl RepoMgr { let (tx, rx) = unbounded_channel(); let mut repos = HashMap::new(); - let repo_ids: Vec = db::Repo::find().select_only().column(db::repo::Column::Id).into_tuple().all(&conn).await?; + let repo_ids: Vec = db::Repo::find() + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .all(&conn) + .await?; for id in repo_ids { - repos.insert(id, AtomicU32::new(0)); + repos.insert(id, Default::default()); } Ok(Self { repos_dir: repos_dir.as_ref().to_path_buf(), conn, pkg_queue: (tx, Mutex::new(rx)), - repos: RwLock::new(repos) + repos: RwLock::new(repos), }) } /// Generate archive databases for all known architectures in the repository, including the /// "any" architecture. - pub async fn sync_repo(&self, repo_id: i32) -> crate::Result<()> { + pub async fn sync_repo(&self, repo: i32) -> crate::Result<()> { + let lock = self + .repos + .read() + .await + .get(&repo) + .map(|(_, lock)| Arc::clone(lock)); + + if lock.is_none() { + return Ok(()); + } + + let lock = lock.unwrap(); + let _guard = lock.lock().await; + let mut archs = db::Package::find() - .filter(db::package::Column::RepoId.eq(repo_id)) + .filter(db::package::Column::RepoId.eq(repo)) .select_only() .column(db::package::Column::Arch) .distinct() @@ -70,12 +96,27 @@ impl RepoMgr { .await?; while let Some(arch) = archs.next().await.transpose()? { - self.generate_archives(repo_id, &arch).await?; + self.generate_archives(repo, &arch).await?; } Ok(()) } + /// Clean any remaining old package files from the database and file system + pub async fn clean(&self) -> crate::Result<()> { + let mut pkgs = db::query::package::to_be_removed_query(&self.conn) + .stream(&self.conn) + .await?; + + while let Some(pkg) = pkgs.next().await.transpose()? { + // TODO remove package from file system and database + } + + // TODO log indicating how many packages were cleaned + + Ok(()) + } + /// Generate the archive databases for the given repository and architecture. async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = @@ -155,11 +196,7 @@ impl RepoMgr { let _ = tokio::fs::remove_file(desc_tmp_file_path).await; let _ = tokio::fs::remove_file(files_tmp_file_path).await; - tracing::info!( - "Package archives generated for repo {} ('{}')", - repo, - arch - ); + tracing::info!("Package archives generated for repo {} ('{}')", repo, arch); Ok(()) } @@ -182,12 +219,21 @@ impl RepoMgr { .await .inspect_err(|e| tracing::error!("{:?}", e)); - let old = self.repos.read().await.get(&msg.repo).map(|n| n.fetch_sub(1, Ordering::SeqCst) ); + let old = self + .repos + .read() + .await + .get(&msg.repo) + .map(|n| n.0.fetch_sub(1, Ordering::SeqCst)); // Every time the queue for a repo becomes empty, we run a sync job if old == Some(1) { // TODO error handling let _ = self.sync_repo(msg.repo).await; + + // TODO move this so that we only clean if entire queue is empty, not just + // queue for specific repo + let _ = self.clean().await; } } } @@ -195,7 +241,9 @@ impl RepoMgr { pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { let _ = self.pkg_queue.0.send(PkgQueueMsg { path, repo }); - self.repos.read().await.get(&repo).inspect(|n| { n.fetch_add(1, Ordering::SeqCst); }); + self.repos.read().await.get(&repo).inspect(|n| { + n.0.fetch_add(1, Ordering::SeqCst); + }); } pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { @@ -241,7 +289,7 @@ impl RepoMgr { let id = new_repo.insert(&self.conn).await?.id; tokio::fs::create_dir(self.repos_dir.join(id.to_string())).await?; - repos.insert(id, AtomicU32::new(0)); + repos.insert(id, Default::default()); id }; From 67b4640e569e477196e7e5b2eddf18425e2e0a7a Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 15 Jun 2024 18:12:14 +0200 Subject: [PATCH 41/73] feat: add package cleaning --- server/src/db/query/package.rs | 47 +++++++++++++++++++++++++++------- server/src/repo/manager2.rs | 34 +++++++++++++++++++----- 2 files changed, 66 insertions(+), 15 deletions(-) diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 5e400ea..0115f5b 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -2,7 +2,7 @@ use crate::db::{self, *}; use futures::Stream; use sea_orm::{sea_query::IntoCondition, *}; -use sea_query::{Alias, Expr, Query}; +use sea_query::{Alias, Expr, Query, SelectStatement}; use serde::Deserialize; #[derive(Deserialize)] @@ -218,11 +218,15 @@ pub async fn full(conn: &DbConn, id: i32) -> Result> { #[derive(FromQueryResult)] pub struct PkgToRemove { - repo_id: i32, - id: i32, + pub repo_id: i32, + pub id: i32, } -pub fn to_be_removed_query(conn: &DbConn) -> SelectorRaw> { +fn stale_pkgs_query(include_repo: bool) -> SelectStatement { + // In each repository, only one version of a package can exist for any given arch. Because ids + // are monotonically increasing, we know that the row that represents the actual package + // currently in the repository is the row with the largest id whose state is "committed". This + // query finds this id for each (repo, arch, name) tuple. let mut max_id_query = Query::select(); max_id_query .from(db::package::Entity) @@ -243,12 +247,23 @@ pub fn to_be_removed_query(conn: &DbConn) -> SelectorRaw SelectorRaw SelectorRaw> { + let query = stale_pkgs_query(true); let builder = conn.get_database_backend(); let sql = builder.build(&query); PkgToRemove::find_by_statement(sql) } + +pub async fn delete_stale_pkgs(conn: &DbConn, max_id: i32) -> crate::Result<()> { + Ok(db::Package::delete_many() + .filter(db::package::Column::Id.lte(max_id)) + .filter(db::package::Column::Id.in_subquery(stale_pkgs_query(false))) + .exec(conn) + .await + .map(|_| ())?) +} diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index b0df209..67d36eb 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -1,5 +1,5 @@ use super::{archive, package}; -use crate::db; +use crate::db::{self, query::package::delete_stale_pkgs}; use std::{ collections::HashMap, @@ -103,16 +103,38 @@ impl RepoMgr { } /// Clean any remaining old package files from the database and file system - pub async fn clean(&self) -> crate::Result<()> { - let mut pkgs = db::query::package::to_be_removed_query(&self.conn) + pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { + let mut pkgs = db::query::package::stale_pkgs(&self.conn) .stream(&self.conn) .await?; + let mut max_id = -1; + let mut removed_pkgs = 0; + + // TODO track largest ID seen, then perform similar query to above except we remove the + // matched IDs, but only if they're smaller than or equal to the largest seen ID so we + // don't remove newly added packages while let Some(pkg) = pkgs.next().await.transpose()? { - // TODO remove package from file system and database + // Failing to remove the package file isn't the biggest problem + let _ = tokio::fs::remove_file( + self.repos_dir + .join(pkg.repo_id.to_string()) + .join(pkg.id.to_string()), + ) + .await; + + if pkg.id > max_id { + max_id = pkg.id; + } + + removed_pkgs += 1; } - // TODO log indicating how many packages were cleaned + if removed_pkgs > 0 { + db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; + } + + tracing::info!("Removed {removed_pkgs} stale package(s)"); Ok(()) } @@ -233,7 +255,7 @@ impl RepoMgr { // TODO move this so that we only clean if entire queue is empty, not just // queue for specific repo - let _ = self.clean().await; + let _ = self.remove_stale_pkgs().await; } } } From 5d7832c43aa40ef8a6f9c239b1b6ec93beec88bf Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 15 Jun 2024 20:24:58 +0200 Subject: [PATCH 42/73] fix: fixed get_file route --- server/src/cli.rs | 2 +- server/src/repo/manager2.rs | 13 ++++++++++ server/src/repo/mod.rs | 50 ++++++++++++++++--------------------- 3 files changed, 35 insertions(+), 30 deletions(-) diff --git a/server/src/cli.rs b/server/src/cli.rs index 68658ae..1ceaf27 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -44,7 +44,7 @@ pub struct Cli { #[arg( long, value_name = "LOG_LEVEL", - default_value = "tower_http=debug,rieterd=debug", + default_value = "tower_http=debug,rieterd=debug,sea_orm=debug", env = "RIETER_LOG" )] pub log: String, diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 67d36eb..9a10e0d 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -268,6 +268,19 @@ impl RepoMgr { }); } + pub async fn get_repo(&self, distro: &str, repo: &str) -> crate::Result> { + Ok(db::Repo::find() + .find_also_related(db::Distro) + .filter( + Condition::all() + .add(db::repo::Column::Name.eq(repo)) + .add(db::distro::Column::Name.eq(distro)), + ) + .one(&self.conn) + .await + .map(|res| res.map(|(repo, _)| repo.id))?) + } + pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { let mut repos = self.repos.write().await; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index c5549ef..bb592c9 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -49,25 +49,29 @@ async fn get_file( Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, req: Request, ) -> crate::Result { - let repo_dir = global - .config - .data_dir - .join("distros") - .join(&distro) - .join(&repo); + if let Some(repo_id) = global.mgr.get_repo(&distro, &repo).await? { + let repo_dir = global + .config + .data_dir + .join("repos") + .join(repo_id.to_string()); - let file_name = - if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { - format!("{}.db.tar.gz", arch) - } else if file_name == format!("{}.files", repo) - || file_name == format!("{}.files.tar.gz", repo) - { - format!("{}.files.tar.gz", arch) - } else { - file_name - }; + let file_name = + if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else { + file_name + }; - Ok(ServeFile::new(repo_dir.join(file_name)).oneshot(req).await) + let path = repo_dir.join(file_name); + Ok(ServeFile::new(path).oneshot(req).await) + } else { + Err(StatusCode::NOT_FOUND.into()) + } } async fn post_package_archive( @@ -84,18 +88,6 @@ async fn post_package_archive( global.mgr.queue_pkg(repo, tmp_path).await; - //let (name, version, arch) = mgr.add_pkg_from_path(&mut body, &repo).await?; - // - //tracing::info!( - // "Added '{}-{}' to repository '{}' ({})", - // name, - // version, - // repo, - // arch - //); - - //tokio::spawn(async move { mgr.sync_repo(&repo).await }); - Ok(()) } From 27afb3496d6c1c1df4165426bfe2e8fe999b9a66 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 15 Jun 2024 21:59:58 +0200 Subject: [PATCH 43/73] feat: start reimplementing package removals; some fixes --- server/src/db/query/package.rs | 72 +++++++++++++++++++++++++++++++++- server/src/main.rs | 2 + server/src/repo/manager2.rs | 51 ++++++++++++++++-------- server/src/repo/mod.rs | 23 +++++------ 4 files changed, 116 insertions(+), 32 deletions(-) diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 0115f5b..8e9c17b 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -2,7 +2,7 @@ use crate::db::{self, *}; use futures::Stream; use sea_orm::{sea_query::IntoCondition, *}; -use sea_query::{Alias, Expr, Query, SelectStatement}; +use sea_query::{Alias, Asterisk, Expr, Query, SelectStatement}; use serde::Deserialize; #[derive(Deserialize)] @@ -222,6 +222,76 @@ pub struct PkgToRemove { pub id: i32, } +fn max_pkg_ids_query() -> SelectStatement { + Query::select() + .from(db::package::Entity) + .columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .expr_as(db::package::Column::Id.max(), Alias::new("max_id")) + .group_by_columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .cond_where( + Condition::all().add(db::package::Column::State.eq(db::PackageState::Committed)), + ) + .to_owned() +} + +pub fn pkgs_to_sync( + conn: &DbConn, + repo: i32, + arch: &str, +) -> SelectorRaw> { + let max_id_query = Query::select() + .columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .expr_as(db::package::Column::Id.max(), Alias::new("max_id")) + .from(db::package::Entity) + .group_by_columns([ + db::package::Column::RepoId, + db::package::Column::Arch, + db::package::Column::Name, + ]) + .to_owned(); + + let (p1, p2) = (Alias::new("p1"), Alias::new("p2")); + let query = Query::select() + .column((p1.clone(), Asterisk)) + .from_as(db::package::Entity, p1.clone()) + .join_subquery( + JoinType::InnerJoin, + max_id_query, + p2.clone(), + Expr::col((p1.clone(), db::package::Column::Id)) + .eq(Expr::col((p2.clone(), Alias::new("max_id")))), + ) + .cond_where( + Condition::all() + .add(Expr::col((p1.clone(), db::package::Column::RepoId)).eq(repo)) + .add( + Expr::col((p1.clone(), db::package::Column::State)) + .ne(db::PackageState::PendingDeletion), + ) + .add( + Expr::col((p1.clone(), db::package::Column::Arch)) + .is_in([arch, crate::ANY_ARCH]), + ), + ) + .to_owned(); + let builder = conn.get_database_backend(); + let sql = builder.build(&query); + + db::Package::find().from_raw_sql(sql) +} + fn stale_pkgs_query(include_repo: bool) -> SelectStatement { // In each repository, only one version of a package can exist for any given arch. Because ids // are monotonically increasing, we know that the row that represents the actual package diff --git a/server/src/main.rs b/server/src/main.rs index f1e70f9..c3237cf 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -12,6 +12,8 @@ use repo::DistroMgr; use clap::Parser; use std::{path::PathBuf, sync::Arc}; +pub const ANY_ARCH: &'static str = "any"; + #[derive(Clone)] pub struct Config { data_dir: PathBuf, diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 9a10e0d..f91ab69 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -22,8 +22,6 @@ use tokio::sync::{ }; use uuid::Uuid; -pub const ANY_ARCH: &'static str = "any"; - struct PkgQueueMsg { repo: i32, path: PathBuf, @@ -108,12 +106,11 @@ impl RepoMgr { .stream(&self.conn) .await?; + // Ids are monotonically increasing, so the max id suffices to know which packages to + // remove later let mut max_id = -1; let mut removed_pkgs = 0; - // TODO track largest ID seen, then perform similar query to above except we remove the - // matched IDs, but only if they're smaller than or equal to the largest seen ID so we - // don't remove newly added packages while let Some(pkg) = pkgs.next().await.transpose()? { // Failing to remove the package file isn't the biggest problem let _ = tokio::fs::remove_file( @@ -148,18 +145,7 @@ impl RepoMgr { // Query all packages in the repo that have the given architecture or the "any" // architecture - let mut pkgs = db::Package::find() - .filter(db::package::Column::RepoId.eq(repo)) - .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) - .filter( - db::package::Column::Id.in_subquery( - Query::select() - .expr(db::package::Column::Id.max()) - .from(db::package::Entity) - .group_by_columns([db::package::Column::Arch, db::package::Column::Name]) - .to_owned(), - ), - ) + let mut pkgs = db::query::package::pkgs_to_sync(&self.conn, repo, arch) .stream(&self.conn) .await?; @@ -358,6 +344,37 @@ impl RepoMgr { Ok(()) } + pub async fn remove_repo(&self, repo: i32) -> crate::Result<()> { + self.repos.write().await.remove(&repo); + db::Repo::delete_by_id(repo).exec(&self.conn).await?; + let _ = tokio::fs::remove_dir_all(self.repos_dir.join(repo.to_string())).await; + + Ok(()) + } + + /// Remove all packages in the repository that have a given arch. This method marks all + /// packages with the given architecture as "pending deletion", before performing a manual sync + /// & removal of stale packages. + pub async fn remove_repo_arch(&self, repo: i32, arch: &str) -> crate::Result<()> { + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::PendingDeletion), + ) + .filter( + Condition::all() + .add(db::package::Column::RepoId.eq(repo)) + .add(db::package::Column::Arch.eq(arch)), + ) + .exec(&self.conn) + .await?; + + self.sync_repo(repo).await?; + self.remove_stale_pkgs().await?; + + Ok(()) + } + pub fn random_file_paths(&self) -> [PathBuf; C] { std::array::from_fn(|_| { let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index bb592c9..290f9a7 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -95,20 +95,15 @@ async fn delete_repo( State(global): State, Path((distro, repo)): Path<(String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(mgr) = global.mgr.get_mgr(&distro).await { - // let repo_removed = mgr.remove_repo(&repo).await?; - // - // if repo_removed { - // tracing::info!("Removed repository '{}'", repo); - // - // Ok(StatusCode::OK) - // } else { - // Ok(StatusCode::NOT_FOUND) - // } - //} else { - // Ok(StatusCode::NOT_FOUND) - //} + if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { + global.mgr.remove_repo(repo).await?; + + tracing::info!("Removed repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } async fn delete_arch_repo( From e17269ac3b0f31f70ee032be4eb862dc938bcb20 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 16 Jun 2024 13:04:04 +0200 Subject: [PATCH 44/73] feat: clean up some queries; implement repo arch remove --- server/src/db/query/package.rs | 102 +++++++++++---------------------- server/src/repo/manager2.rs | 72 +++++++++++------------ server/src/repo/mod.rs | 14 ++++- 3 files changed, 82 insertions(+), 106 deletions(-) diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 8e9c17b..2ba1996 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -2,7 +2,7 @@ use crate::db::{self, *}; use futures::Stream; use sea_orm::{sea_query::IntoCondition, *}; -use sea_query::{Alias, Asterisk, Expr, Query, SelectStatement}; +use sea_query::{Alias, Asterisk, Expr, IntoColumnRef, Query, SelectStatement}; use serde::Deserialize; #[derive(Deserialize)] @@ -222,8 +222,8 @@ pub struct PkgToRemove { pub id: i32, } -fn max_pkg_ids_query() -> SelectStatement { - Query::select() +fn max_pkg_ids_query(committed: bool) -> SelectStatement { + let mut query = Query::select() .from(db::package::Entity) .columns([ db::package::Column::RepoId, @@ -236,39 +236,29 @@ fn max_pkg_ids_query() -> SelectStatement { db::package::Column::Arch, db::package::Column::Name, ]) - .cond_where( - Condition::all().add(db::package::Column::State.eq(db::PackageState::Committed)), - ) - .to_owned() -} - -pub fn pkgs_to_sync( - conn: &DbConn, - repo: i32, - arch: &str, -) -> SelectorRaw> { - let max_id_query = Query::select() - .columns([ - db::package::Column::RepoId, - db::package::Column::Arch, - db::package::Column::Name, - ]) - .expr_as(db::package::Column::Id.max(), Alias::new("max_id")) - .from(db::package::Entity) - .group_by_columns([ - db::package::Column::RepoId, - db::package::Column::Arch, - db::package::Column::Name, - ]) .to_owned(); + if committed { + query.cond_where(db::package::Column::State.eq(db::PackageState::Committed)); + } + + query +} + +/// Query that returns all packages that should be included in a sync for the given repository and +/// arch. +pub fn pkgs_to_sync( + conn: &DbConn, + repo: i32, + arch: &str, +) -> SelectorRaw> { let (p1, p2) = (Alias::new("p1"), Alias::new("p2")); let query = Query::select() - .column((p1.clone(), Asterisk)) + .columns(db::package::Column::iter().map(|c| (p1.clone(), c))) .from_as(db::package::Entity, p1.clone()) .join_subquery( JoinType::InnerJoin, - max_id_query, + max_pkg_ids_query(false), p2.clone(), Expr::col((p1.clone(), db::package::Column::Id)) .eq(Expr::col((p2.clone(), Alias::new("max_id")))), @@ -276,13 +266,13 @@ pub fn pkgs_to_sync( .cond_where( Condition::all() .add(Expr::col((p1.clone(), db::package::Column::RepoId)).eq(repo)) - .add( - Expr::col((p1.clone(), db::package::Column::State)) - .ne(db::PackageState::PendingDeletion), - ) .add( Expr::col((p1.clone(), db::package::Column::Arch)) .is_in([arch, crate::ANY_ARCH]), + ) + .add( + Expr::col((p1.clone(), db::package::Column::State)) + .ne(db::PackageState::PendingDeletion), ), ) .to_owned(); @@ -293,36 +283,10 @@ pub fn pkgs_to_sync( } fn stale_pkgs_query(include_repo: bool) -> SelectStatement { - // In each repository, only one version of a package can exist for any given arch. Because ids - // are monotonically increasing, we know that the row that represents the actual package - // currently in the repository is the row with the largest id whose state is "committed". This - // query finds this id for each (repo, arch, name) tuple. - let mut max_id_query = Query::select(); - max_id_query - .from(db::package::Entity) - .columns([ - db::package::Column::RepoId, - db::package::Column::Arch, - db::package::Column::Name, - ]) - .expr_as(db::package::Column::Id.max(), Alias::new("max_id")) - .group_by_columns([ - db::package::Column::RepoId, - db::package::Column::Arch, - db::package::Column::Name, - ]) - .cond_where( - Condition::all().add(db::package::Column::State.eq(db::PackageState::Committed)), - ); - let (p1, p2) = (Alias::new("p1"), Alias::new("p2")); - let mut query = Query::select(); - - // We then perform an inner join between the max id query above and the package table, where we - // filter on rows whose id is less than their respective package's max id or whose state is set - // to "pending deletion". This gives us all rows in the database that correspond to packages - // that are no longer needed, and can thus be removed. - query.from_as(db::package::Entity, p1.clone()); + let mut query = Query::select() + .from_as(db::package::Entity, p1.clone()) + .to_owned(); if include_repo { query.columns([ @@ -333,10 +297,13 @@ fn stale_pkgs_query(include_repo: bool) -> SelectStatement { query.column((p1.clone(), db::package::Column::Id)); } + // We left join on the max pkgs query because a repository that has all its packages set to + // "pending deletion" doesn't show up in the query. These are also included with a where clause + // on the joined rows. query .join_subquery( - JoinType::InnerJoin, - max_id_query, + JoinType::LeftJoin, + max_pkg_ids_query(true), p2.clone(), Condition::all() .add( @@ -359,11 +326,12 @@ fn stale_pkgs_query(include_repo: bool) -> SelectStatement { .lt(Expr::col((p2.clone(), Alias::new("max_id")))), ) .add( - Expr::col((p1.clone(), db::package::Column::Id)) + Expr::col((p1.clone(), db::package::Column::State)) .eq(db::PackageState::PendingDeletion), ), - ) - .to_owned() + ); + + query } pub fn stale_pkgs(conn: &DbConn) -> SelectorRaw> { diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index f91ab69..266eeee 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -100,42 +100,6 @@ impl RepoMgr { Ok(()) } - /// Clean any remaining old package files from the database and file system - pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { - let mut pkgs = db::query::package::stale_pkgs(&self.conn) - .stream(&self.conn) - .await?; - - // Ids are monotonically increasing, so the max id suffices to know which packages to - // remove later - let mut max_id = -1; - let mut removed_pkgs = 0; - - while let Some(pkg) = pkgs.next().await.transpose()? { - // Failing to remove the package file isn't the biggest problem - let _ = tokio::fs::remove_file( - self.repos_dir - .join(pkg.repo_id.to_string()) - .join(pkg.id.to_string()), - ) - .await; - - if pkg.id > max_id { - max_id = pkg.id; - } - - removed_pkgs += 1; - } - - if removed_pkgs > 0 { - db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; - } - - tracing::info!("Removed {removed_pkgs} stale package(s)"); - - Ok(()) - } - /// Generate the archive databases for the given repository and architecture. async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = @@ -209,6 +173,42 @@ impl RepoMgr { Ok(()) } + /// Clean any remaining old package files from the database and file system + pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { + let mut pkgs = db::query::package::stale_pkgs(&self.conn) + .stream(&self.conn) + .await?; + + // Ids are monotonically increasing, so the max id suffices to know which packages to + // remove later + let mut max_id = -1; + let mut removed_pkgs = 0; + + while let Some(pkg) = pkgs.next().await.transpose()? { + // Failing to remove the package file isn't the biggest problem + let _ = tokio::fs::remove_file( + self.repos_dir + .join(pkg.repo_id.to_string()) + .join(pkg.id.to_string()), + ) + .await; + + if pkg.id > max_id { + max_id = pkg.id; + } + + removed_pkgs += 1; + } + + if removed_pkgs > 0 { + db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; + } + + tracing::info!("Removed {removed_pkgs} stale package(s)"); + + Ok(()) + } + pub async fn pkg_parse_task(&self) { loop { // Receive the next message and immediately drop the mutex afterwards. As long as the diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 290f9a7..d088095 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -78,7 +78,7 @@ async fn post_package_archive( State(global): State, Path((distro, repo)): Path<(String, String)>, body: Body, -) -> crate::Result<()> { +) -> crate::Result { let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); let repo = global.mgr.get_or_create_repo(&distro, &repo).await?; let [tmp_path] = global.mgr.random_file_paths(); @@ -88,7 +88,7 @@ async fn post_package_archive( global.mgr.queue_pkg(repo, tmp_path).await; - Ok(()) + Ok(StatusCode::ACCEPTED) } async fn delete_repo( @@ -110,7 +110,15 @@ async fn delete_arch_repo( State(global): State, Path((distro, repo, arch)): Path<(String, String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) + if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { + global.mgr.remove_repo_arch(repo, &arch).await?; + + tracing::info!("Removed architecture '{arch}' from repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } //if let Some(mgr) = global.mgr.get_mgr(&distro).await { // let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; // From 97e42588ed6f912175b8dadfcd32034ef6df6eb9 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 16 Jun 2024 18:14:56 +0200 Subject: [PATCH 45/73] feat: switch to proper config file --- server/rieterd.toml | 12 +++-- server/src/cli.rs | 90 ++++++++++--------------------------- server/src/config.rs | 59 ++++++++++++++++++++---- server/src/db/mod.rs | 51 ++++++++++++++++++++- server/src/main.rs | 8 +--- server/src/repo/manager2.rs | 3 +- server/src/repo/mod.rs | 37 ++++++++------- 7 files changed, 158 insertions(+), 102 deletions(-) diff --git a/server/rieterd.toml b/server/rieterd.toml index 781a055..9cc56bf 100644 --- a/server/rieterd.toml +++ b/server/rieterd.toml @@ -1,11 +1,17 @@ api_key = "test" -port = 8000 -log_level = "tower_http=debug,rieterd=debug" +pkg_workers = 2 +log_level = "rieterd=debug" [fs] -type = "locl" +type = "local" data_dir = "./data" [db] type = "sqlite" db_dir = "./data" +# [db] +# type = "postgres" +# host = "localhost" +# db = "rieter" +# user = "rieter" +# password = "rieter" diff --git a/server/src/cli.rs b/server/src/cli.rs index 1ceaf27..73dc9f2 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,4 +1,4 @@ -use crate::{distro::MetaDistroMgr, Config, Global}; +use crate::{distro::MetaDistroMgr, Config, FsConfig, Global}; use std::{io, path::PathBuf, sync::Arc}; @@ -12,13 +12,6 @@ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[derive(Parser)] #[command(author, version, about, long_about = None)] pub struct Cli { - /// Directory where repository metadata & SQLite database is stored - #[arg(env = "RIETER_DATA_DIR")] - pub data_dir: PathBuf, - /// API key to authenticate private routes with - #[arg(env = "RIETER_API_KEY")] - pub api_key: String, - #[arg( short, long, @@ -26,89 +19,54 @@ pub struct Cli { default_value = "./rieterd.toml" )] pub config_file: PathBuf, - - /// Database connection URL; either sqlite:// or postgres://. Defaults to rieter.sqlite in the - /// data directory - #[arg(short, long, env = "RIETER_DATABASE_URL")] - pub database_url: Option, - /// Port the server will listen on - #[arg( - short, - long, - value_name = "PORT", - default_value_t = 8000, - env = "RIETER_PORT" - )] - pub port: u16, - /// Log levels for the tracing - #[arg( - long, - value_name = "LOG_LEVEL", - default_value = "tower_http=debug,rieterd=debug,sea_orm=debug", - env = "RIETER_LOG" - )] - pub log: String, } impl Cli { - pub fn init_tracing(&self) { + pub async fn run(&self) -> crate::Result<()> { + let config: Config = Config::figment(&self.config_file) + .extract() + .inspect_err(|e| tracing::error!("{}", e))?; + tracing_subscriber::registry() - .with(tracing_subscriber::EnvFilter::new(self.log.clone())) + .with(tracing_subscriber::EnvFilter::new(config.log_level.clone())) .with(tracing_subscriber::fmt::layer()) .init(); - } - pub async fn run(&self) -> crate::Result<()> { - self.init_tracing(); + tracing::info!("Connecting to database"); + let db = crate::db::connect(&config.db).await?; - //tracing::debug!("{:?}", &self.config_file); - //let new_config: crate::config::Config = crate::config::Config::figment(&self.config_file).extract().inspect_err( - // |e| tracing::error!("{}", e) - //)?; - //tracing::debug!("{:?}", new_config); - - let db_url = if let Some(url) = &self.database_url { - url.clone() - } else { - format!( - "sqlite://{}?mode=rwc", - self.data_dir.join("rieter.sqlite").to_string_lossy() - ) - }; - - debug!("Connecting to database with URL {}", db_url); - - let mut options = sea_orm::ConnectOptions::new(db_url); - options.max_connections(16); - - let db = sea_orm::Database::connect(options).await?; crate::db::Migrator::up(&db, None).await?; - debug!("Successfully applied migrations"); - - let config = Config { - data_dir: self.data_dir.clone(), + let mgr = match &config.fs { + FsConfig::Local { data_dir } => { + crate::repo::RepoMgr::new(data_dir.join("repos"), db.clone()).await? + } }; - let mgr = - Arc::new(crate::repo::RepoMgr::new(&self.data_dir.join("repos"), db.clone()).await?); + let mgr = Arc::new(mgr); - for _ in 0..1 { + for _ in 0..config.pkg_workers { let clone = Arc::clone(&mgr); tokio::spawn(async move { clone.pkg_parse_task().await }); } - let global = Global { config, mgr, db }; + let global = Global { + config: config.clone(), + mgr, + db, + }; // build our application with a single route let app = Router::new() .nest("/api", crate::api::router()) - .merge(crate::repo::router(&self.api_key)) + .merge(crate::repo::router(&config.api_key)) .with_state(global) .layer(TraceLayer::new_for_http()); - let domain: String = format!("0.0.0.0:{}", self.port).parse().unwrap(); + let domain: String = format!("{}:{}", config.domain, config.port) + .parse() + .unwrap(); let listener = tokio::net::TcpListener::bind(domain).await?; // run it with hyper on localhost:3000 Ok(axum::serve(listener, app.into_make_service()) diff --git a/server/src/config.rs b/server/src/config.rs index a639362..e165fdc 100644 --- a/server/src/config.rs +++ b/server/src/config.rs @@ -6,34 +6,49 @@ use figment::{ }; use serde::Deserialize; -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "lowercase")] #[serde(tag = "type")] pub enum FsConfig { Local { data_dir: PathBuf }, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "lowercase")] #[serde(tag = "type")] pub enum DbConfig { Sqlite { db_dir: PathBuf, + #[serde(default = "default_db_sqlite_max_connections")] + max_connections: u32, }, Postgres { host: String, + #[serde(default = "default_db_postgres_port")] + port: u16, user: String, password: String, + db: String, + #[serde(default)] + schema: String, + #[serde(default = "default_db_postgres_max_connections")] + max_connections: u32, }, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct Config { - api_key: String, - port: u16, - log_level: String, - fs: FsConfig, - db: DbConfig, + pub api_key: String, + #[serde(default = "default_domain")] + pub domain: String, + #[serde(default = "default_port")] + pub port: u16, + #[serde(default = "default_log_level")] + pub log_level: String, + pub fs: FsConfig, + pub db: DbConfig, + #[serde(default = "default_pkg_workers")] + pub pkg_workers: u32, } impl Config { @@ -43,3 +58,31 @@ impl Config { .merge(Env::prefixed("RIETER_")) } } + +fn default_domain() -> String { + String::from("0.0.0.0") +} + +fn default_port() -> u16 { + 8000 +} + +fn default_log_level() -> String { + String::from("tower_http=debug,rieterd=debug,sea_orm=debug") +} + +fn default_db_sqlite_max_connections() -> u32 { + 16 +} + +fn default_db_postgres_port() -> u16 { + 5432 +} + +fn default_db_postgres_max_connections() -> u32 { + 16 +} + +fn default_pkg_workers() -> u32 { + 1 +} diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index 98f42a4..a1b7476 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -2,10 +2,12 @@ pub mod entities; mod migrator; pub mod query; +use crate::config::DbConfig; + pub use entities::{prelude::*, *}; pub use migrator::Migrator; -use sea_orm::{DeriveActiveEnum, EnumIter}; +use sea_orm::{ConnectionTrait, Database, DbConn, DeriveActiveEnum, EnumIter}; use serde::{Deserialize, Serialize}; type Result = std::result::Result; @@ -50,3 +52,50 @@ pub struct FullPackage { related: Vec<(PackageRelatedEnum, String)>, files: Vec, } + +pub async fn connect(conn: &DbConfig) -> crate::Result { + match conn { + DbConfig::Sqlite { + db_dir, + max_connections, + } => { + let url = format!( + "sqlite://{}?mode=rwc", + db_dir.join("rieter.sqlite").to_string_lossy() + ); + let options = sea_orm::ConnectOptions::new(url) + .max_connections(*max_connections) + .to_owned(); + + let conn = Database::connect(options).await?; + + // synchronous=NORMAL still ensures database consistency with WAL mode, as per the docs + // https://www.sqlite.org/pragma.html#pragma_synchronous + conn.execute_unprepared("PRAGMA journal_mode=WAL;").await?; + conn.execute_unprepared("PRAGMA synchronous=NORMAL;") + .await?; + + Ok(conn) + } + DbConfig::Postgres { + host, + port, + db, + user, + password, + schema, + max_connections, + } => { + let mut url = format!("postgres://{}:{}@{}:{}/{}", user, password, host, port, db); + + if schema != "" { + url = format!("{url}?currentSchema={schema}"); + } + + let options = sea_orm::ConnectOptions::new(url) + .max_connections(*max_connections) + .to_owned(); + Ok(Database::connect(options).await?) + } + } +} diff --git a/server/src/main.rs b/server/src/main.rs index c3237cf..f7e1a95 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -6,6 +6,7 @@ mod distro; mod error; mod repo; +pub use config::{Config, DbConfig, FsConfig}; pub use error::{Result, ServerError}; use repo::DistroMgr; @@ -14,14 +15,9 @@ use std::{path::PathBuf, sync::Arc}; pub const ANY_ARCH: &'static str = "any"; -#[derive(Clone)] -pub struct Config { - data_dir: PathBuf, -} - #[derive(Clone)] pub struct Global { - config: Config, + config: crate::config::Config, mgr: Arc, db: sea_orm::DbConn, } diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 266eeee..2f66cfe 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -248,7 +248,7 @@ impl RepoMgr { } pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { - let _ = self.pkg_queue.0.send(PkgQueueMsg { path, repo }); + self.pkg_queue.0.send(PkgQueueMsg { path, repo }).unwrap(); self.repos.read().await.get(&repo).inspect(|n| { n.0.fetch_add(1, Ordering::SeqCst); }); @@ -291,6 +291,7 @@ impl RepoMgr { }; let repo_id: Option = db::Repo::find() + .filter(db::repo::Column::DistroId.eq(distro_id)) .filter(db::repo::Column::Name.eq(repo)) .select_only() .column(db::repo::Column::Id) diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index d088095..16c62a5 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -6,6 +6,8 @@ pub mod package; pub use manager::DistroMgr; pub use manager2::RepoMgr; +use crate::FsConfig; + use axum::{ body::Body, extract::{Path, State}, @@ -50,25 +52,26 @@ async fn get_file( req: Request, ) -> crate::Result { if let Some(repo_id) = global.mgr.get_repo(&distro, &repo).await? { - let repo_dir = global - .config - .data_dir - .join("repos") - .join(repo_id.to_string()); + match global.config.fs { + FsConfig::Local { data_dir } => { + let repo_dir = data_dir.join("repos").join(repo_id.to_string()); - let file_name = - if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { - format!("{}.db.tar.gz", arch) - } else if file_name == format!("{}.files", repo) - || file_name == format!("{}.files.tar.gz", repo) - { - format!("{}.files.tar.gz", arch) - } else { - file_name - }; + let file_name = if file_name == format!("{}.db", repo) + || file_name == format!("{}.db.tar.gz", repo) + { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else { + file_name + }; - let path = repo_dir.join(file_name); - Ok(ServeFile::new(path).oneshot(req).await) + let path = repo_dir.join(file_name); + Ok(ServeFile::new(path).oneshot(req).await) + } + } } else { Err(StatusCode::NOT_FOUND.into()) } From cc8848d3ae65818d9789d3af09bb7ab279310016 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 17 Jun 2024 22:59:54 +0200 Subject: [PATCH 46/73] fix: flush desc files explicitely --- server/src/repo/manager2.rs | 8 ++++---- server/src/repo/package.rs | 4 ++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs index 2f66cfe..e4f0581 100644 --- a/server/src/repo/manager2.rs +++ b/server/src/repo/manager2.rs @@ -84,16 +84,16 @@ impl RepoMgr { let lock = lock.unwrap(); let _guard = lock.lock().await; - let mut archs = db::Package::find() + let archs: Vec = db::Package::find() .filter(db::package::Column::RepoId.eq(repo)) .select_only() .column(db::package::Column::Arch) .distinct() - .into_tuple::() - .stream(&self.conn) + .into_tuple() + .all(&self.conn) .await?; - while let Some(arch) = archs.next().await.transpose()? { + for arch in archs { self.generate_archives(repo, &arch).await?; } diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 66c8fa1..df98559 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -397,6 +397,8 @@ pub async fn write_desc( write_attribute(writer, key, &items.join("\n")).await?; } + writer.flush().await?; + Ok(()) } @@ -417,5 +419,7 @@ pub async fn write_files( .await?; } + writer.flush().await?; + Ok(()) } From 730ae009b084bcd8e397f43e8e806d126ab7afdf Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 18 Jun 2024 10:47:35 +0200 Subject: [PATCH 47/73] chore: remove old manager code once again --- server/src/cli.rs | 3 +- server/src/distro.rs | 70 ----- server/src/main.rs | 5 +- server/src/repo/manager.rs | 518 +++++++++++++++++------------------- server/src/repo/manager2.rs | 385 --------------------------- server/src/repo/mod.rs | 4 +- 6 files changed, 241 insertions(+), 744 deletions(-) delete mode 100644 server/src/distro.rs delete mode 100644 server/src/repo/manager2.rs diff --git a/server/src/cli.rs b/server/src/cli.rs index 73dc9f2..c6998eb 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,4 +1,4 @@ -use crate::{distro::MetaDistroMgr, Config, FsConfig, Global}; +use crate::{Config, FsConfig, Global}; use std::{io, path::PathBuf, sync::Arc}; @@ -6,7 +6,6 @@ use axum::Router; use clap::Parser; use sea_orm_migration::MigratorTrait; use tower_http::trace::TraceLayer; -use tracing::debug; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[derive(Parser)] diff --git a/server/src/distro.rs b/server/src/distro.rs deleted file mode 100644 index 22563ff..0000000 --- a/server/src/distro.rs +++ /dev/null @@ -1,70 +0,0 @@ -use crate::{db, DistroMgr}; - -use std::{ - collections::HashMap, - path::{Path, PathBuf}, - sync::Arc, -}; - -use sea_orm::{DbConn, EntityTrait}; -use tokio::sync::Mutex; - -#[derive(Clone)] -pub struct MetaDistroMgr { - distro_dir: PathBuf, - conn: DbConn, - distros: Arc>>>, -} - -impl MetaDistroMgr { - pub async fn new>(distro_dir: P, conn: DbConn) -> crate::Result { - if !tokio::fs::try_exists(&distro_dir).await? { - tokio::fs::create_dir(&distro_dir).await?; - } - - let distro_dir = distro_dir.as_ref().to_path_buf(); - let mut map: HashMap> = HashMap::new(); - - let distros = db::Distro::find().all(&conn).await?; - - for distro in distros { - let mgr = - DistroMgr::new(distro_dir.join(&distro.name), distro.id, conn.clone()).await?; - map.insert(distro.name, Arc::new(mgr)); - } - - Ok(Self { - distro_dir, - conn, - distros: Arc::new(Mutex::new(map)), - }) - } - - pub async fn get_mgr(&self, distro: &str) -> Option> { - let map = self.distros.lock().await; - - map.get(distro).map(|mgr| Arc::clone(mgr)) - } - - pub async fn get_or_create_mgr(&self, distro: &str) -> crate::Result> { - let mut map = self.distros.lock().await; - - if let Some(mgr) = map.get(distro) { - Ok(Arc::clone(mgr)) - } else { - let distro = db::query::distro::insert(&self.conn, distro, None).await?; - - let mgr = Arc::new( - DistroMgr::new( - self.distro_dir.join(&distro.name), - distro.id, - self.conn.clone(), - ) - .await?, - ); - map.insert(distro.name, Arc::clone(&mgr)); - - Ok(mgr) - } - } -} diff --git a/server/src/main.rs b/server/src/main.rs index f7e1a95..eb1c3d0 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -2,16 +2,15 @@ mod api; mod cli; mod config; pub mod db; -mod distro; mod error; mod repo; pub use config::{Config, DbConfig, FsConfig}; pub use error::{Result, ServerError}; -use repo::DistroMgr; + +use std::sync::Arc; use clap::Parser; -use std::{path::PathBuf, sync::Arc}; pub const ANY_ARCH: &'static str = "any"; diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs index 4bf2378..e4f0581 100644 --- a/server/src/repo/manager.rs +++ b/server/src/repo/manager.rs @@ -1,133 +1,107 @@ use super::{archive, package}; -use crate::{db, error::Result}; +use crate::db::{self, query::package::delete_stale_pkgs}; use std::{ collections::HashMap, path::{Path, PathBuf}, sync::{ - atomic::{AtomicBool, AtomicU32, Ordering}, + atomic::{AtomicU32, Ordering}, Arc, }, }; use futures::StreamExt; use sea_orm::{ - ActiveModelTrait, ColumnTrait, DbConn, EntityTrait, ModelTrait, QueryFilter, QuerySelect, - Related, RelationTrait, Set, TransactionTrait, + ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, + ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; -use sea_query::{Expr, Query}; -use tokio::{ - io::AsyncRead, - sync::{Mutex, Notify, RwLock, Semaphore}, +use sea_query::{Alias, Expr, Query}; +use tokio::sync::{ + mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + Mutex, RwLock, }; use uuid::Uuid; -pub const ANY_ARCH: &'static str = "any"; -pub const REPOS_DIR: &'static str = "repos"; - -#[derive(Default)] -pub struct RepoState { - queued_pkgs: AtomicU32, - sync_queued: AtomicBool, - sync_notify: Notify, +struct PkgQueueMsg { + repo: i32, + path: PathBuf, } -pub struct DistroMgr { - distro_dir: PathBuf, - distro_id: i32, +/// A single instance of this struct orchestrates everything related to managing packages files on +/// disk for all repositories in the server +pub struct RepoMgr { + repos_dir: PathBuf, conn: DbConn, - repos: RwLock>>, - sync_lock: Mutex<()>, - pkg_sema: Semaphore, + pkg_queue: ( + UnboundedSender, + Mutex>, + ), + repos: RwLock>)>>, } -impl DistroMgr { - pub async fn new>(distro_dir: P, distro_id: i32, conn: DbConn) -> Result { - if !tokio::fs::try_exists(&distro_dir).await? { - tokio::fs::create_dir(&distro_dir).await?; +impl RepoMgr { + pub async fn new>(repos_dir: P, conn: DbConn) -> crate::Result { + if !tokio::fs::try_exists(&repos_dir).await? { + tokio::fs::create_dir(&repos_dir).await?; } - let repos_dir = distro_dir.as_ref().join(REPOS_DIR); + let (tx, rx) = unbounded_channel(); - if !tokio::fs::try_exists(&repos_dir).await? { - tokio::fs::create_dir(repos_dir).await?; + let mut repos = HashMap::new(); + let repo_ids: Vec = db::Repo::find() + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .all(&conn) + .await?; + + for id in repo_ids { + repos.insert(id, Default::default()); } Ok(Self { - distro_dir: distro_dir.as_ref().to_path_buf(), - distro_id, + repos_dir: repos_dir.as_ref().to_path_buf(), conn, - sync_lock: Mutex::new(()), - pkg_sema: Semaphore::new(1), - repos: RwLock::new(HashMap::new()), + pkg_queue: (tx, Mutex::new(rx)), + repos: RwLock::new(repos), }) } - pub async fn schedule_sync(&self, repo_id: i32) -> Result<()> { - let state = { - let repos = self.repos.read().await; - repos.get(&repo_id).map(Arc::clone) - }; - - if state.is_none() { - tracing::debug!("is none"); - return Ok(()); - } - - let state = state.unwrap(); - - let res = - state - .sync_queued - .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst); - - // Already a sync job scheduled, so this one can simply quit - if res.is_err() { - tracing::debug!("shit"); - return Ok(()); - } - - // If the queue is not empty, we wait for a notification that it is before syncing - if state.queued_pkgs.load(Ordering::SeqCst) > 0 { - tracing::debug!("sync waiter waiting"); - state.sync_notify.notified().await; - tracing::debug!("sync waiter notified"); - } - - self.sync_repo(repo_id).await - } - /// Generate archive databases for all known architectures in the repository, including the /// "any" architecture. - pub async fn sync_repo(&self, repo_id: i32) -> Result<()> { - let _guard = self.sync_lock.lock().await; + pub async fn sync_repo(&self, repo: i32) -> crate::Result<()> { + let lock = self + .repos + .read() + .await + .get(&repo) + .map(|(_, lock)| Arc::clone(lock)); - let repo = crate::db::query::repo::by_id(&self.conn, repo_id).await?; - - if repo.is_none() { + if lock.is_none() { return Ok(()); } - let repo = repo.unwrap(); + let lock = lock.unwrap(); + let _guard = lock.lock().await; - let mut archs = repo - .find_related(db::Package) + let archs: Vec = db::Package::find() + .filter(db::package::Column::RepoId.eq(repo)) .select_only() .column(db::package::Column::Arch) .distinct() - .into_tuple::() - .stream(&self.conn) + .into_tuple() + .all(&self.conn) .await?; - while let Some(arch) = archs.next().await.transpose()? { - self.generate_archives(&repo, &arch).await?; + for arch in archs { + self.generate_archives(repo, &arch).await?; } Ok(()) } /// Generate the archive databases for the given repository and architecture. - async fn generate_archives(&self, repo: &db::repo::Model, arch: &str) -> Result<()> { + async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = self.random_file_paths(); let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; @@ -135,18 +109,7 @@ impl DistroMgr { // Query all packages in the repo that have the given architecture or the "any" // architecture - let mut pkgs = repo - .find_related(crate::db::Package) - .filter(db::package::Column::Arch.is_in([arch, ANY_ARCH])) - .filter( - db::package::Column::Id.in_subquery( - Query::select() - .expr(db::package::Column::Id.max()) - .from(db::package::Entity) - .group_by_columns([db::package::Column::Arch, db::package::Column::Name]) - .to_owned(), - ), - ) + let mut pkgs = db::query::package::pkgs_to_sync(&self.conn, repo, arch) .stream(&self.conn) .await?; @@ -178,7 +141,7 @@ impl DistroMgr { ar_db.close().await?; ar_files.close().await?; - let repo_dir = self.distro_dir.join(&repo.name); + let repo_dir = self.repos_dir.join(repo.to_string()); // Move the db archives to their respective places tokio::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch))).await?; @@ -205,225 +168,218 @@ impl DistroMgr { let _ = tokio::fs::remove_file(desc_tmp_file_path).await; let _ = tokio::fs::remove_file(files_tmp_file_path).await; - tracing::info!( - "Package archives generated for '{}' ('{}')", - &repo.name, - arch - ); + tracing::info!("Package archives generated for repo {} ('{}')", repo, arch); Ok(()) } - async fn get_or_create_repo(&self, repo: &str) -> Result { + /// Clean any remaining old package files from the database and file system + pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { + let mut pkgs = db::query::package::stale_pkgs(&self.conn) + .stream(&self.conn) + .await?; + + // Ids are monotonically increasing, so the max id suffices to know which packages to + // remove later + let mut max_id = -1; + let mut removed_pkgs = 0; + + while let Some(pkg) = pkgs.next().await.transpose()? { + // Failing to remove the package file isn't the biggest problem + let _ = tokio::fs::remove_file( + self.repos_dir + .join(pkg.repo_id.to_string()) + .join(pkg.id.to_string()), + ) + .await; + + if pkg.id > max_id { + max_id = pkg.id; + } + + removed_pkgs += 1; + } + + if removed_pkgs > 0 { + db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; + } + + tracing::info!("Removed {removed_pkgs} stale package(s)"); + + Ok(()) + } + + pub async fn pkg_parse_task(&self) { + loop { + // Receive the next message and immediately drop the mutex afterwards. As long as the + // quue is empty, this will lock the mutex. This is okay, as the mutex will be unlocked + // as soon as a message is received, so another worker can pick up the mutex. + let msg = { + let mut recv = self.pkg_queue.1.lock().await; + recv.recv().await + }; + + if let Some(msg) = msg { + // TODO better handle this error (retry if failure wasn't because the package is + // faulty) + let _ = self + .add_pkg_from_path(msg.path, msg.repo) + .await + .inspect_err(|e| tracing::error!("{:?}", e)); + + let old = self + .repos + .read() + .await + .get(&msg.repo) + .map(|n| n.0.fetch_sub(1, Ordering::SeqCst)); + + // Every time the queue for a repo becomes empty, we run a sync job + if old == Some(1) { + // TODO error handling + let _ = self.sync_repo(msg.repo).await; + + // TODO move this so that we only clean if entire queue is empty, not just + // queue for specific repo + let _ = self.remove_stale_pkgs().await; + } + } + } + } + + pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { + self.pkg_queue.0.send(PkgQueueMsg { path, repo }).unwrap(); + self.repos.read().await.get(&repo).inspect(|n| { + n.0.fetch_add(1, Ordering::SeqCst); + }); + } + + pub async fn get_repo(&self, distro: &str, repo: &str) -> crate::Result> { + Ok(db::Repo::find() + .find_also_related(db::Distro) + .filter( + Condition::all() + .add(db::repo::Column::Name.eq(repo)) + .add(db::distro::Column::Name.eq(distro)), + ) + .one(&self.conn) + .await + .map(|res| res.map(|(repo, _)| repo.id))?) + } + + pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { let mut repos = self.repos.write().await; - if let Some(repo) = db::query::repo::by_name(&self.conn, repo).await? { - Ok(repo) - } else { - tokio::fs::create_dir(self.distro_dir.join(repo)).await?; - let repo = db::query::repo::insert(&self.conn, self.distro_id, repo, None).await?; - - repos.insert(repo.id, Arc::new(RepoState::default())); - - Ok(repo) - } - } - - /// Remove the repo with the given name, if it existed - pub async fn remove_repo(&self, repo: &str) -> Result { - let res = db::query::repo::by_name(&self.conn, repo).await?; - - if let Some(repo_entry) = res { - // Remove repository from database - repo_entry.delete(&self.conn).await?; - - // Remove files from file system - tokio::fs::remove_dir_all(self.distro_dir.join(repo)).await?; - - Ok(true) - } else { - Ok(false) - } - } - - /// Remove all packages from the repository with the given arch. - pub async fn remove_repo_arch(&self, repo: &str, arch: &str) -> Result { - let repo = db::query::repo::by_name(&self.conn, repo).await?; - - if let Some(repo) = repo { - let mut pkgs = repo - .find_related(db::Package) - .filter(db::package::Column::Arch.eq(arch)) - .stream(&self.conn) - .await?; - - while let Some(pkg) = pkgs.next().await.transpose()? { - let path = self - .distro_dir - .join(&repo.name) - .join(super::package::filename(&pkg)); - tokio::fs::remove_file(path).await?; - - pkg.delete(&self.conn).await?; - } - - tokio::fs::remove_file( - self.distro_dir - .join(&repo.name) - .join(format!("{}.db.tar.gz", arch)), - ) - .await?; - tokio::fs::remove_file( - self.distro_dir - .join(&repo.name) - .join(format!("{}.files.tar.gz", arch)), - ) + let distro_id: Option = db::Distro::find() + .filter(db::distro::Column::Name.eq(distro)) + .select_only() + .column(db::distro::Column::Id) + .into_tuple() + .one(&self.conn) .await?; - // If we removed all "any" packages, we need to resync all databases - if arch == ANY_ARCH { - //self.sync_repo(&repo.name).await?; - } - - Ok(true) + let distro_id = if let Some(id) = distro_id { + id } else { - Ok(false) - } + let new_distro = db::distro::ActiveModel { + id: NotSet, + name: Set(distro.to_string()), + description: NotSet, + }; + + new_distro.insert(&self.conn).await?.id + }; + + let repo_id: Option = db::Repo::find() + .filter(db::repo::Column::DistroId.eq(distro_id)) + .filter(db::repo::Column::Name.eq(repo)) + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .one(&self.conn) + .await?; + + let repo_id = if let Some(id) = repo_id { + id + } else { + let new_repo = db::repo::ActiveModel { + id: NotSet, + distro_id: Set(distro_id), + name: Set(repo.to_string()), + description: NotSet, + }; + let id = new_repo.insert(&self.conn).await?.id; + + tokio::fs::create_dir(self.repos_dir.join(id.to_string())).await?; + repos.insert(id, Default::default()); + + id + }; + + Ok(repo_id) } - pub async fn remove_pkg(&self, repo: &str, arch: &str, name: &str) -> Result { - let repo = db::query::repo::by_name(&self.conn, repo).await?; - - if let Some(repo) = repo { - let pkg = - db::query::package::by_fields(&self.conn, repo.id, arch, name, None, None).await?; - - if let Some(pkg) = pkg { - // Remove package from database & file system - tokio::fs::remove_file( - self.distro_dir - .join(&repo.name) - .join(super::package::filename(&pkg)), - ) - .await?; - pkg.delete(&self.conn).await?; - - //if arch == ANY_ARCH { - // self.sync_repo(&repo.name).await?; - //} else { - // self.generate_archives(&repo.name, arch).await?; - //} - - Ok(true) - } else { - Ok(false) - } - } else { - Ok(false) - } - } - - async fn _add_pkg_from_path>( - &self, - path: P, - repo: &db::repo::Model, - ) -> crate::Result { + async fn add_pkg_from_path>(&self, path: P, repo: i32) -> crate::Result<()> { let path_clone = path.as_ref().to_path_buf(); let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) .await .unwrap()?; // TODO prevent database from being updated but file failing to move to repo dir? - let pkg = db::query::package::insert(&self.conn, repo.id, pkg).await?; + let pkg = db::query::package::insert(&self.conn, repo, pkg).await?; - let queue_path = self.distro_dir.join(&repo.name).join(pkg.id.to_string()); - tokio::fs::rename(path.as_ref(), queue_path).await?; + let dest_path = self + .repos_dir + .join(repo.to_string()) + .join(pkg.id.to_string()); + tokio::fs::rename(path.as_ref(), dest_path).await?; tracing::info!( - "Added '{}-{}' to repository '{}' ({})", + "Added '{}-{}-{}' to repository {}", pkg.name, pkg.version, - repo.name, - pkg.arch + pkg.arch, + repo, ); - Ok(pkg) + Ok(()) } - pub async fn add_pkg_from_path>( - &self, - path: P, - repo: &str, - ) -> crate::Result<(i32, String, String, String)> { - let repo = self.get_or_create_repo(repo).await?; + pub async fn remove_repo(&self, repo: i32) -> crate::Result<()> { + self.repos.write().await.remove(&repo); + db::Repo::delete_by_id(repo).exec(&self.conn).await?; + let _ = tokio::fs::remove_dir_all(self.repos_dir.join(repo.to_string())).await; - { - let repos = self.repos.read().await; - - if let Some(state) = repos.get(&repo.id) { - state.queued_pkgs.fetch_add(1, Ordering::SeqCst); - } - } - - let _guard = self.pkg_sema.acquire().await.unwrap(); - let res = self._add_pkg_from_path(path, &repo).await; - - match res { - Ok(pkg) => { - let repos = self.repos.read().await; - - if let Some(state) = repos.get(&repo.id) { - let old = state.queued_pkgs.fetch_sub(1, Ordering::SeqCst); - - if old - 1 == 0 { - state.sync_notify.notify_one(); - } - } - - Ok((repo.id, pkg.name, pkg.version, pkg.arch)) - } - Err(e) => Err(e), - } - - // If the package already exists in the database, we remove it first - //let res = db::query::package::by_fields( - // &self.conn, - // repo.id, - // &pkg.info.arch, - // &pkg.info.name, - // None, - // None, - //) - //.await?; - // - //if let Some(entry) = res { - // entry.delete(&self.conn).await?; - //} - - //let dest_pkg_path = repo_dir.join(pkg.file_name()); - // - //// Insert new package into database - //let name = pkg.info.name.clone(); - //let version = pkg.info.version.clone(); - //let arch = pkg.info.arch.clone(); - //db::query::package::insert(&self.conn, repo.id, pkg).await?; - // - //// Move the package to its final resting place - //tokio::fs::rename(tmp_file_path, dest_pkg_path).await?; - // - // Synchronize archive databases - //if arch == ANY_ARCH { - // self.generate_archives_all(&repo.name).await?; - //} else { - // self.generate_archives(&repo.name, &arch).await?; - //} + Ok(()) + } + + /// Remove all packages in the repository that have a given arch. This method marks all + /// packages with the given architecture as "pending deletion", before performing a manual sync + /// & removal of stale packages. + pub async fn remove_repo_arch(&self, repo: i32, arch: &str) -> crate::Result<()> { + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::PendingDeletion), + ) + .filter( + Condition::all() + .add(db::package::Column::RepoId.eq(repo)) + .add(db::package::Column::Arch.eq(arch)), + ) + .exec(&self.conn) + .await?; + + self.sync_repo(repo).await?; + self.remove_stale_pkgs().await?; + + Ok(()) } - /// Generate a path to a unique file that can be used as a temporary file pub fn random_file_paths(&self) -> [PathBuf; C] { std::array::from_fn(|_| { let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - self.distro_dir.join(uuid.to_string()) + self.repos_dir.join(uuid.to_string()) }) } } diff --git a/server/src/repo/manager2.rs b/server/src/repo/manager2.rs deleted file mode 100644 index e4f0581..0000000 --- a/server/src/repo/manager2.rs +++ /dev/null @@ -1,385 +0,0 @@ -use super::{archive, package}; -use crate::db::{self, query::package::delete_stale_pkgs}; - -use std::{ - collections::HashMap, - path::{Path, PathBuf}, - sync::{ - atomic::{AtomicU32, Ordering}, - Arc, - }, -}; - -use futures::StreamExt; -use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, - ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, -}; -use sea_query::{Alias, Expr, Query}; -use tokio::sync::{ - mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, - Mutex, RwLock, -}; -use uuid::Uuid; - -struct PkgQueueMsg { - repo: i32, - path: PathBuf, -} - -/// A single instance of this struct orchestrates everything related to managing packages files on -/// disk for all repositories in the server -pub struct RepoMgr { - repos_dir: PathBuf, - conn: DbConn, - pkg_queue: ( - UnboundedSender, - Mutex>, - ), - repos: RwLock>)>>, -} - -impl RepoMgr { - pub async fn new>(repos_dir: P, conn: DbConn) -> crate::Result { - if !tokio::fs::try_exists(&repos_dir).await? { - tokio::fs::create_dir(&repos_dir).await?; - } - - let (tx, rx) = unbounded_channel(); - - let mut repos = HashMap::new(); - let repo_ids: Vec = db::Repo::find() - .select_only() - .column(db::repo::Column::Id) - .into_tuple() - .all(&conn) - .await?; - - for id in repo_ids { - repos.insert(id, Default::default()); - } - - Ok(Self { - repos_dir: repos_dir.as_ref().to_path_buf(), - conn, - pkg_queue: (tx, Mutex::new(rx)), - repos: RwLock::new(repos), - }) - } - - /// Generate archive databases for all known architectures in the repository, including the - /// "any" architecture. - pub async fn sync_repo(&self, repo: i32) -> crate::Result<()> { - let lock = self - .repos - .read() - .await - .get(&repo) - .map(|(_, lock)| Arc::clone(lock)); - - if lock.is_none() { - return Ok(()); - } - - let lock = lock.unwrap(); - let _guard = lock.lock().await; - - let archs: Vec = db::Package::find() - .filter(db::package::Column::RepoId.eq(repo)) - .select_only() - .column(db::package::Column::Arch) - .distinct() - .into_tuple() - .all(&self.conn) - .await?; - - for arch in archs { - self.generate_archives(repo, &arch).await?; - } - - Ok(()) - } - - /// Generate the archive databases for the given repository and architecture. - async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { - let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = - self.random_file_paths(); - let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; - let ar_files = archive::RepoArchiveWriter::open(&tmp_ar_files_path).await?; - - // Query all packages in the repo that have the given architecture or the "any" - // architecture - let mut pkgs = db::query::package::pkgs_to_sync(&self.conn, repo, arch) - .stream(&self.conn) - .await?; - - let mut commited_ids: Vec = Vec::new(); - - while let Some(pkg) = pkgs.next().await.transpose()? { - commited_ids.push(pkg.id); - - let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; - let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; - - package::write_files(&self.conn, &mut files_tmp_file, &pkg).await?; - package::write_desc(&self.conn, &mut desc_tmp_file, &pkg).await?; - - let full_name = format!("{}-{}", pkg.name, pkg.version); - - ar_db - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &files_tmp_file_path, false) - .await?; - } - - // Cleanup - ar_db.close().await?; - ar_files.close().await?; - - let repo_dir = self.repos_dir.join(repo.to_string()); - - // Move the db archives to their respective places - tokio::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch))).await?; - tokio::fs::rename( - tmp_ar_files_path, - repo_dir.join(format!("{}.files.tar.gz", arch)), - ) - .await?; - - // Only after we have successfully written everything to disk do we update the database. - // This order ensures any failure can be recovered, as the database is our single source of - // truth. - db::Package::update_many() - .col_expr( - db::package::Column::State, - Expr::value(db::PackageState::Committed), - ) - .filter(db::package::Column::Id.is_in(commited_ids)) - .exec(&self.conn) - .await?; - - // If this fails there's no point in failing the function + if there were no packages in - // the repo, this fails anyway because the temp file doesn't exist - let _ = tokio::fs::remove_file(desc_tmp_file_path).await; - let _ = tokio::fs::remove_file(files_tmp_file_path).await; - - tracing::info!("Package archives generated for repo {} ('{}')", repo, arch); - - Ok(()) - } - - /// Clean any remaining old package files from the database and file system - pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { - let mut pkgs = db::query::package::stale_pkgs(&self.conn) - .stream(&self.conn) - .await?; - - // Ids are monotonically increasing, so the max id suffices to know which packages to - // remove later - let mut max_id = -1; - let mut removed_pkgs = 0; - - while let Some(pkg) = pkgs.next().await.transpose()? { - // Failing to remove the package file isn't the biggest problem - let _ = tokio::fs::remove_file( - self.repos_dir - .join(pkg.repo_id.to_string()) - .join(pkg.id.to_string()), - ) - .await; - - if pkg.id > max_id { - max_id = pkg.id; - } - - removed_pkgs += 1; - } - - if removed_pkgs > 0 { - db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; - } - - tracing::info!("Removed {removed_pkgs} stale package(s)"); - - Ok(()) - } - - pub async fn pkg_parse_task(&self) { - loop { - // Receive the next message and immediately drop the mutex afterwards. As long as the - // quue is empty, this will lock the mutex. This is okay, as the mutex will be unlocked - // as soon as a message is received, so another worker can pick up the mutex. - let msg = { - let mut recv = self.pkg_queue.1.lock().await; - recv.recv().await - }; - - if let Some(msg) = msg { - // TODO better handle this error (retry if failure wasn't because the package is - // faulty) - let _ = self - .add_pkg_from_path(msg.path, msg.repo) - .await - .inspect_err(|e| tracing::error!("{:?}", e)); - - let old = self - .repos - .read() - .await - .get(&msg.repo) - .map(|n| n.0.fetch_sub(1, Ordering::SeqCst)); - - // Every time the queue for a repo becomes empty, we run a sync job - if old == Some(1) { - // TODO error handling - let _ = self.sync_repo(msg.repo).await; - - // TODO move this so that we only clean if entire queue is empty, not just - // queue for specific repo - let _ = self.remove_stale_pkgs().await; - } - } - } - } - - pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { - self.pkg_queue.0.send(PkgQueueMsg { path, repo }).unwrap(); - self.repos.read().await.get(&repo).inspect(|n| { - n.0.fetch_add(1, Ordering::SeqCst); - }); - } - - pub async fn get_repo(&self, distro: &str, repo: &str) -> crate::Result> { - Ok(db::Repo::find() - .find_also_related(db::Distro) - .filter( - Condition::all() - .add(db::repo::Column::Name.eq(repo)) - .add(db::distro::Column::Name.eq(distro)), - ) - .one(&self.conn) - .await - .map(|res| res.map(|(repo, _)| repo.id))?) - } - - pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { - let mut repos = self.repos.write().await; - - let distro_id: Option = db::Distro::find() - .filter(db::distro::Column::Name.eq(distro)) - .select_only() - .column(db::distro::Column::Id) - .into_tuple() - .one(&self.conn) - .await?; - - let distro_id = if let Some(id) = distro_id { - id - } else { - let new_distro = db::distro::ActiveModel { - id: NotSet, - name: Set(distro.to_string()), - description: NotSet, - }; - - new_distro.insert(&self.conn).await?.id - }; - - let repo_id: Option = db::Repo::find() - .filter(db::repo::Column::DistroId.eq(distro_id)) - .filter(db::repo::Column::Name.eq(repo)) - .select_only() - .column(db::repo::Column::Id) - .into_tuple() - .one(&self.conn) - .await?; - - let repo_id = if let Some(id) = repo_id { - id - } else { - let new_repo = db::repo::ActiveModel { - id: NotSet, - distro_id: Set(distro_id), - name: Set(repo.to_string()), - description: NotSet, - }; - let id = new_repo.insert(&self.conn).await?.id; - - tokio::fs::create_dir(self.repos_dir.join(id.to_string())).await?; - repos.insert(id, Default::default()); - - id - }; - - Ok(repo_id) - } - - async fn add_pkg_from_path>(&self, path: P, repo: i32) -> crate::Result<()> { - let path_clone = path.as_ref().to_path_buf(); - let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) - .await - .unwrap()?; - - // TODO prevent database from being updated but file failing to move to repo dir? - let pkg = db::query::package::insert(&self.conn, repo, pkg).await?; - - let dest_path = self - .repos_dir - .join(repo.to_string()) - .join(pkg.id.to_string()); - tokio::fs::rename(path.as_ref(), dest_path).await?; - - tracing::info!( - "Added '{}-{}-{}' to repository {}", - pkg.name, - pkg.version, - pkg.arch, - repo, - ); - - Ok(()) - } - - pub async fn remove_repo(&self, repo: i32) -> crate::Result<()> { - self.repos.write().await.remove(&repo); - db::Repo::delete_by_id(repo).exec(&self.conn).await?; - let _ = tokio::fs::remove_dir_all(self.repos_dir.join(repo.to_string())).await; - - Ok(()) - } - - /// Remove all packages in the repository that have a given arch. This method marks all - /// packages with the given architecture as "pending deletion", before performing a manual sync - /// & removal of stale packages. - pub async fn remove_repo_arch(&self, repo: i32, arch: &str) -> crate::Result<()> { - db::Package::update_many() - .col_expr( - db::package::Column::State, - Expr::value(db::PackageState::PendingDeletion), - ) - .filter( - Condition::all() - .add(db::package::Column::RepoId.eq(repo)) - .add(db::package::Column::Arch.eq(arch)), - ) - .exec(&self.conn) - .await?; - - self.sync_repo(repo).await?; - self.remove_stale_pkgs().await?; - - Ok(()) - } - - pub fn random_file_paths(&self) -> [PathBuf; C] { - std::array::from_fn(|_| { - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - self.repos_dir.join(uuid.to_string()) - }) - } -} diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 16c62a5..953b631 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,10 +1,8 @@ mod archive; mod manager; -mod manager2; pub mod package; -pub use manager::DistroMgr; -pub use manager2::RepoMgr; +pub use manager::RepoMgr; use crate::FsConfig; From 76395afb10b08db0557be5430c492950bd27e38e Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 19 Jun 2024 22:07:30 +0200 Subject: [PATCH 48/73] feat: only return committed packages from the api --- server/src/api/mod.rs | 8 +++---- server/src/api/pagination.rs | 24 ++++++++------------- server/src/db/entities/package.rs | 1 + server/src/db/query/distro.rs | 5 ++--- server/src/db/query/package.rs | 35 ++++++++++++++++++++----------- server/src/db/query/repo.rs | 5 ++--- 6 files changed, 41 insertions(+), 37 deletions(-) diff --git a/server/src/api/mod.rs b/server/src/api/mod.rs index 0a0a56e..4678257 100644 --- a/server/src/api/mod.rs +++ b/server/src/api/mod.rs @@ -22,10 +22,10 @@ async fn get_repos( Query(pagination): Query, Query(filter): Query, ) -> crate::Result>> { - let (total_pages, items) = + let items = db::query::repo::page(&global.db, pagination.per_page, pagination.page - 1, filter).await?; - Ok(Json(pagination.res(total_pages, items))) + Ok(Json(pagination.res(items))) } async fn get_single_repo( @@ -44,11 +44,11 @@ async fn get_packages( Query(pagination): Query, Query(filter): Query, ) -> crate::Result>> { - let (total_pages, pkgs) = + let items = db::query::package::page(&global.db, pagination.per_page, pagination.page - 1, filter) .await?; - Ok(Json(pagination.res(total_pages, pkgs))) + Ok(Json(pagination.res(items))) } async fn get_single_package( diff --git a/server/src/api/pagination.rs b/server/src/api/pagination.rs index 02e32dc..3ede5bf 100644 --- a/server/src/api/pagination.rs +++ b/server/src/api/pagination.rs @@ -1,19 +1,19 @@ use serde::{Deserialize, Serialize}; #[derive(Deserialize)] -#[serde(default)] pub struct Query { + #[serde(default = "default_page")] pub page: u64, + #[serde(default = "default_per_page")] pub per_page: u64, } -impl Default for Query { - fn default() -> Self { - Query { - page: 1, - per_page: 25, - } - } +fn default_page() -> u64 { + 1 +} + +fn default_per_page() -> u64 { + 25 } #[derive(Serialize)] @@ -23,21 +23,15 @@ where { pub page: u64, pub per_page: u64, - pub total_pages: u64, pub count: usize, pub items: Vec, } impl Query { - pub fn res Serialize>( - self, - total_pages: u64, - items: Vec, - ) -> PaginatedResponse { + pub fn res Serialize>(self, items: Vec) -> PaginatedResponse { PaginatedResponse { page: self.page, per_page: self.per_page, - total_pages, count: items.len(), items, } diff --git a/server/src/db/entities/package.rs b/server/src/db/entities/package.rs index 08ac2ab..4ef90a4 100644 --- a/server/src/db/entities/package.rs +++ b/server/src/db/entities/package.rs @@ -26,6 +26,7 @@ pub struct Model { pub pgp_sig_size: Option, pub sha256_sum: String, pub compression: String, + #[serde(skip_serializing)] pub state: PackageState, } diff --git a/server/src/db/query/distro.rs b/server/src/db/query/distro.rs index c4fc70f..8647f2a 100644 --- a/server/src/db/query/distro.rs +++ b/server/src/db/query/distro.rs @@ -21,15 +21,14 @@ pub async fn page( per_page: u64, page: u64, filter: Filter, -) -> Result<(u64, Vec)> { +) -> Result> { let paginator = Distro::find() .filter(filter) .order_by_asc(distro::Column::Id) .paginate(conn, per_page); let repos = paginator.fetch_page(page).await?; - let total_pages = paginator.num_pages().await?; - Ok((total_pages, repos)) + Ok(repos) } pub async fn by_id(conn: &DbConn, id: i32) -> Result> { diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 2ba1996..bfdad73 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -17,10 +17,7 @@ impl IntoCondition for Filter { Condition::all() .add_option(self.repo.map(|repo| package::Column::RepoId.eq(repo))) .add_option(self.arch.map(|arch| package::Column::Arch.eq(arch))) - .add_option( - self.name - .map(|name| package::Column::Name.like(format!("%{}%", name))), - ) + .add_option(self.name.map(|name| package::Column::Name.contains(name))) } } @@ -29,15 +26,29 @@ pub async fn page( per_page: u64, page: u64, filter: Filter, -) -> super::Result<(u64, Vec)> { - let paginator = Package::find() - .filter(filter) - .order_by_asc(package::Column::Id) - .paginate(conn, per_page); - let packages = paginator.fetch_page(page).await?; - let total_pages = paginator.num_pages().await?; +) -> crate::Result> { + let p2 = Alias::new("p2"); + let query = Query::select() + .columns(db::package::Column::iter().map(|c| (db::package::Entity, c))) + .from(db::package::Entity) + .join_subquery( + JoinType::InnerJoin, + max_pkg_ids_query(true), + p2.clone(), + Expr::col((db::package::Entity, db::package::Column::Id)) + .eq(Expr::col((p2.clone(), Alias::new("max_id")))), + ) + .cond_where(filter) + .order_by((db::package::Entity, db::package::Column::Id), Order::Asc) + .to_owned(); + let builder = conn.get_database_backend(); + let sql = builder.build(&query); - Ok((total_pages, packages)) + Ok(db::Package::find() + .from_raw_sql(sql) + .paginate(conn, per_page) + .fetch_page(page) + .await?) } pub async fn by_id(conn: &DbConn, id: i32) -> Result> { diff --git a/server/src/db/query/repo.rs b/server/src/db/query/repo.rs index 2ad54bf..a2daa26 100644 --- a/server/src/db/query/repo.rs +++ b/server/src/db/query/repo.rs @@ -21,15 +21,14 @@ pub async fn page( per_page: u64, page: u64, filter: Filter, -) -> Result<(u64, Vec)> { +) -> Result> { let paginator = Repo::find() .filter(filter) .order_by_asc(repo::Column::Id) .paginate(conn, per_page); let repos = paginator.fetch_page(page).await?; - let total_pages = paginator.num_pages().await?; - Ok((total_pages, repos)) + Ok(repos) } pub async fn by_id(conn: &DbConn, id: i32) -> Result> { From 8864925e58188e0b56a3846a3fcfafd533313a3e Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Mon, 24 Jun 2024 13:02:26 +0200 Subject: [PATCH 49/73] feat: set up prober repo actors; refactor code; this commit is too large --- server/src/cli.rs | 62 +---------------------------- server/src/main.rs | 77 ++++++++++++++++++++++++++++++++++-- server/src/repo/actor.rs | 82 +++++++++++++++++++++++++++++++++++++++ server/src/repo/handle.rs | 72 ++++++++++++++++++++++++++++++++++ server/src/repo/mod.rs | 4 ++ 5 files changed, 232 insertions(+), 65 deletions(-) create mode 100644 server/src/repo/actor.rs create mode 100644 server/src/repo/handle.rs diff --git a/server/src/cli.rs b/server/src/cli.rs index c6998eb..5e8469e 100644 --- a/server/src/cli.rs +++ b/server/src/cli.rs @@ -1,12 +1,6 @@ -use crate::{Config, FsConfig, Global}; +use std::path::PathBuf; -use std::{io, path::PathBuf, sync::Arc}; - -use axum::Router; use clap::Parser; -use sea_orm_migration::MigratorTrait; -use tower_http::trace::TraceLayer; -use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -19,57 +13,3 @@ pub struct Cli { )] pub config_file: PathBuf, } - -impl Cli { - pub async fn run(&self) -> crate::Result<()> { - let config: Config = Config::figment(&self.config_file) - .extract() - .inspect_err(|e| tracing::error!("{}", e))?; - - tracing_subscriber::registry() - .with(tracing_subscriber::EnvFilter::new(config.log_level.clone())) - .with(tracing_subscriber::fmt::layer()) - .init(); - - tracing::info!("Connecting to database"); - let db = crate::db::connect(&config.db).await?; - - crate::db::Migrator::up(&db, None).await?; - - let mgr = match &config.fs { - FsConfig::Local { data_dir } => { - crate::repo::RepoMgr::new(data_dir.join("repos"), db.clone()).await? - } - }; - - let mgr = Arc::new(mgr); - - for _ in 0..config.pkg_workers { - let clone = Arc::clone(&mgr); - - tokio::spawn(async move { clone.pkg_parse_task().await }); - } - - let global = Global { - config: config.clone(), - mgr, - db, - }; - - // build our application with a single route - let app = Router::new() - .nest("/api", crate::api::router()) - .merge(crate::repo::router(&config.api_key)) - .with_state(global) - .layer(TraceLayer::new_for_http()); - - let domain: String = format!("{}:{}", config.domain, config.port) - .parse() - .unwrap(); - let listener = tokio::net::TcpListener::bind(domain).await?; - // run it with hyper on localhost:3000 - Ok(axum::serve(listener, app.into_make_service()) - .await - .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?) - } -} diff --git a/server/src/main.rs b/server/src/main.rs index eb1c3d0..5c0ecac 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -8,9 +8,15 @@ mod repo; pub use config::{Config, DbConfig, FsConfig}; pub use error::{Result, ServerError}; -use std::sync::Arc; +use std::{io, path::PathBuf, sync::Arc}; + +use axum::Router; +use tower_http::trace::TraceLayer; use clap::Parser; +use sea_orm_migration::MigratorTrait; +use tokio::runtime; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; pub const ANY_ARCH: &'static str = "any"; @@ -21,8 +27,71 @@ pub struct Global { db: sea_orm::DbConn, } -#[tokio::main] -async fn main() -> crate::Result<()> { +fn main() -> crate::Result<()> { + let rt = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .unwrap(); + let handle = rt.handle(); + let cli = cli::Cli::parse(); - cli.run().await + let global = setup(handle, cli.config_file)?; + + handle.block_on(run(global)) +} + +fn setup(rt: &runtime::Handle, config_file: PathBuf) -> crate::Result { + let config: Config = Config::figment(config_file) + .extract() + .inspect_err(|e| tracing::error!("{}", e))?; + + tracing_subscriber::registry() + .with(tracing_subscriber::EnvFilter::new(config.log_level.clone())) + .with(tracing_subscriber::fmt::layer()) + .init(); + + tracing::info!("Connecting to database"); + let db = rt.block_on(crate::db::connect(&config.db))?; + rt.block_on(crate::db::Migrator::up(&db, None))?; + + let mgr = match &config.fs { + FsConfig::Local { data_dir } => { + rt.block_on(crate::repo::RepoMgr::new( + data_dir.join("repos"), + db.clone(), + ))? + //RepoHandle::start(data_dir.join("repos"), db.clone(), config.pkg_workers, rt.clone())? + } + }; + let mgr = Arc::new(mgr); + + for _ in 0..config.pkg_workers { + let clone = Arc::clone(&mgr); + + rt.spawn(async move { clone.pkg_parse_task().await }); + } + + Ok(Global { + config: config.clone(), + mgr, + db, + }) +} + +async fn run(global: Global) -> crate::Result<()> { + let domain: String = format!("{}:{}", &global.config.domain, global.config.port) + .parse() + .unwrap(); + let listener = tokio::net::TcpListener::bind(domain).await?; + + // build our application with a single route + let app = Router::new() + .nest("/api", crate::api::router()) + .merge(crate::repo::router(&global.config.api_key)) + .with_state(global) + .layer(TraceLayer::new_for_http()); + // run it with hyper on localhost:3000 + Ok(axum::serve(listener, app.into_make_service()) + .await + .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?) } diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs new file mode 100644 index 0000000..1291656 --- /dev/null +++ b/server/src/repo/actor.rs @@ -0,0 +1,82 @@ +use super::{archive, package, RepoHandle}; +use crate::db; + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::{atomic::AtomicU32, Arc, Mutex, RwLock}, +}; + +use sea_orm::{ + ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, + ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, +}; +use tokio::{runtime, sync::mpsc::UnboundedReceiver}; + +pub enum RepoCommand { + ParsePkg(i32, PathBuf), +} + +/// The actor is responsible for mutating the repositories. They receive their commands their +/// messages and process these commands in both a synchronous and asynchronous way. +pub struct RepoActor { + repos_dir: PathBuf, + conn: DbConn, + rt: runtime::Handle, + rx: Arc>>, + repos: Arc>)>>>, +} + +impl RepoActor { + pub fn new( + repos_dir: PathBuf, + conn: DbConn, + rt: runtime::Handle, + rx: Arc>>, + repos: Arc>)>>>, + ) -> Self { + Self { + repos_dir, + conn, + rt, + rx, + repos, + } + } + + /// Run the main actor loop + pub fn run(self) { + while let Some(msg) = { + let mut rx = self.rx.lock().unwrap(); + rx.blocking_recv() + } { + match msg { + RepoCommand::ParsePkg(repo, path) => { + let _ = self.parse_pkg(repo, path); + } + } + } + } + + fn parse_pkg(&self, repo: i32, path: PathBuf) -> crate::Result<()> { + let pkg = package::Package::open(&path)?; + let pkg = self + .rt + .block_on(db::query::package::insert(&self.conn, repo, pkg))?; + let dest_path = self + .repos_dir + .join(repo.to_string()) + .join(pkg.id.to_string()); + std::fs::rename(path, dest_path)?; + + tracing::info!( + "Added '{}-{}-{}' to repository {}", + pkg.name, + pkg.version, + pkg.arch, + repo, + ); + + Ok(()) + } +} diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs new file mode 100644 index 0000000..b720390 --- /dev/null +++ b/server/src/repo/handle.rs @@ -0,0 +1,72 @@ +use crate::db; + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::{atomic::AtomicU32, Arc, Mutex, RwLock}, +}; + +use sea_orm::{ + ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, + ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, +}; +use tokio::{ + runtime, + sync::mpsc::{unbounded_channel, UnboundedSender}, +}; + +#[derive(Clone)] +pub struct RepoHandle { + repos_dir: PathBuf, + conn: DbConn, + tx: UnboundedSender, + repos: Arc>)>>>, +} + +impl RepoHandle { + pub fn start( + repos_dir: impl AsRef, + conn: DbConn, + actors: u32, + rt: runtime::Handle, + ) -> crate::Result { + std::fs::create_dir_all(repos_dir.as_ref())?; + + let (tx, rx) = unbounded_channel(); + + let mut repos = HashMap::new(); + let repo_ids: Vec = rt.block_on( + db::Repo::find() + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .all(&conn), + )?; + + for id in repo_ids { + repos.insert(id, Default::default()); + } + + let rx = Arc::new(Mutex::new(rx)); + let repos = Arc::new(RwLock::new(repos)); + + for _ in 0..actors { + let actor = super::RepoActor::new( + repos_dir.as_ref().to_path_buf(), + conn.clone(), + rt.clone(), + Arc::clone(&rx), + Arc::clone(&repos), + ); + + std::thread::spawn(|| actor.run()); + } + + Ok(Self { + repos_dir: repos_dir.as_ref().to_path_buf(), + conn, + tx, + repos, + }) + } +} diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 953b631..6ea74ab 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,7 +1,11 @@ +mod actor; mod archive; +mod handle; mod manager; pub mod package; +pub use actor::{RepoActor, RepoCommand}; +pub use handle::RepoHandle; pub use manager::RepoMgr; use crate::FsConfig; From 656df06b4e4c40827a5f09768784c5027d4102df Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 25 Jun 2024 16:53:30 +0200 Subject: [PATCH 50/73] refactor: use shared state struct --- server/src/repo/actor.rs | 54 ++++++++++++++++++++++++++------------- server/src/repo/handle.rs | 26 ++++--------------- server/src/repo/mod.rs | 2 +- 3 files changed, 42 insertions(+), 40 deletions(-) diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index 1291656..c232df2 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -11,43 +11,60 @@ use sea_orm::{ ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; -use tokio::{runtime, sync::mpsc::UnboundedReceiver}; +use tokio::{ + runtime, + sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, +}; pub enum RepoCommand { ParsePkg(i32, PathBuf), } +pub struct RepoSharedState { + repos_dir: PathBuf, + conn: DbConn, + rx: Mutex>, + tx: UnboundedSender, + repos: RwLock>)>>, +} + +impl RepoSharedState { + pub fn new( + repos_dir: impl AsRef, + conn: DbConn, + repos: HashMap>)>, + ) -> Self { + let (tx, rx) = unbounded_channel(); + + Self { + repos_dir: repos_dir.as_ref().to_path_buf(), + conn, + rx: Mutex::new(rx), + tx, + repos: RwLock::new(repos), + } + } +} + /// The actor is responsible for mutating the repositories. They receive their commands their /// messages and process these commands in both a synchronous and asynchronous way. pub struct RepoActor { - repos_dir: PathBuf, - conn: DbConn, rt: runtime::Handle, - rx: Arc>>, - repos: Arc>)>>>, + state: Arc, } impl RepoActor { - pub fn new( - repos_dir: PathBuf, - conn: DbConn, - rt: runtime::Handle, - rx: Arc>>, - repos: Arc>)>>>, - ) -> Self { + pub fn new(rt: runtime::Handle, state: Arc) -> Self { Self { - repos_dir, - conn, rt, - rx, - repos, + state: Arc::clone(&state), } } /// Run the main actor loop pub fn run(self) { while let Some(msg) = { - let mut rx = self.rx.lock().unwrap(); + let mut rx = self.state.rx.lock().unwrap(); rx.blocking_recv() } { match msg { @@ -62,8 +79,9 @@ impl RepoActor { let pkg = package::Package::open(&path)?; let pkg = self .rt - .block_on(db::query::package::insert(&self.conn, repo, pkg))?; + .block_on(db::query::package::insert(&self.state.conn, repo, pkg))?; let dest_path = self + .state .repos_dir .join(repo.to_string()) .join(pkg.id.to_string()); diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index b720390..b918aaf 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -1,3 +1,4 @@ +use super::RepoSharedState; use crate::db; use std::{ @@ -17,10 +18,7 @@ use tokio::{ #[derive(Clone)] pub struct RepoHandle { - repos_dir: PathBuf, - conn: DbConn, - tx: UnboundedSender, - repos: Arc>)>>>, + state: Arc, } impl RepoHandle { @@ -32,8 +30,6 @@ impl RepoHandle { ) -> crate::Result { std::fs::create_dir_all(repos_dir.as_ref())?; - let (tx, rx) = unbounded_channel(); - let mut repos = HashMap::new(); let repo_ids: Vec = rt.block_on( db::Repo::find() @@ -47,26 +43,14 @@ impl RepoHandle { repos.insert(id, Default::default()); } - let rx = Arc::new(Mutex::new(rx)); - let repos = Arc::new(RwLock::new(repos)); + let state = Arc::new(RepoSharedState::new(repos_dir, conn, repos)); for _ in 0..actors { - let actor = super::RepoActor::new( - repos_dir.as_ref().to_path_buf(), - conn.clone(), - rt.clone(), - Arc::clone(&rx), - Arc::clone(&repos), - ); + let actor = super::RepoActor::new(rt.clone(), Arc::clone(&state)); std::thread::spawn(|| actor.run()); } - Ok(Self { - repos_dir: repos_dir.as_ref().to_path_buf(), - conn, - tx, - repos, - }) + Ok(Self { state }) } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 6ea74ab..8e9a627 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -4,7 +4,7 @@ mod handle; mod manager; pub mod package; -pub use actor::{RepoActor, RepoCommand}; +pub use actor::{RepoActor, RepoCommand, RepoSharedState}; pub use handle::RepoHandle; pub use manager::RepoMgr; From 80d52915089d5209cdfb9c97803a372072115235 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 25 Jun 2024 17:05:14 +0200 Subject: [PATCH 51/73] refactor: switch to new repo actors --- server/src/main.rs | 30 ++++++----- server/src/repo/actor.rs | 2 +- server/src/repo/handle.rs | 6 +-- server/src/repo/mod.rs | 110 ++++++++++++++++++++------------------ 4 files changed, 79 insertions(+), 69 deletions(-) diff --git a/server/src/main.rs b/server/src/main.rs index 5c0ecac..274d419 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -23,7 +23,7 @@ pub const ANY_ARCH: &'static str = "any"; #[derive(Clone)] pub struct Global { config: crate::config::Config, - mgr: Arc, + repo: repo::Handle, db: sea_orm::DbConn, } @@ -54,26 +54,32 @@ fn setup(rt: &runtime::Handle, config_file: PathBuf) -> crate::Result { let db = rt.block_on(crate::db::connect(&config.db))?; rt.block_on(crate::db::Migrator::up(&db, None))?; - let mgr = match &config.fs { + let repo = match &config.fs { FsConfig::Local { data_dir } => { - rt.block_on(crate::repo::RepoMgr::new( + crate::repo::Handle::start( data_dir.join("repos"), db.clone(), - ))? + rt.clone(), + config.pkg_workers, + )? + //rt.block_on(crate::repo::RepoMgr::new( + // data_dir.join("repos"), + // db.clone(), + //))? //RepoHandle::start(data_dir.join("repos"), db.clone(), config.pkg_workers, rt.clone())? } }; - let mgr = Arc::new(mgr); - - for _ in 0..config.pkg_workers { - let clone = Arc::clone(&mgr); - - rt.spawn(async move { clone.pkg_parse_task().await }); - } + //let mgr = Arc::new(mgr); + // + //for _ in 0..config.pkg_workers { + // let clone = Arc::clone(&mgr); + // + // rt.spawn(async move { clone.pkg_parse_task().await }); + //} Ok(Global { config: config.clone(), - mgr, + repo, db, }) } diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index c232df2..a24922f 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -1,4 +1,4 @@ -use super::{archive, package, RepoHandle}; +use super::{archive, package, Handle}; use crate::db; use std::{ diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index b918aaf..ff12f42 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -17,16 +17,16 @@ use tokio::{ }; #[derive(Clone)] -pub struct RepoHandle { +pub struct Handle { state: Arc, } -impl RepoHandle { +impl Handle { pub fn start( repos_dir: impl AsRef, conn: DbConn, - actors: u32, rt: runtime::Handle, + actors: u32, ) -> crate::Result { std::fs::create_dir_all(repos_dir.as_ref())?; diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 8e9a627..3bd2a1c 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -5,7 +5,7 @@ mod manager; pub mod package; pub use actor::{RepoActor, RepoCommand, RepoSharedState}; -pub use handle::RepoHandle; +pub use handle::Handle; pub use manager::RepoMgr; use crate::FsConfig; @@ -53,30 +53,31 @@ async fn get_file( Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, req: Request, ) -> crate::Result { - if let Some(repo_id) = global.mgr.get_repo(&distro, &repo).await? { - match global.config.fs { - FsConfig::Local { data_dir } => { - let repo_dir = data_dir.join("repos").join(repo_id.to_string()); - - let file_name = if file_name == format!("{}.db", repo) - || file_name == format!("{}.db.tar.gz", repo) - { - format!("{}.db.tar.gz", arch) - } else if file_name == format!("{}.files", repo) - || file_name == format!("{}.files.tar.gz", repo) - { - format!("{}.files.tar.gz", arch) - } else { - file_name - }; - - let path = repo_dir.join(file_name); - Ok(ServeFile::new(path).oneshot(req).await) - } - } - } else { - Err(StatusCode::NOT_FOUND.into()) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(repo_id) = global.mgr.get_repo(&distro, &repo).await? { + // match global.config.fs { + // FsConfig::Local { data_dir } => { + // let repo_dir = data_dir.join("repos").join(repo_id.to_string()); + // + // let file_name = if file_name == format!("{}.db", repo) + // || file_name == format!("{}.db.tar.gz", repo) + // { + // format!("{}.db.tar.gz", arch) + // } else if file_name == format!("{}.files", repo) + // || file_name == format!("{}.files.tar.gz", repo) + // { + // format!("{}.files.tar.gz", arch) + // } else { + // file_name + // }; + // + // let path = repo_dir.join(file_name); + // Ok(ServeFile::new(path).oneshot(req).await) + // } + // } + //} else { + // Err(StatusCode::NOT_FOUND.into()) + //} } async fn post_package_archive( @@ -84,46 +85,49 @@ async fn post_package_archive( Path((distro, repo)): Path<(String, String)>, body: Body, ) -> crate::Result { - let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); - let repo = global.mgr.get_or_create_repo(&distro, &repo).await?; - let [tmp_path] = global.mgr.random_file_paths(); - - let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; - tokio::io::copy(&mut body, &mut tmp_file).await?; - - global.mgr.queue_pkg(repo, tmp_path).await; - - Ok(StatusCode::ACCEPTED) + Ok(StatusCode::NOT_FOUND) + //let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); + //let repo = global.mgr.get_or_create_repo(&distro, &repo).await?; + //let [tmp_path] = global.mgr.random_file_paths(); + // + //let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; + //tokio::io::copy(&mut body, &mut tmp_file).await?; + // + //global.mgr.queue_pkg(repo, tmp_path).await; + // + //Ok(StatusCode::ACCEPTED) } async fn delete_repo( State(global): State, Path((distro, repo)): Path<(String, String)>, ) -> crate::Result { - if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { - global.mgr.remove_repo(repo).await?; - - tracing::info!("Removed repository {repo}"); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { + // global.mgr.remove_repo(repo).await?; + // + // tracing::info!("Removed repository {repo}"); + // + // Ok(StatusCode::OK) + //} else { + // Ok(StatusCode::NOT_FOUND) + //} } async fn delete_arch_repo( State(global): State, Path((distro, repo, arch)): Path<(String, String, String)>, ) -> crate::Result { - if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { - global.mgr.remove_repo_arch(repo, &arch).await?; - - tracing::info!("Removed architecture '{arch}' from repository {repo}"); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } + Ok(StatusCode::NOT_FOUND) + //if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { + // global.mgr.remove_repo_arch(repo, &arch).await?; + // + // tracing::info!("Removed architecture '{arch}' from repository {repo}"); + // + // Ok(StatusCode::OK) + //} else { + // Ok(StatusCode::NOT_FOUND) + //} //if let Some(mgr) = global.mgr.get_mgr(&distro).await { // let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; // From a7c0d3e062b025eb3ca6f2bbdffb774c8741a4f9 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 12:27:51 +0200 Subject: [PATCH 52/73] feat: start of sync reimplementation --- server/src/repo/actor.rs | 62 +++++++++++++++++++++++++++---- server/src/repo/handle.rs | 77 ++++++++++++++++++++++++++++++++++++++- server/src/repo/mod.rs | 21 +++++------ 3 files changed, 140 insertions(+), 20 deletions(-) diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index a24922f..c1a2c73 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -4,7 +4,10 @@ use crate::db; use std::{ collections::HashMap, path::{Path, PathBuf}, - sync::{atomic::AtomicU32, Arc, Mutex, RwLock}, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, Mutex, + }, }; use sea_orm::{ @@ -13,7 +16,10 @@ use sea_orm::{ }; use tokio::{ runtime, - sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + sync::{ + mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + RwLock, + }, }; pub enum RepoCommand { @@ -21,11 +27,11 @@ pub enum RepoCommand { } pub struct RepoSharedState { - repos_dir: PathBuf, - conn: DbConn, - rx: Mutex>, - tx: UnboundedSender, - repos: RwLock>)>>, + pub repos_dir: PathBuf, + pub conn: DbConn, + pub rx: Mutex>, + pub tx: UnboundedSender, + pub repos: RwLock>)>>, } impl RepoSharedState { @@ -70,11 +76,23 @@ impl RepoActor { match msg { RepoCommand::ParsePkg(repo, path) => { let _ = self.parse_pkg(repo, path); + + if self + .state + .repos + .blocking_read() + .get(&repo) + .map(|n| n.0.load(Ordering::SeqCst)) + == Some(0) + { + // TODO sync + } } } } } + /// Parse a queued package for the given repository. fn parse_pkg(&self, repo: i32, path: PathBuf) -> crate::Result<()> { let pkg = package::Package::open(&path)?; let pkg = self @@ -95,6 +113,36 @@ impl RepoActor { repo, ); + self.state.repos.blocking_read().get(&repo).inspect(|n| { + n.0.fetch_sub(1, Ordering::SeqCst); + }); + + Ok(()) + } + + fn sync_repo(&self, repo: i32) -> crate::Result<()> { + let repos = self.state.repos.blocking_read(); + + if let Some(_guard) = repos.get(&repo).map(|n| n.1.lock()) { + let archs: Vec = self.rt.block_on( + db::Package::find() + .filter(db::package::Column::RepoId.eq(repo)) + .select_only() + .column(db::package::Column::Arch) + .distinct() + .into_tuple() + .all(&self.state.conn), + )?; + + for arch in archs { + self.generate_archives(repo, &arch)?; + } + } + + Ok(()) + } + + fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { Ok(()) } } diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index ff12f42..262f274 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -1,10 +1,13 @@ -use super::RepoSharedState; +use super::{RepoCommand, RepoSharedState}; use crate::db; use std::{ collections::HashMap, path::{Path, PathBuf}, - sync::{atomic::AtomicU32, Arc, Mutex, RwLock}, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, Mutex, RwLock, + }, }; use sea_orm::{ @@ -15,6 +18,7 @@ use tokio::{ runtime, sync::mpsc::{unbounded_channel, UnboundedSender}, }; +use uuid::Uuid; #[derive(Clone)] pub struct Handle { @@ -53,4 +57,73 @@ impl Handle { Ok(Self { state }) } + + pub fn random_file_paths(&self) -> [PathBuf; C] { + std::array::from_fn(|_| { + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + self.state.repos_dir.join(uuid.to_string()) + }) + } + + pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { + let mut repos = self.state.repos.write().await; + + let distro_id: Option = db::Distro::find() + .filter(db::distro::Column::Name.eq(distro)) + .select_only() + .column(db::distro::Column::Id) + .into_tuple() + .one(&self.state.conn) + .await?; + + let distro_id = if let Some(id) = distro_id { + id + } else { + let new_distro = db::distro::ActiveModel { + id: NotSet, + name: Set(distro.to_string()), + description: NotSet, + }; + + new_distro.insert(&self.state.conn).await?.id + }; + + let repo_id: Option = db::Repo::find() + .filter(db::repo::Column::DistroId.eq(distro_id)) + .filter(db::repo::Column::Name.eq(repo)) + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .one(&self.state.conn) + .await?; + + let repo_id = if let Some(id) = repo_id { + id + } else { + let new_repo = db::repo::ActiveModel { + id: NotSet, + distro_id: Set(distro_id), + name: Set(repo.to_string()), + description: NotSet, + }; + let id = new_repo.insert(&self.state.conn).await?.id; + + tokio::fs::create_dir(self.state.repos_dir.join(id.to_string())).await?; + repos.insert(id, Default::default()); + + id + }; + + Ok(repo_id) + } + + pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { + self.state + .tx + .send(RepoCommand::ParsePkg(repo, path)) + .unwrap(); + self.state.repos.read().await.get(&repo).inspect(|n| { + n.0.fetch_add(1, Ordering::SeqCst); + }); + } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 3bd2a1c..6fe6650 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -85,17 +85,16 @@ async fn post_package_archive( Path((distro, repo)): Path<(String, String)>, body: Body, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); - //let repo = global.mgr.get_or_create_repo(&distro, &repo).await?; - //let [tmp_path] = global.mgr.random_file_paths(); - // - //let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; - //tokio::io::copy(&mut body, &mut tmp_file).await?; - // - //global.mgr.queue_pkg(repo, tmp_path).await; - // - //Ok(StatusCode::ACCEPTED) + let repo_id = global.repo.get_or_create_repo(&distro, &repo).await?; + + let [tmp_path] = global.repo.random_file_paths(); + let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; + let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); + tokio::io::copy(&mut body, &mut tmp_file).await?; + + global.repo.queue_pkg(repo_id, tmp_path).await; + + Ok(StatusCode::ACCEPTED) } async fn delete_repo( From 9237add86967917fbef294ee4396ebf90c0ab458 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 14:03:00 +0200 Subject: [PATCH 53/73] feat: reimplement synchronous package sync --- server/src/repo/actor.rs | 82 +++++++++++- server/src/repo/archive.rs | 250 +++++++++++++++++++++++++++++-------- server/src/repo/mod.rs | 2 - 3 files changed, 277 insertions(+), 57 deletions(-) diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index c1a2c73..b90fcee 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -10,17 +10,20 @@ use std::{ }, }; +use futures::StreamExt; use sea_orm::{ ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; +use sea_query::{Alias, Expr, Query}; use tokio::{ runtime, sync::{ - mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender}, RwLock, }, }; +use uuid::Uuid; pub enum RepoCommand { ParsePkg(i32, PathBuf), @@ -67,6 +70,13 @@ impl RepoActor { } } + pub fn random_file_paths(&self) -> [PathBuf; C] { + std::array::from_fn(|_| { + let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); + self.state.repos_dir.join(uuid.to_string()) + }) + } + /// Run the main actor loop pub fn run(self) { while let Some(msg) = { @@ -85,7 +95,7 @@ impl RepoActor { .map(|n| n.0.load(Ordering::SeqCst)) == Some(0) { - // TODO sync + let _ = self.sync_repo(repo); } } } @@ -143,6 +153,74 @@ impl RepoActor { } fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { + let [tmp_ar_db_path, tmp_ar_files_path] = self.random_file_paths(); + + let mut ars = archive::RepoArchivesWriter::new( + &tmp_ar_db_path, + &tmp_ar_files_path, + self.random_file_paths(), + &self.rt, + &self.state.conn, + )?; + + let (tx, mut rx) = mpsc::channel(1); + + let conn = self.state.conn.clone(); + let query = db::query::package::pkgs_to_sync(&self.state.conn, repo, arch); + + // sea_orm needs its connections to be dropped inside an async context, so we spawn a task + // that streams the responses to the synchronous context via message passing + self.rt.spawn(async move { + let stream = query.stream(&conn).await; + + if let Err(err) = stream { + let _ = tx.send(Err(err)).await; + + return; + } + + let mut stream = stream.unwrap(); + + while let Some(res) = stream.next().await { + let is_err = res.is_err(); + let _ = tx.send(res).await; + + if is_err { + return; + } + } + }); + + let mut committed_ids: Vec = Vec::new(); + + while let Some(pkg) = rx.blocking_recv().transpose()? { + committed_ids.push(pkg.id); + ars.append_pkg(&pkg)?; + } + + ars.close()?; + + // Move newly generated package archives to their correct place + let repo_dir = self.state.repos_dir.join(repo.to_string()); + std::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch)))?; + std::fs::rename( + tmp_ar_files_path, + repo_dir.join(format!("{}.files.tar.gz", arch)), + )?; + + // Update the state for the newly committed packages + self.rt.block_on( + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::Committed), + ) + .filter(db::package::Column::Id.is_in(committed_ids)) + .exec(&self.state.conn), + )?; + + tracing::info!("Package archives generated for repo {} ('{}')", repo, arch); + Ok(()) } } diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index a979c09..973a395 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -1,78 +1,222 @@ +use crate::db; use std::{ - io, + io::{self, Write}, path::{Path, PathBuf}, - sync::{Arc, Mutex}, }; +use futures::StreamExt; use libarchive::{ write::{Builder, FileWriter, WriteEntry}, Entry, WriteFilter, WriteFormat, }; +use sea_orm::{ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; +use tokio::{runtime, sync::mpsc}; -/// Struct to abstract away the intrinsics of writing entries to an archive file -pub struct RepoArchiveWriter { - ar: Arc>, +pub struct RepoArchivesWriter { + ar_db: FileWriter, + ar_files: FileWriter, + rt: runtime::Handle, + conn: DbConn, + tmp_paths: [PathBuf; 2], } -impl RepoArchiveWriter { - pub async fn open>(path: P) -> io::Result { - let path = PathBuf::from(path.as_ref()); - - // Open the archive file - let ar = tokio::task::spawn_blocking(move || { - let mut builder = Builder::new(); - builder.add_filter(WriteFilter::Gzip)?; - builder.set_format(WriteFormat::PaxRestricted)?; - - builder.open_file(path) - }) - .await - .unwrap()?; +impl RepoArchivesWriter { + pub fn new( + ar_db_path: impl AsRef, + ar_files_path: impl AsRef, + tmp_paths: [impl AsRef; 2], + rt: &runtime::Handle, + conn: &sea_orm::DbConn, + ) -> crate::Result { + let ar_db = Self::open_ar(ar_db_path)?; + let ar_files = Self::open_ar(ar_files_path)?; Ok(Self { - // In practice, mutex is only ever used by one thread at a time. It's simply here so we - // can use spawn_blocking without issues. - ar: Arc::new(Mutex::new(ar)), + ar_db, + ar_files, + rt: rt.clone(), + conn: conn.clone(), + tmp_paths: [ + tmp_paths[0].as_ref().to_path_buf(), + tmp_paths[1].as_ref().to_path_buf(), + ], }) } - /// Add either a "desc" or "files" entry to the archive - pub async fn add_entry>( - &self, - full_name: &str, - path: P, - desc: bool, - ) -> io::Result<()> { - let metadata = tokio::fs::metadata(&path).await?; + fn open_ar(path: impl AsRef) -> crate::Result { + let mut builder = Builder::new(); + builder.add_filter(WriteFilter::Gzip)?; + builder.set_format(WriteFormat::PaxRestricted)?; + + Ok(builder.open_file(path)?) + } + + fn append_entry( + ar: &mut FileWriter, + src_path: impl AsRef, + dest_path: impl AsRef, + ) -> crate::Result<()> { + let metadata = std::fs::metadata(&src_path)?; let file_size = metadata.len(); - let ar = Arc::clone(&self.ar); - let full_name = String::from(full_name); - let path = PathBuf::from(path.as_ref()); + let mut ar_entry = WriteEntry::new(); + ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); - Ok(tokio::task::spawn_blocking(move || { - let mut ar_entry = WriteEntry::new(); - ar_entry.set_filetype(libarchive::archive::FileType::RegularFile); + ar_entry.set_pathname(dest_path); + ar_entry.set_mode(0o100644); + ar_entry.set_size(file_size.try_into().unwrap()); - let name = if desc { "desc" } else { "files" }; - - ar_entry.set_pathname(PathBuf::from(full_name).join(name)); - ar_entry.set_mode(0o100644); - ar_entry.set_size(file_size.try_into().unwrap()); - - ar.lock().unwrap().append_path(&mut ar_entry, path) - }) - .await - .unwrap()?) + Ok(ar.append_path(&mut ar_entry, src_path)?) } - pub async fn close(&self) -> io::Result<()> { - let ar = Arc::clone(&self.ar); + pub fn append_pkg(&mut self, pkg: &db::package::Model) -> crate::Result<()> { + self.write_desc(&self.tmp_paths[0], pkg)?; + self.write_files(&self.tmp_paths[1], pkg)?; - Ok( - tokio::task::spawn_blocking(move || ar.lock().unwrap().close()) - .await - .unwrap()?, - ) + let full_name = format!("{}-{}", pkg.name, pkg.version); + let dest_desc_path = format!("{}/desc", full_name); + let dest_files_path = format!("{}/files", full_name); + + Self::append_entry(&mut self.ar_db, &self.tmp_paths[0], &dest_desc_path)?; + Self::append_entry(&mut self.ar_files, &self.tmp_paths[0], &dest_desc_path)?; + Self::append_entry(&mut self.ar_files, &self.tmp_paths[1], &dest_files_path)?; + + Ok(()) + } + + /// Generate a "files" archive entry for the package in the given path + fn write_files(&self, path: impl AsRef, pkg: &db::package::Model) -> crate::Result<()> { + let mut f = std::io::BufWriter::new(std::fs::File::create(path)?); + + writeln!(f, "%FILES%")?; + + let (tx, mut rx) = mpsc::channel(1); + + let conn = self.conn.clone(); + let query = pkg.find_related(db::PackageFile); + self.rt.spawn(async move { + let files = query.stream(&conn).await; + + if let Err(err) = files { + let _ = tx.send(Err(err)).await; + + return; + } + + let mut files = files.unwrap(); + + while let Some(res) = files.next().await { + let is_err = res.is_err(); + let _ = tx.send(res).await; + + if is_err { + return; + } + } + }); + + while let Some(file) = rx.blocking_recv().transpose()? { + writeln!(f, "{}", file.path)?; + } + + f.flush()?; + Ok(()) + } + + fn write_desc(&self, path: impl AsRef, pkg: &db::package::Model) -> crate::Result<()> { + let mut f = std::io::BufWriter::new(std::fs::File::create(path)?); + + writeln!(f, "%FILENAME%\n{}", pkg.id)?; + + let mut write_attr = |k: &str, v: &str| { + if !v.is_empty() { + writeln!(f, "\n%{}%\n{}", k, v) + } else { + Ok(()) + } + }; + + write_attr("NAME", &pkg.name)?; + write_attr("BASE", &pkg.base)?; + write_attr("VERSION", &pkg.version)?; + + if let Some(ref desc) = pkg.description { + write_attr("DESC", desc)?; + } + + let groups: Vec = self.rt.block_on( + pkg.find_related(db::PackageGroup) + .select_only() + .column(db::package_group::Column::Name) + .into_tuple() + .all(&self.conn), + )?; + + write_attr("GROUPS", &groups.join("\n"))?; + + write_attr("CSIZE", &pkg.c_size.to_string())?; + write_attr("ISIZE", &pkg.size.to_string())?; + write_attr("SHA256SUM", &pkg.sha256_sum)?; + + if let Some(ref url) = pkg.url { + write_attr("URL", url)?; + } + + let licenses: Vec = self.rt.block_on( + pkg.find_related(db::PackageLicense) + .select_only() + .column(db::package_license::Column::Name) + .into_tuple() + .all(&self.conn), + )?; + write_attr("LICENSE", &licenses.join("\n"))?; + + write_attr("ARCH", &pkg.arch)?; + + // TODO build date + write_attr( + "BUILDDATE", + &pkg.build_date.and_utc().timestamp().to_string(), + )?; + + if let Some(ref packager) = pkg.packager { + write_attr("PACKAGER", packager)?; + } + + let related = [ + ("REPLACES", db::PackageRelatedEnum::Replaces), + ("CONFLICTS", db::PackageRelatedEnum::Conflicts), + ("PROVIDES", db::PackageRelatedEnum::Provides), + ("DEPENDS", db::PackageRelatedEnum::Depend), + ("OPTDEPENDS", db::PackageRelatedEnum::Optdepend), + ("MAKEDEPENDS", db::PackageRelatedEnum::Makedepend), + ("CHECKDEPENDS", db::PackageRelatedEnum::Checkdepend), + ]; + + for (key, attr) in related.into_iter() { + let items: Vec = self.rt.block_on( + pkg.find_related(db::PackageRelated) + .filter(db::package_related::Column::Type.eq(attr)) + .select_only() + .column(db::package_related::Column::Name) + .into_tuple() + .all(&self.conn), + )?; + + write_attr(key, &items.join("\n"))?; + } + + f.flush()?; + Ok(()) + } + + pub fn close(&mut self) -> crate::Result<()> { + self.ar_db.close()?; + self.ar_files.close()?; + + let _ = std::fs::remove_file(&self.tmp_paths[0])?; + let _ = std::fs::remove_file(&self.tmp_paths[1])?; + + Ok(()) } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 6fe6650..e8b65e3 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -1,12 +1,10 @@ mod actor; mod archive; mod handle; -mod manager; pub mod package; pub use actor::{RepoActor, RepoCommand, RepoSharedState}; pub use handle::Handle; -pub use manager::RepoMgr; use crate::FsConfig; From 042f1ecbd3f23ea827b48eff768262417c7a5bbb Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 21:10:04 +0200 Subject: [PATCH 54/73] feat: re-enable most repo functionality --- server/src/repo/actor.rs | 12 +++++ server/src/repo/handle.rs | 53 ++++++++++++++++++++ server/src/repo/mod.rs | 100 ++++++++++++++++---------------------- 3 files changed, 107 insertions(+), 58 deletions(-) diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index b90fcee..d76efa3 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -27,6 +27,8 @@ use uuid::Uuid; pub enum RepoCommand { ParsePkg(i32, PathBuf), + SyncRepo(i32), + Clean, } pub struct RepoSharedState { @@ -98,6 +100,12 @@ impl RepoActor { let _ = self.sync_repo(repo); } } + RepoCommand::SyncRepo(repo) => { + let _ = self.sync_repo(repo); + } + RepoCommand::Clean => { + let _ = self.clean(); + } } } } @@ -223,4 +231,8 @@ impl RepoActor { Ok(()) } + + fn clean(&self) -> crate::Result<()> { + todo!() + } } diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index 262f274..9e63e81 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -14,6 +14,7 @@ use sea_orm::{ ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, }; +use sea_query::{Alias, Expr, Query}; use tokio::{ runtime, sync::mpsc::{unbounded_channel, UnboundedSender}, @@ -117,6 +118,50 @@ impl Handle { Ok(repo_id) } + pub async fn get_repo(&self, distro: &str, repo: &str) -> crate::Result> { + Ok(db::Repo::find() + .find_also_related(db::Distro) + .filter( + Condition::all() + .add(db::repo::Column::Name.eq(repo)) + .add(db::distro::Column::Name.eq(distro)), + ) + .one(&self.state.conn) + .await + .map(|res| res.map(|(repo, _)| repo.id))?) + } + + pub async fn remove_repo(&self, repo: i32) -> crate::Result<()> { + self.state.repos.write().await.remove(&repo); + db::Repo::delete_by_id(repo).exec(&self.state.conn).await?; + let _ = tokio::fs::remove_dir_all(self.state.repos_dir.join(repo.to_string())).await; + + Ok(()) + } + + /// Remove all packages in the repository that have a given arch. This method marks all + /// packages with the given architecture as "pending deletion", before performing a manual sync + /// & removal of stale packages. + pub async fn remove_repo_arch(&self, repo: i32, arch: &str) -> crate::Result<()> { + db::Package::update_many() + .col_expr( + db::package::Column::State, + Expr::value(db::PackageState::PendingDeletion), + ) + .filter( + Condition::all() + .add(db::package::Column::RepoId.eq(repo)) + .add(db::package::Column::Arch.eq(arch)), + ) + .exec(&self.state.conn) + .await?; + + self.queue_sync(repo).await; + self.queue_clean().await; + + Ok(()) + } + pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { self.state .tx @@ -126,4 +171,12 @@ impl Handle { n.0.fetch_add(1, Ordering::SeqCst); }); } + + async fn queue_sync(&self, repo: i32) { + self.state.tx.send(RepoCommand::SyncRepo(repo)).unwrap(); + } + + async fn queue_clean(&self) { + self.state.tx.send(RepoCommand::Clean).unwrap(); + } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index e8b65e3..f48c0d7 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -51,31 +51,30 @@ async fn get_file( Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, req: Request, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(repo_id) = global.mgr.get_repo(&distro, &repo).await? { - // match global.config.fs { - // FsConfig::Local { data_dir } => { - // let repo_dir = data_dir.join("repos").join(repo_id.to_string()); - // - // let file_name = if file_name == format!("{}.db", repo) - // || file_name == format!("{}.db.tar.gz", repo) - // { - // format!("{}.db.tar.gz", arch) - // } else if file_name == format!("{}.files", repo) - // || file_name == format!("{}.files.tar.gz", repo) - // { - // format!("{}.files.tar.gz", arch) - // } else { - // file_name - // }; - // - // let path = repo_dir.join(file_name); - // Ok(ServeFile::new(path).oneshot(req).await) - // } - // } - //} else { - // Err(StatusCode::NOT_FOUND.into()) - //} + if let Some(repo_id) = global.repo.get_repo(&distro, &repo).await? { + match global.config.fs { + FsConfig::Local { data_dir } => { + let repo_dir = data_dir.join("repos").join(repo_id.to_string()); + + let file_name = if file_name == format!("{}.db", repo) + || file_name == format!("{}.db.tar.gz", repo) + { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else { + file_name + }; + + let path = repo_dir.join(file_name); + Ok(ServeFile::new(path).oneshot(req).await) + } + } + } else { + Err(StatusCode::NOT_FOUND.into()) + } } async fn post_package_archive( @@ -99,45 +98,30 @@ async fn delete_repo( State(global): State, Path((distro, repo)): Path<(String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { - // global.mgr.remove_repo(repo).await?; - // - // tracing::info!("Removed repository {repo}"); - // - // Ok(StatusCode::OK) - //} else { - // Ok(StatusCode::NOT_FOUND) - //} + if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { + global.repo.remove_repo(repo).await?; + + tracing::info!("Removed repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } async fn delete_arch_repo( State(global): State, Path((distro, repo, arch)): Path<(String, String, String)>, ) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(repo) = global.mgr.get_repo(&distro, &repo).await? { - // global.mgr.remove_repo_arch(repo, &arch).await?; - // - // tracing::info!("Removed architecture '{arch}' from repository {repo}"); - // - // Ok(StatusCode::OK) - //} else { - // Ok(StatusCode::NOT_FOUND) - //} - //if let Some(mgr) = global.mgr.get_mgr(&distro).await { - // let repo_removed = mgr.remove_repo_arch(&repo, &arch).await?; - // - // if repo_removed { - // tracing::info!("Removed arch '{}' from repository '{}'", arch, repo); - // - // Ok(StatusCode::OK) - // } else { - // Ok(StatusCode::NOT_FOUND) - // } - //} else { - // Ok(StatusCode::NOT_FOUND) - //} + if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { + global.repo.remove_repo_arch(repo, &arch).await?; + + tracing::info!("Removed architecture '{arch}' from repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } } async fn delete_package( From bde3b907114d7c526cd5b8fab381cfc543c31d1b Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 21:25:23 +0200 Subject: [PATCH 55/73] feat: reimplemented clean method in actor --- server/src/repo/actor.rs | 84 ++++++++++++++++++++++++++++++-------- server/src/repo/archive.rs | 28 ++++++------- 2 files changed, 81 insertions(+), 31 deletions(-) diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index d76efa3..04289d1 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -98,6 +98,7 @@ impl RepoActor { == Some(0) { let _ = self.sync_repo(repo); + let _ = self.clean(); } } RepoCommand::SyncRepo(repo) => { @@ -179,22 +180,19 @@ impl RepoActor { // sea_orm needs its connections to be dropped inside an async context, so we spawn a task // that streams the responses to the synchronous context via message passing self.rt.spawn(async move { - let stream = query.stream(&conn).await; + match query.stream(&conn).await { + Ok(mut stream) => { + while let Some(res) = stream.next().await { + let is_err = res.is_err(); + let _ = tx.send(res).await; - if let Err(err) = stream { - let _ = tx.send(Err(err)).await; - - return; - } - - let mut stream = stream.unwrap(); - - while let Some(res) = stream.next().await { - let is_err = res.is_err(); - let _ = tx.send(res).await; - - if is_err { - return; + if is_err { + return; + } + } + } + Err(err) => { + let _ = tx.send(Err(err)).await; } } }); @@ -233,6 +231,60 @@ impl RepoActor { } fn clean(&self) -> crate::Result<()> { - todo!() + let (tx, mut rx) = mpsc::channel(1); + let conn = self.state.conn.clone(); + let query = db::query::package::stale_pkgs(&self.state.conn); + + // sea_orm needs its connections to be dropped inside an async context, so we spawn a task + // that streams the responses to the synchronous context via message passing + self.rt.spawn(async move { + match query.stream(&conn).await { + Ok(mut stream) => { + while let Some(res) = stream.next().await { + let is_err = res.is_err(); + let _ = tx.send(res).await; + + if is_err { + return; + } + } + } + Err(err) => { + let _ = tx.send(Err(err)).await; + } + } + }); + + // Ids are monotonically increasing, so the max id suffices to know which packages to + // remove later + let mut max_id = -1; + let mut removed_pkgs = 0; + + while let Some(pkg) = rx.blocking_recv().transpose()? { + // Failing to remove the package file isn't the biggest problem + let _ = std::fs::remove_file( + self.state + .repos_dir + .join(pkg.repo_id.to_string()) + .join(pkg.id.to_string()), + ); + + if pkg.id > max_id { + max_id = pkg.id; + } + + removed_pkgs += 1; + } + + if removed_pkgs > 0 { + self.rt.block_on(db::query::package::delete_stale_pkgs( + &self.state.conn, + max_id, + ))?; + } + + tracing::info!("Cleaned up {removed_pkgs} old package(s)"); + + Ok(()) } } diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index 973a395..39b3b82 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -94,23 +94,21 @@ impl RepoArchivesWriter { let conn = self.conn.clone(); let query = pkg.find_related(db::PackageFile); + self.rt.spawn(async move { - let files = query.stream(&conn).await; + match query.stream(&conn).await { + Ok(mut stream) => { + while let Some(res) = stream.next().await { + let is_err = res.is_err(); + let _ = tx.send(res).await; - if let Err(err) = files { - let _ = tx.send(Err(err)).await; - - return; - } - - let mut files = files.unwrap(); - - while let Some(res) = files.next().await { - let is_err = res.is_err(); - let _ = tx.send(res).await; - - if is_err { - return; + if is_err { + return; + } + } + } + Err(err) => { + let _ = tx.send(Err(err)).await; } } }); From 412d1e65f1e04f2a9dfa498298d3ab819e13f58d Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 21:37:07 +0200 Subject: [PATCH 56/73] chore: remove some dead code --- server/src/db/query/mod.rs | 2 - server/src/db/query/package.rs | 3 +- server/src/main.rs | 2 +- server/src/repo/actor.rs | 9 +- server/src/repo/archive.rs | 2 +- server/src/repo/handle.rs | 16 +- server/src/repo/manager.rs | 385 --------------------------------- server/src/repo/package.rs | 196 +---------------- 8 files changed, 14 insertions(+), 601 deletions(-) delete mode 100644 server/src/repo/manager.rs diff --git a/server/src/db/query/mod.rs b/server/src/db/query/mod.rs index f0a809b..9eb7954 100644 --- a/server/src/db/query/mod.rs +++ b/server/src/db/query/mod.rs @@ -1,5 +1,3 @@ pub mod distro; pub mod package; pub mod repo; - -type Result = std::result::Result; diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index bfdad73..ad9d74a 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -1,8 +1,7 @@ use crate::db::{self, *}; -use futures::Stream; use sea_orm::{sea_query::IntoCondition, *}; -use sea_query::{Alias, Asterisk, Expr, IntoColumnRef, Query, SelectStatement}; +use sea_query::{Alias, Expr, Query, SelectStatement}; use serde::Deserialize; #[derive(Deserialize)] diff --git a/server/src/main.rs b/server/src/main.rs index 274d419..33865b5 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -8,7 +8,7 @@ mod repo; pub use config::{Config, DbConfig, FsConfig}; pub use error::{Result, ServerError}; -use std::{io, path::PathBuf, sync::Arc}; +use std::{io, path::PathBuf}; use axum::Router; use tower_http::trace::TraceLayer; diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index 04289d1..57f1b93 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -1,4 +1,4 @@ -use super::{archive, package, Handle}; +use super::{archive, package}; use crate::db; use std::{ @@ -11,11 +11,8 @@ use std::{ }; use futures::StreamExt; -use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, - ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, -}; -use sea_query::{Alias, Expr, Query}; +use sea_orm::{ColumnTrait, DbConn, EntityTrait, QueryFilter, QuerySelect}; +use sea_query::Expr; use tokio::{ runtime, sync::{ diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index 39b3b82..ad08a67 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -1,6 +1,6 @@ use crate::db; use std::{ - io::{self, Write}, + io::Write, path::{Path, PathBuf}, }; diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index 9e63e81..4cec237 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -4,21 +4,15 @@ use crate::db; use std::{ collections::HashMap, path::{Path, PathBuf}, - sync::{ - atomic::{AtomicU32, Ordering}, - Arc, Mutex, RwLock, - }, + sync::{atomic::Ordering, Arc}, }; use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, - ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, -}; -use sea_query::{Alias, Expr, Query}; -use tokio::{ - runtime, - sync::mpsc::{unbounded_channel, UnboundedSender}, + ActiveModelTrait, ColumnTrait, Condition, DbConn, EntityTrait, NotSet, QueryFilter, + QuerySelect, Set, }; +use sea_query::Expr; +use tokio::runtime; use uuid::Uuid; #[derive(Clone)] diff --git a/server/src/repo/manager.rs b/server/src/repo/manager.rs deleted file mode 100644 index e4f0581..0000000 --- a/server/src/repo/manager.rs +++ /dev/null @@ -1,385 +0,0 @@ -use super::{archive, package}; -use crate::db::{self, query::package::delete_stale_pkgs}; - -use std::{ - collections::HashMap, - path::{Path, PathBuf}, - sync::{ - atomic::{AtomicU32, Ordering}, - Arc, - }, -}; - -use futures::StreamExt; -use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, ConnectionTrait, DbConn, EntityTrait, JoinType, - ModelTrait, NotSet, QueryFilter, QuerySelect, Related, RelationTrait, Set, TransactionTrait, -}; -use sea_query::{Alias, Expr, Query}; -use tokio::sync::{ - mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, - Mutex, RwLock, -}; -use uuid::Uuid; - -struct PkgQueueMsg { - repo: i32, - path: PathBuf, -} - -/// A single instance of this struct orchestrates everything related to managing packages files on -/// disk for all repositories in the server -pub struct RepoMgr { - repos_dir: PathBuf, - conn: DbConn, - pkg_queue: ( - UnboundedSender, - Mutex>, - ), - repos: RwLock>)>>, -} - -impl RepoMgr { - pub async fn new>(repos_dir: P, conn: DbConn) -> crate::Result { - if !tokio::fs::try_exists(&repos_dir).await? { - tokio::fs::create_dir(&repos_dir).await?; - } - - let (tx, rx) = unbounded_channel(); - - let mut repos = HashMap::new(); - let repo_ids: Vec = db::Repo::find() - .select_only() - .column(db::repo::Column::Id) - .into_tuple() - .all(&conn) - .await?; - - for id in repo_ids { - repos.insert(id, Default::default()); - } - - Ok(Self { - repos_dir: repos_dir.as_ref().to_path_buf(), - conn, - pkg_queue: (tx, Mutex::new(rx)), - repos: RwLock::new(repos), - }) - } - - /// Generate archive databases for all known architectures in the repository, including the - /// "any" architecture. - pub async fn sync_repo(&self, repo: i32) -> crate::Result<()> { - let lock = self - .repos - .read() - .await - .get(&repo) - .map(|(_, lock)| Arc::clone(lock)); - - if lock.is_none() { - return Ok(()); - } - - let lock = lock.unwrap(); - let _guard = lock.lock().await; - - let archs: Vec = db::Package::find() - .filter(db::package::Column::RepoId.eq(repo)) - .select_only() - .column(db::package::Column::Arch) - .distinct() - .into_tuple() - .all(&self.conn) - .await?; - - for arch in archs { - self.generate_archives(repo, &arch).await?; - } - - Ok(()) - } - - /// Generate the archive databases for the given repository and architecture. - async fn generate_archives(&self, repo: i32, arch: &str) -> crate::Result<()> { - let [tmp_ar_db_path, tmp_ar_files_path, files_tmp_file_path, desc_tmp_file_path] = - self.random_file_paths(); - let ar_db = archive::RepoArchiveWriter::open(&tmp_ar_db_path).await?; - let ar_files = archive::RepoArchiveWriter::open(&tmp_ar_files_path).await?; - - // Query all packages in the repo that have the given architecture or the "any" - // architecture - let mut pkgs = db::query::package::pkgs_to_sync(&self.conn, repo, arch) - .stream(&self.conn) - .await?; - - let mut commited_ids: Vec = Vec::new(); - - while let Some(pkg) = pkgs.next().await.transpose()? { - commited_ids.push(pkg.id); - - let mut files_tmp_file = tokio::fs::File::create(&files_tmp_file_path).await?; - let mut desc_tmp_file = tokio::fs::File::create(&desc_tmp_file_path).await?; - - package::write_files(&self.conn, &mut files_tmp_file, &pkg).await?; - package::write_desc(&self.conn, &mut desc_tmp_file, &pkg).await?; - - let full_name = format!("{}-{}", pkg.name, pkg.version); - - ar_db - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &desc_tmp_file_path, true) - .await?; - ar_files - .add_entry(&full_name, &files_tmp_file_path, false) - .await?; - } - - // Cleanup - ar_db.close().await?; - ar_files.close().await?; - - let repo_dir = self.repos_dir.join(repo.to_string()); - - // Move the db archives to their respective places - tokio::fs::rename(tmp_ar_db_path, repo_dir.join(format!("{}.db.tar.gz", arch))).await?; - tokio::fs::rename( - tmp_ar_files_path, - repo_dir.join(format!("{}.files.tar.gz", arch)), - ) - .await?; - - // Only after we have successfully written everything to disk do we update the database. - // This order ensures any failure can be recovered, as the database is our single source of - // truth. - db::Package::update_many() - .col_expr( - db::package::Column::State, - Expr::value(db::PackageState::Committed), - ) - .filter(db::package::Column::Id.is_in(commited_ids)) - .exec(&self.conn) - .await?; - - // If this fails there's no point in failing the function + if there were no packages in - // the repo, this fails anyway because the temp file doesn't exist - let _ = tokio::fs::remove_file(desc_tmp_file_path).await; - let _ = tokio::fs::remove_file(files_tmp_file_path).await; - - tracing::info!("Package archives generated for repo {} ('{}')", repo, arch); - - Ok(()) - } - - /// Clean any remaining old package files from the database and file system - pub async fn remove_stale_pkgs(&self) -> crate::Result<()> { - let mut pkgs = db::query::package::stale_pkgs(&self.conn) - .stream(&self.conn) - .await?; - - // Ids are monotonically increasing, so the max id suffices to know which packages to - // remove later - let mut max_id = -1; - let mut removed_pkgs = 0; - - while let Some(pkg) = pkgs.next().await.transpose()? { - // Failing to remove the package file isn't the biggest problem - let _ = tokio::fs::remove_file( - self.repos_dir - .join(pkg.repo_id.to_string()) - .join(pkg.id.to_string()), - ) - .await; - - if pkg.id > max_id { - max_id = pkg.id; - } - - removed_pkgs += 1; - } - - if removed_pkgs > 0 { - db::query::package::delete_stale_pkgs(&self.conn, max_id).await?; - } - - tracing::info!("Removed {removed_pkgs} stale package(s)"); - - Ok(()) - } - - pub async fn pkg_parse_task(&self) { - loop { - // Receive the next message and immediately drop the mutex afterwards. As long as the - // quue is empty, this will lock the mutex. This is okay, as the mutex will be unlocked - // as soon as a message is received, so another worker can pick up the mutex. - let msg = { - let mut recv = self.pkg_queue.1.lock().await; - recv.recv().await - }; - - if let Some(msg) = msg { - // TODO better handle this error (retry if failure wasn't because the package is - // faulty) - let _ = self - .add_pkg_from_path(msg.path, msg.repo) - .await - .inspect_err(|e| tracing::error!("{:?}", e)); - - let old = self - .repos - .read() - .await - .get(&msg.repo) - .map(|n| n.0.fetch_sub(1, Ordering::SeqCst)); - - // Every time the queue for a repo becomes empty, we run a sync job - if old == Some(1) { - // TODO error handling - let _ = self.sync_repo(msg.repo).await; - - // TODO move this so that we only clean if entire queue is empty, not just - // queue for specific repo - let _ = self.remove_stale_pkgs().await; - } - } - } - } - - pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { - self.pkg_queue.0.send(PkgQueueMsg { path, repo }).unwrap(); - self.repos.read().await.get(&repo).inspect(|n| { - n.0.fetch_add(1, Ordering::SeqCst); - }); - } - - pub async fn get_repo(&self, distro: &str, repo: &str) -> crate::Result> { - Ok(db::Repo::find() - .find_also_related(db::Distro) - .filter( - Condition::all() - .add(db::repo::Column::Name.eq(repo)) - .add(db::distro::Column::Name.eq(distro)), - ) - .one(&self.conn) - .await - .map(|res| res.map(|(repo, _)| repo.id))?) - } - - pub async fn get_or_create_repo(&self, distro: &str, repo: &str) -> crate::Result { - let mut repos = self.repos.write().await; - - let distro_id: Option = db::Distro::find() - .filter(db::distro::Column::Name.eq(distro)) - .select_only() - .column(db::distro::Column::Id) - .into_tuple() - .one(&self.conn) - .await?; - - let distro_id = if let Some(id) = distro_id { - id - } else { - let new_distro = db::distro::ActiveModel { - id: NotSet, - name: Set(distro.to_string()), - description: NotSet, - }; - - new_distro.insert(&self.conn).await?.id - }; - - let repo_id: Option = db::Repo::find() - .filter(db::repo::Column::DistroId.eq(distro_id)) - .filter(db::repo::Column::Name.eq(repo)) - .select_only() - .column(db::repo::Column::Id) - .into_tuple() - .one(&self.conn) - .await?; - - let repo_id = if let Some(id) = repo_id { - id - } else { - let new_repo = db::repo::ActiveModel { - id: NotSet, - distro_id: Set(distro_id), - name: Set(repo.to_string()), - description: NotSet, - }; - let id = new_repo.insert(&self.conn).await?.id; - - tokio::fs::create_dir(self.repos_dir.join(id.to_string())).await?; - repos.insert(id, Default::default()); - - id - }; - - Ok(repo_id) - } - - async fn add_pkg_from_path>(&self, path: P, repo: i32) -> crate::Result<()> { - let path_clone = path.as_ref().to_path_buf(); - let pkg = tokio::task::spawn_blocking(move || package::Package::open(path_clone)) - .await - .unwrap()?; - - // TODO prevent database from being updated but file failing to move to repo dir? - let pkg = db::query::package::insert(&self.conn, repo, pkg).await?; - - let dest_path = self - .repos_dir - .join(repo.to_string()) - .join(pkg.id.to_string()); - tokio::fs::rename(path.as_ref(), dest_path).await?; - - tracing::info!( - "Added '{}-{}-{}' to repository {}", - pkg.name, - pkg.version, - pkg.arch, - repo, - ); - - Ok(()) - } - - pub async fn remove_repo(&self, repo: i32) -> crate::Result<()> { - self.repos.write().await.remove(&repo); - db::Repo::delete_by_id(repo).exec(&self.conn).await?; - let _ = tokio::fs::remove_dir_all(self.repos_dir.join(repo.to_string())).await; - - Ok(()) - } - - /// Remove all packages in the repository that have a given arch. This method marks all - /// packages with the given architecture as "pending deletion", before performing a manual sync - /// & removal of stale packages. - pub async fn remove_repo_arch(&self, repo: i32, arch: &str) -> crate::Result<()> { - db::Package::update_many() - .col_expr( - db::package::Column::State, - Expr::value(db::PackageState::PendingDeletion), - ) - .filter( - Condition::all() - .add(db::package::Column::RepoId.eq(repo)) - .add(db::package::Column::Arch.eq(arch)), - ) - .exec(&self.conn) - .await?; - - self.sync_repo(repo).await?; - self.remove_stale_pkgs().await?; - - Ok(()) - } - - pub fn random_file_paths(&self) -> [PathBuf; C] { - std::array::from_fn(|_| { - let uuid: uuid::fmt::Simple = Uuid::new_v4().into(); - self.repos_dir.join(uuid.to_string()) - }) - } -} diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index df98559..70466ba 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -1,19 +1,17 @@ -use crate::db::{self, entities::package, PackageRelatedEnum}; +use crate::db::entities::package; use std::{ fmt, fs, - io::{self, BufRead, BufReader, BufWriter, Read, Write}, + io::{self, BufRead, BufReader, Read}, path::{Path, PathBuf}, }; use chrono::NaiveDateTime; -use futures::StreamExt; use libarchive::{ read::{Archive, Builder}, Entry, ReadFilter, }; -use sea_orm::{ActiveValue::Set, ColumnTrait, DbConn, ModelTrait, QueryFilter, QuerySelect}; -use tokio::io::{AsyncWrite, AsyncWriteExt}; +use sea_orm::ActiveValue::Set; const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"]; @@ -204,74 +202,6 @@ impl Package { self.compression.extension().unwrap() ) } - - /// Write the formatted desc file to the provided writer - pub fn write_desc(&self, w: &mut W) -> io::Result<()> { - // We write a lot of small strings to the writer, so wrapping it in a BufWriter is - // beneficial - let mut w = BufWriter::new(w); - - let info = &self.info; - - writeln!(w, "%FILENAME%\n{}", self.file_name())?; - - let mut write = |key: &str, value: &str| { - if !value.is_empty() { - writeln!(w, "\n%{}%\n{}", key, value) - } else { - Ok(()) - } - }; - - write("NAME", &info.name)?; - write("BASE", &info.base)?; - write("VERSION", &info.version)?; - - if let Some(ref description) = info.description { - write("DESC", description)?; - } - write("GROUPS", &info.groups.join("\n"))?; - write("CSIZE", &info.csize.to_string())?; - write("ISIZE", &info.size.to_string())?; - - write("SHA256SUM", &info.sha256sum)?; - - if let Some(ref url) = info.url { - write("URL", url)?; - } - - write("LICENSE", &info.licenses.join("\n"))?; - write("ARCH", &info.arch)?; - write("BUILDDATE", &info.build_date.timestamp().to_string())?; - - if let Some(ref packager) = info.packager { - write("PACKAGER", packager)?; - } - - write("REPLACES", &info.replaces.join("\n"))?; - write("CONFLICTS", &info.conflicts.join("\n"))?; - write("PROVIDES", &info.provides.join("\n"))?; - write("DEPENDS", &info.depends.join("\n"))?; - write("OPTDEPENDS", &info.optdepends.join("\n"))?; - write("MAKEDEPENDS", &info.makedepends.join("\n"))?; - write("CHECKDEPENDS", &info.checkdepends.join("\n"))?; - - Ok(()) - } - - pub fn write_files(&self, w: &mut W) -> io::Result<()> { - // We write a lot of small strings to the writer, so wrapping it in a BufWriter is - // beneficial - let mut w = BufWriter::new(w); - - writeln!(w, "%FILES%")?; - - for file in &self.files { - writeln!(w, "{}", file.to_string_lossy())?; - } - - Ok(()) - } } impl From for package::ActiveModel { @@ -303,123 +233,3 @@ pub fn filename(pkg: &package::Model) -> String { pkg.name, pkg.version, pkg.arch, pkg.compression ) } - -async fn write_attribute( - writer: &mut W, - key: &str, - value: &str, -) -> io::Result<()> { - if !value.is_empty() { - let s = format!("\n%{}%\n{}\n", key, value); - writer.write_all(s.as_bytes()).await?; - } - - Ok(()) -} - -pub async fn write_desc( - conn: &DbConn, - writer: &mut W, - pkg: &package::Model, -) -> crate::Result<()> { - writer - .write_all(format!("%FILENAME%\n{}\n", pkg.id).as_bytes()) - .await?; - - write_attribute(writer, "NAME", &pkg.name).await?; - write_attribute(writer, "BASE", &pkg.base).await?; - write_attribute(writer, "VERSION", &pkg.version).await?; - - if let Some(ref description) = pkg.description { - write_attribute(writer, "DESC", description).await?; - } - - let groups: Vec = pkg - .find_related(db::PackageGroup) - .select_only() - .column(db::package_group::Column::Name) - .into_tuple() - .all(conn) - .await?; - write_attribute(writer, "GROUPS", &groups.join("\n")).await?; - - write_attribute(writer, "CSIZE", &pkg.c_size.to_string()).await?; - write_attribute(writer, "ISIZE", &pkg.size.to_string()).await?; - write_attribute(writer, "SHA256SUM", &pkg.sha256_sum).await?; - - if let Some(ref url) = pkg.url { - write_attribute(writer, "URL", url).await?; - } - - let licenses: Vec = pkg - .find_related(db::PackageLicense) - .select_only() - .column(db::package_license::Column::Name) - .into_tuple() - .all(conn) - .await?; - write_attribute(writer, "LICENSE", &licenses.join("\n")).await?; - - write_attribute(writer, "ARCH", &pkg.arch).await?; - - // TODO build date - write_attribute( - writer, - "BUILDDATE", - &pkg.build_date.and_utc().timestamp().to_string(), - ) - .await?; - - if let Some(ref packager) = pkg.packager { - write_attribute(writer, "PACKAGER", packager).await?; - } - - let related = [ - ("REPLACES", PackageRelatedEnum::Replaces), - ("CONFLICTS", PackageRelatedEnum::Conflicts), - ("PROVIDES", PackageRelatedEnum::Provides), - ("DEPENDS", PackageRelatedEnum::Depend), - ("OPTDEPENDS", PackageRelatedEnum::Optdepend), - ("MAKEDEPENDS", PackageRelatedEnum::Makedepend), - ("CHECKDEPENDS", PackageRelatedEnum::Checkdepend), - ]; - - for (key, attr) in related.into_iter() { - let items: Vec = pkg - .find_related(db::PackageRelated) - .filter(db::package_related::Column::Type.eq(attr)) - .select_only() - .column(db::package_related::Column::Name) - .into_tuple() - .all(conn) - .await?; - - write_attribute(writer, key, &items.join("\n")).await?; - } - - writer.flush().await?; - - Ok(()) -} - -pub async fn write_files( - conn: &DbConn, - writer: &mut W, - pkg: &package::Model, -) -> crate::Result<()> { - let line = "%FILES%\n"; - writer.write_all(line.as_bytes()).await?; - - // Generate the files list for the package - let mut files = pkg.find_related(db::PackageFile).stream(conn).await?; - - while let Some(file) = files.next().await.transpose()? { - writer - .write_all(format!("{}\n", file.path).as_bytes()) - .await?; - } - - writer.flush().await?; - - Ok(()) -} From a6de2c3c149227919127a699c59409a01d533d4b Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 21:52:02 +0200 Subject: [PATCH 57/73] refactor: move web code into own module --- server/src/main.rs | 13 +-- server/src/repo/mod.rs | 143 ------------------------- server/src/{ => web}/api/mod.rs | 0 server/src/{ => web}/api/pagination.rs | 0 server/src/web/mod.rs | 13 +++ server/src/web/repo.rs | 142 ++++++++++++++++++++++++ 6 files changed, 158 insertions(+), 153 deletions(-) rename server/src/{ => web}/api/mod.rs (100%) rename server/src/{ => web}/api/pagination.rs (100%) create mode 100644 server/src/web/mod.rs create mode 100644 server/src/web/repo.rs diff --git a/server/src/main.rs b/server/src/main.rs index 33865b5..337ba2e 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -1,18 +1,15 @@ -mod api; mod cli; mod config; pub mod db; mod error; mod repo; +mod web; pub use config::{Config, DbConfig, FsConfig}; pub use error::{Result, ServerError}; use std::{io, path::PathBuf}; -use axum::Router; -use tower_http::trace::TraceLayer; - use clap::Parser; use sea_orm_migration::MigratorTrait; use tokio::runtime; @@ -90,12 +87,8 @@ async fn run(global: Global) -> crate::Result<()> { .unwrap(); let listener = tokio::net::TcpListener::bind(domain).await?; - // build our application with a single route - let app = Router::new() - .nest("/api", crate::api::router()) - .merge(crate::repo::router(&global.config.api_key)) - .with_state(global) - .layer(TraceLayer::new_for_http()); + let app = web::router(global); + // run it with hyper on localhost:3000 Ok(axum::serve(listener, app.into_make_service()) .await diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index f48c0d7..16c368e 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -5,146 +5,3 @@ pub mod package; pub use actor::{RepoActor, RepoCommand, RepoSharedState}; pub use handle::Handle; - -use crate::FsConfig; - -use axum::{ - body::Body, - extract::{Path, State}, - http::{Request, StatusCode}, - response::IntoResponse, - routing::{delete, post}, - Router, -}; -use futures::TryStreamExt; -use tokio_util::io::StreamReader; -use tower::util::ServiceExt; -use tower_http::{services::ServeFile, validate_request::ValidateRequestHeaderLayer}; - -pub fn router(api_key: &str) -> Router { - Router::new() - .route( - "/:distro/:repo", - post(post_package_archive) - .delete(delete_repo) - .route_layer(ValidateRequestHeaderLayer::bearer(api_key)), - ) - .route( - "/:distro/:repo/:arch", - delete(delete_arch_repo).route_layer(ValidateRequestHeaderLayer::bearer(api_key)), - ) - // Routes added after the layer do not get that layer applied, so the GET requests will not - // be authorized - .route( - "/:distro/:repo/:arch/:filename", - delete(delete_package) - .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) - .get(get_file), - ) -} - -/// Serve the package archive files and database archives. If files are requested for an -/// architecture that does not have any explicit packages, a repository containing only "any" files -/// is returned. -async fn get_file( - State(global): State, - Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, - req: Request, -) -> crate::Result { - if let Some(repo_id) = global.repo.get_repo(&distro, &repo).await? { - match global.config.fs { - FsConfig::Local { data_dir } => { - let repo_dir = data_dir.join("repos").join(repo_id.to_string()); - - let file_name = if file_name == format!("{}.db", repo) - || file_name == format!("{}.db.tar.gz", repo) - { - format!("{}.db.tar.gz", arch) - } else if file_name == format!("{}.files", repo) - || file_name == format!("{}.files.tar.gz", repo) - { - format!("{}.files.tar.gz", arch) - } else { - file_name - }; - - let path = repo_dir.join(file_name); - Ok(ServeFile::new(path).oneshot(req).await) - } - } - } else { - Err(StatusCode::NOT_FOUND.into()) - } -} - -async fn post_package_archive( - State(global): State, - Path((distro, repo)): Path<(String, String)>, - body: Body, -) -> crate::Result { - let repo_id = global.repo.get_or_create_repo(&distro, &repo).await?; - - let [tmp_path] = global.repo.random_file_paths(); - let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; - let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); - tokio::io::copy(&mut body, &mut tmp_file).await?; - - global.repo.queue_pkg(repo_id, tmp_path).await; - - Ok(StatusCode::ACCEPTED) -} - -async fn delete_repo( - State(global): State, - Path((distro, repo)): Path<(String, String)>, -) -> crate::Result { - if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { - global.repo.remove_repo(repo).await?; - - tracing::info!("Removed repository {repo}"); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } -} - -async fn delete_arch_repo( - State(global): State, - Path((distro, repo, arch)): Path<(String, String, String)>, -) -> crate::Result { - if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { - global.repo.remove_repo_arch(repo, &arch).await?; - - tracing::info!("Removed architecture '{arch}' from repository {repo}"); - - Ok(StatusCode::OK) - } else { - Ok(StatusCode::NOT_FOUND) - } -} - -async fn delete_package( - State(global): State, - Path((distro, repo, arch, pkg_name)): Path<(String, String, String, String)>, -) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(mgr) = global.mgr.get_mgr(&distro).await { - // let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; - // - // if pkg_removed { - // tracing::info!( - // "Removed package '{}' ({}) from repository '{}'", - // pkg_name, - // arch, - // repo - // ); - // - // Ok(StatusCode::OK) - // } else { - // Ok(StatusCode::NOT_FOUND) - // } - //} else { - // Ok(StatusCode::NOT_FOUND) - //} -} diff --git a/server/src/api/mod.rs b/server/src/web/api/mod.rs similarity index 100% rename from server/src/api/mod.rs rename to server/src/web/api/mod.rs diff --git a/server/src/api/pagination.rs b/server/src/web/api/pagination.rs similarity index 100% rename from server/src/api/pagination.rs rename to server/src/web/api/pagination.rs diff --git a/server/src/web/mod.rs b/server/src/web/mod.rs new file mode 100644 index 0000000..48e9cbb --- /dev/null +++ b/server/src/web/mod.rs @@ -0,0 +1,13 @@ +mod api; +mod repo; + +use axum::Router; +use tower_http::trace::TraceLayer; + +pub fn router(global: crate::Global) -> Router { + Router::new() + .nest("/api", api::router()) + .merge(repo::router(&global.config.api_key)) + .with_state(global) + .layer(TraceLayer::new_for_http()) +} diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs new file mode 100644 index 0000000..d690895 --- /dev/null +++ b/server/src/web/repo.rs @@ -0,0 +1,142 @@ +use crate::FsConfig; + +use axum::{ + body::Body, + extract::{Path, State}, + http::{Request, StatusCode}, + response::IntoResponse, + routing::{delete, post}, + Router, +}; +use futures::TryStreamExt; +use tokio_util::io::StreamReader; +use tower::util::ServiceExt; +use tower_http::{services::ServeFile, validate_request::ValidateRequestHeaderLayer}; + +pub fn router(api_key: &str) -> Router { + Router::new() + .route( + "/:distro/:repo", + post(post_package_archive) + .delete(delete_repo) + .route_layer(ValidateRequestHeaderLayer::bearer(api_key)), + ) + .route( + "/:distro/:repo/:arch", + delete(delete_arch_repo).route_layer(ValidateRequestHeaderLayer::bearer(api_key)), + ) + // Routes added after the layer do not get that layer applied, so the GET requests will not + // be authorized + .route( + "/:distro/:repo/:arch/:filename", + delete(delete_package) + .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) + .get(get_file), + ) +} + +/// Serve the package archive files and database archives. If files are requested for an +/// architecture that does not have any explicit packages, a repository containing only "any" files +/// is returned. +async fn get_file( + State(global): State, + Path((distro, repo, arch, file_name)): Path<(String, String, String, String)>, + req: Request, +) -> crate::Result { + if let Some(repo_id) = global.repo.get_repo(&distro, &repo).await? { + match global.config.fs { + FsConfig::Local { data_dir } => { + let repo_dir = data_dir.join("repos").join(repo_id.to_string()); + + let file_name = if file_name == format!("{}.db", repo) + || file_name == format!("{}.db.tar.gz", repo) + { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else { + file_name + }; + + let path = repo_dir.join(file_name); + Ok(ServeFile::new(path).oneshot(req).await) + } + } + } else { + Err(StatusCode::NOT_FOUND.into()) + } +} + +async fn post_package_archive( + State(global): State, + Path((distro, repo)): Path<(String, String)>, + body: Body, +) -> crate::Result { + let repo_id = global.repo.get_or_create_repo(&distro, &repo).await?; + + let [tmp_path] = global.repo.random_file_paths(); + let mut tmp_file = tokio::fs::File::create(&tmp_path).await?; + let mut body = StreamReader::new(body.into_data_stream().map_err(std::io::Error::other)); + tokio::io::copy(&mut body, &mut tmp_file).await?; + + global.repo.queue_pkg(repo_id, tmp_path).await; + + Ok(StatusCode::ACCEPTED) +} + +async fn delete_repo( + State(global): State, + Path((distro, repo)): Path<(String, String)>, +) -> crate::Result { + if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { + global.repo.remove_repo(repo).await?; + + tracing::info!("Removed repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } +} + +async fn delete_arch_repo( + State(global): State, + Path((distro, repo, arch)): Path<(String, String, String)>, +) -> crate::Result { + if let Some(repo) = global.repo.get_repo(&distro, &repo).await? { + global.repo.remove_repo_arch(repo, &arch).await?; + + tracing::info!("Removed architecture '{arch}' from repository {repo}"); + + Ok(StatusCode::OK) + } else { + Ok(StatusCode::NOT_FOUND) + } +} + +async fn delete_package( + State(global): State, + Path((distro, repo, arch, pkg_name)): Path<(String, String, String, String)>, +) -> crate::Result { + Ok(StatusCode::NOT_FOUND) + //if let Some(mgr) = global.mgr.get_mgr(&distro).await { + // let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; + // + // if pkg_removed { + // tracing::info!( + // "Removed package '{}' ({}) from repository '{}'", + // pkg_name, + // arch, + // repo + // ); + // + // Ok(StatusCode::OK) + // } else { + // Ok(StatusCode::NOT_FOUND) + // } + //} else { + // Ok(StatusCode::NOT_FOUND) + //} +} From d375df0ff4fde9fc8095e41e712c5cef27a86867 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Wed, 26 Jun 2024 22:00:43 +0200 Subject: [PATCH 58/73] refactor(repo): put some more code in its place --- server/src/main.rs | 2 +- server/src/repo/actor.rs | 66 ++++++------------------------- server/src/repo/handle.rs | 52 +++++------------------- server/src/repo/mod.rs | 83 ++++++++++++++++++++++++++++++++++++++- 4 files changed, 105 insertions(+), 98 deletions(-) diff --git a/server/src/main.rs b/server/src/main.rs index 337ba2e..5a91fdb 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -53,7 +53,7 @@ fn setup(rt: &runtime::Handle, config_file: PathBuf) -> crate::Result { let repo = match &config.fs { FsConfig::Local { data_dir } => { - crate::repo::Handle::start( + crate::repo::start( data_dir.join("repos"), db.clone(), rt.clone(), diff --git a/server/src/repo/actor.rs b/server/src/repo/actor.rs index 57f1b93..2c2fd74 100644 --- a/server/src/repo/actor.rs +++ b/server/src/repo/actor.rs @@ -1,68 +1,26 @@ -use super::{archive, package}; +use super::{archive, package, Command, SharedState}; use crate::db; use std::{ - collections::HashMap, - path::{Path, PathBuf}, - sync::{ - atomic::{AtomicU32, Ordering}, - Arc, Mutex, - }, + path::PathBuf, + sync::{atomic::Ordering, Arc}, }; use futures::StreamExt; -use sea_orm::{ColumnTrait, DbConn, EntityTrait, QueryFilter, QuerySelect}; +use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, QuerySelect}; use sea_query::Expr; -use tokio::{ - runtime, - sync::{ - mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender}, - RwLock, - }, -}; +use tokio::{runtime, sync::mpsc}; use uuid::Uuid; -pub enum RepoCommand { - ParsePkg(i32, PathBuf), - SyncRepo(i32), - Clean, -} - -pub struct RepoSharedState { - pub repos_dir: PathBuf, - pub conn: DbConn, - pub rx: Mutex>, - pub tx: UnboundedSender, - pub repos: RwLock>)>>, -} - -impl RepoSharedState { - pub fn new( - repos_dir: impl AsRef, - conn: DbConn, - repos: HashMap>)>, - ) -> Self { - let (tx, rx) = unbounded_channel(); - - Self { - repos_dir: repos_dir.as_ref().to_path_buf(), - conn, - rx: Mutex::new(rx), - tx, - repos: RwLock::new(repos), - } - } -} - /// The actor is responsible for mutating the repositories. They receive their commands their /// messages and process these commands in both a synchronous and asynchronous way. -pub struct RepoActor { +pub struct Actor { rt: runtime::Handle, - state: Arc, + state: Arc, } -impl RepoActor { - pub fn new(rt: runtime::Handle, state: Arc) -> Self { +impl Actor { + pub fn new(rt: runtime::Handle, state: Arc) -> Self { Self { rt, state: Arc::clone(&state), @@ -83,7 +41,7 @@ impl RepoActor { rx.blocking_recv() } { match msg { - RepoCommand::ParsePkg(repo, path) => { + Command::ParsePkg(repo, path) => { let _ = self.parse_pkg(repo, path); if self @@ -98,10 +56,10 @@ impl RepoActor { let _ = self.clean(); } } - RepoCommand::SyncRepo(repo) => { + Command::SyncRepo(repo) => { let _ = self.sync_repo(repo); } - RepoCommand::Clean => { + Command::Clean => { let _ = self.clean(); } } diff --git a/server/src/repo/handle.rs b/server/src/repo/handle.rs index 4cec237..bbcc153 100644 --- a/server/src/repo/handle.rs +++ b/server/src/repo/handle.rs @@ -1,56 +1,27 @@ -use super::{RepoCommand, RepoSharedState}; +use super::{Command, SharedState}; use crate::db; use std::{ - collections::HashMap, - path::{Path, PathBuf}, + path::PathBuf, sync::{atomic::Ordering, Arc}, }; use sea_orm::{ - ActiveModelTrait, ColumnTrait, Condition, DbConn, EntityTrait, NotSet, QueryFilter, - QuerySelect, Set, + ActiveModelTrait, ColumnTrait, Condition, EntityTrait, NotSet, QueryFilter, QuerySelect, Set, }; use sea_query::Expr; -use tokio::runtime; use uuid::Uuid; #[derive(Clone)] pub struct Handle { - state: Arc, + state: Arc, } impl Handle { - pub fn start( - repos_dir: impl AsRef, - conn: DbConn, - rt: runtime::Handle, - actors: u32, - ) -> crate::Result { - std::fs::create_dir_all(repos_dir.as_ref())?; - - let mut repos = HashMap::new(); - let repo_ids: Vec = rt.block_on( - db::Repo::find() - .select_only() - .column(db::repo::Column::Id) - .into_tuple() - .all(&conn), - )?; - - for id in repo_ids { - repos.insert(id, Default::default()); + pub fn new(state: &Arc) -> Self { + Self { + state: Arc::clone(state), } - - let state = Arc::new(RepoSharedState::new(repos_dir, conn, repos)); - - for _ in 0..actors { - let actor = super::RepoActor::new(rt.clone(), Arc::clone(&state)); - - std::thread::spawn(|| actor.run()); - } - - Ok(Self { state }) } pub fn random_file_paths(&self) -> [PathBuf; C] { @@ -157,20 +128,17 @@ impl Handle { } pub async fn queue_pkg(&self, repo: i32, path: PathBuf) { - self.state - .tx - .send(RepoCommand::ParsePkg(repo, path)) - .unwrap(); + self.state.tx.send(Command::ParsePkg(repo, path)).unwrap(); self.state.repos.read().await.get(&repo).inspect(|n| { n.0.fetch_add(1, Ordering::SeqCst); }); } async fn queue_sync(&self, repo: i32) { - self.state.tx.send(RepoCommand::SyncRepo(repo)).unwrap(); + self.state.tx.send(Command::SyncRepo(repo)).unwrap(); } async fn queue_clean(&self) { - self.state.tx.send(RepoCommand::Clean).unwrap(); + self.state.tx.send(Command::Clean).unwrap(); } } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 16c368e..9920326 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -3,5 +3,86 @@ mod archive; mod handle; pub mod package; -pub use actor::{RepoActor, RepoCommand, RepoSharedState}; +pub use actor::Actor; pub use handle::Handle; + +use crate::db; + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::{atomic::AtomicU32, Arc, Mutex}, +}; + +use sea_orm::{DbConn, EntityTrait, QuerySelect}; +use tokio::{ + runtime, + sync::{ + mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, + RwLock, + }, +}; + +pub enum Command { + ParsePkg(i32, PathBuf), + SyncRepo(i32), + Clean, +} + +pub struct SharedState { + pub repos_dir: PathBuf, + pub conn: DbConn, + pub rx: Mutex>, + pub tx: UnboundedSender, + pub repos: RwLock>)>>, +} + +impl SharedState { + pub fn new( + repos_dir: impl AsRef, + conn: DbConn, + repos: HashMap>)>, + ) -> Self { + let (tx, rx) = unbounded_channel(); + + Self { + repos_dir: repos_dir.as_ref().to_path_buf(), + conn, + rx: Mutex::new(rx), + tx, + repos: RwLock::new(repos), + } + } +} + +pub fn start( + repos_dir: impl AsRef, + conn: DbConn, + rt: runtime::Handle, + actors: u32, +) -> crate::Result { + std::fs::create_dir_all(repos_dir.as_ref())?; + + let mut repos = HashMap::new(); + let repo_ids: Vec = rt.block_on( + db::Repo::find() + .select_only() + .column(db::repo::Column::Id) + .into_tuple() + .all(&conn), + )?; + + for id in repo_ids { + repos.insert(id, Default::default()); + } + + let state = Arc::new(SharedState::new(repos_dir, conn, repos)); + + for _ in 0..actors { + let actor = Actor::new(rt.clone(), Arc::clone(&state)); + + std::thread::spawn(|| actor.run()); + } + + Ok(Handle::new(&state)) +} From e3b0f4f0a1c7cf55c8c35d3e370877b7c4920411 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 27 Jun 2024 11:39:04 +0200 Subject: [PATCH 59/73] feat: chunk large database inserts --- server/src/db/query/package.rs | 44 ++++++++++++++++++++++------------ server/src/main.rs | 1 + server/src/util.rs | 23 ++++++++++++++++++ 3 files changed, 53 insertions(+), 15 deletions(-) create mode 100644 server/src/util.rs diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index ad9d74a..9a8be5f 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -4,6 +4,9 @@ use sea_orm::{sea_query::IntoCondition, *}; use sea_query::{Alias, Expr, Query, SelectStatement}; use serde::Deserialize; +/// How many fields may be inserted at once into the database. +const PACKAGE_INSERT_LIMIT: usize = 1000; + #[derive(Deserialize)] pub struct Filter { repo: Option, @@ -160,23 +163,34 @@ pub async fn insert( .iter() .map(|s| (PackageRelatedEnum::Optdepend, s)), ); + let related = crate::util::Chunked::new(related, PACKAGE_INSERT_LIMIT); - PackageRelated::insert_many(related.map(|(t, s)| package_related::ActiveModel { - package_id: Set(pkg_entry.id), - r#type: Set(t), - name: Set(s.to_string()), - })) - .on_empty_do_nothing() - .exec(&txn) - .await?; + for chunk in related { + PackageRelated::insert_many( + chunk + .into_iter() + .map(|(t, s)| package_related::ActiveModel { + package_id: Set(pkg_entry.id), + r#type: Set(t), + name: Set(s.to_string()), + }), + ) + .on_empty_do_nothing() + .exec(&txn) + .await?; + } - PackageFile::insert_many(pkg.files.iter().map(|s| package_file::ActiveModel { - package_id: Set(pkg_entry.id), - path: Set(s.display().to_string()), - })) - .on_empty_do_nothing() - .exec(&txn) - .await?; + let files = crate::util::Chunked::new(pkg.files, PACKAGE_INSERT_LIMIT); + + for chunk in files { + PackageFile::insert_many(chunk.into_iter().map(|s| package_file::ActiveModel { + package_id: Set(pkg_entry.id), + path: Set(s.display().to_string()), + })) + .on_empty_do_nothing() + .exec(&txn) + .await?; + } txn.commit().await?; diff --git a/server/src/main.rs b/server/src/main.rs index 5a91fdb..cb66668 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -3,6 +3,7 @@ mod config; pub mod db; mod error; mod repo; +mod util; mod web; pub use config::{Config, DbConfig, FsConfig}; diff --git a/server/src/util.rs b/server/src/util.rs new file mode 100644 index 0000000..9aad122 --- /dev/null +++ b/server/src/util.rs @@ -0,0 +1,23 @@ +pub struct Chunked { + iter: I, + chunk_size: usize, +} + +impl Chunked { + pub fn new>(into: T, chunk_size: usize) -> Self { + Self { + iter: into.into_iter(), + chunk_size, + } + } +} + +// https://users.rust-lang.org/t/how-to-breakup-an-iterator-into-chunks/87915/5 +impl Iterator for Chunked { + type Item = Vec; + + fn next(&mut self) -> Option { + Some(self.iter.by_ref().take(self.chunk_size).collect()) + .filter(|chunk: &Vec<_>| !chunk.is_empty()) + } +} From 86ab143271ecb866a8cce1db2234e5d0d587dbd3 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Thu, 27 Jun 2024 13:52:07 +0200 Subject: [PATCH 60/73] fix(package): ignore all files that start with a dot --- server/src/repo/package.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 70466ba..996f933 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -13,8 +13,6 @@ use libarchive::{ }; use sea_orm::ActiveValue::Set; -const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"]; - #[derive(Debug, Clone)] pub struct Package { pub path: PathBuf, @@ -158,11 +156,9 @@ impl Package { let entry = entry?; let path_name = entry.pathname(); - if !IGNORED_FILES.iter().any(|p| p == &path_name) { + if !path_name.starts_with('.') { files.push(PathBuf::from(path_name)); - } - - if path_name == ".PKGINFO" { + } else if path_name == ".PKGINFO" { info = Some(PkgInfo::parse(entry)?); } } From fde413d6f6c7e9549c7ea932b9e06b37d5dc19f1 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 6 Jul 2024 22:06:09 +0200 Subject: [PATCH 61/73] feat: use pretty package filenames parsed using regex --- Cargo.lock | 5 ++-- server/Cargo.toml | 1 + server/src/db/query/package.rs | 10 ++++---- server/src/main.rs | 3 +++ server/src/repo/archive.rs | 6 ++++- server/src/repo/package.rs | 7 ------ server/src/web/repo.rs | 46 ++++++++++++++++++++++------------ 7 files changed, 47 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8520e63..bd0c194 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1670,9 +1670,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.4" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", @@ -1732,6 +1732,7 @@ dependencies = [ "futures", "http-body-util", "libarchive", + "regex", "sea-orm", "sea-orm-migration", "sea-query", diff --git a/server/Cargo.toml b/server/Cargo.toml index b1fc688..5c26303 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -14,6 +14,7 @@ figment = { version = "0.10.19", features = ["env", "toml"] } futures = "0.3.28" http-body-util = "0.1.1" libarchive = { path = "../libarchive" } +regex = "1.10.5" sea-orm-migration = "0.12.1" sea-query = { version = "0.30.7", features = ["backend-postgres", "backend-sqlite"] } serde = { version = "1.0.178", features = ["derive"] } diff --git a/server/src/db/query/package.rs b/server/src/db/query/package.rs index 9a8be5f..8a4f054 100644 --- a/server/src/db/query/package.rs +++ b/server/src/db/query/package.rs @@ -60,17 +60,17 @@ pub async fn by_id(conn: &DbConn, id: i32) -> Result> { pub async fn by_fields( conn: &DbConn, repo_id: i32, - arch: &str, name: &str, - version: Option<&str>, - compression: Option<&str>, + version: &str, + arch: &str, + compression: &str, ) -> Result> { let cond = Condition::all() .add(package::Column::RepoId.eq(repo_id)) .add(package::Column::Name.eq(name)) .add(package::Column::Arch.eq(arch)) - .add_option(version.map(|version| package::Column::Version.eq(version))) - .add_option(compression.map(|compression| package::Column::Compression.eq(compression))); + .add(package::Column::Version.eq(version)) + .add(package::Column::Compression.eq(compression)); Package::find().filter(cond).one(conn).await } diff --git a/server/src/main.rs b/server/src/main.rs index cb66668..c641666 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -17,12 +17,14 @@ use tokio::runtime; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; pub const ANY_ARCH: &'static str = "any"; +pub const PKG_FILENAME_REGEX: &'static str = "^([a-z0-9@._+-]+)-((?:[0-9]+:)?[a-zA-Z0-9@._+]+-[0-9]+)-([a-zA-z0-9_]+).pkg.tar.([a-zA-Z0-9]+)$"; #[derive(Clone)] pub struct Global { config: crate::config::Config, repo: repo::Handle, db: sea_orm::DbConn, + pkg_filename_re: regex::Regex, } fn main() -> crate::Result<()> { @@ -79,6 +81,7 @@ fn setup(rt: &runtime::Handle, config_file: PathBuf) -> crate::Result { config: config.clone(), repo, db, + pkg_filename_re: regex::Regex::new(PKG_FILENAME_REGEX).unwrap(), }) } diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index ad08a67..2844b90 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -124,7 +124,11 @@ impl RepoArchivesWriter { fn write_desc(&self, path: impl AsRef, pkg: &db::package::Model) -> crate::Result<()> { let mut f = std::io::BufWriter::new(std::fs::File::create(path)?); - writeln!(f, "%FILENAME%\n{}", pkg.id)?; + let filename = format!( + "{}-{}-{}.pkg.tar.{}", + pkg.name, pkg.version, pkg.arch, pkg.compression + ); + writeln!(f, "%FILENAME%\n{}", filename)?; let mut write_attr = |k: &str, v: &str| { if !v.is_empty() { diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index 996f933..e8bb076 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -222,10 +222,3 @@ impl From for package::ActiveModel { } } } - -pub fn filename(pkg: &package::Model) -> String { - format!( - "{}-{}-{}.pkg.tar.{}", - pkg.name, pkg.version, pkg.arch, pkg.compression - ) -} diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs index d690895..c3bfe6b 100644 --- a/server/src/web/repo.rs +++ b/server/src/web/repo.rs @@ -1,4 +1,4 @@ -use crate::FsConfig; +use crate::{db, FsConfig}; use axum::{ body::Body, @@ -44,23 +44,37 @@ async fn get_file( req: Request, ) -> crate::Result { if let Some(repo_id) = global.repo.get_repo(&distro, &repo).await? { + let file_name = + if file_name == format!("{}.db", repo) || file_name == format!("{}.db.tar.gz", repo) { + format!("{}.db.tar.gz", arch) + } else if file_name == format!("{}.files", repo) + || file_name == format!("{}.files.tar.gz", repo) + { + format!("{}.files.tar.gz", arch) + } else if let Some(m) = global.pkg_filename_re.captures(&file_name) { + // SAFETY: these unwraps cannot fail if the RegEx matched successfully + db::query::package::by_fields( + &global.db, + repo_id, + m.get(1).unwrap().as_str(), + m.get(2).unwrap().as_str(), + m.get(3).unwrap().as_str(), + m.get(4).unwrap().as_str(), + ) + .await? + .ok_or(StatusCode::NOT_FOUND)? + .id + .to_string() + } else { + return Err(StatusCode::NOT_FOUND.into()); + }; + match global.config.fs { FsConfig::Local { data_dir } => { - let repo_dir = data_dir.join("repos").join(repo_id.to_string()); - - let file_name = if file_name == format!("{}.db", repo) - || file_name == format!("{}.db.tar.gz", repo) - { - format!("{}.db.tar.gz", arch) - } else if file_name == format!("{}.files", repo) - || file_name == format!("{}.files.tar.gz", repo) - { - format!("{}.files.tar.gz", arch) - } else { - file_name - }; - - let path = repo_dir.join(file_name); + let path = data_dir + .join("repos") + .join(repo_id.to_string()) + .join(file_name); Ok(ServeFile::new(path).oneshot(req).await) } } From 052fb75ff94f1043eefcefb84ea30cfc0f6bab40 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 10:51:27 +0200 Subject: [PATCH 62/73] chore(ci): add static binary check --- .woodpecker/build.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 392bab1..3529154 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -3,7 +3,7 @@ platform: 'linux/amd64' when: branch: exclude: [main] - event: push + event: [push, pull_request] steps: build: @@ -11,4 +11,6 @@ steps: commands: - apk add --no-cache build-base libarchive libarchive-dev - cargo build --verbose + # Binaries, even debug ones, should be statically compiled + - '[ "$(readelf -d target/debug/rieterd | grep NEEDED | wc -l)" = 0 ]' From a67c33bff2e8e188c243b73b1ba409111289d218 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 10:53:16 +0200 Subject: [PATCH 63/73] chore(ci): bump rust version --- .woodpecker/build.yml | 4 ++-- .woodpecker/clippy.yml | 2 +- .woodpecker/lint.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 3529154..54b5bac 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -3,11 +3,11 @@ platform: 'linux/amd64' when: branch: exclude: [main] - event: [push, pull_request] + event: push steps: build: - image: 'rust:1.70-alpine3.18' + image: 'rust:1.79-alpine3.19' commands: - apk add --no-cache build-base libarchive libarchive-dev - cargo build --verbose diff --git a/.woodpecker/clippy.yml b/.woodpecker/clippy.yml index b1c86a7..2d74a26 100644 --- a/.woodpecker/clippy.yml +++ b/.woodpecker/clippy.yml @@ -7,7 +7,7 @@ when: steps: clippy: - image: 'rust:1.70-alpine3.18' + image: 'rust:1.79-alpine3.19' commands: - rustup component add clippy - cargo clippy -- --no-deps -Dwarnings diff --git a/.woodpecker/lint.yml b/.woodpecker/lint.yml index 4c09bc4..ba3b7ab 100644 --- a/.woodpecker/lint.yml +++ b/.woodpecker/lint.yml @@ -7,7 +7,7 @@ when: steps: lint: - image: 'rust:1.70-alpine3.18' + image: 'rust:1.79-alpine3.19' commands: - rustup component add rustfmt - cargo fmt -- --check From 7546ec9c5fe556ff70c1f768019a5cc38afa212f Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 11:11:31 +0200 Subject: [PATCH 64/73] fix(ci): add static libarchive flags --- .woodpecker/build.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 54b5bac..4cb5370 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -8,8 +8,14 @@ when: steps: build: image: 'rust:1.79-alpine3.19' + environment: + - 'LIBARCHIVE_STATIC=1' + - 'LIBARCHIVE_LIB_DIR=/usr/lib' + - 'LIBARCHIVE_INCLUDE_DIR=/usr/include' + - 'LIBARCHIVE_LDFLAGS=-lssl -lcrypto -L/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4' + - 'LIBARCHIVE_LDFLAGS=-L/usr/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3' commands: - - apk add --no-cache build-base libarchive libarchive-dev + - apk add --no-cache build-base libarchive-static libarchive-dev - cargo build --verbose # Binaries, even debug ones, should be statically compiled - '[ "$(readelf -d target/debug/rieterd | grep NEEDED | wc -l)" = 0 ]' From c13b823682db9790947d121da0c27c838ed4affd Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 12:47:28 +0200 Subject: [PATCH 65/73] fix(ci): static compilation --- .woodpecker/build.yml | 17 ++++++++++++----- libarchive3-sys/build.rs | 34 +--------------------------------- 2 files changed, 13 insertions(+), 38 deletions(-) diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 4cb5370..4d5367b 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -10,12 +10,19 @@ steps: image: 'rust:1.79-alpine3.19' environment: - 'LIBARCHIVE_STATIC=1' - - 'LIBARCHIVE_LIB_DIR=/usr/lib' - - 'LIBARCHIVE_INCLUDE_DIR=/usr/include' - - 'LIBARCHIVE_LDFLAGS=-lssl -lcrypto -L/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4' - - 'LIBARCHIVE_LDFLAGS=-L/usr/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3' commands: - - apk add --no-cache build-base libarchive-static libarchive-dev + # Dependencies required to statically compile libarchive and libsqlite3 + - > + apk add --no-cache build-base + libarchive-static libarchive-dev + zlib-static + openssl-libs-static + bzip2-static + xz-static + expat-static + zstd-static + lz4-static + acl-static - cargo build --verbose # Binaries, even debug ones, should be statically compiled - '[ "$(readelf -d target/debug/rieterd | grep NEEDED | wc -l)" = 0 ]' diff --git a/libarchive3-sys/build.rs b/libarchive3-sys/build.rs index 43d83e7..b82eb31 100644 --- a/libarchive3-sys/build.rs +++ b/libarchive3-sys/build.rs @@ -1,35 +1,3 @@ -extern crate pkg_config; - -use std::env; - fn main() { - let lib_dir = env::var("LIBARCHIVE_LIB_DIR").ok(); - let include_dir = env::var("LIBARCHIVE_INCLUDE_DIR").ok(); - - if lib_dir.is_some() && include_dir.is_some() { - println!("cargo:rustc-flags=-L native={}", lib_dir.unwrap()); - println!("cargo:include={}", include_dir.unwrap()); - let mode = match env::var_os("LIBARCHIVE_STATIC") { - Some(_) => "static", - None => "dylib", - }; - println!("cargo:rustc-flags=-l {0}=archive", mode); - - if mode == "static" { - if let Ok(ldflags) = env::var("LIBARCHIVE_LDFLAGS") { - for token in ldflags.split_whitespace() { - if token.starts_with("-L") { - println!("cargo:rustc-flags=-L native={}", token.replace("-L", "")); - } else if token.starts_with("-l") { - println!("cargo:rustc-flags=-l static={}", token.replace("-l", "")); - } - } - } - } - } else { - match pkg_config::find_library("libarchive") { - Ok(_) => (), - Err(msg) => panic!("Unable to locate libarchive, err={:?}", msg), - } - } + pkg_config::Config::new().atleast_version("3").probe("libarchive").unwrap(); } From 68ce684c77d7955a74632ddac3a4bfe996b3264f Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 13:04:13 +0200 Subject: [PATCH 66/73] chore(ci): move clippy to build step --- .woodpecker/build.yml | 5 ++++- .woodpecker/clippy.yml | 13 ------------- 2 files changed, 4 insertions(+), 14 deletions(-) delete mode 100644 .woodpecker/clippy.yml diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 4d5367b..8ddf4c9 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -26,4 +26,7 @@ steps: - cargo build --verbose # Binaries, even debug ones, should be statically compiled - '[ "$(readelf -d target/debug/rieterd | grep NEEDED | wc -l)" = 0 ]' - + # Clippy also performs a full build, so putting it here saves the CI a + # lot of work + - rustup component add clippy + - cargo clippy -- --no-deps -Dwarnings diff --git a/.woodpecker/clippy.yml b/.woodpecker/clippy.yml deleted file mode 100644 index 2d74a26..0000000 --- a/.woodpecker/clippy.yml +++ /dev/null @@ -1,13 +0,0 @@ -platform: 'linux/amd64' - -when: - branch: - exclude: [main] - event: push - -steps: - clippy: - image: 'rust:1.79-alpine3.19' - commands: - - rustup component add clippy - - cargo clippy -- --no-deps -Dwarnings From 9cec2e0dc2d91e5b95aaf4849f0ffe4132f6ca91 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sun, 7 Jul 2024 13:30:12 +0200 Subject: [PATCH 67/73] feat(ci): use custom builder image --- .woodpecker/build.yml | 25 +++++++------------------ .woodpecker/lint.yml | 3 +-- build.Dockerfile | 20 ++++++++++++++++++++ 3 files changed, 28 insertions(+), 20 deletions(-) create mode 100644 build.Dockerfile diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index 8ddf4c9..e302f64 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -7,26 +7,15 @@ when: steps: build: - image: 'rust:1.79-alpine3.19' - environment: - - 'LIBARCHIVE_STATIC=1' + image: 'git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19' commands: - # Dependencies required to statically compile libarchive and libsqlite3 - - > - apk add --no-cache build-base - libarchive-static libarchive-dev - zlib-static - openssl-libs-static - bzip2-static - xz-static - expat-static - zstd-static - lz4-static - acl-static - cargo build --verbose # Binaries, even debug ones, should be statically compiled - '[ "$(readelf -d target/debug/rieterd | grep NEEDED | wc -l)" = 0 ]' - # Clippy also performs a full build, so putting it here saves the CI a - # lot of work - - rustup component add clippy + + # Clippy also performs a full build, so putting it here saves the CI a + # lot of work + clippy: + image: 'git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19' + commands: - cargo clippy -- --no-deps -Dwarnings diff --git a/.woodpecker/lint.yml b/.woodpecker/lint.yml index ba3b7ab..2bd567e 100644 --- a/.woodpecker/lint.yml +++ b/.woodpecker/lint.yml @@ -7,7 +7,6 @@ when: steps: lint: - image: 'rust:1.79-alpine3.19' + image: 'git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19' commands: - - rustup component add rustfmt - cargo fmt -- --check diff --git a/build.Dockerfile b/build.Dockerfile new file mode 100644 index 0000000..177b462 --- /dev/null +++ b/build.Dockerfile @@ -0,0 +1,20 @@ +# Command to build and push builder image (change tags as necessary): +# docker buildx build -f build.Dockerfile -t git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19 --platform linux/amd64,linux/arm64 --push . +FROM rust:1.79-alpine3.19 + +# Dependencies required to statically compile libarchive and libsqlite3 +RUN apk add --no-cache \ + build-base \ + libarchive-static libarchive-dev \ + zlib-static \ + openssl-libs-static \ + bzip2-static \ + xz-static \ + expat-static \ + zstd-static \ + lz4-static \ + acl-static && \ + rustup component add clippy rustfmt + +# Tell the libarchive3-sys package to statically link libarchive +ENV LIBARCHIVE_STATIC=1 From fde56af414c605a36211bb1b08ec30da72de39f3 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 8 Jul 2024 21:54:12 +0200 Subject: [PATCH 68/73] chore: fix all clippy warnings --- libarchive/src/archive.rs | 7 +- libarchive/src/read/builder.rs | 2 +- libarchive/src/write/file.rs | 6 +- libarchive/src/write/mod.rs | 6 ++ libarchive3-sys/README.md | 4 + libarchive3-sys/build.rs | 5 +- libarchive3-sys/src/ffi.rs | 190 +++++++++++++-------------------- server/src/db/mod.rs | 2 +- server/src/main.rs | 4 +- server/src/repo/archive.rs | 4 +- server/src/repo/mod.rs | 4 +- server/src/repo/package.rs | 39 +++---- server/src/web/repo.rs | 4 +- 13 files changed, 119 insertions(+), 158 deletions(-) diff --git a/libarchive/src/archive.rs b/libarchive/src/archive.rs index 3369a44..932013b 100644 --- a/libarchive/src/archive.rs +++ b/libarchive/src/archive.rs @@ -386,6 +386,7 @@ pub enum ExtractOption { ClearNoChangeFFlags, } +#[derive(Default)] pub struct ExtractOptions { pub flags: i32, } @@ -420,9 +421,3 @@ impl ExtractOptions { self } } - -impl Default for ExtractOptions { - fn default() -> ExtractOptions { - ExtractOptions { flags: 0 } - } -} diff --git a/libarchive/src/read/builder.rs b/libarchive/src/read/builder.rs index e827130..4af0401 100644 --- a/libarchive/src/read/builder.rs +++ b/libarchive/src/read/builder.rs @@ -78,7 +78,7 @@ impl Builder { ffi::archive_read_support_filter_program_signature( self.handle_mut(), c_prog.as_ptr(), - mem::transmute(cb), + mem::transmute::, *const std::ffi::c_void>(cb), size, ) } diff --git a/libarchive/src/write/file.rs b/libarchive/src/write/file.rs index fa39a13..ef4877d 100644 --- a/libarchive/src/write/file.rs +++ b/libarchive/src/write/file.rs @@ -41,7 +41,7 @@ impl FileWriter { unsafe { match ffi::archive_write_header(self.handle_mut(), entry.entry_mut()) { ffi::ARCHIVE_OK => Ok(()), - _ => Err(ArchiveError::from(self as &dyn Handle).into()), + _ => Err(ArchiveError::from(self as &dyn Handle)), } } } @@ -50,7 +50,7 @@ impl FileWriter { unsafe { match ffi::archive_write_header(self.handle_mut(), entry.entry_mut()) { ffi::ARCHIVE_OK => (), - _ => return Err(ArchiveError::from(self as &dyn Handle).into()), + _ => return Err(ArchiveError::from(self as &dyn Handle)), } } @@ -74,7 +74,7 @@ impl FileWriter { // Negative values signal errors if res < 0 { - return Err(ArchiveError::from(self as &dyn Handle).into()); + return Err(ArchiveError::from(self as &dyn Handle)); } written += usize::try_from(res).unwrap(); diff --git a/libarchive/src/write/mod.rs b/libarchive/src/write/mod.rs index 5f583e0..b64aadf 100644 --- a/libarchive/src/write/mod.rs +++ b/libarchive/src/write/mod.rs @@ -30,6 +30,12 @@ impl Entry for WriteEntry { } } +impl Default for WriteEntry { + fn default() -> Self { + Self::new() + } +} + impl Drop for WriteEntry { fn drop(&mut self) { unsafe { ffi::archive_entry_free(self.entry_mut()) } diff --git a/libarchive3-sys/README.md b/libarchive3-sys/README.md index bd605ef..a1467c0 100644 --- a/libarchive3-sys/README.md +++ b/libarchive3-sys/README.md @@ -4,3 +4,7 @@ DYLD_LIBRARY_PATH=/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib xcode-select --install + +# 64-bit timestamps + +`time_t` has been replaced with `i64` as Musl no longer supports 32-bit `time_t` values. diff --git a/libarchive3-sys/build.rs b/libarchive3-sys/build.rs index b82eb31..afe10d9 100644 --- a/libarchive3-sys/build.rs +++ b/libarchive3-sys/build.rs @@ -1,3 +1,6 @@ fn main() { - pkg_config::Config::new().atleast_version("3").probe("libarchive").unwrap(); + pkg_config::Config::new() + .atleast_version("3") + .probe("libarchive") + .unwrap(); } diff --git a/libarchive3-sys/src/ffi.rs b/libarchive3-sys/src/ffi.rs index 92cd267..f5ad4cf 100644 --- a/libarchive3-sys/src/ffi.rs +++ b/libarchive3-sys/src/ffi.rs @@ -294,14 +294,10 @@ extern "C" { ) -> c_int; pub fn archive_read_extract_set_progress_callback( arg1: *mut Struct_archive, - _progress_func: ::std::option::Option ()>, + _progress_func: ::std::option::Option, _user_data: *mut c_void, - ) -> (); - pub fn archive_read_extract_set_skip_file( - arg1: *mut Struct_archive, - arg2: i64, - arg3: i64, - ) -> (); + ); + pub fn archive_read_extract_set_skip_file(arg1: *mut Struct_archive, arg2: i64, arg3: i64); pub fn archive_read_close(arg1: *mut Struct_archive) -> c_int; pub fn archive_read_free(arg1: *mut Struct_archive) -> c_int; pub fn archive_read_finish(arg1: *mut Struct_archive) -> c_int; @@ -443,7 +439,7 @@ extern "C" { arg3: ::std::option::Option< unsafe extern "C" fn(arg1: *mut c_void, arg2: *const c_char, arg3: i64) -> i64, >, - arg4: ::std::option::Option ()>, + arg4: ::std::option::Option, ) -> c_int; pub fn archive_write_disk_set_user_lookup( arg1: *mut Struct_archive, @@ -451,7 +447,7 @@ extern "C" { arg3: ::std::option::Option< unsafe extern "C" fn(arg1: *mut c_void, arg2: *const c_char, arg3: i64) -> i64, >, - arg4: ::std::option::Option ()>, + arg4: ::std::option::Option, ) -> c_int; pub fn archive_write_disk_gid(arg1: *mut Struct_archive, arg2: *const c_char, arg3: i64) -> i64; @@ -475,7 +471,7 @@ extern "C" { arg3: ::std::option::Option< unsafe extern "C" fn(arg1: *mut c_void, arg2: i64) -> *const c_char, >, - arg4: ::std::option::Option ()>, + arg4: ::std::option::Option, ) -> c_int; pub fn archive_read_disk_set_uname_lookup( arg1: *mut Struct_archive, @@ -483,7 +479,7 @@ extern "C" { arg3: ::std::option::Option< unsafe extern "C" fn(arg1: *mut c_void, arg2: i64) -> *const c_char, >, - arg4: ::std::option::Option ()>, + arg4: ::std::option::Option, ) -> c_int; pub fn archive_read_disk_open(arg1: *mut Struct_archive, arg2: *const c_char) -> c_int; pub fn archive_read_disk_open_w(arg1: *mut Struct_archive, arg2: *const wchar_t) -> c_int; @@ -502,7 +498,7 @@ extern "C" { arg1: *mut Struct_archive, arg2: *mut c_void, arg3: *mut Struct_archive_entry, - ) -> (), + ), >, _client_data: *mut c_void, ) -> c_int; @@ -529,10 +525,9 @@ extern "C" { pub fn archive_error_string(arg1: *mut Struct_archive) -> *const c_char; pub fn archive_format_name(arg1: *mut Struct_archive) -> *const c_char; pub fn archive_format(arg1: *mut Struct_archive) -> c_int; - pub fn archive_clear_error(arg1: *mut Struct_archive) -> (); - pub fn archive_set_error(arg1: *mut Struct_archive, _err: c_int, fmt: *const c_char, ...) - -> (); - pub fn archive_copy_error(dest: *mut Struct_archive, src: *mut Struct_archive) -> (); + pub fn archive_clear_error(arg1: *mut Struct_archive); + pub fn archive_set_error(arg1: *mut Struct_archive, _err: c_int, fmt: *const c_char, ...); + pub fn archive_copy_error(dest: *mut Struct_archive, src: *mut Struct_archive); pub fn archive_file_count(arg1: *mut Struct_archive) -> c_int; pub fn archive_match_new() -> *mut Struct_archive; pub fn archive_match_free(arg1: *mut Struct_archive) -> c_int; @@ -590,7 +585,7 @@ extern "C" { pub fn archive_match_include_time( arg1: *mut Struct_archive, _flag: c_int, - _sec: time_t, + _sec: i64, _nsec: c_long, ) -> c_int; pub fn archive_match_include_date( @@ -630,16 +625,16 @@ extern "C" { pub fn archive_match_include_gname_w(arg1: *mut Struct_archive, arg2: *const wchar_t) -> c_int; pub fn archive_entry_clear(arg1: *mut Struct_archive_entry) -> *mut Struct_archive_entry; pub fn archive_entry_clone(arg1: *mut Struct_archive_entry) -> *mut Struct_archive_entry; - pub fn archive_entry_free(arg1: *mut Struct_archive_entry) -> (); + pub fn archive_entry_free(arg1: *mut Struct_archive_entry); pub fn archive_entry_new() -> *mut Struct_archive_entry; pub fn archive_entry_new2(arg1: *mut Struct_archive) -> *mut Struct_archive_entry; - pub fn archive_entry_atime(arg1: *mut Struct_archive_entry) -> time_t; + pub fn archive_entry_atime(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_atime_nsec(arg1: *mut Struct_archive_entry) -> c_long; pub fn archive_entry_atime_is_set(arg1: *mut Struct_archive_entry) -> c_int; - pub fn archive_entry_birthtime(arg1: *mut Struct_archive_entry) -> time_t; + pub fn archive_entry_birthtime(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_birthtime_nsec(arg1: *mut Struct_archive_entry) -> c_long; pub fn archive_entry_birthtime_is_set(arg1: *mut Struct_archive_entry) -> c_int; - pub fn archive_entry_ctime(arg1: *mut Struct_archive_entry) -> time_t; + pub fn archive_entry_ctime(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_ctime_nsec(arg1: *mut Struct_archive_entry) -> c_long; pub fn archive_entry_ctime_is_set(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_dev(arg1: *mut Struct_archive_entry) -> dev_t; @@ -651,7 +646,7 @@ extern "C" { arg1: *mut Struct_archive_entry, arg2: *mut c_ulong, arg3: *mut c_ulong, - ) -> (); + ); pub fn archive_entry_fflags_text(arg1: *mut Struct_archive_entry) -> *const c_char; pub fn archive_entry_gid(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_gname(arg1: *mut Struct_archive_entry) -> *const c_char; @@ -662,7 +657,7 @@ extern "C" { pub fn archive_entry_ino64(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_ino_is_set(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_mode(arg1: *mut Struct_archive_entry) -> mode_t; - pub fn archive_entry_mtime(arg1: *mut Struct_archive_entry) -> time_t; + pub fn archive_entry_mtime(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_mtime_nsec(arg1: *mut Struct_archive_entry) -> c_long; pub fn archive_entry_mtime_is_set(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_nlink(arg1: *mut Struct_archive_entry) -> c_uint; @@ -682,33 +677,17 @@ extern "C" { pub fn archive_entry_uid(arg1: *mut Struct_archive_entry) -> i64; pub fn archive_entry_uname(arg1: *mut Struct_archive_entry) -> *const c_char; pub fn archive_entry_uname_w(arg1: *mut Struct_archive_entry) -> *const wchar_t; - pub fn archive_entry_set_atime( - arg1: *mut Struct_archive_entry, - arg2: time_t, - arg3: c_long, - ) -> (); - pub fn archive_entry_unset_atime(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_set_birthtime( - arg1: *mut Struct_archive_entry, - arg2: time_t, - arg3: c_long, - ) -> (); - pub fn archive_entry_unset_birthtime(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_set_ctime( - arg1: *mut Struct_archive_entry, - arg2: time_t, - arg3: c_long, - ) -> (); - pub fn archive_entry_unset_ctime(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_set_dev(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_devmajor(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_devminor(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_filetype(arg1: *mut Struct_archive_entry, arg2: c_uint) -> (); - pub fn archive_entry_set_fflags( - arg1: *mut Struct_archive_entry, - arg2: c_ulong, - arg3: c_ulong, - ) -> (); + pub fn archive_entry_set_atime(arg1: *mut Struct_archive_entry, arg2: i64, arg3: c_long); + pub fn archive_entry_unset_atime(arg1: *mut Struct_archive_entry); + pub fn archive_entry_set_birthtime(arg1: *mut Struct_archive_entry, arg2: i64, arg3: c_long); + pub fn archive_entry_unset_birthtime(arg1: *mut Struct_archive_entry); + pub fn archive_entry_set_ctime(arg1: *mut Struct_archive_entry, arg2: i64, arg3: c_long); + pub fn archive_entry_unset_ctime(arg1: *mut Struct_archive_entry); + pub fn archive_entry_set_dev(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_devmajor(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_devminor(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_filetype(arg1: *mut Struct_archive_entry, arg2: c_uint); + pub fn archive_entry_set_fflags(arg1: *mut Struct_archive_entry, arg2: c_ulong, arg3: c_ulong); pub fn archive_entry_copy_fflags_text( arg1: *mut Struct_archive_entry, arg2: *const c_char, @@ -717,79 +696,60 @@ extern "C" { arg1: *mut Struct_archive_entry, arg2: *const wchar_t, ) -> *const wchar_t; - pub fn archive_entry_set_gid(arg1: *mut Struct_archive_entry, arg2: i64) -> (); - pub fn archive_entry_set_gname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_gname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_gname_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t) -> (); + pub fn archive_entry_set_gid(arg1: *mut Struct_archive_entry, arg2: i64); + pub fn archive_entry_set_gname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_gname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_gname_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_gname_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, ) -> c_int; - pub fn archive_entry_set_hardlink(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_hardlink(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_hardlink_w( - arg1: *mut Struct_archive_entry, - arg2: *const wchar_t, - ) -> (); + pub fn archive_entry_set_hardlink(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_hardlink(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_hardlink_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_hardlink_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, ) -> c_int; - pub fn archive_entry_set_ino(arg1: *mut Struct_archive_entry, arg2: i64) -> (); - pub fn archive_entry_set_ino64(arg1: *mut Struct_archive_entry, arg2: i64) -> (); - pub fn archive_entry_set_link(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_link(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_link_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t) -> (); + pub fn archive_entry_set_ino(arg1: *mut Struct_archive_entry, arg2: i64); + pub fn archive_entry_set_ino64(arg1: *mut Struct_archive_entry, arg2: i64); + pub fn archive_entry_set_link(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_link(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_link_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_link_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, ) -> c_int; - pub fn archive_entry_set_mode(arg1: *mut Struct_archive_entry, arg2: mode_t) -> (); - pub fn archive_entry_set_mtime( - arg1: *mut Struct_archive_entry, - arg2: time_t, - arg3: c_long, - ) -> (); - pub fn archive_entry_unset_mtime(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_set_nlink(arg1: *mut Struct_archive_entry, arg2: c_uint) -> (); - pub fn archive_entry_set_pathname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_pathname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_pathname_w( - arg1: *mut Struct_archive_entry, - arg2: *const wchar_t, - ) -> (); + pub fn archive_entry_set_mode(arg1: *mut Struct_archive_entry, arg2: mode_t); + pub fn archive_entry_set_mtime(arg1: *mut Struct_archive_entry, arg2: i64, arg3: c_long); + pub fn archive_entry_unset_mtime(arg1: *mut Struct_archive_entry); + pub fn archive_entry_set_nlink(arg1: *mut Struct_archive_entry, arg2: c_uint); + pub fn archive_entry_set_pathname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_pathname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_pathname_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_pathname_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, ) -> c_int; - pub fn archive_entry_set_perm(arg1: *mut Struct_archive_entry, arg2: mode_t) -> (); - pub fn archive_entry_set_rdev(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_rdevmajor(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_rdevminor(arg1: *mut Struct_archive_entry, arg2: dev_t) -> (); - pub fn archive_entry_set_size(arg1: *mut Struct_archive_entry, arg2: i64) -> (); - pub fn archive_entry_unset_size(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_copy_sourcepath( - arg1: *mut Struct_archive_entry, - arg2: *const c_char, - ) -> (); - pub fn archive_entry_copy_sourcepath_w( - arg1: *mut Struct_archive_entry, - arg2: *const wchar_t, - ) -> (); - pub fn archive_entry_set_symlink(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_symlink(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_symlink_w( - arg1: *mut Struct_archive_entry, - arg2: *const wchar_t, - ) -> (); + pub fn archive_entry_set_perm(arg1: *mut Struct_archive_entry, arg2: mode_t); + pub fn archive_entry_set_rdev(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_rdevmajor(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_rdevminor(arg1: *mut Struct_archive_entry, arg2: dev_t); + pub fn archive_entry_set_size(arg1: *mut Struct_archive_entry, arg2: i64); + pub fn archive_entry_unset_size(arg1: *mut Struct_archive_entry); + pub fn archive_entry_copy_sourcepath(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_sourcepath_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); + pub fn archive_entry_set_symlink(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_symlink(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_symlink_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_symlink_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, ) -> c_int; - pub fn archive_entry_set_uid(arg1: *mut Struct_archive_entry, arg2: i64) -> (); - pub fn archive_entry_set_uname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_uname(arg1: *mut Struct_archive_entry, arg2: *const c_char) -> (); - pub fn archive_entry_copy_uname_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t) -> (); + pub fn archive_entry_set_uid(arg1: *mut Struct_archive_entry, arg2: i64); + pub fn archive_entry_set_uname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_uname(arg1: *mut Struct_archive_entry, arg2: *const c_char); + pub fn archive_entry_copy_uname_w(arg1: *mut Struct_archive_entry, arg2: *const wchar_t); pub fn archive_entry_update_uname_utf8( arg1: *mut Struct_archive_entry, arg2: *const c_char, @@ -797,7 +757,7 @@ extern "C" { // pub fn archive_entry_stat(arg1: *mut Struct_archive_entry) -> *const Struct_stat; // pub fn archive_entry_copy_stat(arg1: *mut Struct_archive_entry, // arg2: *const Struct_stat) - // -> (); + // ; pub fn archive_entry_mac_metadata( arg1: *mut Struct_archive_entry, arg2: *mut size_t, @@ -806,8 +766,8 @@ extern "C" { arg1: *mut Struct_archive_entry, arg2: *const c_void, arg3: size_t, - ) -> (); - pub fn archive_entry_acl_clear(arg1: *mut Struct_archive_entry) -> (); + ); + pub fn archive_entry_acl_clear(arg1: *mut Struct_archive_entry); pub fn archive_entry_acl_add_entry( arg1: *mut Struct_archive_entry, arg2: c_int, @@ -848,13 +808,13 @@ extern "C" { pub fn archive_entry_acl_text(arg1: *mut Struct_archive_entry, arg2: c_int) -> *const c_char; pub fn archive_entry_acl_count(arg1: *mut Struct_archive_entry, arg2: c_int) -> c_int; pub fn archive_entry_acl(arg1: *mut Struct_archive_entry) -> *mut Struct_archive_acl; - pub fn archive_entry_xattr_clear(arg1: *mut Struct_archive_entry) -> (); + pub fn archive_entry_xattr_clear(arg1: *mut Struct_archive_entry); pub fn archive_entry_xattr_add_entry( arg1: *mut Struct_archive_entry, arg2: *const c_char, arg3: *const c_void, arg4: size_t, - ) -> (); + ); pub fn archive_entry_xattr_count(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_xattr_reset(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_xattr_next( @@ -863,12 +823,8 @@ extern "C" { arg3: *mut *const c_void, arg4: *mut size_t, ) -> c_int; - pub fn archive_entry_sparse_clear(arg1: *mut Struct_archive_entry) -> (); - pub fn archive_entry_sparse_add_entry( - arg1: *mut Struct_archive_entry, - arg2: i64, - arg3: i64, - ) -> (); + pub fn archive_entry_sparse_clear(arg1: *mut Struct_archive_entry); + pub fn archive_entry_sparse_add_entry(arg1: *mut Struct_archive_entry, arg2: i64, arg3: i64); pub fn archive_entry_sparse_count(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_sparse_reset(arg1: *mut Struct_archive_entry) -> c_int; pub fn archive_entry_sparse_next( @@ -880,13 +836,13 @@ extern "C" { pub fn archive_entry_linkresolver_set_strategy( arg1: *mut Struct_archive_entry_linkresolver, arg2: c_int, - ) -> (); - pub fn archive_entry_linkresolver_free(arg1: *mut Struct_archive_entry_linkresolver) -> (); + ); + pub fn archive_entry_linkresolver_free(arg1: *mut Struct_archive_entry_linkresolver); pub fn archive_entry_linkify( arg1: *mut Struct_archive_entry_linkresolver, arg2: *mut *mut Struct_archive_entry, arg3: *mut *mut Struct_archive_entry, - ) -> (); + ); pub fn archive_entry_partial_links( res: *mut Struct_archive_entry_linkresolver, links: *mut c_uint, diff --git a/server/src/db/mod.rs b/server/src/db/mod.rs index a1b7476..2b03fb1 100644 --- a/server/src/db/mod.rs +++ b/server/src/db/mod.rs @@ -88,7 +88,7 @@ pub async fn connect(conn: &DbConfig) -> crate::Result { } => { let mut url = format!("postgres://{}:{}@{}:{}/{}", user, password, host, port, db); - if schema != "" { + if !schema.is_empty() { url = format!("{url}?currentSchema={schema}"); } diff --git a/server/src/main.rs b/server/src/main.rs index c641666..7118da5 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -16,8 +16,8 @@ use sea_orm_migration::MigratorTrait; use tokio::runtime; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; -pub const ANY_ARCH: &'static str = "any"; -pub const PKG_FILENAME_REGEX: &'static str = "^([a-z0-9@._+-]+)-((?:[0-9]+:)?[a-zA-Z0-9@._+]+-[0-9]+)-([a-zA-z0-9_]+).pkg.tar.([a-zA-Z0-9]+)$"; +pub const ANY_ARCH: &str = "any"; +pub const PKG_FILENAME_REGEX: &str = "^([a-z0-9@._+-]+)-((?:[0-9]+:)?[a-zA-Z0-9@._+]+-[0-9]+)-([a-zA-z0-9_]+).pkg.tar.([a-zA-Z0-9]+)$"; #[derive(Clone)] pub struct Global { diff --git a/server/src/repo/archive.rs b/server/src/repo/archive.rs index 2844b90..18e0801 100644 --- a/server/src/repo/archive.rs +++ b/server/src/repo/archive.rs @@ -216,8 +216,8 @@ impl RepoArchivesWriter { self.ar_db.close()?; self.ar_files.close()?; - let _ = std::fs::remove_file(&self.tmp_paths[0])?; - let _ = std::fs::remove_file(&self.tmp_paths[1])?; + let _ = std::fs::remove_file(&self.tmp_paths[0]); + let _ = std::fs::remove_file(&self.tmp_paths[1]); Ok(()) } diff --git a/server/src/repo/mod.rs b/server/src/repo/mod.rs index 9920326..25325c6 100644 --- a/server/src/repo/mod.rs +++ b/server/src/repo/mod.rs @@ -29,12 +29,14 @@ pub enum Command { Clean, } +type RepoState = (AtomicU32, Arc>); + pub struct SharedState { pub repos_dir: PathBuf, pub conn: DbConn, pub rx: Mutex>, pub tx: UnboundedSender, - pub repos: RwLock>)>>, + pub repos: RwLock>, } impl SharedState { diff --git a/server/src/repo/package.rs b/server/src/repo/package.rs index e8bb076..103a521 100644 --- a/server/src/repo/package.rs +++ b/server/src/repo/package.rs @@ -48,18 +48,18 @@ pub struct PkgInfo { } #[derive(Debug, PartialEq, Eq)] -pub enum ParsePkgInfoError { - InvalidSize, - InvalidBuildDate, - InvalidPgpSigSize, +pub enum InvalidPkgInfoError { + Size, + BuildDate, + PgpSigSize, } -impl fmt::Display for ParsePkgInfoError { +impl fmt::Display for InvalidPkgInfoError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let s = match self { - Self::InvalidSize => "invalid size", - Self::InvalidBuildDate => "invalid build date", - Self::InvalidPgpSigSize => "invalid pgp sig size", + Self::Size => "invalid size", + Self::BuildDate => "invalid build date", + Self::PgpSigSize => "invalid pgp sig size", }; write!(f, "{}", s) @@ -67,7 +67,7 @@ impl fmt::Display for ParsePkgInfoError { } impl PkgInfo { - pub fn extend>(&mut self, line: S) -> Result<(), ParsePkgInfoError> { + pub fn extend>(&mut self, line: S) -> Result<(), InvalidPkgInfoError> { let line = line.as_ref(); if !line.starts_with('#') { @@ -77,26 +77,21 @@ impl PkgInfo { "pkgbase" => self.base = value.to_string(), "pkgver" => self.version = value.to_string(), "pkgdesc" => self.description = Some(value.to_string()), - "size" => { - self.size = value.parse().map_err(|_| ParsePkgInfoError::InvalidSize)? - } + "size" => self.size = value.parse().map_err(|_| InvalidPkgInfoError::Size)?, "url" => self.url = Some(value.to_string()), "arch" => self.arch = value.to_string(), "builddate" => { - let seconds: i64 = value - .parse() - .map_err(|_| ParsePkgInfoError::InvalidBuildDate)?; - self.build_date = NaiveDateTime::from_timestamp_millis(seconds * 1000) - .ok_or(ParsePkgInfoError::InvalidBuildDate)? + let seconds: i64 = + value.parse().map_err(|_| InvalidPkgInfoError::BuildDate)?; + self.build_date = chrono::DateTime::from_timestamp_millis(seconds * 1000) + .ok_or(InvalidPkgInfoError::BuildDate)? + .naive_utc(); } "packager" => self.packager = Some(value.to_string()), "pgpsig" => self.pgpsig = Some(value.to_string()), "pgpsigsize" => { - self.pgpsigsize = Some( - value - .parse() - .map_err(|_| ParsePkgInfoError::InvalidPgpSigSize)?, - ) + self.pgpsigsize = + Some(value.parse().map_err(|_| InvalidPkgInfoError::PgpSigSize)?) } "group" => self.groups.push(value.to_string()), "license" => self.licenses.push(value.to_string()), diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs index c3bfe6b..e1bc61a 100644 --- a/server/src/web/repo.rs +++ b/server/src/web/repo.rs @@ -131,8 +131,8 @@ async fn delete_arch_repo( } async fn delete_package( - State(global): State, - Path((distro, repo, arch, pkg_name)): Path<(String, String, String, String)>, + State(_global): State, + Path((_distro, _repo, _arch, _pkg_name)): Path<(String, String, String, String)>, ) -> crate::Result { Ok(StatusCode::NOT_FOUND) //if let Some(mgr) = global.mgr.get_mgr(&distro).await { From 6246108f33452680c36ba4b35d1661eb54221ca7 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Mon, 8 Jul 2024 22:54:47 +0200 Subject: [PATCH 69/73] feat(ci): add static binary builds --- .woodpecker/build-rel.yml | 42 +++++++++++++++++++++++++++++++++++++++ .woodpecker/build.yml | 2 +- .woodpecker/docker.yml | 11 +++++++--- Dockerfile | 37 ++++++++++------------------------ 4 files changed, 61 insertions(+), 31 deletions(-) create mode 100644 .woodpecker/build-rel.yml diff --git a/.woodpecker/build-rel.yml b/.woodpecker/build-rel.yml new file mode 100644 index 0000000..b4b7067 --- /dev/null +++ b/.woodpecker/build-rel.yml @@ -0,0 +1,42 @@ +matrix: + PLATFORM: + - 'linux/amd64' + +platform: ${PLATFORM} + +when: + branch: [main, dev] + event: [push, tag] + +steps: + build: + image: 'git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19' + commands: + - cargo build --verbose --release + - '[ "$(readelf -d target/release/rieterd | grep NEEDED | wc -l)" = 0 ]' + + publish-dev: + image: 'git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19' + commands: + - apk add --no-cache minio-client + - mcli alias set rb 'https://s3.rustybever.be' "$MINIO_ACCESS_KEY" "$MINIO_SECRET_KEY" + - mcli cp target/release/rieterd "rb/rieter/commits/$CI_COMMIT_SHA/rieterd-$(echo '${PLATFORM}' | sed 's:/:-:g')" + secrets: + - minio_access_key + - minio_secret_key + when: + branch: dev + event: push + + publish-rel: + image: 'curlimages/curl' + commands: + - > + curl -s --fail + --user "Chewing_Bever:$GITEA_PASSWORD" + --upload-file target/release/rieterd + https://git.rustybever.be/api/packages/Chewing_Bever/generic/rieter/"${CI_COMMIT_TAG}"/rieterd-"$(echo '${PLATFORM}' | sed 's:/:-:g')" + secrets: + - gitea_password + when: + event: tag diff --git a/.woodpecker/build.yml b/.woodpecker/build.yml index e302f64..f179f00 100644 --- a/.woodpecker/build.yml +++ b/.woodpecker/build.yml @@ -2,7 +2,7 @@ platform: 'linux/amd64' when: branch: - exclude: [main] + exclude: [dev, main] event: push steps: diff --git a/.woodpecker/docker.yml b/.woodpecker/docker.yml index 0bcdf4a..214df4b 100644 --- a/.woodpecker/docker.yml +++ b/.woodpecker/docker.yml @@ -1,11 +1,11 @@ platform: 'linux/amd64' when: - branch: dev - event: push + branch: [main, dev] + event: [push, tag] depends_on: - - build + - build-rel steps: dev: @@ -19,4 +19,9 @@ steps: tags: - 'dev' platforms: [ 'linux/amd64' ] + build_args_from_env: + - 'CI_COMMIT_SHA' mtu: 1300 + when: + branch: dev + event: push diff --git a/Dockerfile b/Dockerfile index 88b51a8..24031b3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,16 @@ -FROM rust:1.70-alpine3.18 AS builder +FROM git.rustybever.be/chewing_bever/rieter-builder:1.79-alpine3.19 AS builder +ARG TARGETPLATFORM +ARG CI_COMMIT_SHA ARG DI_VER=1.2.5 WORKDIR /app RUN apk add --no-cache \ - build-base \ curl \ make \ unzip \ - pkgconf \ - libarchive libarchive-dev + pkgconf # Build dumb-init RUN curl -Lo - "https://github.com/Yelp/dumb-init/archive/refs/tags/v${DI_VER}.tar.gz" | tar -xzf - && \ @@ -21,33 +21,16 @@ RUN curl -Lo - "https://github.com/Yelp/dumb-init/archive/refs/tags/v${DI_VER}.t COPY . . -# ENV LIBARCHIVE_STATIC=1 \ -# LIBARCHIVE_LIB_DIR=/usr/lib \ -# LIBARCHIVE_INCLUDE_DIR=/usr/include \ -# LIBARCHIVE_LDFLAGS='-lssl -lcrypto -L/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4' - # LIBARCHIVE_LDFLAGS='-L/usr/lib -lz -lbz2 -llzma -lexpat -lzstd -llz4 -lsqlite3' - -# https://users.rust-lang.org/t/sigsegv-with-program-linked-against-openssl-in-an-alpine-container/52172 -ENV RUSTFLAGS='-C target-feature=-crt-static' - -RUN cargo build --release && \ - du -h target/release/rieterd && \ - readelf -d target/release/rieterd && \ - chmod +x target/release/rieterd +RUN curl \ + --fail \ + -o rieterd \ + "https://s3.rustybever.be/rieter/commits/${CI_COMMIT_SHA}/rieterd-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" -FROM alpine:3.18 - -RUN apk add --no-cache \ - libgcc \ - libarchive \ - openssl +FROM alpine:3.19 COPY --from=builder /app/dumb-init /bin/dumb-init -COPY --from=builder /app/target/release/rieterd /bin/rieterd - -ENV RIETER_PKG_DIR=/data/pkgs \ - RIETER_DATA_DIR=/data +COPY --from=builder /app/rieterd /bin/rieterd WORKDIR /data From 04715b00364065d9658fc0d04c1eb56a961599bd Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 9 Jul 2024 17:46:38 +0200 Subject: [PATCH 70/73] chore: chmod binary in dockerfile --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 24031b3..e1e5a96 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,8 @@ COPY . . RUN curl \ --fail \ -o rieterd \ - "https://s3.rustybever.be/rieter/commits/${CI_COMMIT_SHA}/rieterd-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" + "https://s3.rustybever.be/rieter/commits/${CI_COMMIT_SHA}/rieterd-$(echo "${TARGETPLATFORM}" | sed 's:/:-:g')" && \ + chmod +x rieterd FROM alpine:3.19 From 777d57512e2da821754c3203e7140df9aa8f956c Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Tue, 9 Jul 2024 20:44:31 +0200 Subject: [PATCH 71/73] chore(repo): remove package removal route for now --- CHANGELOG.md | 18 ++++++++---------- server/src/web/repo.rs | 34 ++-------------------------------- 2 files changed, 10 insertions(+), 42 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d9f4871..63ec9e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,13 +9,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added -* Server - * Functional repository server - * Serve packages from any number of repositories & architectures - * Publish packages to and delete packages from repositories using HTTP - requests - * Packages of architecture "any" are part of every architecture's - database - * Bearer authentication for private routes - * REST API - * Repository & package information available using JSON REST API +* Functional repository server + * Supports any number of repositories, grouped into distros, each + supporting any number of architectures + * Repository & package information available using JSON REST API + * Queueing system with configurable number of workers for resilient + concurrency +* TOML configuration file +* SQLite & Postgres support diff --git a/server/src/web/repo.rs b/server/src/web/repo.rs index e1bc61a..84d80ca 100644 --- a/server/src/web/repo.rs +++ b/server/src/web/repo.rs @@ -5,7 +5,7 @@ use axum::{ extract::{Path, State}, http::{Request, StatusCode}, response::IntoResponse, - routing::{delete, post}, + routing::{delete, get, post}, Router, }; use futures::TryStreamExt; @@ -27,12 +27,7 @@ pub fn router(api_key: &str) -> Router { ) // Routes added after the layer do not get that layer applied, so the GET requests will not // be authorized - .route( - "/:distro/:repo/:arch/:filename", - delete(delete_package) - .route_layer(ValidateRequestHeaderLayer::bearer(api_key)) - .get(get_file), - ) + .route("/:distro/:repo/:arch/:filename", get(get_file)) } /// Serve the package archive files and database archives. If files are requested for an @@ -129,28 +124,3 @@ async fn delete_arch_repo( Ok(StatusCode::NOT_FOUND) } } - -async fn delete_package( - State(_global): State, - Path((_distro, _repo, _arch, _pkg_name)): Path<(String, String, String, String)>, -) -> crate::Result { - Ok(StatusCode::NOT_FOUND) - //if let Some(mgr) = global.mgr.get_mgr(&distro).await { - // let pkg_removed = mgr.remove_pkg(&repo, &arch, &pkg_name).await?; - // - // if pkg_removed { - // tracing::info!( - // "Removed package '{}' ({}) from repository '{}'", - // pkg_name, - // arch, - // repo - // ); - // - // Ok(StatusCode::OK) - // } else { - // Ok(StatusCode::NOT_FOUND) - // } - //} else { - // Ok(StatusCode::NOT_FOUND) - //} -} From 2c4b9e545292f9665436c3869b82142e0f7efa9e Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 9 Jul 2024 20:58:25 +0200 Subject: [PATCH 72/73] feat(ci): add release docker build --- .woodpecker/docker.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.woodpecker/docker.yml b/.woodpecker/docker.yml index 214df4b..edba80c 100644 --- a/.woodpecker/docker.yml +++ b/.woodpecker/docker.yml @@ -25,3 +25,19 @@ steps: when: branch: dev event: push + + release: + image: 'woodpeckerci/plugin-docker-buildx' + secrets: + - 'docker_username' + - 'docker_password' + settings: + registry: 'git.rustybever.be' + repo: 'git.rustybever.be/chewing_bever/rieter' + auto_tag: true + platforms: [ 'linux/amd64' ] + build_args_from_env: + - 'CI_COMMIT_SHA' + mtu: 1300 + when: + event: tag From fbdb182f50410984c2199e00a0fd485467672373 Mon Sep 17 00:00:00 2001 From: Jef Roosens Date: Tue, 9 Jul 2024 21:02:07 +0200 Subject: [PATCH 73/73] chore: update changelog for 0.1.0 --- .woodpecker/build-rel.yml | 3 --- CHANGELOG.md | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.woodpecker/build-rel.yml b/.woodpecker/build-rel.yml index b4b7067..faadd85 100644 --- a/.woodpecker/build-rel.yml +++ b/.woodpecker/build-rel.yml @@ -24,9 +24,6 @@ steps: secrets: - minio_access_key - minio_secret_key - when: - branch: dev - event: push publish-rel: image: 'curlimages/curl' diff --git a/CHANGELOG.md b/CHANGELOG.md index 63ec9e4..79a40d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased](https://git.rustybever.be/Chewing_Bever/rieter/src/branch/dev) +## [0.1.0](https://git.rustybever.be/Chewing_Bever/rieter/src/tag/0.1.0) + ### Added * Functional repository server