feat(server): update database when publishing packages
parent
f706b72b7c
commit
7c6f485ea6
|
@ -45,10 +45,10 @@ impl Cli {
|
||||||
pub async fn run(&self) {
|
pub async fn run(&self) {
|
||||||
self.init_tracing();
|
self.init_tracing();
|
||||||
|
|
||||||
// let db = crate::db::init("sqlite://test.db").await.unwrap();
|
let db = crate::db::init("sqlite://test.db").await.unwrap();
|
||||||
let db = crate::db::init("postgres://rieter:rieter@localhost:5432/rieter")
|
// let db = crate::db::init("postgres://rieter:rieter@localhost:5432/rieter")
|
||||||
.await
|
// .await
|
||||||
.unwrap();
|
// .unwrap();
|
||||||
|
|
||||||
let config = Config {
|
let config = Config {
|
||||||
repo_dir: self.repo_dir.clone(),
|
repo_dir: self.repo_dir.clone(),
|
||||||
|
|
|
@ -17,7 +17,7 @@ pub struct Model {
|
||||||
pub c_size: i64,
|
pub c_size: i64,
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
pub url: Option<String>,
|
pub url: Option<String>,
|
||||||
pub build_date: Option<DateTime>,
|
pub build_date: DateTime,
|
||||||
pub packager: Option<String>,
|
pub packager: Option<String>,
|
||||||
pub pgp_sig: Option<String>,
|
pub pgp_sig: Option<String>,
|
||||||
pub pgp_sig_size: Option<i64>,
|
pub pgp_sig_size: Option<i64>,
|
||||||
|
|
|
@ -10,7 +10,6 @@ pub type Result<T> = std::result::Result<T, ServerError>;
|
||||||
pub enum ServerError {
|
pub enum ServerError {
|
||||||
IO(io::Error),
|
IO(io::Error),
|
||||||
Axum(axum::Error),
|
Axum(axum::Error),
|
||||||
Status(StatusCode),
|
|
||||||
Db(sea_orm::DbErr),
|
Db(sea_orm::DbErr),
|
||||||
Status(StatusCode),
|
Status(StatusCode),
|
||||||
}
|
}
|
||||||
|
@ -75,9 +74,3 @@ impl From<sea_orm::DbErr> for ServerError {
|
||||||
ServerError::Db(err)
|
ServerError::Db(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<StatusCode> for ServerError {
|
|
||||||
fn from(status: StatusCode) -> Self {
|
|
||||||
ServerError::Status(status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
mod api;
|
mod api;
|
||||||
mod cli;
|
mod cli;
|
||||||
mod db;
|
pub mod db;
|
||||||
mod error;
|
mod error;
|
||||||
mod repo;
|
mod repo;
|
||||||
|
|
||||||
|
|
|
@ -104,9 +104,12 @@ impl RepoGroupManager {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_pkg_from_path<P: AsRef<Path>>(&mut self, repo: &str, path: P) -> io::Result<()> {
|
pub fn add_pkg_from_path<P: AsRef<Path>>(
|
||||||
let mut pkg = Package::open(&path)?;
|
&mut self,
|
||||||
pkg.calculate_checksum()?;
|
repo: &str,
|
||||||
|
path: P,
|
||||||
|
) -> io::Result<Package> {
|
||||||
|
let pkg = Package::open(&path)?;
|
||||||
|
|
||||||
self.add_pkg(repo, &pkg)?;
|
self.add_pkg(repo, &pkg)?;
|
||||||
|
|
||||||
|
@ -118,7 +121,9 @@ impl RepoGroupManager {
|
||||||
.join(pkg.file_name());
|
.join(pkg.file_name());
|
||||||
|
|
||||||
fs::create_dir_all(dest_pkg_path.parent().unwrap())?;
|
fs::create_dir_all(dest_pkg_path.parent().unwrap())?;
|
||||||
fs::rename(&path, dest_pkg_path)
|
fs::rename(&path, dest_pkg_path)?;
|
||||||
|
|
||||||
|
Ok(pkg)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a package to the given repo, returning to what architectures the package was added.
|
/// Add a package to the given repo, returning to what architectures the package was added.
|
||||||
|
|
|
@ -4,6 +4,7 @@ mod package;
|
||||||
pub use manager::RepoGroupManager;
|
pub use manager::RepoGroupManager;
|
||||||
|
|
||||||
use axum::body::Body;
|
use axum::body::Body;
|
||||||
|
use crate::db::entities::{package as db_package, repo as db_repo};
|
||||||
use axum::extract::{BodyStream, Path, State};
|
use axum::extract::{BodyStream, Path, State};
|
||||||
use axum::http::Request;
|
use axum::http::Request;
|
||||||
use axum::http::StatusCode;
|
use axum::http::StatusCode;
|
||||||
|
@ -11,6 +12,7 @@ use axum::response::IntoResponse;
|
||||||
use axum::routing::{delete, post};
|
use axum::routing::{delete, post};
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::{fs, io::AsyncWriteExt};
|
use tokio::{fs, io::AsyncWriteExt};
|
||||||
use tower::util::ServiceExt;
|
use tower::util::ServiceExt;
|
||||||
|
@ -51,9 +53,39 @@ async fn post_package_archive(
|
||||||
// Remove the downloaded file if the adding failed
|
// Remove the downloaded file if the adding failed
|
||||||
if res.is_err() {
|
if res.is_err() {
|
||||||
let _ = tokio::fs::remove_file(path).await;
|
let _ = tokio::fs::remove_file(path).await;
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(res?)
|
let pkg = res.unwrap();
|
||||||
|
|
||||||
|
// Query the repo for its ID, or create it if it does not already exist
|
||||||
|
let repo_entity = db_repo::Entity::find()
|
||||||
|
.filter(db_repo::Column::Name.eq(&repo))
|
||||||
|
.one(&global.db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let repo_id = if let Some(repo_entity) = repo_entity {
|
||||||
|
repo_entity.id
|
||||||
|
} else {
|
||||||
|
let model = db_repo::ActiveModel {
|
||||||
|
name: sea_orm::Set(repo.clone()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
db_repo::Entity::insert(model)
|
||||||
|
.exec(&global.db)
|
||||||
|
.await?
|
||||||
|
.last_insert_id
|
||||||
|
};
|
||||||
|
|
||||||
|
// Insert the package's data into the database
|
||||||
|
let mut model: db_package::ActiveModel = pkg.into();
|
||||||
|
model.repo_id = sea_orm::Set(repo_id);
|
||||||
|
|
||||||
|
model.insert(&global.db).await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serve the package archive files and database archives. If files are requested for an
|
/// Serve the package archive files and database archives. If files are requested for an
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
use chrono::NaiveDateTime;
|
use chrono::NaiveDateTime;
|
||||||
use libarchive::read::{Archive, Builder};
|
use libarchive::read::{Archive, Builder};
|
||||||
use libarchive::{Entry, ReadFilter};
|
use libarchive::{Entry, ReadFilter};
|
||||||
|
use sea_orm::ActiveValue::Set;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, BufRead, BufReader, BufWriter, Read, Write};
|
use std::io::{self, BufRead, BufReader, BufWriter, Read, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use crate::db::entities::package;
|
||||||
|
|
||||||
const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"];
|
const IGNORED_FILES: [&str; 5] = [".BUILDINFO", ".INSTALL", ".MTREE", ".PKGINFO", ".CHANGELOG"];
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -39,7 +42,7 @@ pub struct PkgInfo {
|
||||||
pub optdepends: Vec<String>,
|
pub optdepends: Vec<String>,
|
||||||
pub makedepends: Vec<String>,
|
pub makedepends: Vec<String>,
|
||||||
pub checkdepends: Vec<String>,
|
pub checkdepends: Vec<String>,
|
||||||
pub sha256sum: Option<String>,
|
pub sha256sum: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
@ -163,6 +166,7 @@ impl Package {
|
||||||
if let Some(mut info) = info {
|
if let Some(mut info) = info {
|
||||||
// I'll take my chances on a file size fitting in an i64
|
// I'll take my chances on a file size fitting in an i64
|
||||||
info.csize = fs::metadata(path.as_ref())?.len().try_into().unwrap();
|
info.csize = fs::metadata(path.as_ref())?.len().try_into().unwrap();
|
||||||
|
info.sha256sum = sha256::try_digest(path.as_ref())?;
|
||||||
|
|
||||||
Ok(Package {
|
Ok(Package {
|
||||||
path: path.as_ref().to_path_buf(),
|
path: path.as_ref().to_path_buf(),
|
||||||
|
@ -178,12 +182,6 @@ impl Package {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn calculate_checksum(&mut self) -> io::Result<()> {
|
|
||||||
self.info.sha256sum = Some(sha256::try_digest(self.path.as_ref())?);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn full_name(&self) -> String {
|
pub fn full_name(&self) -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}-{}-{}",
|
"{}-{}-{}",
|
||||||
|
@ -230,9 +228,7 @@ impl Package {
|
||||||
write("CSIZE", &info.csize.to_string())?;
|
write("CSIZE", &info.csize.to_string())?;
|
||||||
write("ISIZE", &info.size.to_string())?;
|
write("ISIZE", &info.size.to_string())?;
|
||||||
|
|
||||||
if let Some(checksum) = &info.sha256sum {
|
write("SHA256SUM", &info.sha256sum)?;
|
||||||
write("SHA256SUM", checksum)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref url) = info.url {
|
if let Some(ref url) = info.url {
|
||||||
write("URL", url)?;
|
write("URL", url)?;
|
||||||
|
@ -240,7 +236,7 @@ impl Package {
|
||||||
|
|
||||||
write("LICENSE", &info.licenses.join("\n"))?;
|
write("LICENSE", &info.licenses.join("\n"))?;
|
||||||
write("ARCH", &info.arch)?;
|
write("ARCH", &info.arch)?;
|
||||||
write("BUILDDATE", &info.build_date.to_string())?;
|
write("BUILDDATE", &info.build_date.timestamp().to_string())?;
|
||||||
|
|
||||||
if let Some(ref packager) = info.packager {
|
if let Some(ref packager) = info.packager {
|
||||||
write("PACKAGER", packager)?;
|
write("PACKAGER", packager)?;
|
||||||
|
@ -271,3 +267,26 @@ impl Package {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Package> for package::ActiveModel {
|
||||||
|
fn from(pkg: Package) -> Self {
|
||||||
|
let info = pkg.info;
|
||||||
|
|
||||||
|
package::ActiveModel {
|
||||||
|
base: Set(info.base),
|
||||||
|
name: Set(info.name),
|
||||||
|
version: Set(info.version),
|
||||||
|
arch: Set(info.arch),
|
||||||
|
size: Set(info.size),
|
||||||
|
c_size: Set(info.csize),
|
||||||
|
description: Set(info.description),
|
||||||
|
url: Set(info.url),
|
||||||
|
build_date: Set(info.build_date),
|
||||||
|
packager: Set(info.packager),
|
||||||
|
pgp_sig: Set(info.pgpsig),
|
||||||
|
pgp_sig_size: Set(info.pgpsigsize),
|
||||||
|
sha256_sum: Set(info.sha256sum),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue