Compare commits

...

2 Commits

Author SHA1 Message Date
Renovate Bot 83a1b25b88 chore(deps): update rust crate hyper to 0.14.27
renovate/artifacts Artifact file update failure
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
2023-08-22 21:05:43 +00:00
Jef Roosens 3b33cba0d4
feat: add some proper error handling
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
2023-07-25 12:45:29 +02:00
5 changed files with 77 additions and 28 deletions

View File

@ -12,7 +12,7 @@ name = "site"
[dependencies] [dependencies]
axum = { version = "0.6.18" } axum = { version = "0.6.18" }
hyper = { version = "0.14.26" } hyper = { version = "0.14.27" }
tokio = { version = "1.28.0", features = ["full"] } tokio = { version = "1.28.0", features = ["full"] }
tracing = "0.1.37" tracing = "0.1.37"
tracing-subscriber = {version = "0.3.17", features = ["env-filter"] } tracing-subscriber = {version = "0.3.17", features = ["env-filter"] }

View File

@ -1,2 +0,0 @@
[toolchain]
channel = "1.69"

View File

@ -8,6 +8,7 @@ use axum::{
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use serde::Deserialize; use serde::Deserialize;
use std::io;
use tar::Archive; use tar::Archive;
use tokio_util::io::StreamReader; use tokio_util::io::StreamReader;
@ -22,15 +23,15 @@ pub async fn post_deploy(
Extension(data_dir): Extension<String>, Extension(data_dir): Extension<String>,
Query(params): Query<StaticDirParams>, Query(params): Query<StaticDirParams>,
res: BodyStream, res: BodyStream,
) -> impl IntoResponse { ) -> crate::Result<()> {
// This converts a stream into something that implements AsyncRead, which we can then use to // This converts a stream into something that implements AsyncRead, which we can then use to
// asynchronously write the file to disk // asynchronously write the file to disk
let mut read = let mut read =
StreamReader::new(res.map_err(|axum_err| std::io::Error::new(ErrorKind::Other, axum_err))); StreamReader::new(res.map_err(|axum_err| std::io::Error::new(ErrorKind::Other, axum_err)));
let uuid = uuid::Uuid::new_v4(); let uuid = uuid::Uuid::new_v4();
let file_path = Path::new(&data_dir).join(uuid.as_hyphenated().to_string()); let file_path = Path::new(&data_dir).join(uuid.as_hyphenated().to_string());
let mut file = tokio::fs::File::create(&file_path).await.unwrap(); let mut file = tokio::fs::File::create(&file_path).await?;
tokio::io::copy(&mut read, &mut file).await; tokio::io::copy(&mut read, &mut file).await?;
// If no dir is provided, we use the default one. Otherwise, use the provided one. // If no dir is provided, we use the default one. Otherwise, use the provided one.
let static_path = Path::new(&data_dir) let static_path = Path::new(&data_dir)
@ -38,34 +39,30 @@ pub async fn post_deploy(
.join(params.dir.unwrap_or(DEFAULT_STATIC_SITE.to_string())); .join(params.dir.unwrap_or(DEFAULT_STATIC_SITE.to_string()));
// Make sure the static directory exists // Make sure the static directory exists
tokio::fs::create_dir_all(&static_path).await; tokio::fs::create_dir_all(&static_path).await?;
let fp_clone = file_path.clone(); let fp_clone = file_path.clone();
// Extract the contents of the tarball synchronously // Extract the contents of the tarball synchronously
let res = tokio::task::spawn_blocking(move || process_archive(&fp_clone, &static_path)).await??;
match tokio::task::spawn_blocking(move || process_archive(&fp_clone, &static_path)).await {
Ok(_) => StatusCode::OK,
Err(_) => StatusCode::INTERNAL_SERVER_ERROR,
};
// Remove archive file after use // Remove archive file after use
tokio::fs::remove_file(&file_path).await; tokio::fs::remove_file(&file_path).await?;
res Ok(())
} }
fn process_archive(archive_path: &Path, static_dir: &Path) -> Result<(), ()> { fn process_archive(archive_path: &Path, static_dir: &Path) -> io::Result<()> {
let file = std::fs::File::open(archive_path).map_err(|_| ())?; let file = std::fs::File::open(archive_path)?;
let tar = GzDecoder::new(file); let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar); let mut archive = Archive::new(tar);
let mut paths = HashSet::new(); let mut paths = HashSet::new();
let entries = archive.entries().map_err(|_| ())?; let entries = archive.entries()?;
// Extract each entry into the output directory // Extract each entry into the output directory
for entry_res in entries { for entry in entries {
let mut entry = entry_res.map_err(|_| ())?; let mut entry = entry?;
entry.unpack_in(static_dir).map_err(|_| ())?; entry.unpack_in(static_dir)?;
if let Ok(path) = entry.path() { if let Ok(path) = entry.path() {
paths.insert(path.into_owned()); paths.insert(path.into_owned());
@ -76,20 +73,20 @@ fn process_archive(archive_path: &Path, static_dir: &Path) -> Result<(), ()> {
let mut items = vec![]; let mut items = vec![];
// Start by populating the vec with the initial files // Start by populating the vec with the initial files
let iter = static_dir.read_dir().map_err(|_| ())?; let iter = static_dir.read_dir()?;
iter.filter_map(|r| r.ok()) iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path())); .for_each(|e| items.push(e.path()));
// As long as there are still items in the vec, we keep going // As long as there are still items in the vec, we keep going
while items.len() > 0 { while !items.is_empty() {
let item = items.pop().unwrap(); let item = items.pop().unwrap();
tracing::debug!("{:?}", item); tracing::debug!("{:?}", item);
if !paths.contains(item.strip_prefix(&static_dir).unwrap()) { if !paths.contains(item.strip_prefix(&static_dir).unwrap()) {
if item.is_dir() { if item.is_dir() {
std::fs::remove_dir_all(item); std::fs::remove_dir_all(item)?;
} else { } else {
std::fs::remove_file(item); std::fs::remove_file(item)?;
} }
} else if let Ok(iter) = item.read_dir() { } else if let Ok(iter) = item.read_dir() {
iter.filter_map(|r| r.ok()) iter.filter_map(|r| r.ok())

51
src/error.rs 100644
View File

@ -0,0 +1,51 @@
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use std::error::Error;
use std::fmt;
use std::io;
pub type Result<T> = std::result::Result<T, ServerError>;
#[derive(Debug)]
pub enum ServerError {
IO(io::Error),
Axum(axum::Error),
}
impl fmt::Display for ServerError {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ServerError::IO(err) => write!(fmt, "{}", err),
ServerError::Axum(err) => write!(fmt, "{}", err),
}
}
}
impl Error for ServerError {}
impl IntoResponse for ServerError {
fn into_response(self) -> Response {
match self {
ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
}
}
}
impl From<io::Error> for ServerError {
fn from(err: io::Error) -> Self {
ServerError::IO(err)
}
}
impl From<axum::Error> for ServerError {
fn from(err: axum::Error) -> Self {
ServerError::Axum(err)
}
}
impl From<tokio::task::JoinError> for ServerError {
fn from(err: tokio::task::JoinError) -> Self {
ServerError::IO(err.into())
}
}

View File

@ -1,3 +1,10 @@
mod api;
mod error;
mod matrix;
mod metrics;
pub use error::Result;
use std::{future::ready, net::SocketAddr}; use std::{future::ready, net::SocketAddr};
use axum::{ use axum::{
@ -12,10 +19,6 @@ use tower_http::{
}; };
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
mod api;
mod matrix;
mod metrics;
/// Name of the directory where static sites are stored inside the data directory /// Name of the directory where static sites are stored inside the data directory
const STATIC_DIR_NAME: &str = "static"; const STATIC_DIR_NAME: &str = "static";
/// Name of the subdir of STATIC_DIR_NAME where the default (fallback) site is located /// Name of the subdir of STATIC_DIR_NAME where the default (fallback) site is located