feat: update all deps; implement arbitrary path static file hosting

main
Jef Roosens 2025-01-01 19:25:28 +01:00
parent 03bc88e7c3
commit 391e45c09d
No known key found for this signature in database
GPG Key ID: 21FD3D77D56BAF49
6 changed files with 1004 additions and 599 deletions

1312
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -11,18 +11,18 @@ name = "site"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
axum = { version = "0.6.18" }
hyper = { version = "0.14.26" }
tokio = { version = "1.28.0", features = ["full"] }
tracing = "0.1.37"
tracing-subscriber = {version = "0.3.17", features = ["env-filter"] }
tower-http = { version = "0.4.0", features = ["fs", "trace", "auth"] }
tar = "0.4.38"
flate2 = "1.0.26"
tokio-util = { version = "0.7.8", features = ["io"] }
futures-util = "0.3.28"
uuid = { version = "1.3.2", features = ["v4"] }
serde_json = "1.0.96"
metrics = "0.21.0"
metrics-exporter-prometheus = "0.12.0"
axum = "0.8.1"
hyper = { version = "1.5.2" }
tokio = { version = "1.42.0", features = ["full"] }
tracing = "0.1.41"
tracing-subscriber = {version = "0.3.19", features = ["env-filter"] }
tower-http = { version = "0.6.2", features = ["fs", "trace", "auth"] }
tar = "0.4.43"
flate2 = "1.0.35"
tokio-util = { version = "0.7.13", features = ["io"] }
futures-util = "0.3.31"
uuid = { version = "1.11.0", features = ["v4"] }
serde_json = "1.0.134"
metrics = "0.24.1"
metrics-exporter-prometheus = "0.16.0"
serde = { version = "1.0", features = ["derive"] }

View File

@ -1,98 +0,0 @@
use std::{collections::HashSet, io::ErrorKind, path::Path};
use axum::{
extract::{BodyStream, Extension, Query},
http::StatusCode,
response::IntoResponse,
};
use flate2::read::GzDecoder;
use futures_util::TryStreamExt;
use serde::Deserialize;
use std::io;
use tar::Archive;
use tokio_util::io::StreamReader;
use crate::{DEFAULT_STATIC_SITE, STATIC_DIR_NAME};
#[derive(Deserialize)]
pub struct StaticDirParams {
dir: Option<String>,
}
pub async fn post_deploy(
Extension(data_dir): Extension<String>,
Query(params): Query<StaticDirParams>,
res: BodyStream,
) -> crate::Result<()> {
// This converts a stream into something that implements AsyncRead, which we can then use to
// asynchronously write the file to disk
let mut read =
StreamReader::new(res.map_err(|axum_err| std::io::Error::new(ErrorKind::Other, axum_err)));
let uuid = uuid::Uuid::new_v4();
let file_path = Path::new(&data_dir).join(uuid.as_hyphenated().to_string());
let mut file = tokio::fs::File::create(&file_path).await?;
tokio::io::copy(&mut read, &mut file).await?;
// If no dir is provided, we use the default one. Otherwise, use the provided one.
let static_path = Path::new(&data_dir)
.join(STATIC_DIR_NAME)
.join(params.dir.unwrap_or(DEFAULT_STATIC_SITE.to_string()));
// Make sure the static directory exists
tokio::fs::create_dir_all(&static_path).await?;
let fp_clone = file_path.clone();
// Extract the contents of the tarball synchronously
tokio::task::spawn_blocking(move || process_archive(&fp_clone, &static_path)).await??;
// Remove archive file after use
tokio::fs::remove_file(&file_path).await?;
Ok(())
}
fn process_archive(archive_path: &Path, static_dir: &Path) -> io::Result<()> {
let file = std::fs::File::open(archive_path)?;
let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar);
let mut paths = HashSet::new();
let entries = archive.entries()?;
// Extract each entry into the output directory
for entry in entries {
let mut entry = entry?;
entry.unpack_in(static_dir)?;
if let Ok(path) = entry.path() {
paths.insert(path.into_owned());
}
}
// Remove any old files that weren't present in new archive
let mut items = vec![];
// Start by populating the vec with the initial files
let iter = static_dir.read_dir()?;
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
// As long as there are still items in the vec, we keep going
while !items.is_empty() {
let item = items.pop().unwrap();
tracing::debug!("{:?}", item);
if !paths.contains(item.strip_prefix(&static_dir).unwrap()) {
if item.is_dir() {
std::fs::remove_dir_all(item)?;
} else {
std::fs::remove_file(item)?;
}
} else if let Ok(iter) = item.read_dir() {
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
}
}
Ok(())
}

View File

@ -1,7 +0,0 @@
use axum::{routing::post, Router};
mod deploy;
pub fn router() -> Router {
Router::new().route("/deploy", post(deploy::post_deploy))
}

View File

@ -1,9 +1,9 @@
mod api;
mod error;
mod metrics;
// mod metrics;
mod server;
pub use error::Result;
use tokio::net::TcpListener;
use std::{net::SocketAddr, path::PathBuf};
@ -11,6 +11,7 @@ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
/// Name of the directory where static sites are stored inside the data directory
const STATIC_DIR_NAME: &str = "static";
const STATIC_ROOT_NAME: &str = "_root";
const REDIRECTS: [(&str, &str); 6] = [
("/github", "https://github.com/ChewingBever"),
@ -21,6 +22,12 @@ const REDIRECTS: [(&str, &str); 6] = [
("/aur", "https://aur.archlinux.org/account/Chewing_Bever"),
];
#[derive(Clone)]
pub struct Context {
static_dir: PathBuf,
tmp_dir: PathBuf,
}
#[tokio::main]
async fn main() {
// Enable tracing
@ -33,14 +40,21 @@ async fn main() {
// Get required variables from env vars
let api_key = std::env::var("API_KEY").expect("No API_KEY was provided.");
let data_dir = std::env::var("DATA_DIR").expect("No DATA_DIR was provided.");
let static_dir = format!("{}/{}", data_dir, STATIC_DIR_NAME);
let data_dir = PathBuf::from(std::env::var("DATA_DIR").expect("No DATA_DIR was provided."));
let static_dir = data_dir.join(STATIC_DIR_NAME);
std::fs::create_dir_all(&static_dir);
let state = Context {
static_dir,
tmp_dir: std::env::temp_dir(),
};
tracing::info!("tmpdir = {}", state.tmp_dir.display());
// Initialize metrics
let recorder_handle = metrics::setup_metrics_recorder();
let app = server::app(PathBuf::from(static_dir), &REDIRECTS);
// let recorder_handle = metrics::setup_metrics_recorder();
let app = server::app(state, &api_key, &REDIRECTS);
// Each static site gets mounted explicitely so that the default site can be used as fallback
// Each entry is of the form (route, static dir name)
@ -51,9 +65,7 @@ async fn main() {
];
let addr = SocketAddr::from(([0, 0, 0, 0], 3000));
let listener = TcpListener::bind(addr).await.unwrap();
tracing::debug!("listening on {}", addr);
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
axum::serve(listener, app).await.unwrap();
}

View File

@ -1,18 +1,128 @@
mod matrix;
use axum::{response::Redirect, routing::any, Extension, Router};
use tower_http::{services::ServeDir, trace::TraceLayer};
use axum::{
body::Body,
extract::{self, State},
response::Redirect,
routing::{any, post},
Router,
};
use flate2::read::GzDecoder;
use futures_util::TryStreamExt;
use tar::Archive;
use tokio::fs::File;
use tokio_util::io::StreamReader;
use tower_http::{
services::ServeDir, trace::TraceLayer, validate_request::ValidateRequestHeaderLayer,
};
use std::path::PathBuf;
use std::{
io,
path::{Path, PathBuf},
};
pub fn app(static_dir: PathBuf, redirects: &[(&'static str, &'static str)]) -> Router {
let mut app = Router::new().nest("/", matrix::router());
use crate::STATIC_ROOT_NAME;
pub fn app(
ctx: crate::Context,
api_key: &str,
redirects: &[(&'static str, &'static str)],
) -> Router {
// We first try to route the request according to the contents of the root directory. If the
// file doesn't exist, then we look for it in the other directories.
let serve_dir = ServeDir::new(ctx.static_dir.join(STATIC_ROOT_NAME))
.append_index_html_on_directories(true)
.not_found_service(
ServeDir::new(ctx.static_dir.clone()).append_index_html_on_directories(true),
);
let mut app = Router::new()
.route_service("/", serve_dir.clone())
.route(
"/{*path}",
post(post_static_archive)
.route_layer(ValidateRequestHeaderLayer::bearer(api_key))
.get_service(serve_dir),
)
.with_state(ctx.clone())
.merge(matrix::router());
for (path, url) in redirects.iter() {
app = app.route(path, any(|| async { Redirect::permanent(url) }))
}
app.fallback_service(ServeDir::new(static_dir.clone()))
.layer(Extension(static_dir))
.layer(TraceLayer::new_for_http())
app.layer(TraceLayer::new_for_http())
}
pub async fn post_static_archive(
State(ctx): State<crate::Context>,
extract::Path(path): extract::Path<String>,
body: Body,
) {
// Copy tarball data to file for parsing
let stream = body.into_data_stream();
let mut reader = StreamReader::new(stream.map_err(io::Error::other));
let uuid = uuid::Uuid::new_v4();
let ar_path = ctx.tmp_dir.join(uuid.to_string());
let mut f = File::create(&ar_path).await;
tokio::io::copy(&mut reader, &mut f.unwrap()).await;
// Root is stored in its own specifc directory, as otherwise it would wipe all other uploaded
// directories every time it's updated
let dest_dir = if path.is_empty() {
String::from(crate::STATIC_ROOT_NAME)
} else {
path
};
let dest_dir = ctx.static_dir.join(dest_dir);
tokio::task::spawn_blocking(move || process_archive(&ar_path, &dest_dir))
.await
.unwrap();
}
fn process_archive(ar_path: &Path, dest_dir: &Path) -> io::Result<()> {
let f = std::fs::File::open(ar_path)?;
let tar = GzDecoder::new(f);
let mut ar = Archive::new(tar);
// trim possible trailing slash from path
let dest_dir = PathBuf::from(dest_dir.to_string_lossy().trim_end_matches('/'));
// extract extension and append '.new' to form new extension
let ext = dest_dir
.extension()
.map(|ext| ext.to_string_lossy().to_string())
.unwrap_or(String::from(""));
let new_dir = dest_dir.with_extension(format!("{ext}.new"));
// Unpack archive into new directory
std::fs::create_dir(&new_dir)?;
ar.unpack(&new_dir)?;
// Replace original directory with new one
if dest_dir.try_exists()? {
std::fs::remove_dir_all(&dest_dir)?;
}
std::fs::rename(new_dir, dest_dir)?;
std::fs::remove_file(ar_path)?;
Ok(())
}
pub async fn delete_dir(
State(ctx): State<crate::Context>,
extract::Path(path): extract::Path<String>,
) {
let dest_dir = if path.is_empty() {
String::from(crate::STATIC_ROOT_NAME)
} else {
path
};
let dest_dir = ctx.static_dir.join(dest_dir);
tokio::fs::remove_dir_all(dest_dir).await;
}