Compare commits
	
		
			No commits in common. "1cca0d46de3e65f4a5fd2758746a2513bb713328" and "e5147a92b980d264e78be9ef49a803130073c0c5" have entirely different histories. 
		
	
	
		
			1cca0d46de
			...
			e5147a92b9
		
	
		| 
						 | 
				
			
			@ -4,7 +4,7 @@ branches:
 | 
			
		|||
 | 
			
		||||
pipeline:
 | 
			
		||||
  build:
 | 
			
		||||
    image: 'rust:1.83-alpine3.21'
 | 
			
		||||
    image: 'rust:1.69-alpine3.16'
 | 
			
		||||
    commands:
 | 
			
		||||
      - apk add build-base
 | 
			
		||||
      - cargo build 
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										28
									
								
								Cargo.toml
								
								
								
								
							
							
						
						
									
										28
									
								
								Cargo.toml
								
								
								
								
							| 
						 | 
				
			
			@ -11,18 +11,18 @@ name = "site"
 | 
			
		|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
axum = "0.8.1"
 | 
			
		||||
hyper = { version = "1.5.2" }
 | 
			
		||||
tokio = { version = "1.42.0", features = ["full"] }
 | 
			
		||||
tracing = "0.1.41"
 | 
			
		||||
tracing-subscriber = {version = "0.3.19", features = ["env-filter"] }
 | 
			
		||||
tower-http = { version = "0.6.2", features = ["fs", "trace", "auth", "compression-br", "compression-gzip"] }
 | 
			
		||||
tar = "0.4.43"
 | 
			
		||||
flate2 = "1.0.35"
 | 
			
		||||
tokio-util = { version = "0.7.13", features = ["io"] }
 | 
			
		||||
futures-util = "0.3.31"
 | 
			
		||||
uuid = { version = "1.11.0", features = ["v4"] }
 | 
			
		||||
serde_json = "1.0.134"
 | 
			
		||||
metrics = "0.24.1"
 | 
			
		||||
metrics-exporter-prometheus = "0.16.0"
 | 
			
		||||
axum = { version = "0.6.18" }
 | 
			
		||||
hyper = { version = "0.14.26" }
 | 
			
		||||
tokio = { version = "1.28.0", features = ["full"] }
 | 
			
		||||
tracing = "0.1.37"
 | 
			
		||||
tracing-subscriber = {version = "0.3.17", features = ["env-filter"] }
 | 
			
		||||
tower-http = { version = "0.4.0", features = ["fs", "trace", "auth"] }
 | 
			
		||||
tar = "0.4.38"
 | 
			
		||||
flate2 = "1.0.26"
 | 
			
		||||
tokio-util = { version = "0.7.8", features = ["io"] }
 | 
			
		||||
futures-util = "0.3.28"
 | 
			
		||||
uuid = { version = "1.3.2", features = ["v4"] }
 | 
			
		||||
serde_json = "1.0.96"
 | 
			
		||||
metrics = "0.21.0"
 | 
			
		||||
metrics-exporter-prometheus = "0.12.0"
 | 
			
		||||
serde = { version = "1.0", features = ["derive"] }
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										10
									
								
								Dockerfile
								
								
								
								
							
							
						
						
									
										10
									
								
								Dockerfile
								
								
								
								
							| 
						 | 
				
			
			@ -1,4 +1,4 @@
 | 
			
		|||
FROM rust:1.83-alpine3.21 AS builder
 | 
			
		||||
FROM rust:1.69-alpine3.16 AS builder
 | 
			
		||||
 | 
			
		||||
ARG DI_VER=1.2.5
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -13,16 +13,12 @@ RUN wget -O - "https://github.com/Yelp/dumb-init/archive/refs/tags/v${DI_VER}.ta
 | 
			
		|||
    mv dumb-init .. && \
 | 
			
		||||
    cd ..
 | 
			
		||||
 | 
			
		||||
COPY Cargo.toml Cargo.lock ./
 | 
			
		||||
 | 
			
		||||
RUN cargo fetch --locked
 | 
			
		||||
 | 
			
		||||
COPY . ./
 | 
			
		||||
 | 
			
		||||
RUN cargo build --release --frozen
 | 
			
		||||
RUN cargo build --release
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
FROM alpine:3.21
 | 
			
		||||
FROM alpine:3.16
 | 
			
		||||
 | 
			
		||||
COPY --from=builder /app/target/release/site /bin/site
 | 
			
		||||
COPY --from=builder /app/dumb-init /bin/dumb-init
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,2 @@
 | 
			
		|||
[toolchain]
 | 
			
		||||
channel = "1.69"
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,101 @@
 | 
			
		|||
use std::{collections::HashSet, io::ErrorKind, path::Path};
 | 
			
		||||
 | 
			
		||||
use axum::{
 | 
			
		||||
    extract::{BodyStream, Extension, Query},
 | 
			
		||||
    http::StatusCode,
 | 
			
		||||
    response::IntoResponse,
 | 
			
		||||
};
 | 
			
		||||
use flate2::read::GzDecoder;
 | 
			
		||||
use futures_util::TryStreamExt;
 | 
			
		||||
use serde::Deserialize;
 | 
			
		||||
use tar::Archive;
 | 
			
		||||
use tokio_util::io::StreamReader;
 | 
			
		||||
 | 
			
		||||
use crate::{DEFAULT_STATIC_SITE, STATIC_DIR_NAME};
 | 
			
		||||
 | 
			
		||||
#[derive(Deserialize)]
 | 
			
		||||
pub struct StaticDirParams {
 | 
			
		||||
    dir: Option<String>,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn post_deploy(
 | 
			
		||||
    Extension(data_dir): Extension<String>,
 | 
			
		||||
    Query(params): Query<StaticDirParams>,
 | 
			
		||||
    res: BodyStream,
 | 
			
		||||
) -> impl IntoResponse {
 | 
			
		||||
    // This converts a stream into something that implements AsyncRead, which we can then use to
 | 
			
		||||
    // asynchronously write the file to disk
 | 
			
		||||
    let mut read =
 | 
			
		||||
        StreamReader::new(res.map_err(|axum_err| std::io::Error::new(ErrorKind::Other, axum_err)));
 | 
			
		||||
    let uuid = uuid::Uuid::new_v4();
 | 
			
		||||
    let file_path = Path::new(&data_dir).join(uuid.as_hyphenated().to_string());
 | 
			
		||||
    let mut file = tokio::fs::File::create(&file_path).await.unwrap();
 | 
			
		||||
    tokio::io::copy(&mut read, &mut file).await;
 | 
			
		||||
 | 
			
		||||
    // If no dir is provided, we use the default one. Otherwise, use the provided one.
 | 
			
		||||
    let static_path = Path::new(&data_dir)
 | 
			
		||||
        .join(STATIC_DIR_NAME)
 | 
			
		||||
        .join(params.dir.unwrap_or(DEFAULT_STATIC_SITE.to_string()));
 | 
			
		||||
 | 
			
		||||
    // Make sure the static directory exists
 | 
			
		||||
    tokio::fs::create_dir_all(&static_path).await;
 | 
			
		||||
 | 
			
		||||
    let fp_clone = file_path.clone();
 | 
			
		||||
    // Extract the contents of the tarball synchronously
 | 
			
		||||
    let res =
 | 
			
		||||
        match tokio::task::spawn_blocking(move || process_archive(&fp_clone, &static_path)).await {
 | 
			
		||||
            Ok(_) => StatusCode::OK,
 | 
			
		||||
            Err(_) => StatusCode::INTERNAL_SERVER_ERROR,
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
    // Remove archive file after use
 | 
			
		||||
    tokio::fs::remove_file(&file_path).await;
 | 
			
		||||
 | 
			
		||||
    res
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn process_archive(archive_path: &Path, static_dir: &Path) -> Result<(), ()> {
 | 
			
		||||
    let file = std::fs::File::open(archive_path).map_err(|_| ())?;
 | 
			
		||||
    let tar = GzDecoder::new(file);
 | 
			
		||||
    let mut archive = Archive::new(tar);
 | 
			
		||||
 | 
			
		||||
    let mut paths = HashSet::new();
 | 
			
		||||
 | 
			
		||||
    let entries = archive.entries().map_err(|_| ())?;
 | 
			
		||||
    // Extract each entry into the output directory
 | 
			
		||||
    for entry_res in entries {
 | 
			
		||||
        let mut entry = entry_res.map_err(|_| ())?;
 | 
			
		||||
        entry.unpack_in(static_dir).map_err(|_| ())?;
 | 
			
		||||
 | 
			
		||||
        if let Ok(path) = entry.path() {
 | 
			
		||||
            paths.insert(path.into_owned());
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Remove any old files that weren't present in new archive
 | 
			
		||||
    let mut items = vec![];
 | 
			
		||||
 | 
			
		||||
    // Start by populating the vec with the initial files
 | 
			
		||||
    let iter = static_dir.read_dir().map_err(|_| ())?;
 | 
			
		||||
    iter.filter_map(|r| r.ok())
 | 
			
		||||
        .for_each(|e| items.push(e.path()));
 | 
			
		||||
 | 
			
		||||
    // As long as there are still items in the vec, we keep going
 | 
			
		||||
    while items.len() > 0 {
 | 
			
		||||
        let item = items.pop().unwrap();
 | 
			
		||||
        tracing::debug!("{:?}", item);
 | 
			
		||||
 | 
			
		||||
        if !paths.contains(item.strip_prefix(&static_dir).unwrap()) {
 | 
			
		||||
            if item.is_dir() {
 | 
			
		||||
                std::fs::remove_dir_all(item);
 | 
			
		||||
            } else {
 | 
			
		||||
                std::fs::remove_file(item);
 | 
			
		||||
            }
 | 
			
		||||
        } else if let Ok(iter) = item.read_dir() {
 | 
			
		||||
            iter.filter_map(|r| r.ok())
 | 
			
		||||
                .for_each(|e| items.push(e.path()));
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    Ok(())
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,7 @@
 | 
			
		|||
use axum::{routing::post, Router};
 | 
			
		||||
 | 
			
		||||
mod deploy;
 | 
			
		||||
 | 
			
		||||
pub fn router() -> Router {
 | 
			
		||||
    Router::new().route("/deploy", post(deploy::post_deploy))
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										53
									
								
								src/error.rs
								
								
								
								
							
							
						
						
									
										53
									
								
								src/error.rs
								
								
								
								
							| 
						 | 
				
			
			@ -1,53 +0,0 @@
 | 
			
		|||
use axum::http::StatusCode;
 | 
			
		||||
use axum::response::{IntoResponse, Response};
 | 
			
		||||
use std::error::Error;
 | 
			
		||||
use std::fmt;
 | 
			
		||||
use std::io;
 | 
			
		||||
 | 
			
		||||
pub type Result<T> = std::result::Result<T, ServerError>;
 | 
			
		||||
 | 
			
		||||
#[derive(Debug)]
 | 
			
		||||
pub enum ServerError {
 | 
			
		||||
    IO(io::Error),
 | 
			
		||||
    Axum(axum::Error),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl fmt::Display for ServerError {
 | 
			
		||||
    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
 | 
			
		||||
        match self {
 | 
			
		||||
            ServerError::IO(err) => write!(fmt, "{}", err),
 | 
			
		||||
            ServerError::Axum(err) => write!(fmt, "{}", err),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl Error for ServerError {}
 | 
			
		||||
 | 
			
		||||
impl IntoResponse for ServerError {
 | 
			
		||||
    fn into_response(self) -> Response {
 | 
			
		||||
        tracing::error!("{}", self);
 | 
			
		||||
 | 
			
		||||
        match self {
 | 
			
		||||
            ServerError::IO(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
 | 
			
		||||
            ServerError::Axum(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(),
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<io::Error> for ServerError {
 | 
			
		||||
    fn from(err: io::Error) -> Self {
 | 
			
		||||
        ServerError::IO(err)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<axum::Error> for ServerError {
 | 
			
		||||
    fn from(err: axum::Error) -> Self {
 | 
			
		||||
        ServerError::Axum(err)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
impl From<tokio::task::JoinError> for ServerError {
 | 
			
		||||
    fn from(err: tokio::task::JoinError) -> Self {
 | 
			
		||||
        ServerError::IO(err.into())
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										109
									
								
								src/main.rs
								
								
								
								
							
							
						
						
									
										109
									
								
								src/main.rs
								
								
								
								
							| 
						 | 
				
			
			@ -1,32 +1,25 @@
 | 
			
		|||
mod error;
 | 
			
		||||
// mod metrics;
 | 
			
		||||
mod server;
 | 
			
		||||
 | 
			
		||||
pub use error::Result;
 | 
			
		||||
use tokio::net::TcpListener;
 | 
			
		||||
 | 
			
		||||
use std::{net::SocketAddr, path::PathBuf};
 | 
			
		||||
use std::{future::ready, net::SocketAddr};
 | 
			
		||||
 | 
			
		||||
use axum::{
 | 
			
		||||
    extract::Extension,
 | 
			
		||||
    middleware,
 | 
			
		||||
    response::Redirect,
 | 
			
		||||
    routing::{any, get},
 | 
			
		||||
    Router,
 | 
			
		||||
};
 | 
			
		||||
use tower_http::{
 | 
			
		||||
    services::ServeDir, trace::TraceLayer, validate_request::ValidateRequestHeaderLayer,
 | 
			
		||||
};
 | 
			
		||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
 | 
			
		||||
 | 
			
		||||
mod api;
 | 
			
		||||
mod matrix;
 | 
			
		||||
mod metrics;
 | 
			
		||||
 | 
			
		||||
/// Name of the directory where static sites are stored inside the data directory
 | 
			
		||||
const STATIC_DIR_NAME: &str = "static";
 | 
			
		||||
const STATIC_ROOT_NAME: &str = "_root";
 | 
			
		||||
 | 
			
		||||
const REDIRECTS: [(&str, &str); 6] = [
 | 
			
		||||
    ("/github", "https://github.com/ChewingBever"),
 | 
			
		||||
    ("/gitea", "https://git.rustybever.be/Chewing_Bever"),
 | 
			
		||||
    ("/gitlab", "https://gitlab.com/Chewing_Bever"),
 | 
			
		||||
    ("/codeberg", "https://codeberg.org/Chewing_Bever"),
 | 
			
		||||
    ("/matrix", "https://matrix.to/#/@jef:rustybever.be"),
 | 
			
		||||
    ("/aur", "https://aur.archlinux.org/account/Chewing_Bever"),
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
#[derive(Clone)]
 | 
			
		||||
pub struct Context {
 | 
			
		||||
    static_dir: PathBuf,
 | 
			
		||||
    tmp_dir: PathBuf,
 | 
			
		||||
}
 | 
			
		||||
/// Name of the subdir of STATIC_DIR_NAME where the default (fallback) site is located
 | 
			
		||||
const DEFAULT_STATIC_SITE: &str = "default";
 | 
			
		||||
 | 
			
		||||
#[tokio::main]
 | 
			
		||||
async fn main() {
 | 
			
		||||
| 
						 | 
				
			
			@ -40,22 +33,66 @@ async fn main() {
 | 
			
		|||
 | 
			
		||||
    // Get required variables from env vars
 | 
			
		||||
    let api_key = std::env::var("API_KEY").expect("No API_KEY was provided.");
 | 
			
		||||
    let data_dir = PathBuf::from(std::env::var("DATA_DIR").expect("No DATA_DIR was provided."));
 | 
			
		||||
    let static_dir = data_dir.join(STATIC_DIR_NAME);
 | 
			
		||||
    let data_dir = std::env::var("DATA_DIR").expect("No DATA_DIR was provided.");
 | 
			
		||||
    let static_dir = format!("{}/{}", data_dir, STATIC_DIR_NAME);
 | 
			
		||||
 | 
			
		||||
    std::fs::create_dir_all(&static_dir).unwrap();
 | 
			
		||||
 | 
			
		||||
    let state = Context {
 | 
			
		||||
        static_dir,
 | 
			
		||||
        tmp_dir: std::env::temp_dir(),
 | 
			
		||||
    };
 | 
			
		||||
    std::fs::create_dir_all(&static_dir);
 | 
			
		||||
 | 
			
		||||
    // Initialize metrics
 | 
			
		||||
    // let recorder_handle = metrics::setup_metrics_recorder();
 | 
			
		||||
    let app = server::app(state, &api_key, &REDIRECTS);
 | 
			
		||||
    let recorder_handle = metrics::setup_metrics_recorder();
 | 
			
		||||
 | 
			
		||||
    let mut app = Router::new()
 | 
			
		||||
        // Handle Matrix .well-known files
 | 
			
		||||
        .nest("/", matrix::router())
 | 
			
		||||
        // Routes under /api path
 | 
			
		||||
        .nest(
 | 
			
		||||
            "/api",
 | 
			
		||||
            api::router().layer(ValidateRequestHeaderLayer::bearer(&api_key)),
 | 
			
		||||
        )
 | 
			
		||||
        .route("/metrics", get(move || ready(recorder_handle.render())));
 | 
			
		||||
 | 
			
		||||
    // Each static site gets mounted explicitely so that the default site can be used as fallback
 | 
			
		||||
    // Each entry is of the form (route, static dir name)
 | 
			
		||||
    let sites = [
 | 
			
		||||
        ("/docs/vieter", "docs-vieter"),
 | 
			
		||||
        ("/api-docs/vieter", "api-docs-vieter"),
 | 
			
		||||
        ("/man/vieter", "man-vieter"),
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    for (path, dir) in sites {
 | 
			
		||||
        let full_path = format!("{}/{}", static_dir, dir);
 | 
			
		||||
 | 
			
		||||
        app = app.nest_service(path, ServeDir::new(full_path));
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Define some redirects
 | 
			
		||||
    let redirects = [
 | 
			
		||||
        ("/github", "https://github.com/ChewingBever"),
 | 
			
		||||
        ("/gitea", "https://git.rustybever.be/Chewing_Bever"),
 | 
			
		||||
        ("/gitlab", "https://gitlab.com/Chewing_Bever"),
 | 
			
		||||
        ("/codeberg", "https://codeberg.org/Chewing_Bever"),
 | 
			
		||||
        ("/matrix", "https://matrix.to/#/@jef:rustybever.be"),
 | 
			
		||||
        ("/aur", "https://aur.archlinux.org/account/Chewing_Bever"),
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    for (path, url) in redirects {
 | 
			
		||||
        app = app.route(path, any(|| async { Redirect::permanent(url) }))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    app = app
 | 
			
		||||
        // The fallback option is to serve the actual static files
 | 
			
		||||
        .fallback_service(ServeDir::new(format!(
 | 
			
		||||
            "{}/{}",
 | 
			
		||||
            static_dir, DEFAULT_STATIC_SITE
 | 
			
		||||
        )))
 | 
			
		||||
        .layer(middleware::from_fn(metrics::track_metrics))
 | 
			
		||||
        .layer(Extension(data_dir))
 | 
			
		||||
        .layer(TraceLayer::new_for_http());
 | 
			
		||||
 | 
			
		||||
    let addr = SocketAddr::from(([0, 0, 0, 0], 3000));
 | 
			
		||||
    let listener = TcpListener::bind(addr).await.unwrap();
 | 
			
		||||
    tracing::debug!("listening on {}", addr);
 | 
			
		||||
    axum::serve(listener, app).await.unwrap();
 | 
			
		||||
    axum::Server::bind(&addr)
 | 
			
		||||
        .serve(app.into_make_service())
 | 
			
		||||
        .await
 | 
			
		||||
        .unwrap();
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,138 +0,0 @@
 | 
			
		|||
mod matrix;
 | 
			
		||||
 | 
			
		||||
use axum::{
 | 
			
		||||
    body::Body,
 | 
			
		||||
    extract::{self, State},
 | 
			
		||||
    response::Redirect,
 | 
			
		||||
    routing::{any, post},
 | 
			
		||||
    Router,
 | 
			
		||||
};
 | 
			
		||||
use flate2::read::GzDecoder;
 | 
			
		||||
use futures_util::TryStreamExt;
 | 
			
		||||
use tar::Archive;
 | 
			
		||||
use tokio::fs::File;
 | 
			
		||||
use tokio_util::io::StreamReader;
 | 
			
		||||
use tower_http::{
 | 
			
		||||
    compression::CompressionLayer, services::ServeDir, trace::TraceLayer,
 | 
			
		||||
    validate_request::ValidateRequestHeaderLayer,
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
use std::{
 | 
			
		||||
    io,
 | 
			
		||||
    path::{Path, PathBuf},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
use crate::{error::Result, STATIC_ROOT_NAME};
 | 
			
		||||
 | 
			
		||||
pub fn app(
 | 
			
		||||
    ctx: crate::Context,
 | 
			
		||||
    api_key: &str,
 | 
			
		||||
    redirects: &[(&'static str, &'static str)],
 | 
			
		||||
) -> Router {
 | 
			
		||||
    // We first try to route the request according to the contents of the root directory. If the
 | 
			
		||||
    // file doesn't exist, then we look for it in the other directories.
 | 
			
		||||
    let serve_dir = ServeDir::new(ctx.static_dir.join(STATIC_ROOT_NAME))
 | 
			
		||||
        .append_index_html_on_directories(true)
 | 
			
		||||
        .fallback(ServeDir::new(ctx.static_dir.clone()).append_index_html_on_directories(true));
 | 
			
		||||
 | 
			
		||||
    let mut app = Router::new()
 | 
			
		||||
        .route_service("/", serve_dir.clone())
 | 
			
		||||
        .route(
 | 
			
		||||
            "/{*path}",
 | 
			
		||||
            post(post_static_archive)
 | 
			
		||||
                .delete(delete_dir)
 | 
			
		||||
                .route_layer(ValidateRequestHeaderLayer::bearer(api_key))
 | 
			
		||||
                .get_service(serve_dir),
 | 
			
		||||
        )
 | 
			
		||||
        .with_state(ctx.clone())
 | 
			
		||||
        .merge(matrix::router());
 | 
			
		||||
 | 
			
		||||
    for (path, url) in redirects.iter() {
 | 
			
		||||
        app = app.route(path, any(|| async { Redirect::permanent(url) }))
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    app.layer(CompressionLayer::new().gzip(true).br(true))
 | 
			
		||||
        .layer(TraceLayer::new_for_http())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn post_static_archive(
 | 
			
		||||
    State(ctx): State<crate::Context>,
 | 
			
		||||
    extract::Path(path): extract::Path<String>,
 | 
			
		||||
    body: Body,
 | 
			
		||||
) -> Result<()> {
 | 
			
		||||
    // Copy tarball data to file for parsing
 | 
			
		||||
    let stream = body.into_data_stream();
 | 
			
		||||
    let mut reader = StreamReader::new(stream.map_err(io::Error::other));
 | 
			
		||||
 | 
			
		||||
    let uuid = uuid::Uuid::new_v4();
 | 
			
		||||
    let ar_path = ctx.tmp_dir.join(uuid.to_string());
 | 
			
		||||
    let mut f = File::create(&ar_path).await?;
 | 
			
		||||
 | 
			
		||||
    tokio::io::copy(&mut reader, &mut f).await?;
 | 
			
		||||
 | 
			
		||||
    // Root is stored in its own specifc directory, as otherwise it would wipe all other uploaded
 | 
			
		||||
    // directories every time it's updated
 | 
			
		||||
    let dest_dir = if path.is_empty() {
 | 
			
		||||
        String::from(crate::STATIC_ROOT_NAME)
 | 
			
		||||
    } else {
 | 
			
		||||
        path
 | 
			
		||||
    };
 | 
			
		||||
    let dest_dir = ctx.static_dir.join(dest_dir);
 | 
			
		||||
 | 
			
		||||
    tokio::task::spawn_blocking(move || process_archive(&ar_path, &dest_dir))
 | 
			
		||||
        .await
 | 
			
		||||
        .unwrap()?;
 | 
			
		||||
 | 
			
		||||
    Ok(())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn process_archive(ar_path: &Path, dest_dir: &Path) -> io::Result<()> {
 | 
			
		||||
    let f = std::fs::File::open(ar_path)?;
 | 
			
		||||
    let tar = GzDecoder::new(f);
 | 
			
		||||
    let mut ar = Archive::new(tar);
 | 
			
		||||
 | 
			
		||||
    // trim possible trailing slash from path
 | 
			
		||||
    let dest_dir = PathBuf::from(dest_dir.to_string_lossy().trim_end_matches('/'));
 | 
			
		||||
 | 
			
		||||
    // extract extension and append '.new' to form new extension
 | 
			
		||||
    let ext = dest_dir
 | 
			
		||||
        .extension()
 | 
			
		||||
        .map(|ext| ext.to_string_lossy().to_string())
 | 
			
		||||
        .unwrap_or(String::from(""));
 | 
			
		||||
    let new_dir = dest_dir.with_extension(format!("{ext}.new"));
 | 
			
		||||
 | 
			
		||||
    // Directory might be left behind by previous failed upload
 | 
			
		||||
    if new_dir.try_exists()? {
 | 
			
		||||
        std::fs::remove_dir_all(&new_dir)?;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Unpack archive into new directory
 | 
			
		||||
    std::fs::create_dir_all(&new_dir)?;
 | 
			
		||||
    ar.unpack(&new_dir)?;
 | 
			
		||||
 | 
			
		||||
    // Replace original directory with new one
 | 
			
		||||
    if dest_dir.try_exists()? {
 | 
			
		||||
        std::fs::remove_dir_all(&dest_dir)?;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    std::fs::rename(new_dir, dest_dir)?;
 | 
			
		||||
    std::fs::remove_file(ar_path)?;
 | 
			
		||||
 | 
			
		||||
    Ok(())
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pub async fn delete_dir(
 | 
			
		||||
    State(ctx): State<crate::Context>,
 | 
			
		||||
    extract::Path(path): extract::Path<String>,
 | 
			
		||||
) -> Result<()> {
 | 
			
		||||
    let dest_dir = if path.is_empty() {
 | 
			
		||||
        String::from(crate::STATIC_ROOT_NAME)
 | 
			
		||||
    } else {
 | 
			
		||||
        path
 | 
			
		||||
    };
 | 
			
		||||
    let dest_dir = ctx.static_dir.join(dest_dir);
 | 
			
		||||
 | 
			
		||||
    tokio::fs::remove_dir_all(dest_dir).await?;
 | 
			
		||||
 | 
			
		||||
    Ok(())
 | 
			
		||||
}
 | 
			
		||||
		Loading…
	
		Reference in New Issue