Added support for optionally deploying to subdir (for docs later)

pull/5/head
Jef Roosens 2022-04-05 09:58:02 +02:00
parent a3cf021fc6
commit b4c8216ebc
Signed by: Jef Roosens
GPG Key ID: B75D4F293C7052DB
6 changed files with 103 additions and 69 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
/target /target
/data/ /data/
*.tar.gz

15
Cargo.lock generated
View File

@ -791,6 +791,20 @@ name = "serde"
version = "1.0.136" version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
@ -843,6 +857,7 @@ dependencies = [
"hyper", "hyper",
"metrics", "metrics",
"metrics-exporter-prometheus", "metrics-exporter-prometheus",
"serde",
"serde_json", "serde_json",
"tar", "tar",
"tokio", "tokio",

View File

@ -25,3 +25,4 @@ uuid = { version = "1.0.0-alpha.1", features = ["v4"] }
serde_json = "1.0.79" serde_json = "1.0.79"
metrics = "0.18.1" metrics = "0.18.1"
metrics-exporter-prometheus = "0.9.0" metrics-exporter-prometheus = "0.9.0"
serde = { version = "1.0", features = ["derive"] }

8
curl 100755
View File

@ -0,0 +1,8 @@
#!/usr/bin/env sh
curl \
-XPOST \
-T test.tar.gz \
-H 'Authorization: Bearer test' \
-v \
http://localhost:3000/api/deploy?dir=docs

View File

@ -1,19 +1,27 @@
use std::{collections::HashSet, io::ErrorKind, path::Path}; use std::{collections::HashSet, io::ErrorKind, path::Path};
use axum::{ use axum::{
extract::{BodyStream, Extension}, extract::{BodyStream, Extension, Query},
http::StatusCode, http::StatusCode,
response::IntoResponse, response::IntoResponse,
}; };
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use serde::Deserialize;
use tar::Archive; use tar::Archive;
use tokio_util::io::StreamReader; use tokio_util::io::StreamReader;
use crate::STATIC_DIR_NAME; use crate::DEFAULT_STATIC_DIR_NAME;
#[derive(Deserialize)]
pub struct StaticDirParams
{
dir: Option<String>,
}
pub async fn post_deploy( pub async fn post_deploy(
Extension(data_dir): Extension<String>, Extension(data_dir): Extension<String>,
Query(params): Query<StaticDirParams>,
res: BodyStream, res: BodyStream,
) -> impl IntoResponse ) -> impl IntoResponse
{ {
@ -26,71 +34,72 @@ pub async fn post_deploy(
let mut file = tokio::fs::File::create(&file_path).await.unwrap(); let mut file = tokio::fs::File::create(&file_path).await.unwrap();
tokio::io::copy(&mut read, &mut file).await; tokio::io::copy(&mut read, &mut file).await;
// Extract the contents of the tarball synchronously let mut static_path = Path::new(&data_dir).join(DEFAULT_STATIC_DIR_NAME);
match tokio::task::spawn_blocking(move || {
let file = match std::fs::File::open(file_path) {
Ok(v) => v,
Err(_) => return StatusCode::INTERNAL_SERVER_ERROR,
};
let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar);
let mut paths = HashSet::new(); if params.dir.is_some() {
static_path = static_path.join(params.dir.unwrap());
let entries = match archive.entries() {
Ok(e) => e,
Err(_) => return StatusCode::INTERNAL_SERVER_ERROR,
};
// Extract each entry into the output directory
let static_dir = Path::new(&data_dir).join(STATIC_DIR_NAME);
for entry_res in entries {
if let Ok(mut entry) = entry_res {
if let Err(_) = entry.unpack_in(&static_dir) {
return StatusCode::INTERNAL_SERVER_ERROR;
}
if let Ok(path) = entry.path() {
paths.insert(path.into_owned());
}
} else {
return StatusCode::INTERNAL_SERVER_ERROR;
}
}
// Remove any old files that weren't present in new archive
let mut items = vec![];
// Start by populating the vec with the initial files
let iter = match static_dir.read_dir() {
Ok(v) => v,
Err(_) => return StatusCode::INTERNAL_SERVER_ERROR,
};
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
// As long as there are still items in the vec, we keep going
while items.len() > 0 {
let item = items.pop().unwrap();
tracing::debug!("{:?}", item);
if !paths.contains(item.strip_prefix(&static_dir).unwrap()) {
if item.is_dir() {
std::fs::remove_dir_all(item);
} else {
std::fs::remove_file(item);
}
} else if let Ok(iter) = item.read_dir() {
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
}
}
StatusCode::OK
})
.await
{
Ok(s) => s,
Err(_) => StatusCode::INTERNAL_SERVER_ERROR,
} }
// Make sure the static directory exists
tokio::fs::create_dir_all(&static_path).await;
let fp_clone = file_path.clone();
// Extract the contents of the tarball synchronously
let res =
match tokio::task::spawn_blocking(move || process_archive(&fp_clone, &static_path)).await {
Ok(_) => StatusCode::OK,
Err(_) => StatusCode::INTERNAL_SERVER_ERROR,
};
// Remove archive file after use
tokio::fs::remove_file(&file_path).await;
res
}
fn process_archive(archive_path: &Path, static_dir: &Path) -> Result<(), ()>
{
let file = std::fs::File::open(archive_path).map_err(|_| ())?;
let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar);
let mut paths = HashSet::new();
let entries = archive.entries().map_err(|_| ())?;
// Extract each entry into the output directory
for entry_res in entries {
let mut entry = entry_res.map_err(|_| ())?;
entry.unpack_in(static_dir).map_err(|_| ())?;
if let Ok(path) = entry.path() {
paths.insert(path.into_owned());
}
}
// Remove any old files that weren't present in new archive
let mut items = vec![];
// Start by populating the vec with the initial files
let iter = static_dir.read_dir().map_err(|_| ())?;
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
// As long as there are still items in the vec, we keep going
while items.len() > 0 {
let item = items.pop().unwrap();
tracing::debug!("{:?}", item);
if !paths.contains(item.strip_prefix(&static_dir).unwrap()) {
if item.is_dir() {
std::fs::remove_dir_all(item);
} else {
std::fs::remove_file(item);
}
} else if let Ok(iter) = item.read_dir() {
iter.filter_map(|r| r.ok())
.for_each(|e| items.push(e.path()));
}
}
Ok(())
} }

View File

@ -14,7 +14,7 @@ mod api;
mod matrix; mod matrix;
mod metrics; mod metrics;
const STATIC_DIR_NAME: &str = "static"; const DEFAULT_STATIC_DIR_NAME: &str = "static";
#[tokio::main] #[tokio::main]
async fn main() async fn main()
@ -30,7 +30,7 @@ async fn main()
// Get required variables from env vars // Get required variables from env vars
let api_key = std::env::var("API_KEY").expect("No API_KEY was provided."); let api_key = std::env::var("API_KEY").expect("No API_KEY was provided.");
let data_dir = std::env::var("DATA_DIR").expect("No DATA_DIR was provided."); let data_dir = std::env::var("DATA_DIR").expect("No DATA_DIR was provided.");
let static_dir = format!("{}/{}", data_dir, STATIC_DIR_NAME); let static_dir = format!("{}/{}", data_dir, DEFAULT_STATIC_DIR_NAME);
std::fs::create_dir_all(&static_dir); std::fs::create_dir_all(&static_dir);