feat: initially working export command
parent
80b814bcff
commit
5567323473
|
@ -1,5 +1,6 @@
|
||||||
use super::State;
|
use super::State;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::borrow::Borrow;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
/// Represents the changes relative to the previous backup
|
/// Represents the changes relative to the previous backup
|
||||||
|
@ -26,7 +27,6 @@ impl Delta {
|
||||||
///
|
///
|
||||||
/// The union of two deltas is a delta that produces the same state as if you were to apply
|
/// The union of two deltas is a delta that produces the same state as if you were to apply
|
||||||
/// both deltas in-order. Note that this operation is not commutative.
|
/// both deltas in-order. Note that this operation is not commutative.
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn union(&self, delta: &Self) -> Self {
|
pub fn union(&self, delta: &Self) -> Self {
|
||||||
let mut out = self.clone();
|
let mut out = self.clone();
|
||||||
|
|
||||||
|
@ -110,20 +110,35 @@ impl Delta {
|
||||||
out
|
out
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contributions(deltas: &[Self]) -> Vec<State> {
|
/// Given a chain of deltas, ordered from last to first, calculate the "contribution" for each
|
||||||
let mut contributions: Vec<State> = Vec::with_capacity(deltas.len());
|
/// state.
|
||||||
// contributions[deltas.len() - 1] = deltas[deltas.len() - 1].added.clone();
|
///
|
||||||
|
/// The contribution of a delta in a given chain is defined as the parts of the state produced
|
||||||
|
/// by this chain that are actually provided by this delta. This comes down to calculating the
|
||||||
|
/// strict difference of this delta and all of its successive deltas.
|
||||||
|
pub fn contributions<I>(deltas: I) -> Vec<State>
|
||||||
|
where
|
||||||
|
I: IntoIterator,
|
||||||
|
I::Item: Borrow<Delta>,
|
||||||
|
{
|
||||||
|
let mut contributions: Vec<State> = Vec::new();
|
||||||
|
|
||||||
// for (i, delta) in deltas.iter().enumerate() {
|
let mut deltas = deltas.into_iter();
|
||||||
// // The base case for the contributions is every added file
|
|
||||||
// let mut contribution = delta.clone();
|
|
||||||
|
|
||||||
// for other_delta in &deltas[i + 1..] {
|
if let Some(first_delta) = deltas.next() {
|
||||||
// contribution = contribution.difference(other_delta);
|
// From last to first, we calculate the strict difference of the delta with the union of all its
|
||||||
// }
|
// following deltas. The list of added files of this difference is the contribution for
|
||||||
|
// that delta.
|
||||||
|
contributions.push(first_delta.borrow().added.clone());
|
||||||
|
let mut union_future = first_delta.borrow().clone();
|
||||||
|
|
||||||
// contributions.push(contribution);
|
for delta in deltas {
|
||||||
// }
|
contributions.push(delta.borrow().strict_difference(&union_future).added);
|
||||||
|
union_future = union_future.union(delta.borrow());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// contributions.reverse();
|
||||||
|
|
||||||
contributions
|
contributions
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
/// Manages a collection of backup layers, allowing them to be utilized as a single object.
|
/// Manages a collection of backup layers, allowing them to be utilized as a single object.
|
||||||
pub struct MetaManager<T>
|
pub struct MetaManager<T>
|
||||||
|
@ -129,6 +129,17 @@ where
|
||||||
.map(|manager| manager.restore_backup(start_time, dirs))
|
.map(|manager| manager.restore_backup(start_time, dirs))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn export_backup<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
layer: &str,
|
||||||
|
start_time: chrono::DateTime<Utc>,
|
||||||
|
output_path: P,
|
||||||
|
) -> Option<io::Result<()>> {
|
||||||
|
self.managers
|
||||||
|
.get(layer)
|
||||||
|
.map(|manager| manager.export_backup(start_time, output_path))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn managers(&self) -> &HashMap<String, Manager<T>> {
|
pub fn managers(&self) -> &HashMap<String, Manager<T>> {
|
||||||
&self.managers
|
&self.managers
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,15 +4,17 @@ mod meta;
|
||||||
pub use config::ManagerConfig;
|
pub use config::ManagerConfig;
|
||||||
pub use meta::MetaManager;
|
pub use meta::MetaManager;
|
||||||
|
|
||||||
use super::{Backup, State};
|
use super::{Backup, Delta, State};
|
||||||
use crate::other;
|
use crate::other;
|
||||||
use chrono::SubsecRound;
|
use chrono::SubsecRound;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
|
use flate2::write::GzEncoder;
|
||||||
|
use flate2::Compression;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::fs::File;
|
use std::fs::{File, OpenOptions};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
/// Manages a single backup layer consisting of one or more chains of backups.
|
/// Manages a single backup layer consisting of one or more chains of backups.
|
||||||
pub struct Manager<T>
|
pub struct Manager<T>
|
||||||
|
@ -187,6 +189,47 @@ where
|
||||||
Err(other("Unknown backup."))
|
Err(other("Unknown backup."))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn export_backup<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
start_time: chrono::DateTime<Utc>,
|
||||||
|
output_path: P,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
// Iterate over each chain, skipping elements until the element with the given start time
|
||||||
|
// is possibly found.
|
||||||
|
for chain in &self.chains {
|
||||||
|
// If we find the element in the chain, restore the entire chain up to and including
|
||||||
|
// the element
|
||||||
|
if let Some(index) = chain
|
||||||
|
.iter()
|
||||||
|
.position(|b| b.start_time.trunc_subsecs(0) == start_time)
|
||||||
|
{
|
||||||
|
let contributions =
|
||||||
|
Delta::contributions(chain.iter().take(index + 1).map(|b| &b.delta).rev());
|
||||||
|
|
||||||
|
let tar_gz = OpenOptions::new()
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.open(output_path.as_ref())?;
|
||||||
|
let enc = GzEncoder::new(tar_gz, Compression::default());
|
||||||
|
let mut ar = tar::Builder::new(enc);
|
||||||
|
|
||||||
|
for (contribution, backup) in
|
||||||
|
contributions.iter().rev().zip(chain.iter().take(index + 1))
|
||||||
|
{
|
||||||
|
backup.append(&self.backup_dir, contribution, &mut ar)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut enc = ar.into_inner()?;
|
||||||
|
enc.try_finish()?;
|
||||||
|
enc.finish()?;
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(other("Unknown backup."))
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a reference to the underlying chains
|
/// Get a reference to the underlying chains
|
||||||
pub fn chains(&self) -> &Vec<Vec<Backup<T>>> {
|
pub fn chains(&self) -> &Vec<Vec<Backup<T>>> {
|
||||||
&self.chains
|
&self.chains
|
||||||
|
|
|
@ -244,6 +244,31 @@ impl<T: Clone> Backup<T> {
|
||||||
let enc = GzDecoder::new(tar_gz);
|
let enc = GzDecoder::new(tar_gz);
|
||||||
Ok(tar::Archive::new(enc))
|
Ok(tar::Archive::new(enc))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Open this backup's archive and append all its files that are part of the provided state to
|
||||||
|
/// the archive file.
|
||||||
|
pub fn append<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
backup_dir: P,
|
||||||
|
state: &State,
|
||||||
|
ar: &mut tar::Builder<GzEncoder<File>>,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
let mut own_ar = self.open(backup_dir)?;
|
||||||
|
// println!("{:?}", &state);
|
||||||
|
|
||||||
|
for entry in own_ar.entries()? {
|
||||||
|
let entry = entry?;
|
||||||
|
let entry_path_in_tar = entry.path()?.to_path_buf();
|
||||||
|
|
||||||
|
if state.contains(&entry_path_in_tar) {
|
||||||
|
println!("{:?}", &entry_path_in_tar);
|
||||||
|
let header = entry.header().clone();
|
||||||
|
ar.append(&header, entry)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clone> fmt::Display for Backup<T> {
|
impl<T: Clone> fmt::Display for Backup<T> {
|
||||||
|
|
|
@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
/// Struct that represents a current state for a backup. This struct acts as a smart pointer around
|
/// Struct that represents a current state for a backup. This struct acts as a smart pointer around
|
||||||
/// a HashMap.
|
/// a HashMap.
|
||||||
|
@ -32,6 +32,15 @@ impl State {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether the provided relative path is part of the given state.
|
||||||
|
pub fn contains<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||||
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
self.0.iter().any(|(dir, files)| {
|
||||||
|
path.starts_with(dir) && files.contains(path.strip_prefix(dir).unwrap())
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<T> for State
|
impl<T> From<T> for State
|
||||||
|
|
|
@ -14,6 +14,8 @@ pub enum BackupCommands {
|
||||||
Create(BackupCreateArgs),
|
Create(BackupCreateArgs),
|
||||||
/// Restore a backup
|
/// Restore a backup
|
||||||
Restore(BackupRestoreArgs),
|
Restore(BackupRestoreArgs),
|
||||||
|
/// Export a backup into a full archive
|
||||||
|
Export(BackupExportArgs),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
@ -36,13 +38,13 @@ pub struct BackupListArgs {
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
pub struct BackupRestoreArgs {
|
pub struct BackupRestoreArgs {
|
||||||
/// Path to the backup inside the backup directory
|
/// Path to the backup inside the backup directory to restore
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
/// Directory to store config in
|
/// Directory to store config in
|
||||||
output_config: PathBuf,
|
output_config: PathBuf,
|
||||||
/// Directory to store worlds in
|
/// Directory to store worlds in
|
||||||
output_worlds: PathBuf,
|
output_worlds: PathBuf,
|
||||||
/// Whether to overwrite the contents of the existing directories
|
/// Whether to overwrite the contents of the output directories
|
||||||
#[arg(short, long, default_value_t = false)]
|
#[arg(short, long, default_value_t = false)]
|
||||||
force: bool,
|
force: bool,
|
||||||
/// Create output directories if they don't exist
|
/// Create output directories if they don't exist
|
||||||
|
@ -50,12 +52,24 @@ pub struct BackupRestoreArgs {
|
||||||
make: bool,
|
make: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Args)]
|
||||||
|
pub struct BackupExportArgs {
|
||||||
|
/// Path to the backup inside the backup directory to export
|
||||||
|
path: PathBuf,
|
||||||
|
/// Path to store the exported archive
|
||||||
|
output: PathBuf,
|
||||||
|
/// Create output directories if they don't exist
|
||||||
|
#[arg(short, long, default_value_t = false)]
|
||||||
|
make: bool,
|
||||||
|
}
|
||||||
|
|
||||||
impl BackupArgs {
|
impl BackupArgs {
|
||||||
pub fn run(&self, cli: &Cli) -> io::Result<()> {
|
pub fn run(&self, cli: &Cli) -> io::Result<()> {
|
||||||
match &self.command {
|
match &self.command {
|
||||||
BackupCommands::Create(args) => args.run(cli),
|
BackupCommands::Create(args) => args.run(cli),
|
||||||
BackupCommands::List(args) => args.run(cli),
|
BackupCommands::List(args) => args.run(cli),
|
||||||
BackupCommands::Restore(args) => args.run(cli),
|
BackupCommands::Restore(args) => args.run(cli),
|
||||||
|
BackupCommands::Export(args) => args.run(cli),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -171,3 +185,48 @@ impl BackupListArgs {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl BackupExportArgs {
|
||||||
|
pub fn run(&self, cli: &Cli) -> io::Result<()> {
|
||||||
|
let backup_dir = cli.backup.canonicalize()?;
|
||||||
|
|
||||||
|
if self.make {
|
||||||
|
if let Some(parent) = &self.output.parent() {
|
||||||
|
std::fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse input path
|
||||||
|
let path = self.path.canonicalize()?;
|
||||||
|
|
||||||
|
if !path.starts_with(&backup_dir) {
|
||||||
|
return Err(other("Provided file is not inside the backup directory."));
|
||||||
|
}
|
||||||
|
|
||||||
|
let layer = if let Some(parent) = path.parent() {
|
||||||
|
// Backup files should be stored nested inside a layer's folder
|
||||||
|
if parent != backup_dir {
|
||||||
|
parent.file_name().unwrap().to_string_lossy()
|
||||||
|
} else {
|
||||||
|
return Err(other("Invalid path."));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(other("Invalid path."));
|
||||||
|
};
|
||||||
|
|
||||||
|
let timestamp = if let Some(filename) = path.file_name() {
|
||||||
|
Utc.datetime_from_str(&filename.to_string_lossy(), Backup::FILENAME_FORMAT)
|
||||||
|
.map_err(|_| other("Invalid filename."))?
|
||||||
|
} else {
|
||||||
|
return Err(other("Invalid filename."));
|
||||||
|
};
|
||||||
|
|
||||||
|
let meta = cli.meta()?;
|
||||||
|
|
||||||
|
if let Some(res) = meta.export_backup(&layer, timestamp, &self.output) {
|
||||||
|
res
|
||||||
|
} else {
|
||||||
|
Err(other("Unknown layer"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue