refactor: have fun with rust's functional stuff
parent
fc8e8d37d3
commit
1acfc9c422
|
@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
## [Unreleased](https://git.rustybever.be/Chewing_Bever/alex/src/branch/dev)
|
## [Unreleased](https://git.rustybever.be/Chewing_Bever/alex/src/branch/dev)
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* Export command to export any backup as a new full backup
|
||||||
|
|
||||||
## [0.3.0](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.3.0)
|
## [0.3.0](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.3.0)
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
@ -4,7 +4,7 @@ mod meta;
|
||||||
pub use config::ManagerConfig;
|
pub use config::ManagerConfig;
|
||||||
pub use meta::MetaManager;
|
pub use meta::MetaManager;
|
||||||
|
|
||||||
use super::{Backup, Delta, State};
|
use super::{Backup, BackupType, Delta, State};
|
||||||
use crate::other;
|
use crate::other;
|
||||||
use chrono::SubsecRound;
|
use chrono::SubsecRound;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
|
@ -162,6 +162,25 @@ where
|
||||||
chrono::offset::Utc::now()
|
chrono::offset::Utc::now()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Search for a chain containing a backup with the specified start time.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// A tuple (chain, index) with index being the index of the found backup in the returned
|
||||||
|
/// chain.
|
||||||
|
fn find(&self, start_time: chrono::DateTime<Utc>) -> Option<(&Vec<Backup<T>>, usize)> {
|
||||||
|
for chain in &self.chains {
|
||||||
|
if let Some(index) = chain
|
||||||
|
.iter()
|
||||||
|
.position(|b| b.start_time.trunc_subsecs(0) == start_time)
|
||||||
|
{
|
||||||
|
return Some((chain, index));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
/// Restore the backup with the given start time by restoring its chain up to and including the
|
/// Restore the backup with the given start time by restoring its chain up to and including the
|
||||||
/// backup, in order.
|
/// backup, in order.
|
||||||
pub fn restore_backup(
|
pub fn restore_backup(
|
||||||
|
@ -169,62 +188,57 @@ where
|
||||||
start_time: chrono::DateTime<Utc>,
|
start_time: chrono::DateTime<Utc>,
|
||||||
dirs: &Vec<(PathBuf, PathBuf)>,
|
dirs: &Vec<(PathBuf, PathBuf)>,
|
||||||
) -> io::Result<()> {
|
) -> io::Result<()> {
|
||||||
// Iterate over each chain, skipping elements until the element with the given start time
|
self.find(start_time)
|
||||||
// is possibly found.
|
.ok_or_else(|| other("Unknown layer."))
|
||||||
for chain in &self.chains {
|
.and_then(|(chain, index)| {
|
||||||
// If we find the element in the chain, restore the entire chain up to and including
|
|
||||||
// the element
|
|
||||||
if let Some(index) = chain
|
|
||||||
.iter()
|
|
||||||
.position(|b| b.start_time.trunc_subsecs(0) == start_time)
|
|
||||||
{
|
|
||||||
for backup in chain.iter().take(index + 1) {
|
for backup in chain.iter().take(index + 1) {
|
||||||
backup.restore(&self.backup_dir, dirs)?;
|
backup.restore(&self.backup_dir, dirs)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(());
|
Ok(())
|
||||||
}
|
})
|
||||||
}
|
|
||||||
|
|
||||||
Err(other("Unknown backup."))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Export the backup with the given start time as a new full archive.
|
||||||
pub fn export_backup<P: AsRef<Path>>(
|
pub fn export_backup<P: AsRef<Path>>(
|
||||||
&self,
|
&self,
|
||||||
start_time: chrono::DateTime<Utc>,
|
start_time: chrono::DateTime<Utc>,
|
||||||
output_path: P,
|
output_path: P,
|
||||||
) -> io::Result<()> {
|
) -> io::Result<()> {
|
||||||
// Iterate over each chain, skipping elements until the element with the given start time
|
self.find(start_time)
|
||||||
// is possibly found.
|
.ok_or_else(|| other("Unknown layer."))
|
||||||
for chain in &self.chains {
|
.and_then(|(chain, index)| {
|
||||||
// If we find the element in the chain, restore the entire chain up to and including
|
match chain[index].type_ {
|
||||||
// the element
|
// A full backup is simply copied to the output path
|
||||||
if let Some(index) = chain
|
BackupType::Full => std::fs::copy(
|
||||||
.iter()
|
Backup::path(&self.backup_dir, chain[index].start_time),
|
||||||
.position(|b| b.start_time.trunc_subsecs(0) == start_time)
|
output_path,
|
||||||
{
|
)
|
||||||
let contributions =
|
.map(|_| ()),
|
||||||
Delta::contributions(chain.iter().take(index + 1).map(|b| &b.delta).rev());
|
// Incremental backups are exported one by one according to their contribution
|
||||||
|
BackupType::Incremental => {
|
||||||
|
let contributions = Delta::contributions(
|
||||||
|
chain.iter().take(index + 1).map(|b| &b.delta).rev(),
|
||||||
|
);
|
||||||
|
|
||||||
let tar_gz = OpenOptions::new()
|
let tar_gz = OpenOptions::new()
|
||||||
.write(true)
|
.write(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
.open(output_path.as_ref())?;
|
.open(output_path.as_ref())?;
|
||||||
let enc = GzEncoder::new(tar_gz, Compression::default());
|
let enc = GzEncoder::new(tar_gz, Compression::default());
|
||||||
let mut ar = tar::Builder::new(enc);
|
let mut ar = tar::Builder::new(enc);
|
||||||
|
|
||||||
for (contribution, backup) in
|
for (contribution, backup) in
|
||||||
contributions.iter().rev().zip(chain.iter().take(index + 1))
|
contributions.iter().rev().zip(chain.iter().take(index + 1))
|
||||||
{
|
{
|
||||||
backup.append(&self.backup_dir, contribution, &mut ar)?;
|
backup.append(&self.backup_dir, contribution, &mut ar)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut enc = ar.into_inner()?;
|
||||||
|
enc.try_finish()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
})
|
||||||
let mut enc = ar.into_inner()?;
|
|
||||||
return enc.try_finish();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(other("Unknown backup."))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a reference to the underlying chains
|
/// Get a reference to the underlying chains
|
||||||
|
|
|
@ -129,16 +129,15 @@ pub trait PathExt {
|
||||||
|
|
||||||
impl PathExt for Path {
|
impl PathExt for Path {
|
||||||
fn not_modified_since(&self, timestamp: chrono::DateTime<Utc>) -> bool {
|
fn not_modified_since(&self, timestamp: chrono::DateTime<Utc>) -> bool {
|
||||||
if let Ok(metadata) = self.metadata() {
|
self.metadata()
|
||||||
if let Ok(last_modified) = metadata.modified() {
|
.and_then(|m| m.modified())
|
||||||
|
.map(|last_modified| {
|
||||||
let t: chrono::DateTime<Utc> = last_modified.into();
|
let t: chrono::DateTime<Utc> = last_modified.into();
|
||||||
let t = t.with_timezone(&Local);
|
let t = t.with_timezone(&Local);
|
||||||
|
|
||||||
return t < timestamp;
|
t < timestamp
|
||||||
}
|
})
|
||||||
}
|
.unwrap_or(false)
|
||||||
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_dir_recursive(&self) -> io::Result<ReadDirRecursive> {
|
fn read_dir_recursive(&self) -> io::Result<ReadDirRecursive> {
|
||||||
|
|
Loading…
Reference in New Issue