From 1acfc9c4225231bf326f443d7c5d3744c495e0c7 Mon Sep 17 00:00:00 2001 From: Chewing_Bever Date: Sat, 8 Jul 2023 13:30:11 +0200 Subject: [PATCH] refactor: have fun with rust's functional stuff --- CHANGELOG.md | 4 ++ src/backup/manager/mod.rs | 100 ++++++++++++++++++++++---------------- src/backup/path.rs | 13 +++-- 3 files changed, 67 insertions(+), 50 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c9711d..d4de051 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased](https://git.rustybever.be/Chewing_Bever/alex/src/branch/dev) +### Added + +* Export command to export any backup as a new full backup + ## [0.3.0](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.3.0) ### Added diff --git a/src/backup/manager/mod.rs b/src/backup/manager/mod.rs index 4cf01f0..db53ad8 100644 --- a/src/backup/manager/mod.rs +++ b/src/backup/manager/mod.rs @@ -4,7 +4,7 @@ mod meta; pub use config::ManagerConfig; pub use meta::MetaManager; -use super::{Backup, Delta, State}; +use super::{Backup, BackupType, Delta, State}; use crate::other; use chrono::SubsecRound; use chrono::Utc; @@ -162,6 +162,25 @@ where chrono::offset::Utc::now() } + /// Search for a chain containing a backup with the specified start time. + /// + /// # Returns + /// + /// A tuple (chain, index) with index being the index of the found backup in the returned + /// chain. + fn find(&self, start_time: chrono::DateTime) -> Option<(&Vec>, usize)> { + for chain in &self.chains { + if let Some(index) = chain + .iter() + .position(|b| b.start_time.trunc_subsecs(0) == start_time) + { + return Some((chain, index)); + } + } + + None + } + /// Restore the backup with the given start time by restoring its chain up to and including the /// backup, in order. pub fn restore_backup( @@ -169,62 +188,57 @@ where start_time: chrono::DateTime, dirs: &Vec<(PathBuf, PathBuf)>, ) -> io::Result<()> { - // Iterate over each chain, skipping elements until the element with the given start time - // is possibly found. - for chain in &self.chains { - // If we find the element in the chain, restore the entire chain up to and including - // the element - if let Some(index) = chain - .iter() - .position(|b| b.start_time.trunc_subsecs(0) == start_time) - { + self.find(start_time) + .ok_or_else(|| other("Unknown layer.")) + .and_then(|(chain, index)| { for backup in chain.iter().take(index + 1) { backup.restore(&self.backup_dir, dirs)?; } - return Ok(()); - } - } - - Err(other("Unknown backup.")) + Ok(()) + }) } + /// Export the backup with the given start time as a new full archive. pub fn export_backup>( &self, start_time: chrono::DateTime, output_path: P, ) -> io::Result<()> { - // Iterate over each chain, skipping elements until the element with the given start time - // is possibly found. - for chain in &self.chains { - // If we find the element in the chain, restore the entire chain up to and including - // the element - if let Some(index) = chain - .iter() - .position(|b| b.start_time.trunc_subsecs(0) == start_time) - { - let contributions = - Delta::contributions(chain.iter().take(index + 1).map(|b| &b.delta).rev()); + self.find(start_time) + .ok_or_else(|| other("Unknown layer.")) + .and_then(|(chain, index)| { + match chain[index].type_ { + // A full backup is simply copied to the output path + BackupType::Full => std::fs::copy( + Backup::path(&self.backup_dir, chain[index].start_time), + output_path, + ) + .map(|_| ()), + // Incremental backups are exported one by one according to their contribution + BackupType::Incremental => { + let contributions = Delta::contributions( + chain.iter().take(index + 1).map(|b| &b.delta).rev(), + ); - let tar_gz = OpenOptions::new() - .write(true) - .create(true) - .open(output_path.as_ref())?; - let enc = GzEncoder::new(tar_gz, Compression::default()); - let mut ar = tar::Builder::new(enc); + let tar_gz = OpenOptions::new() + .write(true) + .create(true) + .open(output_path.as_ref())?; + let enc = GzEncoder::new(tar_gz, Compression::default()); + let mut ar = tar::Builder::new(enc); - for (contribution, backup) in - contributions.iter().rev().zip(chain.iter().take(index + 1)) - { - backup.append(&self.backup_dir, contribution, &mut ar)?; + for (contribution, backup) in + contributions.iter().rev().zip(chain.iter().take(index + 1)) + { + backup.append(&self.backup_dir, contribution, &mut ar)?; + } + + let mut enc = ar.into_inner()?; + enc.try_finish() + } } - - let mut enc = ar.into_inner()?; - return enc.try_finish(); - } - } - - Err(other("Unknown backup.")) + }) } /// Get a reference to the underlying chains diff --git a/src/backup/path.rs b/src/backup/path.rs index b8b5ae9..77c6883 100644 --- a/src/backup/path.rs +++ b/src/backup/path.rs @@ -129,16 +129,15 @@ pub trait PathExt { impl PathExt for Path { fn not_modified_since(&self, timestamp: chrono::DateTime) -> bool { - if let Ok(metadata) = self.metadata() { - if let Ok(last_modified) = metadata.modified() { + self.metadata() + .and_then(|m| m.modified()) + .map(|last_modified| { let t: chrono::DateTime = last_modified.into(); let t = t.with_timezone(&Local); - return t < timestamp; - } - } - - false + t < timestamp + }) + .unwrap_or(false) } fn read_dir_recursive(&self) -> io::Result {