fix: actually working incremental backup
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
ci/woodpecker/push/clippy Pipeline failed Details

incremental-backups
Jef Roosens 2023-06-15 09:56:40 +02:00
parent a9e7b215d1
commit d204c68400
Signed by: Jef Roosens
GPG Key ID: B75D4F293C7052DB
1 changed files with 22 additions and 17 deletions

View File

@ -38,9 +38,12 @@ fn files(src_dir: PathBuf) -> io::Result<HashSet<PathBuf>> {
Ok(files) Ok(files)
} }
/// Return false only if we can say with certainty that the file wasn't modified since the given /// Check whether a file has been modified since the given timestamp.
/// timestamp, true otherwise. ///
fn modified_since<T: AsRef<Path>>(time: chrono::DateTime<Utc>, path: T) -> bool { /// Note that this function will *only* return true if it can determine with certainty that the
/// file has not been modified. If any errors occur while obtaining the required metadata (e.g. if
/// the file system does not support this metadata), this function will return false.
fn not_modified_since<T: AsRef<Path>>(time: chrono::DateTime<Utc>, path: T) -> bool {
let path = path.as_ref(); let path = path.as_ref();
if let Ok(metadata) = path.metadata() { if let Ok(metadata) = path.metadata() {
@ -50,14 +53,14 @@ fn modified_since<T: AsRef<Path>>(time: chrono::DateTime<Utc>, path: T) -> bool
let t: chrono::DateTime<Utc> = last_modified.into(); let t: chrono::DateTime<Utc> = last_modified.into();
let t = t.with_timezone(&Local); let t = t.with_timezone(&Local);
return t >= time; return t < time;
} }
} }
false false
} }
#[derive(PartialEq)] #[derive(Debug, PartialEq)]
pub enum BackupType { pub enum BackupType {
Full, Full,
Incremental, Incremental,
@ -71,6 +74,7 @@ pub enum BackupError {
type BackupResult<T> = Result<T, BackupError>; type BackupResult<T> = Result<T, BackupError>;
/// Represents the changes relative to the previous backup /// Represents the changes relative to the previous backup
#[derive(Debug)]
pub struct BackupDelta { pub struct BackupDelta {
/// What files were added/modified in each part of the tarball. /// What files were added/modified in each part of the tarball.
pub added: HashMap<PathBuf, HashSet<PathBuf>>, pub added: HashMap<PathBuf, HashSet<PathBuf>>,
@ -142,6 +146,7 @@ impl BackupDelta {
} }
/// Represents a successful backup /// Represents a successful backup
#[derive(Debug)]
pub struct Backup { pub struct Backup {
previous: Option<Arc<Backup>>, previous: Option<Arc<Backup>>,
/// When the backup was started (also corresponds to the name) /// When the backup was started (also corresponds to the name)
@ -199,7 +204,7 @@ impl Backup {
let files = files(src_dir.clone())?; let files = files(src_dir.clone())?;
for path in &files { for path in &files {
ar.append_path_with_name(dir_in_tar.join(path), src_dir.join(path))?; ar.append_path_with_name(src_dir.join(path), dir_in_tar.join(path))?;
} }
added.insert(dir_in_tar, files); added.insert(dir_in_tar, files);
@ -231,6 +236,7 @@ impl Backup {
let enc = GzEncoder::new(tar_gz, Compression::default()); let enc = GzEncoder::new(tar_gz, Compression::default());
let mut ar = tar::Builder::new(enc); let mut ar = tar::Builder::new(enc);
// TODO remove unwrap
let previous_state = previous.state().unwrap(); let previous_state = previous.state().unwrap();
let mut delta = BackupDelta::new(); let mut delta = BackupDelta::new();
@ -238,12 +244,14 @@ impl Backup {
let files = files(src_dir.clone())?; let files = files(src_dir.clone())?;
let added_files = files let added_files = files
.iter() .iter()
.filter(|p| modified_since(previous.start_time, p)) // This explicit negation is because we wish to also include files for which we
// couldn't determine the last modified time
.filter(|p| !not_modified_since(previous.start_time, src_dir.join(p)))
.cloned() .cloned()
.collect::<HashSet<PathBuf>>(); .collect::<HashSet<PathBuf>>();
for path in added_files.iter() { for path in added_files.iter() {
ar.append_path_with_name(dir_in_tar.join(path), src_dir.join(path))?; ar.append_path_with_name(src_dir.join(path), dir_in_tar.join(path))?;
} }
delta.added.insert(dir_in_tar.clone(), added_files); delta.added.insert(dir_in_tar.clone(), added_files);
@ -295,16 +303,13 @@ impl BackupManager {
(PathBuf::from("worlds"), self.world_dir.clone()), (PathBuf::from("worlds"), self.world_dir.clone()),
]; ];
if let Some(last_backup) = &self.last_backup { let backup = if let Some(last_backup) = &self.last_backup {
let clone = last_backup.clone(); Backup::create_from(Arc::clone(last_backup), &self.backup_dir, dirs)?
self.last_backup = Some(Arc::new(Backup::create_from(
clone,
&self.backup_dir,
dirs,
)?));
} else { } else {
self.last_backup = Some(Arc::new(Backup::create(&self.backup_dir, dirs)?)); Backup::create(&self.backup_dir, dirs)?
} };
self.last_backup = Some(Arc::new(backup));
Ok(()) Ok(())
} }