mod delta; mod manager; mod path; use delta::Delta; pub use manager::Manager; use chrono::Utc; use flate2::write::GzEncoder; use flate2::Compression; use path::PathExt; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io; use std::path::{Path, PathBuf}; #[derive(Debug, PartialEq, Serialize, Deserialize)] pub enum BackupType { Full, Incremental, } /// Represents a successful backup #[derive(Serialize, Deserialize)] pub struct Backup { /// When the backup was started (also corresponds to the name) pub start_time: chrono::DateTime, /// Type of the backup pub type_: BackupType, pub delta: Delta, /// Additional metadata that can be associated with a given backup pub metadata: Option, } impl Backup<()> { /// Return the path to a backup file by properly formatting the data. pub fn path>(backup_dir: P, start_time: chrono::DateTime) -> PathBuf { let backup_dir = backup_dir.as_ref(); let filename = format!("{}", start_time.format(Self::FILENAME_FORMAT)); backup_dir.join(filename) } } impl Backup { const FILENAME_FORMAT: &str = "%Y-%m-%d_%H-%M-%S.tar.gz"; pub fn set_metadata(&mut self, metadata: T) { self.metadata = Some(metadata); } /// Resolve the state of the list of backups by applying their deltas in-order to an initially /// empty state. pub fn state(backups: &Vec) -> HashMap> { let mut state: HashMap> = HashMap::new(); for backup in backups { backup.delta.apply(&mut state); } state } /// Create a new Full backup, populated with the given directories. /// /// # Arguments /// /// * `backup_dir` - Directory to store archive in /// * `dirs` - list of tuples `(path_in_tar, src_dir)` with `path_in_tar` the directory name /// under which `src_dir`'s contents should be stored in the archive /// /// # Returns /// /// The `Backup` instance describing this new backup. pub fn create>( backup_dir: P, dirs: Vec<(PathBuf, PathBuf)>, ) -> io::Result { let start_time = chrono::offset::Utc::now(); let path = Backup::path(backup_dir, start_time); let tar_gz = File::create(path)?; let enc = GzEncoder::new(tar_gz, Compression::default()); let mut ar = tar::Builder::new(enc); let mut delta = Delta::new(); for (dir_in_tar, src_dir) in dirs { let mut added_files: HashSet = HashSet::new(); for entry in src_dir.read_dir_recursive()?.ignored("cache").files() { let path = entry?.path(); let stripped = path.strip_prefix(&src_dir).unwrap(); ar.append_path_with_name(&path, dir_in_tar.join(stripped))?; added_files.insert(stripped.to_path_buf()); } delta.added.insert(dir_in_tar, added_files); } Ok(Backup { type_: BackupType::Full, start_time, delta, metadata: None, }) } /// Create a new incremental backup from a given previous backup pub fn create_from>( previous_state: HashMap>, previous_start_time: chrono::DateTime, backup_dir: P, dirs: Vec<(PathBuf, PathBuf)>, ) -> io::Result { let start_time = chrono::offset::Utc::now(); let path = Backup::path(backup_dir, start_time); let tar_gz = File::create(path)?; let enc = GzEncoder::new(tar_gz, Compression::default()); let mut ar = tar::Builder::new(enc); let mut delta = Delta::new(); for (dir_in_tar, src_dir) in dirs { let mut all_files: HashSet = HashSet::new(); let mut added_files: HashSet = HashSet::new(); for entry in src_dir.read_dir_recursive()?.ignored("cache").files() { let path = entry?.path(); let stripped = path.strip_prefix(&src_dir).unwrap(); if !path.not_modified_since(previous_start_time) { ar.append_path_with_name(&path, dir_in_tar.join(stripped))?; added_files.insert(stripped.to_path_buf()); } all_files.insert(stripped.to_path_buf()); } delta.added.insert(dir_in_tar.clone(), added_files); if let Some(previous_files) = previous_state.get(&dir_in_tar) { delta.removed.insert( dir_in_tar, previous_files.difference(&all_files).cloned().collect(), ); } } Ok(Backup { type_: BackupType::Incremental, start_time, delta, metadata: None, }) } }