use flate2::write::GzEncoder; use flate2::Compression; use std::fs::File; use std::io; use std::path::{Path, PathBuf}; #[link(name = "c")] extern "C" { fn geteuid() -> u32; fn getegid() -> u32; } static FILENAME_FORMAT: &str = "%Y-%m-%d_%H-%M-%S.tar.gz"; pub struct BackupManager { backup_dir: PathBuf, config_dir: PathBuf, world_dir: PathBuf, max_backups: u64, start_time: Option>, } impl BackupManager { pub fn open( backup_dir: PathBuf, config_dir: PathBuf, world_dir: PathBuf, max_backups: u64, ) -> Self { BackupManager { backup_dir, config_dir, world_dir, max_backups, start_time: None, } } pub fn create_archive(&mut self) -> io::Result<()> { let start_time = chrono::offset::Local::now(); self.start_time = Some(start_time); let filename = format!("{}", start_time.format(FILENAME_FORMAT)); let path = self.backup_dir.join(filename); let tar_gz = File::create(path)?; let enc = GzEncoder::new(tar_gz, Compression::default()); let mut tar = tar::Builder::new(enc); tar.append_dir_all("worlds", &self.world_dir)?; // Add all files from the config directory that aren't the cache for entry in self .config_dir .read_dir()? .filter_map(|e| e.ok()) .filter(|e| e.file_name() != "cache") { let tar_path = Path::new("config").join(entry.file_name()); if entry.file_type()?.is_dir() { tar.append_dir_all(tar_path, entry.path())?; } else { tar.append_path_with_name(entry.path(), tar_path)?; } } // TODO re-add this info file in some way // We add a file to the backup describing for what version it was made // let info = format!("{} {}", self.type_, self.version); // let info_bytes = info.as_bytes(); // let mut header = tar::Header::new_gnu(); // header.set_size(info_bytes.len().try_into().unwrap()); // header.set_mode(0o100644); // unsafe { // header.set_gid(getegid().into()); // header.set_uid(geteuid().into()); // } // tar.append_data(&mut header, "info.txt", info_bytes)?; Ok(()) } /// Remove the oldest backups pub fn remove_old_backups(&mut self) -> std::io::Result<()> { // The naming format used allows us to sort the backups by name and still get a sorting by // creation time let mut backups = self .backup_dir .read_dir()? .filter_map(|res| res.map(|e| e.path()).ok()) .collect::>(); backups.sort(); let max_backups: usize = self.max_backups.try_into().unwrap(); if backups.len() > max_backups { let excess_backups = backups.len() - max_backups; for backup in &backups[0..excess_backups] { std::fs::remove_file(backup)?; } } Ok(()) } }