wip
ci/woodpecker/push/lint Pipeline failed Details
ci/woodpecker/push/clippy Pipeline failed Details
ci/woodpecker/push/build Pipeline was successful Details

Jef Roosens 2023-06-13 15:09:07 +02:00
parent b1c0bbb3af
commit 42c7a7cc5b
2 changed files with 44 additions and 15 deletions

View File

@ -1,3 +1,3 @@
[alias]
runs = "run -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper.jar"
runrs = "run --release -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper.jar"
runs = "run -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper-1.19.4-525.jar"
runrs = "run --release -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper-1.19.4-525.jar"

View File

@ -3,6 +3,8 @@ use flate2::Compression;
use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
use chrono::{Utc, Local};
use std::collections::HashSet;
#[link(name = "c")]
extern "C" {
@ -17,8 +19,10 @@ pub struct BackupManager {
config_dir: PathBuf,
world_dir: PathBuf,
max_backups: u64,
start_time: Option<chrono::DateTime<chrono::Utc>>,
files: Vec<(PathBuf, PathBuf)>
/// Start time of the last successful backup
last_start_time: Option<chrono::DateTime<chrono::Utc>>,
/// Files contained in the last successful backup
last_files: HashSet<(PathBuf, PathBuf)>
}
impl BackupManager {
@ -33,17 +37,17 @@ impl BackupManager {
config_dir,
world_dir,
max_backups,
start_time: None,
files: Vec::new()
last_start_time: None,
last_files: HashSet::new()
}
}
fn set_files_to_backup(&mut self) -> io::Result<()> {
fn files_to_backup(&mut self) -> io::Result<HashSet<(PathBuf, PathBuf)>> {
let mut dirs = vec![
(PathBuf::from("worlds"), self.world_dir.clone()),
(PathBuf::from("config"), self.config_dir.clone()),
];
self.files.clear();
let mut files: HashSet<(PathBuf, PathBuf)> = HashSet::new();
while let Some((path_in_tar, path)) = dirs.pop() {
for res in path.read_dir()? {
@ -60,28 +64,49 @@ impl BackupManager {
if entry.file_type()?.is_dir() {
dirs.push((new_path_in_tar, entry.path()));
} else {
self.files.push((new_path_in_tar, entry.path()));
// Only add files that have been updated since the last backup (incremental backup)
if let Some(last_start_time) = self.last_start_time {
let last_modified = entry.path().metadata()?.modified();
if let Ok(last_modified) = last_modified {
let t: chrono::DateTime<Utc> = last_modified.into();
let t = t.with_timezone(&Local);
if t < last_start_time {
continue
}
}
}
files.insert((new_path_in_tar, entry.path()));
}
}
}
Ok(())
Ok(files)
}
pub fn create_archive(&mut self) -> io::Result<()> {
let start_time = chrono::offset::Utc::now();
self.start_time = Some(start_time);
let filename = format!("{}", start_time.format(FILENAME_FORMAT));
let path = self.backup_dir.join(filename);
let tar_gz = File::create(path)?;
let enc = GzEncoder::new(tar_gz, Compression::default());
let mut tar = tar::Builder::new(enc);
let mut ar = tar::Builder::new(enc);
self.set_files_to_backup()?;
let files = self.files_to_backup()?;
for (path_in_tar, path) in &self.files {
tar.append_path_with_name(path, path_in_tar)?;
for (path_in_tar, path) in &files {
ar.append_path_with_name(path, path_in_tar)?;
}
let deleted_files = self.last_files.difference(&files);
println!("{} {}", files.len(), self.last_files.len());
for (path_in_tar, path) in deleted_files {
println!("{path_in_tar:?}: {path:?}");
}
// TODO re-add this info file in some way
@ -98,6 +123,10 @@ impl BackupManager {
// }
// tar.append_data(&mut header, "info.txt", info_bytes)?;
// After a successful backup, we store the original metadata
self.last_start_time = Some(start_time);
self.last_files = files;
Ok(())
}