feat: properly backup config directory
ci/woodpecker/push/release unknown status Details
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/clippy Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details

incremental-backups
Jef Roosens 2023-06-06 20:12:42 +02:00
parent 640364405f
commit f5fc8b588f
Signed by: Jef Roosens
GPG Key ID: B75D4F293C7052DB
2 changed files with 20 additions and 9 deletions

View File

@ -1,2 +1,3 @@
[alias]
runs = "run -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper.jar"
runrs = "run --release -- paper 1.19.4-545 --config data/config --backup data/backups --world data/worlds --jar data/paper.jar"

View File

@ -2,7 +2,7 @@ use crate::server::ServerType;
use flate2::write::GzEncoder;
use flate2::Compression;
use std::io::Write;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::process::Child;
#[link(name = "c")]
@ -108,11 +108,21 @@ impl ServerProcess {
tar.append_dir_all("worlds", &self.world_dir)?;
// We don't store all files in the config, as this would include caches
tar.append_path_with_name(
self.config_dir.join("server.properties"),
"config/server.properties",
)?;
// Add all files from the config directory that aren't the cache
for entry in self
.config_dir
.read_dir()?
.filter_map(|e| e.ok())
.filter(|e| e.file_name() != "cache")
{
let tar_path = Path::new("config").join(entry.file_name());
if entry.file_type()?.is_dir() {
tar.append_dir_all(tar_path, entry.path())?;
} else {
tar.append_path_with_name(entry.path(), tar_path)?;
}
}
// We add a file to the backup describing for what version it was made
let info = format!("{} {}", self.type_, self.version);
@ -128,8 +138,6 @@ impl ServerProcess {
tar.append_data(&mut header, "info.txt", info_bytes)?;
// tar.append_dir_all("config", &self.config_dir)?;
// Backup file gets finalized in the drop
Ok(())
@ -139,7 +147,9 @@ impl ServerProcess {
fn remove_old_backups(&mut self) -> std::io::Result<()> {
// The naming format used allows us to sort the backups by name and still get a sorting by
// creation time
let mut backups = std::fs::read_dir(&self.backup_dir)?
let mut backups = self
.backup_dir
.read_dir()?
.filter_map(|res| res.map(|e| e.path()).ok())
.collect::<Vec<PathBuf>>();
backups.sort();