Compare commits

...

4 Commits

Author SHA1 Message Date
Jef Roosens bfb264e823
docs: add some more help strings
ci/woodpecker/push/lint Pipeline was successful Details
ci/woodpecker/push/clippy Pipeline was successful Details
ci/woodpecker/push/build Pipeline was successful Details
2023-07-08 15:34:24 +02:00
Jef Roosens 241bb4d68e
feat: add extract command 2023-07-08 15:31:01 +02:00
Jef Roosens 6cdc18742e
feat: don't read non-contributing archives for export 2023-07-08 14:50:18 +02:00
Jef Roosens b924a054a6
chore: bump version to 0.3.1
ci/woodpecker/tag/lint Pipeline was successful Details
ci/woodpecker/tag/clippy Pipeline was successful Details
ci/woodpecker/tag/build Pipeline was successful Details
ci/woodpecker/tag/release Pipeline was successful Details
ci/woodpecker/push/release Pipeline was successful Details
2023-07-08 14:12:18 +02:00
8 changed files with 161 additions and 31 deletions

View File

@ -9,6 +9,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
* Extract command for working with the output of export
### Changed
* Export command no longer reads backups that do not contribute to the final
state
## [0.3.1](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.3.1)
### Added
* Export command to export any backup as a new full backup
## [0.3.0](https://git.rustybever.be/Chewing_Bever/alex/src/tag/0.3.0)

2
Cargo.lock generated
View File

@ -10,7 +10,7 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "alex"
version = "0.3.0"
version = "0.3.1"
dependencies = [
"chrono",
"clap",

View File

@ -1,6 +1,6 @@
[package]
name = "alex"
version = "0.3.0"
version = "0.3.1"
description = "Wrapper around Minecraft server processes, designed to complement Docker image installations."
authors = ["Jef Roosens"]
edition = "2021"

View File

@ -18,11 +18,18 @@ pub struct Delta {
impl Delta {
pub fn new() -> Self {
Self {
added: Default::default(),
removed: Default::default(),
added: State::new(),
removed: State::new(),
}
}
/// Returns whether the delta is empty by checking whether both its added and removed state
/// return true for their `is_empty`.
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.added.is_empty() && self.removed.is_empty()
}
/// Calculate the union of this delta with another delta.
///
/// The union of two deltas is a delta that produces the same state as if you were to apply

View File

@ -226,9 +226,16 @@ where
let enc = GzEncoder::new(tar_gz, Compression::default());
let mut ar = tar::Builder::new(enc);
for (contribution, backup) in
contributions.iter().rev().zip(chain.iter().take(index + 1))
// We only need to consider backups that have a non-empty contribution.
// This allows us to skip reading backups that have been completely
// overwritten by their successors anyways.
for (contribution, backup) in contributions
.iter()
.rev()
.zip(chain.iter().take(index + 1))
.filter(|(contribution, _)| !contribution.is_empty())
{
println!("{}", &backup);
backup.append(&self.backup_dir, contribution, &mut ar)?;
}

View File

@ -55,6 +55,45 @@ impl Backup<()> {
let filename = format!("{}", start_time.format(Self::FILENAME_FORMAT));
backup_dir.join(filename)
}
/// Extract an archive.
///
/// # Arguments
///
/// * `backup_path` - Path to the archive to extract
/// * `dirs` - list of tuples `(path_in_tar, dst_dir)` with `dst_dir` the directory on-disk
/// where the files stored under `path_in_tar` inside the tarball should be extracted to.
pub fn extract_archive<P: AsRef<Path>>(
archive_path: P,
dirs: &Vec<(PathBuf, PathBuf)>,
) -> io::Result<()> {
let tar_gz = File::open(archive_path)?;
let enc = GzDecoder::new(tar_gz);
let mut ar = tar::Archive::new(enc);
// Unpack each file by matching it with one of the destination directories and extracting
// it to the right path
for entry in ar.entries()? {
let mut entry = entry?;
let entry_path_in_tar = entry.path()?.to_path_buf();
for (path_in_tar, dst_dir) in dirs {
if entry_path_in_tar.starts_with(path_in_tar) {
let dst_path =
dst_dir.join(entry_path_in_tar.strip_prefix(path_in_tar).unwrap());
// Ensure all parent directories are present
std::fs::create_dir_all(dst_path.parent().unwrap())?;
entry.unpack(dst_path)?;
break;
}
}
}
Ok(())
}
}
impl<T: Clone> Backup<T> {
@ -199,31 +238,8 @@ impl<T: Clone> Backup<T> {
backup_dir: P,
dirs: &Vec<(PathBuf, PathBuf)>,
) -> io::Result<()> {
let path = Backup::path(backup_dir, self.start_time);
let tar_gz = File::open(path)?;
let enc = GzDecoder::new(tar_gz);
let mut ar = tar::Archive::new(enc);
// Unpack each file by matching it with one of the destination directories and extracting
// it to the right path
for entry in ar.entries()? {
let mut entry = entry?;
let entry_path_in_tar = entry.path()?.to_path_buf();
for (path_in_tar, dst_dir) in dirs {
if entry_path_in_tar.starts_with(path_in_tar) {
let dst_path =
dst_dir.join(entry_path_in_tar.strip_prefix(path_in_tar).unwrap());
// Ensure all parent directories are present
std::fs::create_dir_all(dst_path.parent().unwrap())?;
entry.unpack(dst_path)?;
break;
}
}
}
let backup_path = Backup::path(backup_dir, self.start_time);
Backup::extract_archive(backup_path, dirs)?;
// Remove any files
for (path_in_tar, dst_dir) in dirs {

View File

@ -41,6 +41,14 @@ impl State {
path.starts_with(dir) && files.contains(path.strip_prefix(dir).unwrap())
})
}
/// Returns whether the state is empty.
///
/// Note that this does not necessarily mean that the state does not contain any sets, but
/// rather that any sets that it does contain are also empty.
pub fn is_empty(&self) -> bool {
self.0.values().all(|s| s.is_empty())
}
}
impl<T> From<T> for State

View File

@ -9,13 +9,30 @@ use std::path::{Path, PathBuf};
#[derive(Subcommand)]
pub enum BackupCommands {
/// List all tracked backups
///
/// Note that this will only list backups for the layers currently configured, and will ignore
/// any other layers also present in the backup directory.
List(BackupListArgs),
/// Manually create a new backup
///
/// Note that backups created using this command will count towards the length of a chain, and
/// can therefore shorten how far back in time your backups will be stored.
Create(BackupCreateArgs),
/// Restore a backup
///
/// This command will restore the selected backup by extracting its entire chain up to and
/// including the requested backup in-order.
Restore(BackupRestoreArgs),
/// Export a backup into a full archive
///
/// Just like the restore command, this will extract each backup from the chain up to and
/// including the requested backup, but instead of writing the files to disk, they will be
/// recompressed into a new tarball, resulting in a new tarball containing a full backup.
Export(BackupExportArgs),
/// Extract an archive file, which is assumed to be a full backup.
///
/// This command mostly exists as a convenience method for working with the output of `export`.
Extract(BackupExtractArgs),
}
#[derive(Args)]
@ -45,6 +62,9 @@ pub struct BackupRestoreArgs {
/// Directory to store worlds in
output_worlds: PathBuf,
/// Whether to overwrite the contents of the output directories
///
/// If set, the output directories will be completely cleared before trying to restore the
/// backup.
#[arg(short, long, default_value_t = false)]
force: bool,
/// Create output directories if they don't exist
@ -63,6 +83,25 @@ pub struct BackupExportArgs {
make: bool,
}
#[derive(Args)]
pub struct BackupExtractArgs {
/// Path to the backup to extract
path: PathBuf,
/// Directory to store config in
output_config: PathBuf,
/// Directory to store worlds in
output_worlds: PathBuf,
/// Whether to overwrite the contents of the output directories
///
/// If set, the output directories will be completely cleared before trying to restore the
/// backup.
#[arg(short, long, default_value_t = false)]
force: bool,
/// Create output directories if they don't exist
#[arg(short, long, default_value_t = false)]
make: bool,
}
impl BackupArgs {
pub fn run(&self, cli: &Cli) -> io::Result<()> {
match &self.command {
@ -70,6 +109,7 @@ impl BackupArgs {
BackupCommands::List(args) => args.run(cli),
BackupCommands::Restore(args) => args.run(cli),
BackupCommands::Export(args) => args.run(cli),
BackupCommands::Extract(args) => args.run(cli),
}
}
}
@ -219,3 +259,44 @@ impl BackupExportArgs {
}
}
}
impl BackupExtractArgs {
pub fn run(&self, _cli: &Cli) -> io::Result<()> {
// Create directories if needed
if self.make {
std::fs::create_dir_all(&self.output_config)?;
std::fs::create_dir_all(&self.output_worlds)?;
}
let output_config = self.output_config.canonicalize()?;
let output_worlds = self.output_worlds.canonicalize()?;
let backup_path = self.path.canonicalize()?;
// Clear previous contents of directories
let mut entries = output_config
.read_dir()?
.chain(output_worlds.read_dir()?)
.peekable();
if entries.peek().is_some() && !self.force {
return Err(other("Output directories are not empty. If you wish to overwrite these contents, use the force flag."));
}
for entry in entries {
let path = entry?.path();
if path.is_dir() {
std::fs::remove_dir_all(path)?;
} else {
std::fs::remove_file(path)?;
}
}
let dirs = vec![
(PathBuf::from("config"), output_config),
(PathBuf::from("worlds"), output_worlds),
];
Backup::extract_archive(backup_path, &dirs)
}
}