feat: add extract command
parent
6cdc18742e
commit
241bb4d68e
|
@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased](https://git.rustybever.be/Chewing_Bever/alex/src/branch/dev)
|
||||
|
||||
### Added
|
||||
|
||||
* Extract command for working with the output of export
|
||||
|
||||
### Changed
|
||||
|
||||
* Export command no longer reads backups that do not contribute to the final
|
||||
|
|
|
@ -55,6 +55,45 @@ impl Backup<()> {
|
|||
let filename = format!("{}", start_time.format(Self::FILENAME_FORMAT));
|
||||
backup_dir.join(filename)
|
||||
}
|
||||
|
||||
/// Extract an archive.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `backup_path` - Path to the archive to extract
|
||||
/// * `dirs` - list of tuples `(path_in_tar, dst_dir)` with `dst_dir` the directory on-disk
|
||||
/// where the files stored under `path_in_tar` inside the tarball should be extracted to.
|
||||
pub fn extract_archive<P: AsRef<Path>>(
|
||||
archive_path: P,
|
||||
dirs: &Vec<(PathBuf, PathBuf)>,
|
||||
) -> io::Result<()> {
|
||||
let tar_gz = File::open(archive_path)?;
|
||||
let enc = GzDecoder::new(tar_gz);
|
||||
let mut ar = tar::Archive::new(enc);
|
||||
|
||||
// Unpack each file by matching it with one of the destination directories and extracting
|
||||
// it to the right path
|
||||
for entry in ar.entries()? {
|
||||
let mut entry = entry?;
|
||||
let entry_path_in_tar = entry.path()?.to_path_buf();
|
||||
|
||||
for (path_in_tar, dst_dir) in dirs {
|
||||
if entry_path_in_tar.starts_with(path_in_tar) {
|
||||
let dst_path =
|
||||
dst_dir.join(entry_path_in_tar.strip_prefix(path_in_tar).unwrap());
|
||||
|
||||
// Ensure all parent directories are present
|
||||
std::fs::create_dir_all(dst_path.parent().unwrap())?;
|
||||
|
||||
entry.unpack(dst_path)?;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone> Backup<T> {
|
||||
|
@ -199,31 +238,8 @@ impl<T: Clone> Backup<T> {
|
|||
backup_dir: P,
|
||||
dirs: &Vec<(PathBuf, PathBuf)>,
|
||||
) -> io::Result<()> {
|
||||
let path = Backup::path(backup_dir, self.start_time);
|
||||
let tar_gz = File::open(path)?;
|
||||
let enc = GzDecoder::new(tar_gz);
|
||||
let mut ar = tar::Archive::new(enc);
|
||||
|
||||
// Unpack each file by matching it with one of the destination directories and extracting
|
||||
// it to the right path
|
||||
for entry in ar.entries()? {
|
||||
let mut entry = entry?;
|
||||
let entry_path_in_tar = entry.path()?.to_path_buf();
|
||||
|
||||
for (path_in_tar, dst_dir) in dirs {
|
||||
if entry_path_in_tar.starts_with(path_in_tar) {
|
||||
let dst_path =
|
||||
dst_dir.join(entry_path_in_tar.strip_prefix(path_in_tar).unwrap());
|
||||
|
||||
// Ensure all parent directories are present
|
||||
std::fs::create_dir_all(dst_path.parent().unwrap())?;
|
||||
|
||||
entry.unpack(dst_path)?;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let backup_path = Backup::path(backup_dir, self.start_time);
|
||||
Backup::extract_archive(backup_path, dirs)?;
|
||||
|
||||
// Remove any files
|
||||
for (path_in_tar, dst_dir) in dirs {
|
||||
|
|
|
@ -12,10 +12,13 @@ pub enum BackupCommands {
|
|||
List(BackupListArgs),
|
||||
/// Manually create a new backup
|
||||
Create(BackupCreateArgs),
|
||||
/// Restore a backup
|
||||
/// Restore a backup including all of its required predecessors
|
||||
Restore(BackupRestoreArgs),
|
||||
/// Export a backup into a full archive
|
||||
Export(BackupExportArgs),
|
||||
/// Extract a single backup; meant as a convenience method for working with the output of
|
||||
/// `export`
|
||||
Extract(BackupExtractArgs),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
@ -63,6 +66,22 @@ pub struct BackupExportArgs {
|
|||
make: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct BackupExtractArgs {
|
||||
/// Path to the backup to extract
|
||||
path: PathBuf,
|
||||
/// Directory to store config in
|
||||
output_config: PathBuf,
|
||||
/// Directory to store worlds in
|
||||
output_worlds: PathBuf,
|
||||
/// Whether to overwrite the contents of the output directories
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
force: bool,
|
||||
/// Create output directories if they don't exist
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
make: bool,
|
||||
}
|
||||
|
||||
impl BackupArgs {
|
||||
pub fn run(&self, cli: &Cli) -> io::Result<()> {
|
||||
match &self.command {
|
||||
|
@ -70,6 +89,7 @@ impl BackupArgs {
|
|||
BackupCommands::List(args) => args.run(cli),
|
||||
BackupCommands::Restore(args) => args.run(cli),
|
||||
BackupCommands::Export(args) => args.run(cli),
|
||||
BackupCommands::Extract(args) => args.run(cli),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -219,3 +239,44 @@ impl BackupExportArgs {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BackupExtractArgs {
|
||||
pub fn run(&self, _cli: &Cli) -> io::Result<()> {
|
||||
// Create directories if needed
|
||||
if self.make {
|
||||
std::fs::create_dir_all(&self.output_config)?;
|
||||
std::fs::create_dir_all(&self.output_worlds)?;
|
||||
}
|
||||
|
||||
let output_config = self.output_config.canonicalize()?;
|
||||
let output_worlds = self.output_worlds.canonicalize()?;
|
||||
let backup_path = self.path.canonicalize()?;
|
||||
|
||||
// Clear previous contents of directories
|
||||
let mut entries = output_config
|
||||
.read_dir()?
|
||||
.chain(output_worlds.read_dir()?)
|
||||
.peekable();
|
||||
|
||||
if entries.peek().is_some() && !self.force {
|
||||
return Err(other("Output directories are not empty. If you wish to overwrite these contents, use the force flag."));
|
||||
}
|
||||
|
||||
for entry in entries {
|
||||
let path = entry?.path();
|
||||
|
||||
if path.is_dir() {
|
||||
std::fs::remove_dir_all(path)?;
|
||||
} else {
|
||||
std::fs::remove_file(path)?;
|
||||
}
|
||||
}
|
||||
|
||||
let dirs = vec![
|
||||
(PathBuf::from("config"), output_config),
|
||||
(PathBuf::from("worlds"), output_worlds),
|
||||
];
|
||||
|
||||
Backup::extract_archive(backup_path, &dirs)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue