Compare commits

...

3 Commits

Author SHA1 Message Date
Jef Roosens ecfa6fe7b7
Added documentation
continuous-integration/drone the build failed Details
2021-04-25 19:26:12 +02:00
Jef Roosens d513a03c4a
Added parser.py docstrings 2021-04-25 18:27:57 +02:00
Jef Roosens 3277af2ac5
Documented skeleton.py 2021-04-25 18:10:37 +02:00
7 changed files with 149 additions and 52 deletions

View File

@ -1,3 +1,4 @@
"""The main entrypoint of the program."""
import argparse
import sys
from parser import read_specs_file
@ -5,53 +6,60 @@ from parser import read_specs_file
# This just displays the error type and message, not the stack trace
def except_hook(ext_type, value, traceback):
"""
Make errors not show the stracktrace to stdout.
Todo:
* Replace this with proper error handling
"""
sys.stderr.write("{}: {}\n".format(ext_type.__name__, value))
# sys.excepthook = except_hook
# Define parser
parser = argparse.ArgumentParser(
if __name__ == "__main__":
# Define parser
parser = argparse.ArgumentParser(
description="Backup directories and Docker volumes."
)
parser.add_argument(
)
parser.add_argument(
"-f",
"--file",
action="append",
dest="file",
required=True,
help="File containing spec definitions.",
)
parser.add_argument(
)
parser.add_argument(
"-j",
"--json",
action="store_const",
const=True,
default=False,
help="Print out the parsed specs as JSON and exit",
)
parser.add_argument(
)
parser.add_argument(
"spec", nargs="*", help="The specs to process. Defaults to all."
)
)
# Parse arguments
args = parser.parse_args()
specs = sum([read_specs_file(path) for path in args.file], [])
# Parse arguments
args = parser.parse_args()
specs = sum((read_specs_file(path) for path in args.file), [])
# Filter specs if needed
if args.spec:
# Filter specs if needed
if args.spec:
specs = list(filter(lambda s: s.name in args.spec, specs))
# Dump parsed data as json
if args.json:
# Dump parsed data as json
if args.json:
import json
# TODO replace this with error handling system
print(json.dumps([spec.to_dict() for spec in specs], indent=4))
else:
# Run the backups
if not specs:
elif not specs:
# TODO replace this with error handling system
print("No specs, exiting.")
sys.exit(0)

View File

@ -1,3 +1,4 @@
"""Handles parsing a config file from disk."""
import yaml
from pathlib import Path
from typing import List, Union
@ -6,9 +7,19 @@ import skeleton
def read_specs_file(path: Union[str, Path]) -> List[Spec]:
"""
Read a config file and merge it with the skeleton.
Args:
path: path to the yaml config file
Returns:
A list of specs, parsed from the config.
"""
with open(path, "r") as yaml_file:
data = yaml.safe_load(yaml_file)
# NOTE shouldn't this be defined as a top-level variable?
categories = [
("directories", DirectorySpec),
("volumes", VolumeSpec),
@ -23,6 +34,7 @@ def read_specs_file(path: Union[str, Path]) -> List[Spec]:
# Check what defaults are present
defaults = {}
if data.get("defaults"):
if data["defaults"].get("all"):
defaults = skeleton.merge(defaults, data["defaults"]["all"])

View File

@ -1,21 +1,55 @@
"""Handles merging with the skeleton config."""
from typing import Dict
class InvalidKeyError(Exception):
def __init__(self, key):
"""Thrown when a config file contains an invalid key."""
def __init__(self, key: str):
"""
Create a new InvalidKeyError object with the given key.
Args:
key: the invalid key
"""
self.message = "Invalid key: {}".format(key)
super().__init__(key)
class MissingKeyError(Exception):
def __init__(self, key):
"""Thrown when a required key is missing from a config."""
def __init__(self, key: str):
"""
Create a new MissingKeyError object with the given key.
Args:
key: the invalid key
"""
self.message = "Missing key: {}".format(key)
super().__init__(key)
def merge(*dicts: [Dict]) -> Dict:
"""
Merge multiple dicts into one.
It reads the dicts from left to right, always preferring the "right"
dictionary's values. Therefore, the dictionaries should be sorted from
least important to most important (e.g. a default values skeleton should be
to the left of a dict of selected values).
Args:
dicts: the dictionaries to merge
Returns:
a new dictionary representing the merged dictionaries
Todo:
* Make sure an infinite loop is not possible
"""
# Base cases
if len(dicts) == 0:
return {}
@ -45,10 +79,23 @@ def merge(*dicts: [Dict]) -> Dict:
def merge_with_skeleton(data: Dict, skel: Dict) -> Dict:
"""
Compare a dict with a given skeleton dict, and fill in default values where
needed.
"""
Merge a dictionary with a skeleton containing default values.
The skeleton not only defines what the default values are, but also
enforces a certain shape. This allows us to define a config file using a
dictionary and parse it.
Args:
data: dictionary containing the selected config values
skel: dictionary containing the skeleton (aka the def)
Returns:
a new dictionary representing the two merged dictionaries
Todo:
* Check if an infinite loop is possible
* Split info less complex functions
"""
# First, check for illegal keys
for key in data:
if key not in skel:
@ -66,6 +113,7 @@ def merge_with_skeleton(data: Dict, skel: Dict) -> Dict:
# Error if value is not same type as default value
elif type(data[key]) != type(value) and value is not None:
# TODO make this error message more verbose
raise TypeError("Invalid value type")
# Recurse into dicts

View File

@ -1,4 +1,7 @@
"""Parent module for the various spec types."""
from .spec import Spec
from .directory import DirectorySpec
from .volume import VolumeSpec
from .container import ContainerSpec
__all__ = ["Spec", "DirectorySpec", "VolumeSpec", "ContainerSpec"]

View File

@ -1,3 +1,4 @@
"""Module defining a Container-based spec."""
from .spec import Spec
from typing import Union
from pathlib import Path
@ -6,11 +7,10 @@ import subprocess
class ContainerSpec(Spec):
"""
A spec for backing up via a container.
"""
"""Spec for backing up via a container."""
_SKEL = {"container": None, "command": None, "mountpoint": "/from"}
"""The skeleton for the ContainerSpec config."""
def __init__(
self,
@ -23,6 +23,22 @@ class ContainerSpec(Spec):
mountpoint: str,
notify=None,
):
"""
Create a new ContainerSpec object.
Args:
name: name of the spec (used as an identifier)
container: the Docker container to back up
destination: where to store the backups (gets created if
non-existent)
limit: max amount of backups to keep
command: command to run inside the container. This command should
perform a specified backup and output this data to stdout. This
output then gets piped to a backup file.
extension: the extension of the backup files.
mountpoint:
notify: notifier object (may be None)
"""
super().__init__(name, destination, limit, extension, notify)
self.container = container

View File

@ -6,9 +6,7 @@ from datetime import datetime
class DirectorySpec(Spec):
"""
A spec for backing up a local directory.
"""
"""A spec for backing up a local directory."""
_SKEL = {
"source": None,

View File

@ -60,7 +60,19 @@ class Spec:
self.extension = extension
@classmethod
def skeleton(cls):
def skeleton(cls: "Spec") -> Dict:
"""
Return the skeleton for the given class.
It works by inspecting the inheritance tree and merging the skeleton
for each of the parents.
Args:
cls: the class to get the skeleton for
Returns:
a dictionary containing the skeleton
"""
return skeleton.merge(
*[val._SKEL for val in reversed(inspect.getmro(cls)[:-1])]
)