Compare commits

...

3 Commits

Author SHA1 Message Date
Jef Roosens ecfa6fe7b7
Added documentation
continuous-integration/drone the build failed Details
2021-04-25 19:26:12 +02:00
Jef Roosens d513a03c4a
Added parser.py docstrings 2021-04-25 18:27:57 +02:00
Jef Roosens 3277af2ac5
Documented skeleton.py 2021-04-25 18:10:37 +02:00
7 changed files with 149 additions and 52 deletions

View File

@ -1,3 +1,4 @@
"""The main entrypoint of the program."""
import argparse import argparse
import sys import sys
from parser import read_specs_file from parser import read_specs_file
@ -5,55 +6,62 @@ from parser import read_specs_file
# This just displays the error type and message, not the stack trace # This just displays the error type and message, not the stack trace
def except_hook(ext_type, value, traceback): def except_hook(ext_type, value, traceback):
"""
Make errors not show the stracktrace to stdout.
Todo:
* Replace this with proper error handling
"""
sys.stderr.write("{}: {}\n".format(ext_type.__name__, value)) sys.stderr.write("{}: {}\n".format(ext_type.__name__, value))
# sys.excepthook = except_hook # sys.excepthook = except_hook
# Define parser if __name__ == "__main__":
parser = argparse.ArgumentParser( # Define parser
description="Backup directories and Docker volumes." parser = argparse.ArgumentParser(
) description="Backup directories and Docker volumes."
parser.add_argument( )
"-f", parser.add_argument(
"--file", "-f",
action="append", "--file",
dest="file", action="append",
required=True, dest="file",
help="File containing spec definitions.", required=True,
) help="File containing spec definitions.",
parser.add_argument( )
"-j", parser.add_argument(
"--json", "-j",
action="store_const", "--json",
const=True, action="store_const",
default=False, const=True,
help="Print out the parsed specs as JSON and exit", default=False,
) help="Print out the parsed specs as JSON and exit",
parser.add_argument( )
"spec", nargs="*", help="The specs to process. Defaults to all." parser.add_argument(
) "spec", nargs="*", help="The specs to process. Defaults to all."
)
# Parse arguments # Parse arguments
args = parser.parse_args() args = parser.parse_args()
specs = sum([read_specs_file(path) for path in args.file], []) specs = sum((read_specs_file(path) for path in args.file), [])
# Filter specs if needed # Filter specs if needed
if args.spec: if args.spec:
specs = list(filter(lambda s: s.name in args.spec, specs)) specs = list(filter(lambda s: s.name in args.spec, specs))
# Dump parsed data as json # Dump parsed data as json
if args.json: if args.json:
import json import json
print(json.dumps([spec.to_dict() for spec in specs], indent=4)) # TODO replace this with error handling system
print(json.dumps([spec.to_dict() for spec in specs], indent=4))
else: elif not specs:
# Run the backups # TODO replace this with error handling system
if not specs: print("No specs, exiting.")
print("No specs, exiting.") sys.exit(0)
sys.exit(0)
for spec in specs: for spec in specs:
spec.backup() spec.backup()

View File

@ -1,3 +1,4 @@
"""Handles parsing a config file from disk."""
import yaml import yaml
from pathlib import Path from pathlib import Path
from typing import List, Union from typing import List, Union
@ -6,9 +7,19 @@ import skeleton
def read_specs_file(path: Union[str, Path]) -> List[Spec]: def read_specs_file(path: Union[str, Path]) -> List[Spec]:
"""
Read a config file and merge it with the skeleton.
Args:
path: path to the yaml config file
Returns:
A list of specs, parsed from the config.
"""
with open(path, "r") as yaml_file: with open(path, "r") as yaml_file:
data = yaml.safe_load(yaml_file) data = yaml.safe_load(yaml_file)
# NOTE shouldn't this be defined as a top-level variable?
categories = [ categories = [
("directories", DirectorySpec), ("directories", DirectorySpec),
("volumes", VolumeSpec), ("volumes", VolumeSpec),
@ -23,6 +34,7 @@ def read_specs_file(path: Union[str, Path]) -> List[Spec]:
# Check what defaults are present # Check what defaults are present
defaults = {} defaults = {}
if data.get("defaults"): if data.get("defaults"):
if data["defaults"].get("all"): if data["defaults"].get("all"):
defaults = skeleton.merge(defaults, data["defaults"]["all"]) defaults = skeleton.merge(defaults, data["defaults"]["all"])

View File

@ -1,21 +1,55 @@
"""Handles merging with the skeleton config."""
from typing import Dict from typing import Dict
class InvalidKeyError(Exception): class InvalidKeyError(Exception):
def __init__(self, key): """Thrown when a config file contains an invalid key."""
def __init__(self, key: str):
"""
Create a new InvalidKeyError object with the given key.
Args:
key: the invalid key
"""
self.message = "Invalid key: {}".format(key) self.message = "Invalid key: {}".format(key)
super().__init__(key) super().__init__(key)
class MissingKeyError(Exception): class MissingKeyError(Exception):
def __init__(self, key): """Thrown when a required key is missing from a config."""
def __init__(self, key: str):
"""
Create a new MissingKeyError object with the given key.
Args:
key: the invalid key
"""
self.message = "Missing key: {}".format(key) self.message = "Missing key: {}".format(key)
super().__init__(key) super().__init__(key)
def merge(*dicts: [Dict]) -> Dict: def merge(*dicts: [Dict]) -> Dict:
"""
Merge multiple dicts into one.
It reads the dicts from left to right, always preferring the "right"
dictionary's values. Therefore, the dictionaries should be sorted from
least important to most important (e.g. a default values skeleton should be
to the left of a dict of selected values).
Args:
dicts: the dictionaries to merge
Returns:
a new dictionary representing the merged dictionaries
Todo:
* Make sure an infinite loop is not possible
"""
# Base cases # Base cases
if len(dicts) == 0: if len(dicts) == 0:
return {} return {}
@ -45,10 +79,23 @@ def merge(*dicts: [Dict]) -> Dict:
def merge_with_skeleton(data: Dict, skel: Dict) -> Dict: def merge_with_skeleton(data: Dict, skel: Dict) -> Dict:
""" """
Compare a dict with a given skeleton dict, and fill in default values where Merge a dictionary with a skeleton containing default values.
needed.
"""
The skeleton not only defines what the default values are, but also
enforces a certain shape. This allows us to define a config file using a
dictionary and parse it.
Args:
data: dictionary containing the selected config values
skel: dictionary containing the skeleton (aka the def)
Returns:
a new dictionary representing the two merged dictionaries
Todo:
* Check if an infinite loop is possible
* Split info less complex functions
"""
# First, check for illegal keys # First, check for illegal keys
for key in data: for key in data:
if key not in skel: if key not in skel:
@ -66,6 +113,7 @@ def merge_with_skeleton(data: Dict, skel: Dict) -> Dict:
# Error if value is not same type as default value # Error if value is not same type as default value
elif type(data[key]) != type(value) and value is not None: elif type(data[key]) != type(value) and value is not None:
# TODO make this error message more verbose
raise TypeError("Invalid value type") raise TypeError("Invalid value type")
# Recurse into dicts # Recurse into dicts

View File

@ -1,4 +1,7 @@
"""Parent module for the various spec types."""
from .spec import Spec from .spec import Spec
from .directory import DirectorySpec from .directory import DirectorySpec
from .volume import VolumeSpec from .volume import VolumeSpec
from .container import ContainerSpec from .container import ContainerSpec
__all__ = ["Spec", "DirectorySpec", "VolumeSpec", "ContainerSpec"]

View File

@ -1,3 +1,4 @@
"""Module defining a Container-based spec."""
from .spec import Spec from .spec import Spec
from typing import Union from typing import Union
from pathlib import Path from pathlib import Path
@ -6,11 +7,10 @@ import subprocess
class ContainerSpec(Spec): class ContainerSpec(Spec):
""" """Spec for backing up via a container."""
A spec for backing up via a container.
"""
_SKEL = {"container": None, "command": None, "mountpoint": "/from"} _SKEL = {"container": None, "command": None, "mountpoint": "/from"}
"""The skeleton for the ContainerSpec config."""
def __init__( def __init__(
self, self,
@ -23,6 +23,22 @@ class ContainerSpec(Spec):
mountpoint: str, mountpoint: str,
notify=None, notify=None,
): ):
"""
Create a new ContainerSpec object.
Args:
name: name of the spec (used as an identifier)
container: the Docker container to back up
destination: where to store the backups (gets created if
non-existent)
limit: max amount of backups to keep
command: command to run inside the container. This command should
perform a specified backup and output this data to stdout. This
output then gets piped to a backup file.
extension: the extension of the backup files.
mountpoint:
notify: notifier object (may be None)
"""
super().__init__(name, destination, limit, extension, notify) super().__init__(name, destination, limit, extension, notify)
self.container = container self.container = container

View File

@ -6,9 +6,7 @@ from datetime import datetime
class DirectorySpec(Spec): class DirectorySpec(Spec):
""" """A spec for backing up a local directory."""
A spec for backing up a local directory.
"""
_SKEL = { _SKEL = {
"source": None, "source": None,

View File

@ -60,7 +60,19 @@ class Spec:
self.extension = extension self.extension = extension
@classmethod @classmethod
def skeleton(cls): def skeleton(cls: "Spec") -> Dict:
"""
Return the skeleton for the given class.
It works by inspecting the inheritance tree and merging the skeleton
for each of the parents.
Args:
cls: the class to get the skeleton for
Returns:
a dictionary containing the skeleton
"""
return skeleton.merge( return skeleton.merge(
*[val._SKEL for val in reversed(inspect.getmro(cls)[:-1])] *[val._SKEL for val in reversed(inspect.getmro(cls)[:-1])]
) )