diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..1d1c7f5 --- /dev/null +++ b/.flake8 @@ -0,0 +1,8 @@ +# vim: ft=cfg +[flake8] +max-complexity = 7 +docstring-convention=google + +# Required to be compatible with black +extend-ignore = E203,W503 +inline-quotes = double diff --git a/.gitignore b/.gitignore index 4fc828a..0156c35 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ __pycache__/ .venv/ backup_tool +*.egg-info/ +.tox/ +.coverage diff --git a/.woodpecker.yml b/.woodpecker.yml new file mode 100644 index 0000000..9673933 --- /dev/null +++ b/.woodpecker.yml @@ -0,0 +1,31 @@ +matrix: + PYTHON_VERSION: + - 3.6 + - 3.7 + - 3.8 + - 3.9 + +pipeline: + test: + group: test + image: python:${PYTHON_VERSION}-alpine + pull: true + commands: + - pip install -e .[test] + - pytest --cov=app --cov-fail-under=90 tests/ + + test-pypy: + group: test + image: pypy:3-7-slim + pull: true + commands: + - pip install -e .[test] + - pytest --cov=app --cov-fail-under=90 tests/ + + lint: + image: python:3.6-alpine + commands: + - apk update && apk add --no-cache build-base + - pip install -e .[lint] + - black --check setup.py app + - flake8 setup.py app diff --git a/Makefile b/Makefile index 5f6e639..ab0f16e 100644 --- a/Makefile +++ b/Makefile @@ -1,25 +1,28 @@ # =====CONFIG===== -# Devop environment runs in 3.8 -PYTHON=python3.8 +PYTHON := python3.6 +VENV := .venv # =====RECIPES===== -.venv/bin/activate: requirements.txt requirements-dev.txt - '$(PYTHON)' -m venv .venv - .venv/bin/pip install -r requirements.txt -r requirements-dev.txt +# Create the virtual environment +$(VENV)/bin/activate: setup.py + '$(PYTHON)' -m venv '$(VENV)' + '$(VENV)/bin/pip' install -e .[develop] -venv: .venv/bin/activate +venv: $(VENV)/bin/activate .PHONY: venv +# Format the codebase using Black format: venv - @ .venv/bin/black app/*.py app/**/*.py + @ '$(VENV)/bin/black' setup.py app .PHONY: format +# Remove any temporary files clean: - rm -rf .venv - rm backup_tool + @ rm -rf '$(VENV)' .tox backup_tool .PHONY: clean +# Pack the package into a zipfile backup_tool: @ cd app && \ zip -r ../app.zip * \ @@ -31,5 +34,17 @@ backup_tool: app: backup_tool .PHONY: app +# Install the app install: app cp backup_tool /usr/local/bin +.PHONY: install + +# We can't force the develop to have all the versions locally, so +# the local tests only include python3.6 +test: venv tox.ini + @ '$(VENV)/bin/tox' -e py36 +.PHONY: test + +lint: venv + @ '$(VENV)/bin/tox' -e lint +.PHONY: lint diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..5927fab --- /dev/null +++ b/app/__init__.py @@ -0,0 +1 @@ +"""Module containing all app code.""" diff --git a/app/__main__.py b/app/__main__.py index b4cd3a0..1a674ad 100644 --- a/app/__main__.py +++ b/app/__main__.py @@ -1,59 +1,65 @@ +"""The main entrypoint of the program.""" import argparse import sys from parser import read_specs_file -# This just displays the error type and message, not the stack trace def except_hook(ext_type, value, traceback): + """Make errors not show the stracktrace to stdout. + + Todo: + * Replace this with proper error handling + """ sys.stderr.write("{}: {}\n".format(ext_type.__name__, value)) # sys.excepthook = except_hook -# Define parser -parser = argparse.ArgumentParser( - description="Backup directories and Docker volumes." -) -parser.add_argument( - "-f", - "--file", - action="append", - dest="file", - required=True, - help="File containing spec definitions.", -) -parser.add_argument( - "-j", - "--json", - action="store_const", - const=True, - default=False, - help="Print out the parsed specs as JSON and exit", -) -parser.add_argument( - "spec", nargs="*", help="The specs to process. Defaults to all." -) +if __name__ == "__main__": + # Define parser + parser = argparse.ArgumentParser( + description="Backup directories and Docker volumes." + ) + parser.add_argument( + "-f", + "--file", + action="append", + dest="file", + required=True, + help="File containing spec definitions.", + ) + parser.add_argument( + "-j", + "--json", + action="store_const", + const=True, + default=False, + help="Print out the parsed specs as JSON and exit", + ) + parser.add_argument( + "spec", nargs="*", help="The specs to process. Defaults to all." + ) -# Parse arguments -args = parser.parse_args() -specs = sum([read_specs_file(path) for path in args.file], []) + # Parse arguments + args = parser.parse_args() + specs = sum((read_specs_file(path) for path in args.file), []) -# Filter specs if needed -if args.spec: - specs = list(filter(lambda s: s.name in args.spec, specs)) + # Filter specs if needed + if args.spec: + specs = list(filter(lambda s: s.name in args.spec, specs)) -# Dump parsed data as json -if args.json: - import json + # Dump parsed data as json + if args.json: + import json - print(json.dumps([spec.to_dict() for spec in specs], indent=4)) + # TODO replace this with error handling system + print(json.dumps([spec.to_dict() for spec in specs], indent=4)) -else: - # Run the backups - if not specs: + elif not specs: + # TODO replace this with error handling system print("No specs, exiting.") sys.exit(0) - for spec in specs: - spec.backup() + for spec in specs: + spec.backup() diff --git a/app/exceptions.py b/app/exceptions.py new file mode 100644 index 0000000..b82bad8 --- /dev/null +++ b/app/exceptions.py @@ -0,0 +1,55 @@ +"""Common exceptions raised by the program.""" +from typing import Union, List + + +class InvalidKeyError(Exception): + """Thrown when a config file contains an invalid key.""" + + def __init__(self, keys: Union[str, List[str]]): + """Create a new InvalidKeyError object with the given key. + + Args: + keys: the invalid key(s) + """ + if type(keys) == str: + keys = [keys] + + self.message = "Invalid key(s): {}".format(", ".join(keys)) + + super().__init__() + + +class MissingKeyError(Exception): + """Thrown when a required key is missing from a config.""" + + def __init__(self, keys: Union[str, List[str]]): + """Create a new MissingKeyError object with the given key. + + Args: + keys: the invalid key(s) + """ + if type(keys) == str: + keys = [keys] + + self.message = "Missing key(s): {}".format(", ".join(keys)) + + super().__init__() + + +class InvalidValueError(Exception): + """Thrown when a key contains an invalid value.""" + + def __init__(self, key: str, expected: str, actual: str): + """Create a new InvalidValueError given the arguments. + + Args: + key: the key containing the invalid value + expected: name of the expected type + actual: name of the actual type + """ + self.message = ( + f"Invalid value for key {key}: expected {expected}, " + f"got {actual}" + ) + + super().__init__() diff --git a/app/logger.py b/app/logger.py new file mode 100644 index 0000000..245eeed --- /dev/null +++ b/app/logger.py @@ -0,0 +1,109 @@ +"""This module contains the logging module.""" +from typing import Union +from pathlib import Path +from datetime import datetime +import sys + + +class Logger: + """A logger class that logs, ya get the point.""" + + LOG_LEVELS = [ + "debug", + "info", + "warning", + "error", + "critical", + ] + """The log levels' names. + + When used as arguments, the counting starts at 1 + instead of 0. + """ + + def __init__( + self, + log_file: Union[Path, str] = None, + append: bool = True, + stdout: bool = True, + log_level: int = 3, + ): + """Initialize a new Logger object. + + Args: + log_file: path to a log file. If any of the folders within the log + file's path don't exist, they will get created. If no value is + specified, no log file is created. + append: wether or not to append to the existing file or overwrite + it. If False, the original file gets deleted during init. + stdout: wether or not to log to stdout as well + log_level: the minimum level to log + """ + self.log_file = Path(log_file) if log_file else None + self.stdout = stdout + self.log_level = log_level + + # Remove the original log file + if not append: + self.log_file.unlink(missing_ok=True) + + def custom(self, message: str, header: str = None): + """Log a message given a header and a message. + + If a header is provided (aka truthy), the final form of the messsage + wil be: + + `[YYYY-MM-DD HH:MM:SS][header] message` + + Otherwise, it's just: + + `[YYYY-MM-DD HH:MM:SS] message` + + Args: + message: the message to display + header: the header to add to the message + """ + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + log_message = f"[{timestamp}] {message}\n" + + if header: + log_message = f"[{timestamp}][{header}] {message}\n" + + if self.log_file: + self.log_file.write_text(log_message) + + if self.stdout: + sys.stdout.write(log_message) + + def log(self, level: int, message: str): + """Log a message with a specific level. + + Args: + level: log level (index in the LOG_LEVELS variable) + message: the message to log + """ + if level < self.log_level: + return + + level_name = self.LOG_LEVELS[level - 1].upper() + self.custom(level_name, message) + + def debug(self, message: str): + """Log a debug message.""" + self.log(1, message) + + def info(self, message: str): + """Log an info message.""" + self.log(2, message) + + def warning(self, message: str): + """Log a warning message.""" + self.log(3, message) + + def error(self, message: str): + """Log an error message.""" + self.log(4, message) + + def critical(self, message: str): + """Log a critical message.""" + self.log(5, message) diff --git a/app/notifier.py b/app/notifier.py index 1848775..903e791 100644 --- a/app/notifier.py +++ b/app/notifier.py @@ -1,9 +1,12 @@ +"""Module handling IFTTT notifications.""" from typing import List import os import requests class Notifier: + """A notifier object that can send IFTTT notifications.""" + # (positive, negative) _EVENTS = { "backup": ( @@ -15,24 +18,40 @@ class Notifier: "Couldn't restore {name}.", ), } + """The message content for a given event.""" # Placeholder def __init__( self, title: str, events: List[str], endpoint: str, api_key: str = None ): + """Initialize a new Notifier object. + + Args: + title: the notification title to use + events: the event types that should trigger a notification (should + be one of the keys in _EVENTS). + endpoint: IFTTT endpoint name + api_key: your IFTTT API key. If not provided, it will be read from + the IFTTT_API_KEY environment variable. + + Todo: + * Read the API key on init + """ self.title = title self.events = events self.endpoint = endpoint self.api_key = api_key def notify(self, category: str, name: str, status_code: int): - """ + """Send an IFTTT notification. + Args: - category: type of notify (e.g. backup or restore) + category: type of notify (should be one of the keys in _EVENTS). + Only if the category was passed during initialization will the + notification be sent. name: name of the spec status_code: exit code of the command """ - event = "{}_{}".format( category, "success" if status_code == 0 else "failure" ) diff --git a/app/parser.py b/app/parser.py index bbac40c..b02e5a0 100644 --- a/app/parser.py +++ b/app/parser.py @@ -1,3 +1,4 @@ +"""Handles parsing a config file from disk.""" import yaml from pathlib import Path from typing import List, Union @@ -6,9 +7,18 @@ import skeleton def read_specs_file(path: Union[str, Path]) -> List[Spec]: + """Read a config file and merge it with the skeleton. + + Args: + path: path to the yaml config file + + Returns: + A list of specs, parsed from the config. + """ with open(path, "r") as yaml_file: data = yaml.safe_load(yaml_file) + # NOTE shouldn't this be defined as a top-level variable? categories = [ ("directories", DirectorySpec), ("volumes", VolumeSpec), @@ -23,6 +33,7 @@ def read_specs_file(path: Union[str, Path]) -> List[Spec]: # Check what defaults are present defaults = {} + if data.get("defaults"): if data["defaults"].get("all"): defaults = skeleton.merge(defaults, data["defaults"]["all"]) diff --git a/app/skeleton.py b/app/skeleton.py index 07c6afb..c12bd45 100644 --- a/app/skeleton.py +++ b/app/skeleton.py @@ -1,21 +1,25 @@ +"""Handles merging with the skeleton config.""" from typing import Dict - - -class InvalidKeyError(Exception): - def __init__(self, key): - self.message = "Invalid key: {}".format(key) - - super().__init__(key) - - -class MissingKeyError(Exception): - def __init__(self, key): - self.message = "Missing key: {}".format(key) - - super().__init__(key) +from .exceptions import InvalidKeyError, MissingKeyError def merge(*dicts: [Dict]) -> Dict: + """Merge multiple dicts into one. + + It reads the dicts from left to right, always preferring the "right" + dictionary's values. Therefore, the dictionaries should be sorted from + least important to most important (e.g. a default values skeleton should be + to the left of a dict of selected values). + + Args: + dicts: the dictionaries to merge + + Returns: + a new dictionary representing the merged dictionaries + + Todo: + * Make sure an infinite loop is not possible + """ # Base cases if len(dicts) == 0: return {} @@ -44,15 +48,28 @@ def merge(*dicts: [Dict]) -> Dict: def merge_with_skeleton(data: Dict, skel: Dict) -> Dict: - """ - Compare a dict with a given skeleton dict, and fill in default values where - needed. - """ + """Merge a dictionary with a skeleton containing default values. + The skeleton not only defines what the default values are, but also + enforces a certain shape. This allows us to define a config file using a + dictionary and parse it. + + Args: + data: dictionary containing the selected config values + skel: dictionary containing the skeleton (aka the def) + + Returns: + a new dictionary representing the two merged dictionaries + + Todo: + * Check if an infinite loop is possible + * Split info less complex functions + """ # First, check for illegal keys - for key in data: - if key not in skel: - raise InvalidKeyError(key) + invalid_keys = list(filter(lambda k: k not in skel, data)) + + if invalid_keys: + raise InvalidKeyError(invalid_keys) # Then, check the default values for key, value in skel.items(): @@ -66,6 +83,7 @@ def merge_with_skeleton(data: Dict, skel: Dict) -> Dict: # Error if value is not same type as default value elif type(data[key]) != type(value) and value is not None: + # TODO make this error message more verbose raise TypeError("Invalid value type") # Recurse into dicts diff --git a/app/specs/__init__.py b/app/specs/__init__.py index 0192b9e..dd08c1c 100644 --- a/app/specs/__init__.py +++ b/app/specs/__init__.py @@ -1,4 +1,7 @@ +"""Parent module for the various spec types.""" from .spec import Spec from .directory import DirectorySpec from .volume import VolumeSpec from .container import ContainerSpec + +__all__ = ["Spec", "DirectorySpec", "VolumeSpec", "ContainerSpec"] diff --git a/app/specs/container.py b/app/specs/container.py index bef6f8c..79d4d3a 100644 --- a/app/specs/container.py +++ b/app/specs/container.py @@ -1,3 +1,4 @@ +"""Module defining a container-based spec.""" from .spec import Spec from typing import Union from pathlib import Path @@ -6,11 +7,10 @@ import subprocess class ContainerSpec(Spec): - """ - A spec for backing up via a container. - """ + """Spec for backing up via a container.""" _SKEL = {"container": None, "command": None, "mountpoint": "/from"} + """The skeleton for the ContainerSpec config.""" def __init__( self, @@ -23,6 +23,21 @@ class ContainerSpec(Spec): mountpoint: str, notify=None, ): + """Create a new ContainerSpec object. + + Args: + name: name of the spec (used as an identifier) + container: the Docker container to back up + destination: where to store the backups (gets created if + non-existent) + limit: max amount of backups to keep + command: command to run inside the container. This command should + perform a specified backup and output this data to stdout. This + output then gets piped to a backup file. + extension: the extension of the backup files. + mountpoint: I don't actually know, this never gets used + notify: notifier object (may be None) + """ super().__init__(name, destination, limit, extension, notify) self.container = container @@ -30,6 +45,7 @@ class ContainerSpec(Spec): self.command = command def backup(self): + """Create a new backup.""" # Remove excess backups self.remove_backups() diff --git a/app/specs/directory.py b/app/specs/directory.py index 9747b5b..b8fbc32 100644 --- a/app/specs/directory.py +++ b/app/specs/directory.py @@ -1,3 +1,4 @@ +"""Module defining a directory-based spec.""" from .spec import Spec from pathlib import Path from typing import Union @@ -6,9 +7,7 @@ from datetime import datetime class DirectorySpec(Spec): - """ - A spec for backing up a local directory. - """ + """A spec for backing up a local directory.""" _SKEL = { "source": None, @@ -25,6 +24,17 @@ class DirectorySpec(Spec): extension: str, notify=None, ): + """Initialize a new DirectorySpec object. + + Args: + name: name of the spec + source: what directory to back up + destination: where to store the backup + limit: how many backups to keep + command: what command to use to create the backup + extension: extension of the backup files + notify: a Notifier object that handles sending notifications + """ super().__init__(name, destination, limit, extension, notify) self.source = source if type(source) == Path else Path(source) @@ -38,6 +48,7 @@ class DirectorySpec(Spec): self.command = command def backup(self): + """Create a new backup.""" # Remove excess backups self.remove_backups() diff --git a/app/specs/spec.py b/app/specs/spec.py index 9b372ef..f26f54b 100644 --- a/app/specs/spec.py +++ b/app/specs/spec.py @@ -1,4 +1,4 @@ -from pathlib import Path +"""This module contains the base Spec class.""" from typing import Union, Dict import skeleton import os @@ -7,9 +7,7 @@ import inspect class Spec: - """ - Base class for all other spec types. - """ + """Base class for all other spec types.""" _SKEL = { "destination": None, @@ -31,23 +29,26 @@ class Spec: extension: str, notify=None, ): - """ + """Initialize a new Spec object. + + This initializer usually gets called by a subclass's init instead of + directly. + Args: name: name of the spec destination: directory where the backups shall reside limit: max amount of backups - notifier: notifier object + extension: file extension of the backup files + notify: notifier object to send IFTT notifications """ - self.name = name - self.destination = ( - destination if type(destination) == Path else Path(destination) - ) + self.destination = Path(destination) # Create destination if non-existent try: self.destination.mkdir(parents=True, exist_ok=True) + # TODO just make this some checks in advance except FileExistsError: raise NotADirectoryError( "{} already exists, but isn't a directory.".format( @@ -60,16 +61,24 @@ class Spec: self.extension = extension @classmethod - def skeleton(cls): + def skeleton(cls: "Spec") -> Dict: + """Return the skeleton for the given class. + + It works by inspecting the inheritance tree and merging the skeleton + for each of the parents. + + Args: + cls: the class to get the skeleton for + + Returns: + a dictionary containing the skeleton + """ return skeleton.merge( *[val._SKEL for val in reversed(inspect.getmro(cls)[:-1])] ) def remove_backups(self): - """ - Remove all backups exceeding the limit - """ - + """Remove all backups exceeding the limit.""" files = sorted( self.destination.glob("*." + self.extension), key=os.path.getmtime, @@ -81,13 +90,22 @@ class Spec: path.unlink() def backup(self): + """Create a new backup. + + This function should be implemented by the subclasses. + """ raise NotImplementedError() def restore(self): + """Restore a given backup (NOT IMPLEMENTED). + + This function should be implemented by the subclasses. + """ raise NotImplementedError() @classmethod def from_dict(cls, name, obj: Dict, defaults: Dict) -> "Spec": + """Create the class given a dictionary (e.g. from a config).""" # Combine defaults with skeleton, creating new skeleton skel = skeleton.merge(cls.skeleton(), defaults) @@ -97,4 +115,8 @@ class Spec: return cls(name, **obj) def to_dict(self): + """Export the class as a dictionary. + + This function should be imnplemented by the subclasses. + """ raise NotImplementedError() diff --git a/app/specs/volume.py b/app/specs/volume.py index cd9d937..403c51b 100644 --- a/app/specs/volume.py +++ b/app/specs/volume.py @@ -1,3 +1,4 @@ +"""Module defining a Docker volume-based spec.""" from .spec import Spec from typing import Union from pathlib import Path @@ -6,9 +7,7 @@ import subprocess class VolumeSpec(Spec): - """ - A spec for backing up a Docker volume. - """ + """A spec for backing up a Docker volume.""" _SKEL = { "volume": None, @@ -27,6 +26,18 @@ class VolumeSpec(Spec): extension: str, notify=None, ): + """Initialize a new VolumeSpec object. + + Args: + name: name of the spec + volume: Docker volume to back up + image: base image to use to run backup command + destination: where to store the backup files + limit: max backup files to keep + command: backup command to run within the base image + extension: file extension of the backup files + notify: Notifier object + """ super().__init__(name, destination, limit, extension, notify) self.volume = volume @@ -34,6 +45,7 @@ class VolumeSpec(Spec): self.command = command def backup(self): + """Create a new backup.""" # Remove excess backups self.remove_backups() @@ -42,8 +54,10 @@ class VolumeSpec(Spec): datetime.now().strftime("%Y-%m-%d_%H-%M-%S"), self.extension ) + base_cmd = "docker run --rm -v '{}:/from' -v '{}:/to' -w /from '{}' {}" + process = subprocess.run( - "docker run --rm -v '{}:/from' -v '{}:/to' -w /from '{}' {}".format( + base_cmd.format( self.volume, self.destination, self.image, diff --git a/renovate.json b/renovate.json new file mode 100644 index 0000000..9775346 --- /dev/null +++ b/renovate.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json" +} diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 3eed54f..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,10 +0,0 @@ -# Language server -jedi==0.18.0 - -# Linting & Formatting -black==20.8b1 -flake8==3.8.4 - -# Testing -tox==3.21.1 -pytest==6.2.1 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..568b586 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,25 @@ +[options.extras_require] +# Used to run the tests inside the CICD pipeline +ci = + tox==3.23.1 + +# Used inside Tox for running tests +test = + pytest==6.2.4 + pytest-cov==2.12.1 + +# Used inside tox for linting +lint = + black==20.8b1 + flake8==3.9.2 + flake8-bugbear==21.4.3 + flake8-comprehensions==3.5.0 + flake8-docstrings==1.6.0 + flake8-print==4.0.0 + flake8-quotes==3.2.0 + +# Required for the developer +develop = + %(ci)s + %(lint)s + jedi==0.18.0 diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..0b00eae --- /dev/null +++ b/setup.py @@ -0,0 +1,10 @@ +from setuptools import setup + +setup( + name="backup-tool", + version="0.1.0", + author="Jef Roosens", + description="A utility to simply backing up services.", + # TODO add license + packages=["app", "tests"], +) diff --git a/requirements.txt b/tests/__init__.py similarity index 100% rename from requirements.txt rename to tests/__init__.py diff --git a/tests/test_dict_merge.py b/tests/test_dict_merge.py new file mode 100644 index 0000000..2da81bf --- /dev/null +++ b/tests/test_dict_merge.py @@ -0,0 +1,69 @@ +"""Tests for the skeleton module.""" +from app.skeleton import merge + + +def test_merge_empty(): + """Test correct response for an empty merge.""" + assert merge() == {} + + +def test_merge_single(): + """Test merge command with a single input.""" + assert merge({}) == {} + + dic = {"test": "value", "test2": "value2"} + + assert merge(dic) == dic + + +def test_merge_double_no_overlap(): + """Test merge command with two non-overlapping inputs.""" + d1 = {"test": "value", "test2": "value2"} + d2 = {"test3": "value3"} + d_res = {"test": "value", "test2": "value2", "test3": "value3"} + + assert merge(d1, d2) == d_res + + +def test_merge_double_overlap(): + """Test merge command with two overlapping inputs.""" + d1 = {"test": "value", "test2": "value2"} + d2 = {"test2": "value3"} + d_res = {"test": "value", "test2": "value3"} + + assert merge(d1, d2) == d_res + + +def test_merge_triple_no_overlap(): + """Test merge command with three non-overlapping inputs. + + This test tells us that the recursion works. + """ + d1 = {"test": "value", "test2": "value2"} + d2 = {"test3": "value3"} + d3 = {"test4": "value4"} + d_res = { + "test": "value", + "test2": "value2", + "test3": "value3", + "test4": "value4", + } + + assert merge(d1, d2, d3) == d_res + + +def test_merge_triple_overlap(): + """Test merge command with three overlapping inputs. + + This test tells us that the recursion works. + """ + d1 = {"test": "value", "test2": "value2"} + d2 = {"test3": "value3"} + d3 = {"test2": "value4"} + d_res = { + "test": "value", + "test2": "value4", + "test3": "value3", + } + + assert merge(d1, d2, d3) == d_res diff --git a/tests/test_logger.py b/tests/test_logger.py new file mode 100644 index 0000000..ff2debd --- /dev/null +++ b/tests/test_logger.py @@ -0,0 +1,23 @@ +"""Tests for the logger module.""" +from app.logger import Logger +from datetime import datetime + + +def test_custom_stdout(capfd): + """Test the custom command.""" + logger = Logger() + + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + logger.custom("a message", header="cewl") + + out, _ = capfd.readouterr() + + assert out == f"[{timestamp}][cewl] a message\n" + + +def test_log_stdout(capfd): + """Test the log command with several levels.""" + + logger = Logger() + + # TODO diff --git a/tests/test_skeleton.py b/tests/test_skeleton.py new file mode 100644 index 0000000..9ade87e --- /dev/null +++ b/tests/test_skeleton.py @@ -0,0 +1,70 @@ +"""Tests wether the skeleton merge works.""" +from app.skeleton import merge_with_skeleton +from app.exceptions import InvalidKeyError, MissingKeyError +import pytest + + +def test_single_invalid_key(): + """Tests wether an InvalidKeyError is correctly thrown for a single key.""" + data = { + "test": 1, + "test2": "test" + } + skel = { + "test": None, + } + + with pytest.raises(InvalidKeyError) as e_info: + merge_with_skeleton(data, skel) + + assert e_info.value.message == "Invalid key(s): test2" + + +def test_multiple_invalid_keys(): + """Tests wether an InvalidKeyError is thrown for multiple keys.""" + data = { + "test": 1, + "test2": "test", + "test3": "test", + } + skel = { + "test": None, + } + + with pytest.raises(InvalidKeyError) as e_info: + merge_with_skeleton(data, skel) + + assert e_info.value.message == "Invalid key(s): test2, test3" + + +def test_single_missing_key(): + """Tests wether a MissingKeyError is correctly thrown for a single key.""" + data = { + "test": 1, + } + skel = { + "test": None, + "test2": None, + } + + with pytest.raises(MissingKeyError) as e_info: + merge_with_skeleton(data, skel) + + assert e_info.value.message == "Missing key(s): test2" + + +def test_multiple_missing_keys(): + """Tests wether a MissingKeyError is correctly thrown for multiple keys.""" + data = { + "test": 1, + } + skel = { + "test": None, + "test2": None, + "test3": None, + } + + with pytest.raises(MissingKeyError) as e_info: + merge_with_skeleton(data, skel) + + assert e_info.value.message == "Missing key(s): test2, test3" diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..847ba45 --- /dev/null +++ b/tox.ini @@ -0,0 +1,15 @@ +[tox] +envlist = py36,py37,pypy37,py38,py39,lint + +[testenv] +deps = .[test] +commands = + pytest + pytest --cov=app --cov-fail-under=90 tests/ + +[testenv:lint] +basepython = python3.6 +deps = .[lint] +commands = + black --check setup.py app + flake8 setup.py app