Added working backup system
parent
bb33f7cbbc
commit
44764d30d9
|
@ -8,7 +8,7 @@ def except_hook(ext_type, value, traceback):
|
|||
sys.stderr.write("{}: {}\n".format(ext_type.__name__, value))
|
||||
|
||||
|
||||
sys.excepthook = except_hook
|
||||
# sys.excepthook = except_hook
|
||||
|
||||
|
||||
# Define parser
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
class Notifier:
|
||||
# Placeholder
|
||||
def __init__(*args, **kwargs):
|
||||
pass
|
|
@ -3,32 +3,33 @@ import yaml
|
|||
from pathlib import Path
|
||||
from typing import List, Union
|
||||
from specs import Spec, DirectorySpec
|
||||
import skeleton
|
||||
|
||||
|
||||
def read_specs_file(path: Union[str, Path]) -> List[Spec]:
|
||||
with open(path, "r") as yaml_file:
|
||||
data = yaml.safe_load(yaml_file, Loader=yaml.FullLoader)
|
||||
data = yaml.safe_load(yaml_file)
|
||||
|
||||
categories = [("directories", DirectorySpec)]
|
||||
|
||||
specs = []
|
||||
|
||||
for key, class_name in categories:
|
||||
if key not in data["specs"]:
|
||||
if not data["specs"].get(key):
|
||||
continue
|
||||
|
||||
# Check what defaults are present
|
||||
defaults = []
|
||||
defaults = {}
|
||||
if data.get("defaults"):
|
||||
if data["defaults"].get("all"):
|
||||
defaults.append(data["defaults"]["all"])
|
||||
defaults = skeleton.merge(defaults, data["defaults"]["all"])
|
||||
|
||||
if data["defaults"].get(key):
|
||||
defaults.append(data["defaults"][key])
|
||||
defaults = skeleton.merge(defaults, data["defaults"][key])
|
||||
|
||||
specs.extend(
|
||||
[
|
||||
class_name.from_dict(name, spec, *defaults)
|
||||
class_name.from_dict(name, spec, defaults)
|
||||
for name, spec in data["specs"][key].items()
|
||||
]
|
||||
)
|
||||
|
|
|
@ -15,7 +15,35 @@ class MissingKeyError(Exception):
|
|||
super().__init__(key)
|
||||
|
||||
|
||||
def combine(data: Dict, skel: Dict) -> Dict:
|
||||
def merge(*dicts: [Dict]) -> Dict:
|
||||
# Base cases
|
||||
if len(dicts) == 0:
|
||||
return {}
|
||||
|
||||
if len(dicts) == 1:
|
||||
return dicts[0]
|
||||
|
||||
# We merge the first two dicts
|
||||
d1, d2 = dicts[0], dicts[1]
|
||||
|
||||
output = d1.copy()
|
||||
|
||||
for key, value in d2.items():
|
||||
if type(value) == dict:
|
||||
# Merge the two sub-dictionaries
|
||||
output[key] = (
|
||||
merge(output[key], value)
|
||||
if type(output.get(key)) == dict
|
||||
else value
|
||||
)
|
||||
|
||||
else:
|
||||
output[key] = value
|
||||
|
||||
return merge(output, *dicts[2:])
|
||||
|
||||
|
||||
def merge_with_skeleton(data: Dict, skel: Dict) -> Dict:
|
||||
"""
|
||||
Compare a dict with a given skeleton dict, and fill in default values where
|
||||
needed.
|
||||
|
@ -42,6 +70,6 @@ def combine(data: Dict, skel: Dict) -> Dict:
|
|||
|
||||
# Recurse into dicts
|
||||
elif type(value) == dict:
|
||||
data[key] = combine_with_skeleton(data[key], value)
|
||||
data[key] = merge_with_skeleton(data[key], value)
|
||||
|
||||
return data
|
||||
|
|
|
@ -10,14 +10,9 @@ class DirectorySpec(Spec):
|
|||
A spec for backing up a local directory.
|
||||
"""
|
||||
|
||||
__SKEL = {
|
||||
"name": None,
|
||||
_SKEL = {
|
||||
"source": None,
|
||||
"destination": None,
|
||||
"limit": None,
|
||||
"notifier": None,
|
||||
"command": "tar -czf '{destination}/{filename}' .",
|
||||
"extension": "tar.gz",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
|
@ -28,9 +23,9 @@ class DirectorySpec(Spec):
|
|||
limit: int,
|
||||
command: str,
|
||||
extension: str,
|
||||
notifier=None,
|
||||
notify=None,
|
||||
):
|
||||
super().__init__(name, destination, limit, extension, notifier)
|
||||
super().__init__(name, destination, limit, extension, notify)
|
||||
|
||||
self.source = source if type(source) == Path else Path(source)
|
||||
|
||||
|
@ -58,6 +53,7 @@ class DirectorySpec(Spec):
|
|||
filename=filename,
|
||||
),
|
||||
cwd=self.source,
|
||||
shell=True,
|
||||
)
|
||||
|
||||
if self.notifier:
|
||||
|
|
|
@ -3,6 +3,8 @@ from pathlib import Path
|
|||
from typing import Union, Dict
|
||||
import skeleton
|
||||
import os
|
||||
from notifier import Notifier
|
||||
import inspect
|
||||
|
||||
|
||||
class Spec:
|
||||
|
@ -10,11 +12,13 @@ class Spec:
|
|||
Base class for all other spec types.
|
||||
"""
|
||||
|
||||
__SKEL = {
|
||||
"name": None,
|
||||
_SKEL = {
|
||||
"destination": None,
|
||||
"limit": None,
|
||||
"notifier": None,
|
||||
"notify": {
|
||||
"title": "Backup Notification",
|
||||
"events": ["backup_sucess"],
|
||||
},
|
||||
"extension": "tar.gz",
|
||||
}
|
||||
|
||||
|
@ -24,7 +28,7 @@ class Spec:
|
|||
destination: Union[Path, str],
|
||||
limit: int,
|
||||
extension: str,
|
||||
notifier=None,
|
||||
notify=None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
|
@ -48,9 +52,15 @@ class Spec:
|
|||
)
|
||||
|
||||
self.limit = limit
|
||||
self.notifier = notifier
|
||||
self.notifier = Notifier(*notify) if notify else None
|
||||
self.extension = extension
|
||||
|
||||
@classmethod
|
||||
def skeleton(cls):
|
||||
return skeleton.merge(
|
||||
*[val._SKEL for val in reversed(inspect.getmro(cls)[:-1])]
|
||||
)
|
||||
|
||||
def remove_backups(self):
|
||||
"""
|
||||
Remove all backups exceeding the limit
|
||||
|
@ -73,14 +83,15 @@ class Spec:
|
|||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, name, obj: Dict, *defaults: Dict) -> Spec:
|
||||
def from_dict(cls, name, obj: Dict, defaults: Dict) -> Spec:
|
||||
# Combine defaults with skeleton, creating new skeleton
|
||||
skel = cls.__SKEL
|
||||
|
||||
for default in defaults:
|
||||
skel = skeleton.combine(defaults, skel)
|
||||
skel = skeleton.merge(cls.skeleton(), defaults)
|
||||
print(skel)
|
||||
|
||||
# Then, combine actual values with new skeleton
|
||||
obj = skeleton.combine(obj, skel)
|
||||
obj = skeleton.merge_with_skeleton(obj, skel)
|
||||
|
||||
return cls(name, **obj)
|
||||
|
||||
def to_dict(self):
|
||||
raise NotImplementedError()
|
||||
|
|
|
@ -36,3 +36,7 @@ specs:
|
|||
cmd: ''
|
||||
extension: 'tar.gz'
|
||||
directories:
|
||||
test:
|
||||
destination: "/home/jjr/test"
|
||||
limit: 5
|
||||
source: "/home/jjr"
|
||||
|
|
Loading…
Reference in New Issue