Compare commits
2 Commits
57a0248236
...
0172b193a1
Author | SHA1 | Date |
---|---|---|
Jef Roosens | 0172b193a1 | |
Jef Roosens | 94bd72ee39 |
|
@ -1,2 +0,0 @@
|
|||
__pycache__/
|
||||
backup_tool
|
|
@ -1,4 +0,0 @@
|
|||
# Backups
|
||||
I wrote this Python program to manage backups of the stuff running on our
|
||||
server. I know there's probably better ways to do this, but I really liked
|
||||
working on this and it works well enough for our usecase.
|
|
@ -1,41 +0,0 @@
|
|||
import argparse
|
||||
import sys
|
||||
from specs import parse_specs_file
|
||||
|
||||
|
||||
# This just displays the error type and message, not the stack trace
|
||||
def except_hook(ext_type, value, traceback):
|
||||
sys.stderr.write("{}: {}\n".format(ext_type.__name__, value))
|
||||
|
||||
sys.excepthook = except_hook
|
||||
|
||||
|
||||
# Define parser
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Backup directories and Docker volumes.')
|
||||
parser.add_argument('-f', '--file', action='append', dest='file',
|
||||
help='File containing spec definitions.')
|
||||
parser.add_argument('-j', '--json', action='store_const', const=True,
|
||||
default=False, help='Print out the parsed specs as JSON '
|
||||
'and exit')
|
||||
parser.add_argument('spec', nargs='*',
|
||||
help='The specs to process. Defaults to all.')
|
||||
|
||||
# Parse arguments
|
||||
args = parser.parse_args()
|
||||
specs = sum([parse_specs_file(path) for path in args.file], [])
|
||||
|
||||
# Filter specs if needed
|
||||
if args.spec:
|
||||
specs = filter(lambda s: s.name in args.spec, specs)
|
||||
|
||||
# Dump parsed data as json
|
||||
if args.json:
|
||||
import json
|
||||
print(json.dumps([spec.to_dict() for spec in specs], indent=4))
|
||||
|
||||
else:
|
||||
pass
|
||||
# Run the backups
|
||||
# for spec in specs:
|
||||
# spec.backup()
|
|
@ -1,2 +0,0 @@
|
|||
from .specs import Spec
|
||||
from .parser import parse_specs_file
|
|
@ -1,114 +0,0 @@
|
|||
import yaml
|
||||
from pathlib import Path
|
||||
from specs import Spec
|
||||
from typing import List, Dict
|
||||
|
||||
|
||||
class InvalidKeyError(Exception):
|
||||
def __init__(self, key):
|
||||
message = "Invalid key: {}".format(key)
|
||||
|
||||
super().__init__(key)
|
||||
|
||||
|
||||
class MissingKeyError(Exception):
|
||||
def __init__(self, key):
|
||||
message = "Missing key: {}".format(key)
|
||||
|
||||
super().__init__(key)
|
||||
|
||||
|
||||
def parse_specs_file(path: Path) -> List[Spec]:
|
||||
"""
|
||||
Parse a YAML file defining backup specs.
|
||||
|
||||
Args:
|
||||
path: path to the specs file
|
||||
|
||||
Returns:
|
||||
A list of specs
|
||||
"""
|
||||
|
||||
# Skeleton of a spec config
|
||||
# If a value is None, this means it doesn't have a default value and must be
|
||||
# defined
|
||||
spec_skel = {
|
||||
"source": None,
|
||||
"destination": None,
|
||||
"limit": None,
|
||||
"volume": False,
|
||||
"notify": {
|
||||
"title": "Backup Notification",
|
||||
"events": ["failure"]
|
||||
}
|
||||
}
|
||||
|
||||
# Read YAML file
|
||||
with open(path, "r") as yaml_file:
|
||||
data = yaml.load(yaml_file, Loader=yaml.Loader)
|
||||
|
||||
# Check specs section exists
|
||||
if "specs" not in data:
|
||||
raise MissingKeyError("specs")
|
||||
|
||||
# Allow for default notify settings
|
||||
if "notify" in data:
|
||||
spec_skel["notify"] = data["notify"]
|
||||
|
||||
specs = []
|
||||
# Check format for each spec
|
||||
for key in data["specs"]:
|
||||
specs.append(Spec.from_dict(key, combine_with_skeleton(
|
||||
data["specs"][key], spec_skel)
|
||||
))
|
||||
|
||||
return specs
|
||||
|
||||
|
||||
def combine_with_skeleton(data: Dict, skel: Dict) -> Dict:
|
||||
"""
|
||||
Compare a dict with a given skeleton dict, and fill in default values where
|
||||
needed.
|
||||
"""
|
||||
|
||||
# First, check for illegal keys
|
||||
for key in data:
|
||||
if key not in skel:
|
||||
raise InvalidKeyError(key)
|
||||
|
||||
# Then, check the default values
|
||||
for key, value in skel.items():
|
||||
if key not in data:
|
||||
# Raise error if there's not default value
|
||||
if value is None:
|
||||
raise MissingKeyError(key)
|
||||
|
||||
# Replace with default value
|
||||
data[key] = value
|
||||
|
||||
# Error if value is not same type as default value
|
||||
elif type(data[key]) != type(value) and value is not None:
|
||||
raise TypeError("Invalid value type")
|
||||
|
||||
# Recurse into dicts
|
||||
elif type(value) == dict:
|
||||
data[key] = combine_with_skeleton(data[key], value)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# Test cases
|
||||
if __name__ == "__main__":
|
||||
d1 = {
|
||||
"a": 5
|
||||
}
|
||||
s1 = {
|
||||
"a": 7,
|
||||
"b": 2
|
||||
}
|
||||
r1 = {
|
||||
"a": 5,
|
||||
"b": 2
|
||||
}
|
||||
|
||||
assert combine_with_skeleton(d1, s1) == r1
|
|
@ -1,146 +0,0 @@
|
|||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import os
|
||||
|
||||
|
||||
class Spec:
|
||||
def __init__(self, name, destination, limit, title, events=None):
|
||||
self.name = name
|
||||
self.destination = Path(destination)
|
||||
self.limit = limit
|
||||
self.title = title
|
||||
self.events = [] if events is None else events
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"destination": str(self.destination),
|
||||
"limit": self.limit,
|
||||
"notify": {
|
||||
"title": self.title,
|
||||
"events": self.events
|
||||
}
|
||||
}
|
||||
|
||||
def backup(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def remove_redundant(self):
|
||||
tarballs = sorted(self.destination.glob('*.tar.gz'),
|
||||
key=os.path.getmtime, reverse=True)
|
||||
|
||||
if len(tarballs) >= self.limit:
|
||||
for path in tarballs[self.limit - 1:]:
|
||||
path.unlink()
|
||||
|
||||
def notify(self, status_code):
|
||||
if status_code:
|
||||
if "failure" not in self.events:
|
||||
return
|
||||
|
||||
message = "backup for {} failed.".format(self.name)
|
||||
|
||||
else:
|
||||
if "success" not in self.events:
|
||||
return
|
||||
|
||||
message = "backup for {} succeeded.".format(self.name)
|
||||
|
||||
# Read API key from env vars
|
||||
try:
|
||||
key = os.environ["IFTTT_API_KEY"]
|
||||
|
||||
# Don't send notification if there's not API key defined
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
url = "https://maker.ifttt.com/trigger/{}/with/key/{}".format(
|
||||
"phone_notifications",
|
||||
key
|
||||
)
|
||||
|
||||
data = {
|
||||
"value1": self.title,
|
||||
"value2": message
|
||||
}
|
||||
|
||||
requests.post(url, data=data)
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_{}.tar.gz'.format(
|
||||
self.name,
|
||||
datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_dict(name, data) -> "Specification":
|
||||
if data.get("volume", False):
|
||||
return VolumeSpec.from_dict(name, data)
|
||||
|
||||
return DirSpec.from_dict(name, data)
|
||||
|
||||
@staticmethod
|
||||
def from_file(path: str):
|
||||
with open(path, 'r') as yaml_file:
|
||||
data = yaml.load(yaml_file, Loader=yaml.Loader)
|
||||
|
||||
return [Spec.from_dict(name, info)
|
||||
for name, info in data["specs"].items()]
|
||||
|
||||
|
||||
class DirSpec(Spec):
|
||||
def __init__(self, name, source, destination, limit, title, events=None):
|
||||
super().__init__(name, destination, limit, title, events)
|
||||
|
||||
self.source = Path(source)
|
||||
|
||||
def backup(self):
|
||||
self.remove_redundant()
|
||||
|
||||
status_code = os.system(
|
||||
"tar -C '{}' -czf '{}' -- .".format(
|
||||
self.source,
|
||||
self.destination / self.get_filename()
|
||||
)
|
||||
)
|
||||
|
||||
self.notify(status_code)
|
||||
|
||||
@staticmethod
|
||||
def from_dict(name, data):
|
||||
return DirSpec(
|
||||
name,
|
||||
data["source"],
|
||||
data["destination"],
|
||||
data["limit"],
|
||||
data["notify"]["title"],
|
||||
data["notify"]["events"]
|
||||
)
|
||||
|
||||
class VolumeSpec(Spec):
|
||||
def __init__(self, name, volume, destination, limit, title, events=None):
|
||||
super().__init__(name, destination, limit, title, events)
|
||||
|
||||
self.volume = volume
|
||||
|
||||
def backup(self):
|
||||
status_code = os.system(
|
||||
"docker run --rm -v '{}:/from' -v '{}:/to' alpine:latest "
|
||||
"tar -C /from -czf '/to/{}' -- .".format(
|
||||
self.volume,
|
||||
self.destination,
|
||||
self.get_filename()
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_dict(name, data):
|
||||
return VolumeSpec(
|
||||
name,
|
||||
data["source"],
|
||||
data["destination"],
|
||||
data["limit"],
|
||||
data["notify"]["title"],
|
||||
data["notify"]["events"]
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
notify:
|
||||
title: "title"
|
||||
events:
|
||||
- 'random'
|
||||
|
||||
specs:
|
||||
test-spec:
|
||||
source: '/some/path'
|
||||
destination: '/some/other/path'
|
||||
limit: 7
|
||||
|
||||
test-2:
|
||||
source: '/path/to'
|
||||
destination: '/to/some/other/path'
|
||||
limit: 2
|
|
@ -1,14 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
# Zip app
|
||||
(cd app && zip -r ../app.zip * -x "__pycache__/*" "**/__pycache__/*" ".vim/*" "**/.vim/*")
|
||||
|
||||
# Add shebang to top of file
|
||||
echo "#!/usr/bin/env python3" | cat - app.zip > backup_tool
|
||||
chmod a+x backup_tool
|
||||
|
||||
# Move executable over
|
||||
mv backup_tool /usr/local/bin
|
||||
|
||||
# Remove zip
|
||||
rm app.zip
|
|
@ -63,7 +63,7 @@ DB_HOST=db
|
|||
DB_PORT=5432
|
||||
DB_DATABASE=firefly
|
||||
DB_USERNAME=firefly
|
||||
DB_PASSWORD=password
|
||||
DB_PASSWORD=firefly
|
||||
|
||||
# MySQL supports SSL. You can configure it here.
|
||||
# If you use Docker or similar, you can set these variables from a file by appending them with _FILE
|
||||
|
|
|
@ -6,24 +6,23 @@ services:
|
|||
context: '.'
|
||||
args:
|
||||
- 'LOCALE=$DEFAULT_LOCALE'
|
||||
image: 'firefly-iii-cron:latest'
|
||||
image: 'chewingbever/firefly-iii-cron:latest'
|
||||
restart: 'always'
|
||||
|
||||
healthcheck:
|
||||
test: 'curl -f localhost:8080 || exit 1'
|
||||
interval: '1m'
|
||||
timeout: '10s'
|
||||
retries: 3
|
||||
start_period: '10s'
|
||||
|
||||
depends_on:
|
||||
db:
|
||||
condition: 'service_healthy'
|
||||
redis:
|
||||
condition: 'service_healthy'
|
||||
|
||||
env_file:
|
||||
- '.env'
|
||||
labels:
|
||||
- 'com.centurylinklabs.watchtower.enable=true'
|
||||
networks:
|
||||
- 'nginx'
|
||||
- 'default'
|
||||
|
@ -31,25 +30,24 @@ services:
|
|||
- 'upload:/var/www/html/storage/upload'
|
||||
|
||||
db:
|
||||
image: 'postgres:13-alpine'
|
||||
image: 'postgres:13.2-alpine'
|
||||
restart: 'always'
|
||||
healthcheck:
|
||||
test: 'pg_isready -U $DB_USERNAME'
|
||||
test: 'pg_isready -U firefly'
|
||||
interval: '10s'
|
||||
timeout: '5s'
|
||||
retries: 5
|
||||
start_period: '0s'
|
||||
|
||||
environment:
|
||||
- 'POSTGRES_DB=$DB_DATABASE'
|
||||
- 'POSTGRES_PASSWORD=$DB_PASSWORD'
|
||||
- 'POSTGRES_USER=$DB_USERNAME'
|
||||
labels:
|
||||
- 'com.centurylinklabs.watchtower.enable=true'
|
||||
- 'POSTGRES_DB=firefly'
|
||||
- 'POSTGRES_PASSWORD=firefly'
|
||||
- 'POSTGRES_USER=firefly'
|
||||
volumes:
|
||||
- 'db-data:/var/lib/postgresql/data'
|
||||
|
||||
redis:
|
||||
image: 'redis:6-alpine'
|
||||
image: 'redis:6.0.12-alpine'
|
||||
restart: 'always'
|
||||
healthcheck:
|
||||
test: 'redis-cli -h localhost ping'
|
||||
|
@ -57,9 +55,6 @@ services:
|
|||
timeout: '5s'
|
||||
retries: 3
|
||||
|
||||
labels:
|
||||
- 'com.centurylinklabs.watchtower.enable=true'
|
||||
|
||||
networks:
|
||||
nginx:
|
||||
external: true
|
||||
|
|
Reference in New Issue