Initial backup_tool code
							parent
							
								
									ac8374f824
								
							
						
					
					
						commit
						63bc3cc0f0
					
				|  | @ -0,0 +1 @@ | |||
| __pycache__/ | ||||
|  | @ -0,0 +1,4 @@ | |||
| # Backups | ||||
| I wrote this Python program to manage backups of the stuff running on our | ||||
| server. I know there's probably better ways to do this, but I really liked | ||||
| working on this and it works good enough for our usecase. | ||||
|  | @ -0,0 +1,42 @@ | |||
| import argparse | ||||
| import sys | ||||
| from specs import parse_specs_file | ||||
| 
 | ||||
| 
 | ||||
| # This just displays the error type and message, not the stack trace | ||||
| def except_hook(ext_type, value, traceback): | ||||
|     sys.stderr.write("{}: {}\n".format(ext_type.__name__, value)) | ||||
| 
 | ||||
| sys.excepthook = except_hook | ||||
| 
 | ||||
| 
 | ||||
| # Define parser | ||||
| parser = argparse.ArgumentParser( | ||||
|     description='Backup directories and Docker volumes.') | ||||
| parser.add_argument('-s', '--spec', action='append', dest='specs', | ||||
|                     help='Spec to back up. If not specified, all specs in ' | ||||
|                          'in provided files are processed.') | ||||
| parser.add_argument('-j', '--json', action='store_const', const=True, | ||||
|                     default=False, help='Print out the parsed specs as JSON ' | ||||
|                                         'and exit') | ||||
| parser.add_argument('file', nargs='+', | ||||
|                    help='A YAML file containg specs.') | ||||
| 
 | ||||
| # Parse arguments | ||||
| args = parser.parse_args() | ||||
| specs = sum([parse_specs_file(path) for path in args.file], []) | ||||
| 
 | ||||
| # Filter specs if needed | ||||
| if args.specs: | ||||
|     specs = filter(lambda s: s.name in args.specs, specs) | ||||
| 
 | ||||
| # Dump parsed data as json | ||||
| if args.json: | ||||
|     import json | ||||
|     print(json.dumps([spec.to_dict() for spec in specs], indent=4)) | ||||
| 
 | ||||
| else: | ||||
|     pass | ||||
|     # Run the backups | ||||
|     # for spec in specs: | ||||
|     #     spec.backup() | ||||
|  | @ -0,0 +1,2 @@ | |||
| from .specs import Spec | ||||
| from .parser import parse_specs_file | ||||
|  | @ -0,0 +1,112 @@ | |||
| import yaml | ||||
| from pathlib import Path | ||||
| from specs import Spec | ||||
| from typing import List, Dict | ||||
| 
 | ||||
| 
 | ||||
| class InvalidKeyError(Exception): | ||||
|     def __init__(self, key): | ||||
|         message = "Invalid key: {}".format(key) | ||||
| 
 | ||||
|         super().__init__(key) | ||||
| 
 | ||||
| 
 | ||||
| class MissingKeyError(Exception): | ||||
|     def __init__(self, key): | ||||
|         message = "Missing key: {}".format(key) | ||||
| 
 | ||||
|         super().__init__(key) | ||||
| 
 | ||||
| 
 | ||||
| def parse_specs_file(path: Path) -> List[Spec]: | ||||
|     """ | ||||
|     Parse a YAML file defining backup specs. | ||||
| 
 | ||||
|     Args: | ||||
|         path: path to the specs file | ||||
| 
 | ||||
|     Returns: | ||||
|         A list of specs | ||||
|     """ | ||||
| 
 | ||||
|     # Skeleton of a spec config | ||||
|     # If a value is None, this means it doesn't have a default value and must be | ||||
|     # defined | ||||
|     spec_skel = { | ||||
|         "source": None, | ||||
|         "destination": None, | ||||
|         "limit": None, | ||||
|         "volume": False, | ||||
|         "notify": { | ||||
|             "title": "Backup Notification", | ||||
|             "events": ["success"] | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     # Read YAML file | ||||
|     with open(path, "r") as yaml_file: | ||||
|         data = yaml.load(yaml_file, Loader=yaml.Loader) | ||||
| 
 | ||||
|     # Check specs section exists | ||||
|     if "specs" not in data: | ||||
|         raise MissingKeyError("specs") | ||||
| 
 | ||||
|     # TODO check if only specs section exists | ||||
| 
 | ||||
|     specs = [] | ||||
|     # Check format for each spec | ||||
|     for key in data["specs"]: | ||||
|         specs.append(Spec.from_dict(key, combine_with_skeleton( | ||||
|             data["specs"][key], spec_skel) | ||||
|         )) | ||||
| 
 | ||||
|     return specs | ||||
| 
 | ||||
| 
 | ||||
| def combine_with_skeleton(data: Dict, skel: Dict) -> Dict: | ||||
|     """ | ||||
|     Compare a dict with a given skeleton dict, and fill in default values where | ||||
|     needed. | ||||
|     """ | ||||
| 
 | ||||
|     # First, check for illegal keys | ||||
|     for key in data: | ||||
|         if key not in skel: | ||||
|             raise InvalidKeyError(key) | ||||
| 
 | ||||
|     # Then, check the default values | ||||
|     for key, value in skel.items(): | ||||
|         if key not in data: | ||||
|             # Raise error if there's not default value | ||||
|             if value is None: | ||||
|                 raise MissingKeyError(key) | ||||
| 
 | ||||
|             # Replace with default value | ||||
|             data[key] = value | ||||
| 
 | ||||
|         # Error if value is not same type as default value | ||||
|         elif type(data[key]) != type(value) and value is not None: | ||||
|             raise TypeError("Invalid value type") | ||||
| 
 | ||||
|         # Recurse into dicts | ||||
|         elif type(value) == dict: | ||||
|             data[key] = combine_with_skeleton(data[key], value) | ||||
| 
 | ||||
|     return data | ||||
| 
 | ||||
| 
 | ||||
| # Test cases | ||||
| if __name__ == "__main__": | ||||
|     d1 = { | ||||
|         "a": 5 | ||||
|     } | ||||
|     s1 = { | ||||
|         "a": 7, | ||||
|         "b": 2 | ||||
|     } | ||||
|     r1 = { | ||||
|         "a": 5, | ||||
|         "b": 2 | ||||
|     } | ||||
| 
 | ||||
|     assert combine_with_skeleton(d1, s1) == r1 | ||||
|  | @ -0,0 +1,119 @@ | |||
| from pathlib import Path | ||||
| import os | ||||
| 
 | ||||
| 
 | ||||
| class Spec: | ||||
|     def __init__(self, name, destination, limit, title, events=None): | ||||
|         self.name        = name | ||||
|         self.destination = Path(destination) | ||||
|         self.limit       = limit | ||||
|         self.title       = title | ||||
|         self.events      = ["success"] if events is None else events | ||||
| 
 | ||||
|     def to_dict(self): | ||||
|         return { | ||||
|             "name": self.name, | ||||
|             "destination": str(self.destination), | ||||
|             "limit": self.limit, | ||||
|             "notify": { | ||||
|                 "title": self.title, | ||||
|                 "events": self.events | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|     def backup(self): | ||||
|         raise NotImplementedError() | ||||
| 
 | ||||
|     def notify(status_code): | ||||
|         if status_code: | ||||
|             if "failure" not in self.events: | ||||
|                 return | ||||
| 
 | ||||
|             message = "backup for {} failed.".format(self.name) | ||||
| 
 | ||||
|         else: | ||||
|             if "success" not in self.events: | ||||
|                 return | ||||
| 
 | ||||
|             message = "backup for {} succeeded.".format(self.name) | ||||
| 
 | ||||
|         # Read API key from env vars | ||||
|         try: | ||||
|             key = os.environ["IFTTT_API_KEY"] | ||||
| 
 | ||||
|         # Don't send notification if there's not API key defined | ||||
|         except KeyError: | ||||
|             return | ||||
| 
 | ||||
|         url = "https://maker.ifttt.com/trigger/{}/with/key/{}".format( | ||||
|             "phone_notifications", | ||||
|             key | ||||
|         ) | ||||
| 
 | ||||
|         data = { | ||||
|             "value1": self.title, | ||||
|             "value2": message | ||||
|         } | ||||
| 
 | ||||
|         requests.post(url, data=data) | ||||
| 
 | ||||
|     def get_filename(self): | ||||
|         return '{}_{}.tar.gz'.format( | ||||
|             self.name, | ||||
|             datetime.now().strftime('%Y-%m-%d_%H-%M-%S') | ||||
|         ) | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def from_dict(name, data) -> "Specification": | ||||
|         if data.get("volume", False): | ||||
|             return VolumeSpec.from_dict(name, data) | ||||
| 
 | ||||
|         return DirSpec.from_dict(name, data) | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def from_file(path: str): | ||||
|         with open(path, 'r') as yaml_file: | ||||
|             data = yaml.load(yaml_file, Loader=yaml.Loader) | ||||
| 
 | ||||
|         return [Spec.from_dict(name, info) | ||||
|                 for name, info in data["specs"].items()] | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| class DirSpec(Spec): | ||||
|     def __init__(self, name, source, destination, limit, title, events=None): | ||||
|         super().__init__(name, destination, limit, title, events) | ||||
| 
 | ||||
|         self.source = Path(source) | ||||
| 
 | ||||
|     def backup(self): | ||||
|         tarballs = self.destination.glob('*.tar.gz') | ||||
| 
 | ||||
|         # Remove redundant tarballs | ||||
|         if len(tarballs) >= self.limit: | ||||
|             for path in tarballs[self.limit - 1:]: | ||||
|                 path.unlink() | ||||
| 
 | ||||
|         # Create new tarball | ||||
|         status_code = os.system( | ||||
|             "tar -C '{}' -czf '{}' -- .".format( | ||||
|                 self.source, | ||||
|                 self.destination / self.get_filename() | ||||
|             ) | ||||
|         ) | ||||
| 
 | ||||
|         self.notify(status_code) | ||||
| 
 | ||||
|     @staticmethod | ||||
|     def from_dict(name, data): | ||||
|         return DirSpec( | ||||
|             name, | ||||
|             data["source"], | ||||
|             data["destination"], | ||||
|             data["limit"], | ||||
|             data["notify"]["title"], | ||||
|             data["notify"]["events"] | ||||
|         ) | ||||
| 
 | ||||
| class VolumeSpec(Spec): | ||||
|     pass | ||||
|  | @ -0,0 +1,13 @@ | |||
| specs: | ||||
|     test-spec: | ||||
|         source: '/some/path' | ||||
|         destination: '/some/other/path' | ||||
|         limit: 7 | ||||
|         notify: | ||||
|             events: | ||||
|                 - 'failure' | ||||
| 
 | ||||
|     test-2: | ||||
|         source: '/path/to' | ||||
|         destination: '/to/some/other/path' | ||||
|         limit: 2 | ||||
|  | @ -0,0 +1,16 @@ | |||
| #!/usr/bin/env sh | ||||
| 
 | ||||
| # Zip app | ||||
| (cd app && zip -r ../app.zip *) | ||||
| 
 | ||||
| pwd | ||||
| 
 | ||||
| # Add shebang to top of file | ||||
| echo "#!/usr/bin/env python3" | cat - app.zip > backup_tool | ||||
| chmod a+x backup_tool | ||||
| 
 | ||||
| # Move executable over  | ||||
| mv backup_tool /usr/local/bin | ||||
| 
 | ||||
| # Remove zip | ||||
| rm app.zip | ||||
		Reference in New Issue