#!/usr/local/bin/python import json import logging import pathlib import sys import minio def main(): logger = logging.getLogger() logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.DEBUG) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) handler.setFormatter(formatter) logger.addHandler(handler) # Load the user configuration. with open("/data/options.json", "r") as f: config = json.load(f) client = minio.Minio( config["minio_url"], access_key=config["minio_access_key"], secret_key=config["minio_secret_key"], ) logger.info("Created minio client") found = client.bucket_exists(config["minio_bucket"]) if not found: logger.warning("Creating bucket %s", config["minio_bucket"]) client.make_bucket(config["minio_bucket"]) else: logger.info("Bucket %s already exists.", config["minio_bucket"]) backup_folder = pathlib.Path("/backup") backup_files = list(backup_folder.iterdir()) logger.info("Found the folllowing files in /backup: %s", backup_files) objects = {x.object_name: x for x in client.list_objects(config["minio_bucket"])} logger.info("Found the following files in s3: %s", objects.keys()) to_upload = [] for file in backup_files: if file.name not in objects.keys(): to_upload.append(file) else: obj = objects[file.name] logger.info("File %s already exists in s3", file.name) if obj.size != file.stat().st_size: logger.warning( "Size in s3 (%s) is different to local size (%s)", str(obj.size[0]), str(file.stat().st_size), ) logger.warning( "The following files do not already exist and will be backed up: %s", to_upload ) for file in to_upload: client.fput_object( config["minio_bucket"], file.name, str(file.resolve()), ) logger.warning("Uploaded %s", file.name) logger.info("Done") if __name__ == "__main__": main()