From 981344c5b2bcf4f06778860840ec81fdf9377d2e Mon Sep 17 00:00:00 2001 From: skullY Date: Tue, 10 Apr 2018 14:03:51 -0700 Subject: [PATCH] Replace minio with boto3 --- qmk_storage.py | 107 ++++++++++++++++++++++++++++++----------------- requirements.txt | 2 +- 2 files changed, 69 insertions(+), 40 deletions(-) diff --git a/qmk_storage.py b/qmk_storage.py index edd5240..2da358a 100644 --- a/qmk_storage.py +++ b/qmk_storage.py @@ -1,23 +1,20 @@ import logging -import minio.helpers -from minio import Minio -from minio.error import ResponseError, BucketAlreadyOwnedByYou, BucketAlreadyExists from os import environ, mkdir from os.path import dirname, exists from shutil import copyfile, copyfileobj -# Ugly hack- disable minio's multipart upload feature -minio.helpers.MIN_PART_SIZE = minio.helpers.MAX_MULTIPART_OBJECT_SIZE +import boto3 +import botocore.client # Configuration -STORAGE_ENGINE = environ.get('STORAGE_ENGINE', 'minio') # 'minio' or 'filesystem' +STORAGE_ENGINE = environ.get('STORAGE_ENGINE', 's3') # 's3' or 'filesystem' FILESYSTEM_PATH = environ.get('FILESYSTEM_PATH', 'firmwares') -MINIO_HOST = environ.get('MINIO_HOST', 'lb.minio:9000') -MINIO_LOCATION = environ.get('MINIO_LOCATION', 'us-east-1') -MINIO_BUCKET = environ.get('MINIO_BUCKET', 'compiled-qmk-firmware') -MINIO_ACCESS_KEY = environ.get('MINIO_ACCESS_KEY', '') -MINIO_SECRET_KEY = environ.get('MINIO_SECRET_KEY', '') -MINIO_SECURE = False +S3_HOST = environ.get('S3_HOST', 'http://127.0.0.1:9000') +S3_LOCATION = environ.get('S3_LOCATION', 'nyc3') +S3_BUCKET = environ.get('S3_BUCKET', 'qmk') +S3_ACCESS_KEY = environ.get('S3_ACCESS_KEY', 'minio_dev') +S3_SECRET_KEY = environ.get('S3_SECRET_KEY', 'minio_dev_secret') +S3_SECURE = False # The `keymap.c` template to use when a keyboard doesn't have its own DEFAULT_KEYMAP_C = """#include QMK_KEYBOARD_H @@ -31,27 +28,22 @@ """ # Objects we need to instaniate -minio = Minio(MINIO_HOST, access_key=MINIO_ACCESS_KEY, secret_key=MINIO_SECRET_KEY, secure=MINIO_SECURE) +s3 = boto3.session.Session().client('s3', region_name=S3_LOCATION, endpoint_url=S3_HOST, aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_SECRET_KEY) -# Make sure our minio store is properly setup +# Make sure our s3 store is properly setup try: - minio.make_bucket(MINIO_BUCKET, location=MINIO_LOCATION) -except BucketAlreadyOwnedByYou as err: - pass -except BucketAlreadyExists as err: - pass + s3.create_bucket(Bucket=S3_BUCKET) +except botocore.exceptions.ClientError as e: + if e.__class__.__name__ != 'BucketAlreadyOwnedByYou': + raise def save_fd(fd, filename, length, content_type='application/json'): """Store the contents of a file-like object in the configured storage engine. """ - if STORAGE_ENGINE == 'minio': - logging.debug('Uploading %s to minio.', filename) - try: - minio.put_object(MINIO_BUCKET, filename, fd, length, content_type) - except ResponseError as err: - logging.error('Could not upload firmware binary to minio: %s', err) - logging.exception(err) + if STORAGE_ENGINE == 's3': + logging.debug('Uploading %s to s3.', filename) + s3.upload_fileobj(fd, S3_BUCKET, filename) else: logging.debug('Writing to %s/%s.', FILESYSTEM_PATH, filename) if FILESYSTEM_PATH[0] == '/': @@ -65,13 +57,9 @@ def save_fd(fd, filename, length, content_type='application/json'): def save_file(local_filename, remote_filename, content_type='application/json'): """Store the contents of a file in the configured storage engine. """ - if STORAGE_ENGINE == 'minio': - logging.debug('Uploading %s to minio: %s.', local_filename, remote_filename) - try: - minio.fput_object(MINIO_BUCKET, remote_filename, local_filename, content_type) - except ResponseError as err: - logging.error('Could not upload firmware binary to minio: %s', err) - logging.exception(err) + if STORAGE_ENGINE == 's3': + logging.debug('Uploading %s to s3: %s.', local_filename, remote_filename) + s3.upload_file(local_filename, S3_BUCKET, remote_filename) else: logging.debug('Writing to %s/%s.', FILESYSTEM_PATH, remote_filename) if FILESYSTEM_PATH[0] == '/': @@ -82,13 +70,54 @@ def save_file(local_filename, remote_filename, content_type='application/json'): copyfile(local_filename, remote_filename) -def get(filename): - """Returns the contents of a requested file. +def put(filename, value): + """Uploads an object to S3. """ - if STORAGE_ENGINE == 'minio': - object = minio.get_object(MINIO_BUCKET, filename) - return object.data.decode('utf-8') + if STORAGE_ENGINE == 's3': + try: + object = s3.put_object(Bucket=S3_BUCKET, Key=filename, Body=value) + return object + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == "404": + return False + else: + raise + else: + logging.debug('Writing to %s.', FILESYSTEM_PATH, filename) + if FILESYSTEM_PATH[0] == '/': + file_path = '%s/%s' % (FILESYSTEM_PATH, filename) + else: + file_path = '../%s/%s' % (FILESYSTEM_PATH, filename) + mkdir(dirname(file_path)) + open(file_path, 'w').write(value) + + +def get_fd(filename): + """Retrieve an object from S3 and return a file-like object + """ + if STORAGE_ENGINE == 's3': + s3_object = s3.get_object(Bucket=S3_BUCKET, Key=filename) + return s3_object['Body'] + else: file_path = '/'.join((FILESYSTEM_PATH, filename)) if exists(file_path): - return open(file_path).read().decode('utf-8') + return open(file_path) + else: + raise FileNotFoundError(filename) + + +def get(filename): + """Retrieve an object from S3 + """ + fd = get_fd(filename) + data = fd.read() + try: + return data.decode('utf-8') + except UnicodeDecodeError: + return data + + +if __name__ == '__main__': + print(1, put('foo', 'bar')) + print(2, get('foo')) diff --git a/requirements.txt b/requirements.txt index dce4b45..f2ff2c1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -minio +boto3 rq<0.9.0