diff --git a/requirements.txt b/requirements.txt index 3aa9629..0621547 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ pydantic~=1.9.0 opencdms~=0.1.0 starlette~=0.17.1 setuptools~=57.0.0 -boto3~=1.20.49 backoff Babel -pyyaml \ No newline at end of file +pyyaml +minio \ No newline at end of file diff --git a/src/climsoft_api/api/s3_files/router.py b/src/climsoft_api/api/s3_files/router.py index 5e6d2ed..4a07a99 100644 --- a/src/climsoft_api/api/s3_files/router.py +++ b/src/climsoft_api/api/s3_files/router.py @@ -1,19 +1,28 @@ +import minio +import urllib3.response + from climsoft_api.config import settings -from climsoft_api.utils.s3 import get_s3_client -from fastapi import APIRouter +from climsoft_api.utils.s3 import get_minio_client +from fastapi import APIRouter, Request from fastapi.responses import Response from climsoft_api.utils.exception import handle_exceptions +from climsoft_api.utils.deployment import override_settings router = APIRouter() -@router.get("/s3/image/{object_key}") -@handle_exceptions -def get_s3_object(object_key): - s3_client = get_s3_client() - response = s3_client.get_object( - Bucket=settings.S3_BUCKET, - Key=object_key +@router.get("/cloud-storage/image/{object_key}") +def get_s3_object(object_key, request: Request): + try: + _settings = override_settings(request.state.settings_override) + except AttributeError: + _settings = settings + + client: minio.Minio = get_minio_client(_settings) + response: urllib3.response.HTTPResponse = client.get_object( + bucket_name=_settings.S3_BUCKET, + object_name=object_key ) - return Response(response["Body"].read(), + + return Response(response.read(), media_type=f"image/{object_key.split('.')[-1]}") diff --git a/src/climsoft_api/api/upload/router.py b/src/climsoft_api/api/upload/router.py index da236d5..c2dda01 100644 --- a/src/climsoft_api/api/upload/router.py +++ b/src/climsoft_api/api/upload/router.py @@ -1,14 +1,15 @@ +import json import logging from climsoft_api.api.upload.schema import ( FileUploadedToDiskResponse, FileUploadedToS3Response ) -from climsoft_api.config import settings from climsoft_api.services import file_upload_service from climsoft_api.utils.response import get_success_response, get_error_response -from fastapi import APIRouter, UploadFile, File +from fastapi import APIRouter, UploadFile, File, Request from climsoft_api.utils.response import translate_schema - +from climsoft_api.utils.deployment import override_settings +from climsoft_api.config import settings router = APIRouter() @@ -19,7 +20,11 @@ @router.post( "/file-upload/image" ) -async def upload_image(file: UploadFile = File(...)): +async def upload_image(request: Request, file: UploadFile = File(...)): + try: + _settings = override_settings(request.state.settings_override) + except AttributeError: + _settings = settings try: contents = await file.read() file_type = file.content_type @@ -27,7 +32,7 @@ async def upload_image(file: UploadFile = File(...)): raise TypeError(_("Only image files are supported.")) filepath = file_upload_service.save_file( - settings.FILE_STORAGE, + _settings, contents, file_type ) diff --git a/src/climsoft_api/config.py b/src/climsoft_api/config.py index 8651837..11f12a2 100644 --- a/src/climsoft_api/config.py +++ b/src/climsoft_api/config.py @@ -11,7 +11,7 @@ class Settings(BaseSettings): DATABASE_URI: AnyUrl = "mysql+mysqldb://root:password@mariadb/climsoft" FILE_STORAGE: str = "disk" UPLOAD_DIR: str = "/climsoft_uploads" - S3_BUCKET: str = "climsoft-paper-archive" + S3_BUCKET: str = "s3-bucket-name" AWS_REGION: str = "eu-west-2" AWS_ACCESS_KEY_ID: str = "replace it" AWS_SECRET_ACCESS_KEY: str = "replace it" diff --git a/src/climsoft_api/main.py b/src/climsoft_api/main.py index 01d898b..5ec9d62 100644 --- a/src/climsoft_api/main.py +++ b/src/climsoft_api/main.py @@ -39,6 +39,7 @@ def get_app(config=None): async def db_session_middleware(request: Request, call_next): try: request.state.get_session = get_session_local(config) + request.state.settings_override = deployment_configs.get(config) response = await call_next(request) except Exception as exc: logging.exception(exc) diff --git a/src/climsoft_api/services/file_upload_service.py b/src/climsoft_api/services/file_upload_service.py index 9102a3f..c1f0a0c 100644 --- a/src/climsoft_api/services/file_upload_service.py +++ b/src/climsoft_api/services/file_upload_service.py @@ -1,34 +1,38 @@ import io import uuid from pathlib import Path -from climsoft_api.config import settings -from climsoft_api.utils.s3 import get_s3_client +import minio +from climsoft_api.utils.s3 import get_minio_client +from climsoft_api.config import Settings -def save_file(storage, file, file_type): +def save_file(settings: Settings, file, file_type): file_name = f"{uuid.uuid4().hex}.{file_type.split('/')[-1]}" - if storage == "disk": - return save_file_to_disk(file, file_name) - elif storage == "s3": - return save_file_to_s3(file, file_name) + if settings.FILE_STORAGE == "disk": + return save_file_to_disk(settings, file, file_name) + elif settings.FILE_STORAGE == "cloud_storage": + return save_file_to_cloud_storage(settings, file, file_name) else: raise NotImplemented() -def save_file_to_s3(file, file_name): - s3_client = get_s3_client() - s3_client.upload_fileobj( - io.BytesIO(file), - settings.S3_BUCKET, - file_name +def save_file_to_cloud_storage(settings, file, file_name): + client: minio.Minio = get_minio_client(settings) + octet_stream = io.BytesIO(file) + client.put_object( + bucket_name=settings.S3_BUCKET, + object_name=file_name, + data=octet_stream, + length=len(octet_stream.getbuffer()) ) + return { - "storage": "s3", + "storage": "cloud_storage", "object_key": file_name } -def save_file_to_disk(file, file_name): +def save_file_to_disk(settings, file, file_name): target_file_path = Path(settings.UPLOAD_DIR).joinpath(file_name) with open(target_file_path, "wb") as target_file: target_file.write(file) diff --git a/src/climsoft_api/utils/deployment.py b/src/climsoft_api/utils/deployment.py index 845dec7..aee4dd3 100644 --- a/src/climsoft_api/utils/deployment.py +++ b/src/climsoft_api/utils/deployment.py @@ -1,6 +1,10 @@ +import copy + import yaml from pathlib import Path from typing import Dict +from pydantic import BaseSettings +from climsoft_api.config import settings, Settings deployment_config_file = Path.resolve(Path("./deployment.yml")) @@ -12,3 +16,12 @@ def load_deployment_configs() -> Dict[str, Dict[str, str]]: with open(deployment_config_file, "r") as stream: deployment_configs = yaml.safe_load(stream=stream) return deployment_configs + + +def override_settings(overrides: Dict[str, str]) -> Settings: + if overrides.get("NAME"): + overrides.pop("NAME") + settings_copy = copy.deepcopy(settings) + for k, v in overrides.items(): + setattr(settings_copy, k, v) + return settings_copy diff --git a/src/climsoft_api/utils/s3.py b/src/climsoft_api/utils/s3.py index 25b6d61..7c01890 100644 --- a/src/climsoft_api/utils/s3.py +++ b/src/climsoft_api/utils/s3.py @@ -1,34 +1,28 @@ -import boto3 -from climsoft_api.config import settings +import minio -def get_s3_client(): - s3_client = boto3.client( - 's3', - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - region_name=settings.AWS_REGION +def get_minio_client(settings): + client = minio.Minio( + 's3.amazonaws.com', + access_key=settings.AWS_ACCESS_KEY_ID, + secret_key=settings.AWS_SECRET_ACCESS_KEY, + region=settings.AWS_REGION ) + return client - return s3_client - -def create_presigned_url(bucket_name, object_name, expiration=3600): +def create_presigned_url(settings, bucket_name, object_name): """Generate a presigned URL to share an S3 object :param bucket_name: string :param object_name: string - :param expiration: Time in seconds for the presigned URL to remain valid :return: Presigned URL as string. If error, returns None. """ # Generate a presigned URL for the S3 object - s3_client = get_s3_client() - return s3_client.generate_presigned_url( - ClientMethod='get_object', - Params={ - 'Bucket': bucket_name, - 'Key': object_name - }, - ExpiresIn=expiration + client = get_minio_client(settings) + return client.get_presigned_url( + method='GET', + bucket_name=bucket_name, + object_name=object_name )