From 2bd7ae10b94a1785a324f92bf5f42b85d2025767 Mon Sep 17 00:00:00 2001 From: Vitor Hideyoshi Date: Thu, 10 Aug 2023 01:54:17 -0300 Subject: [PATCH] Implements Better Abstraction on Top of StorageService and File Handlers Adds Expiration Time to Config Reformats Project --- resize_image_service/config/config_s3.py | 4 ++ .../controller/s3_controller.py | 28 ++++---- .../depends/depend_s3_service.py | 16 ++++- .../{s3_service.py => amazon_s3_service.py} | 67 ++++++++----------- .../service/storage_service.py | 25 +++++++ resize_image_service/utils/enums/file_type.py | 3 - .../utils/enums/storage_type.py | 5 ++ .../utils/file_handler/__init__.py | 9 +++ .../utils/file_handler/handlers/__init__.py | 0 .../file_handler/handlers/image_handler.py | 20 ++++++ .../worker/s3_image_worker.py | 7 -- .../worker/storage_file_worker.py | 7 ++ 12 files changed, 125 insertions(+), 66 deletions(-) rename resize_image_service/service/{s3_service.py => amazon_s3_service.py} (58%) create mode 100644 resize_image_service/service/storage_service.py create mode 100644 resize_image_service/utils/enums/storage_type.py create mode 100644 resize_image_service/utils/file_handler/__init__.py create mode 100644 resize_image_service/utils/file_handler/handlers/__init__.py create mode 100644 resize_image_service/utils/file_handler/handlers/image_handler.py delete mode 100644 resize_image_service/worker/s3_image_worker.py create mode 100644 resize_image_service/worker/storage_file_worker.py diff --git a/resize_image_service/config/config_s3.py b/resize_image_service/config/config_s3.py index d5c7afe..31ce4db 100644 --- a/resize_image_service/config/config_s3.py +++ b/resize_image_service/config/config_s3.py @@ -1,3 +1,5 @@ +from resize_image_service.utils.enums.storage_type import StorageType + from dotenv import load_dotenv import os @@ -5,9 +7,11 @@ import os def get_config_s3(): load_dotenv() + return { "aws_access_key_id": os.environ.get("AWS_ACCESS_KEY_ID", None), "aws_secret_access_key": os.environ.get("AWS_SECRET_ACCESS_KEY", None), "region_name": os.environ.get("AWS_REGION_NAME", None), "bucket_name": os.environ.get("AWS_BUCKET_NAME", None), + "expires_in": os.environ.get("EXPIRES_IN", 3600), } diff --git a/resize_image_service/controller/s3_controller.py b/resize_image_service/controller/s3_controller.py index 0c60b6d..9cd05cb 100644 --- a/resize_image_service/controller/s3_controller.py +++ b/resize_image_service/controller/s3_controller.py @@ -1,11 +1,11 @@ from resize_image_service.depends.depend_queue import dependency_queue from resize_image_service.depends.depend_s3_service import ( - dependency_s3_service, + dependency_storage_service, ) -from resize_image_service.service.s3_service import S3Service +from resize_image_service.service.storage_service import StorageService from resize_image_service.utils.enums.file_type import FileType from resize_image_service.utils.file_name_hash import file_name_hash -from resize_image_service.worker.s3_image_worker import s3_image_worker +from resize_image_service.worker.storage_file_worker import storage_file_worker from fastapi import Body, Depends, Form from fastapi_utils.cbv import cbv @@ -20,27 +20,29 @@ s3_router = InferringRouter() @cbv(s3_router) class S3Controller: queue: Queue = Depends(dependency_queue, use_cache=True) - s3_service: S3Service = Depends(dependency_s3_service, use_cache=True) + storage_service: StorageService = Depends(dependency_storage_service, use_cache=True) @s3_router.get("/new_file_url/", status_code=200) def new_file_url( self, - username: Annotated[str, Form()], - file_postfix: Annotated[str, Form()], - file_type: Annotated[FileType, Form()], + username: Annotated[str, Body(embed=True)], + file_postfix: Annotated[str, Body(embed=True)], + file_type: Annotated[FileType, Body(embed=True)], ) -> dict[str, str]: - return self.s3_service.get_temp_upload_link( + return self.storage_service.get_temp_upload_link( file_name_hash(username, file_postfix), file_type ) @s3_router.get("/file_url/", status_code=200) def file_url( - self, username: Annotated[str, Form()], file_postfix: Annotated[str, Form()] + self, + username: Annotated[str, Body(embed=True)], + file_postfix: Annotated[str, Body(embed=True)], ) -> dict[str, str]: - return self.s3_service.get_temp_read_link( + return self.storage_service.get_temp_read_link( file_name_hash(username, file_postfix) ) - @s3_router.post("/process_image/", status_code=200) - def process_image(self, string_url: Annotated[str, Body(embed=True)]): - self.queue.enqueue(s3_image_worker, string_url) + @s3_router.post("/process_file/", status_code=200) + def process_file(self, string_url: Annotated[str, Body(embed=True)]): + self.queue.enqueue(storage_file_worker, string_url) diff --git a/resize_image_service/depends/depend_s3_service.py b/resize_image_service/depends/depend_s3_service.py index 92bfb34..ef16b11 100644 --- a/resize_image_service/depends/depend_s3_service.py +++ b/resize_image_service/depends/depend_s3_service.py @@ -1,9 +1,19 @@ from resize_image_service.config.config_s3 import get_config_s3 -from resize_image_service.service.s3_service import S3Service +from resize_image_service.service.amazon_s3_service import AmazonS3Service +from resize_image_service.service.storage_service import StorageService +from resize_image_service.utils.enums.storage_type import StorageType +from dotenv import load_dotenv + +import os from functools import cache @cache -def dependency_s3_service() -> S3Service: - return S3Service(**get_config_s3()) +def dependency_storage_service() -> StorageService: + load_dotenv() + + if StorageType(os.environ["STORAGE_TYPE"]) == StorageType.S3_STORAGE: + return AmazonS3Service(**get_config_s3()) + + raise RuntimeError("Invalid Storage Type") diff --git a/resize_image_service/service/s3_service.py b/resize_image_service/service/amazon_s3_service.py similarity index 58% rename from resize_image_service/service/s3_service.py rename to resize_image_service/service/amazon_s3_service.py index a80f1ff..9555be3 100644 --- a/resize_image_service/service/s3_service.py +++ b/resize_image_service/service/amazon_s3_service.py @@ -1,18 +1,27 @@ -from resize_image_service.utils.enums.file_type import CONTENT_TYPE, FileType +from __future__ import annotations + +from resize_image_service.service.storage_service import StorageService +from resize_image_service.utils.enums.file_type import FileType +from resize_image_service.utils.file_handler import FILE_HANDLER import boto3 from PIL import Image import io -from typing import Any, Dict +from typing import Any -class S3Service: +class AmazonS3Service(StorageService): def __init__(self, **kwargs): + super().__init__(**kwargs) + self.__validate_config(**kwargs) self.bucket_name = kwargs.get("bucket_name") self.region_name = kwargs.get("region_name") + + self.expires_in = kwargs.get("expires_in") + self.s3 = boto3.client( "s3", aws_access_key_id=kwargs.get("aws_access_key_id"), @@ -24,69 +33,47 @@ class S3Service: self, file_name, file_type: FileType ) -> dict[str, str | Any]: return { - "presigned_url": self._get_presigned_right_url(file_name, file_type), + "presigned_url": self._get_presigned_write_url(file_name, file_type), "file_key": self._get_object_url(file_name), } def get_temp_read_link(self, file_name) -> dict[str, str | Any]: return {"presigned_url": self._get_presigned_read_url(file_name)} - def process_image(self, file_name) -> None: - img = self._get_image_obj(file_name) + def process_file(self, file_name: str, file_type: FileType) -> None: + file_bytes = self._get_file_obj(file_name) + handler = FILE_HANDLER[file_type]["handler"] - img = self._resize_img(img) - img = self._remove_img_metadata(img) + self._upload_file(file_name, handler(file_bytes)) - self._upload_image(file_name, img) - - def _get_object_url(self, file_name: str): + def _get_object_url(self, file_name: str) -> str: return f"https://{self.bucket_name}.s3.{self.region_name}.amazonaws.com/{file_name}" - def _get_presigned_right_url(self, file_name, file_type: FileType): + def _get_presigned_write_url(self, file_name, file_type: FileType) -> str: return self.s3.generate_presigned_url( "put_object", Params={ "Bucket": self.bucket_name, "Key": file_name, - "ContentType": CONTENT_TYPE[file_type], + "ContentType": FILE_HANDLER[file_type]["content_type"], }, - ExpiresIn=3600, + ExpiresIn=self.expires_in, ) - def _get_presigned_read_url(self, file_name): + def _get_presigned_read_url(self, file_name) -> str: return self.s3.generate_presigned_url( "get_object", Params={"Bucket": self.bucket_name, "Key": file_name}, - ExpiresIn=3600, + ExpiresIn=self.expires_in, ) - def _get_image_obj(self, file_name: str): - object_byte = io.BytesIO( + def _get_file_obj(self, file_name: str) -> io.BytesIO: + return io.BytesIO( self.s3.get_object(Bucket=self.bucket_name, Key=file_name)["Body"].read() ) - return Image.open(object_byte) - - def _upload_image(self, file_name: str, img: Image): - new_byte_img = io.BytesIO() - img.save(new_byte_img, format="PNG") - - new_byte_img.seek(0) - self.s3.upload_fileobj(new_byte_img, Bucket=self.bucket_name, Key=file_name) - - @staticmethod - def _resize_img(img): - img.thumbnail((320, 320)) - - return img - - @staticmethod - def _remove_img_metadata(img): - data = list(img.getdata()) - image_without_exif = Image.new(img.mode, img.size) - image_without_exif.putdata(data) - - return image_without_exif + def _upload_file(self, file_name: str, file_bytes: io.BytesIO) -> None: + self.s3.upload_fileobj(file_bytes, Bucket=self.bucket_name, Key=file_name) @staticmethod def __validate_config(**kwargs): diff --git a/resize_image_service/service/storage_service.py b/resize_image_service/service/storage_service.py new file mode 100644 index 0000000..cce01f6 --- /dev/null +++ b/resize_image_service/service/storage_service.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from resize_image_service.utils.enums.file_type import FileType + +from abc import ABC, abstractmethod +from typing import Any + + +class StorageService(ABC): + def __init__(self, **kwargs): + pass + + @abstractmethod + def get_temp_upload_link( + self, file_name, file_type: FileType + ) -> dict[str, str | Any]: + pass + + @abstractmethod + def get_temp_read_link(self, file_name) -> dict[str, str | Any]: + pass + + @abstractmethod + def process_file(self, file_name) -> None: + pass diff --git a/resize_image_service/utils/enums/file_type.py b/resize_image_service/utils/enums/file_type.py index dbb91f3..1b8ddc1 100644 --- a/resize_image_service/utils/enums/file_type.py +++ b/resize_image_service/utils/enums/file_type.py @@ -4,6 +4,3 @@ from enum import Enum class FileType(Enum): PNG = "png" JPEG = "jpeg" - - -CONTENT_TYPE = {FileType.PNG: "image/png", FileType.JPEG: "image/jpeg"} diff --git a/resize_image_service/utils/enums/storage_type.py b/resize_image_service/utils/enums/storage_type.py new file mode 100644 index 0000000..7183f7d --- /dev/null +++ b/resize_image_service/utils/enums/storage_type.py @@ -0,0 +1,5 @@ +from enum import Enum + + +class StorageType(Enum): + S3_STORAGE = "s3" diff --git a/resize_image_service/utils/file_handler/__init__.py b/resize_image_service/utils/file_handler/__init__.py new file mode 100644 index 0000000..e78370c --- /dev/null +++ b/resize_image_service/utils/file_handler/__init__.py @@ -0,0 +1,9 @@ +from resize_image_service.utils.enums.file_type import FileType +from resize_image_service.utils.file_handler.handlers.image_handler import ( + image_handler, +) + +FILE_HANDLER = { + FileType.PNG: {"content_type": "image/png", "handler": image_handler}, + FileType.JPEG: {"content_type": "image/jpeg", "handler": image_handler}, +} diff --git a/resize_image_service/utils/file_handler/handlers/__init__.py b/resize_image_service/utils/file_handler/handlers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/resize_image_service/utils/file_handler/handlers/image_handler.py b/resize_image_service/utils/file_handler/handlers/image_handler.py new file mode 100644 index 0000000..764bf64 --- /dev/null +++ b/resize_image_service/utils/file_handler/handlers/image_handler.py @@ -0,0 +1,20 @@ +from PIL import Image + +import io + + +def image_handler(file_bytes: io.BytesIO) -> io.BytesIO: + img = Image.open(file_bytes) + + img.thumbnail((320, 320)) + + data = list(img.getdata()) + image_without_exif = Image.new(img.mode, img.size) + image_without_exif.putdata(data) + + new_byte_img = io.BytesIO() + img.save(new_byte_img, format="PNG") + + new_byte_img.seek(0) + + return new_byte_img diff --git a/resize_image_service/worker/s3_image_worker.py b/resize_image_service/worker/s3_image_worker.py deleted file mode 100644 index 5fd6e4d..0000000 --- a/resize_image_service/worker/s3_image_worker.py +++ /dev/null @@ -1,7 +0,0 @@ -from resize_image_service.depends.depend_s3_service import ( - dependency_s3_service, -) - - -def s3_image_worker(string_url: str) -> None: - dependency_s3_service().process_image(string_url) diff --git a/resize_image_service/worker/storage_file_worker.py b/resize_image_service/worker/storage_file_worker.py new file mode 100644 index 0000000..821c533 --- /dev/null +++ b/resize_image_service/worker/storage_file_worker.py @@ -0,0 +1,7 @@ +from resize_image_service.depends.depend_s3_service import ( + dependency_storage_service, +) + + +def storage_file_worker(string_url: str) -> None: + dependency_storage_service().process_image(string_url)