Implements Better Abstraction on Top of StorageService and File Handlers
Adds Expiration Time to Config Reformats Project
This commit is contained in:
@@ -1,3 +1,5 @@
|
||||
from resize_image_service.utils.enums.storage_type import StorageType
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
@@ -5,9 +7,11 @@ import os
|
||||
|
||||
def get_config_s3():
|
||||
load_dotenv()
|
||||
|
||||
return {
|
||||
"aws_access_key_id": os.environ.get("AWS_ACCESS_KEY_ID", None),
|
||||
"aws_secret_access_key": os.environ.get("AWS_SECRET_ACCESS_KEY", None),
|
||||
"region_name": os.environ.get("AWS_REGION_NAME", None),
|
||||
"bucket_name": os.environ.get("AWS_BUCKET_NAME", None),
|
||||
"expires_in": os.environ.get("EXPIRES_IN", 3600),
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from resize_image_service.depends.depend_queue import dependency_queue
|
||||
from resize_image_service.depends.depend_s3_service import (
|
||||
dependency_s3_service,
|
||||
dependency_storage_service,
|
||||
)
|
||||
from resize_image_service.service.s3_service import S3Service
|
||||
from resize_image_service.service.storage_service import StorageService
|
||||
from resize_image_service.utils.enums.file_type import FileType
|
||||
from resize_image_service.utils.file_name_hash import file_name_hash
|
||||
from resize_image_service.worker.s3_image_worker import s3_image_worker
|
||||
from resize_image_service.worker.storage_file_worker import storage_file_worker
|
||||
|
||||
from fastapi import Body, Depends, Form
|
||||
from fastapi_utils.cbv import cbv
|
||||
@@ -20,27 +20,29 @@ s3_router = InferringRouter()
|
||||
@cbv(s3_router)
|
||||
class S3Controller:
|
||||
queue: Queue = Depends(dependency_queue, use_cache=True)
|
||||
s3_service: S3Service = Depends(dependency_s3_service, use_cache=True)
|
||||
storage_service: StorageService = Depends(dependency_storage_service, use_cache=True)
|
||||
|
||||
@s3_router.get("/new_file_url/", status_code=200)
|
||||
def new_file_url(
|
||||
self,
|
||||
username: Annotated[str, Form()],
|
||||
file_postfix: Annotated[str, Form()],
|
||||
file_type: Annotated[FileType, Form()],
|
||||
username: Annotated[str, Body(embed=True)],
|
||||
file_postfix: Annotated[str, Body(embed=True)],
|
||||
file_type: Annotated[FileType, Body(embed=True)],
|
||||
) -> dict[str, str]:
|
||||
return self.s3_service.get_temp_upload_link(
|
||||
return self.storage_service.get_temp_upload_link(
|
||||
file_name_hash(username, file_postfix), file_type
|
||||
)
|
||||
|
||||
@s3_router.get("/file_url/", status_code=200)
|
||||
def file_url(
|
||||
self, username: Annotated[str, Form()], file_postfix: Annotated[str, Form()]
|
||||
self,
|
||||
username: Annotated[str, Body(embed=True)],
|
||||
file_postfix: Annotated[str, Body(embed=True)],
|
||||
) -> dict[str, str]:
|
||||
return self.s3_service.get_temp_read_link(
|
||||
return self.storage_service.get_temp_read_link(
|
||||
file_name_hash(username, file_postfix)
|
||||
)
|
||||
|
||||
@s3_router.post("/process_image/", status_code=200)
|
||||
def process_image(self, string_url: Annotated[str, Body(embed=True)]):
|
||||
self.queue.enqueue(s3_image_worker, string_url)
|
||||
@s3_router.post("/process_file/", status_code=200)
|
||||
def process_file(self, string_url: Annotated[str, Body(embed=True)]):
|
||||
self.queue.enqueue(storage_file_worker, string_url)
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
from resize_image_service.config.config_s3 import get_config_s3
|
||||
from resize_image_service.service.s3_service import S3Service
|
||||
from resize_image_service.service.amazon_s3_service import AmazonS3Service
|
||||
from resize_image_service.service.storage_service import StorageService
|
||||
from resize_image_service.utils.enums.storage_type import StorageType
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
from functools import cache
|
||||
|
||||
|
||||
@cache
|
||||
def dependency_s3_service() -> S3Service:
|
||||
return S3Service(**get_config_s3())
|
||||
def dependency_storage_service() -> StorageService:
|
||||
load_dotenv()
|
||||
|
||||
if StorageType(os.environ["STORAGE_TYPE"]) == StorageType.S3_STORAGE:
|
||||
return AmazonS3Service(**get_config_s3())
|
||||
|
||||
raise RuntimeError("Invalid Storage Type")
|
||||
|
||||
@@ -1,18 +1,27 @@
|
||||
from resize_image_service.utils.enums.file_type import CONTENT_TYPE, FileType
|
||||
from __future__ import annotations
|
||||
|
||||
from resize_image_service.service.storage_service import StorageService
|
||||
from resize_image_service.utils.enums.file_type import FileType
|
||||
from resize_image_service.utils.file_handler import FILE_HANDLER
|
||||
|
||||
import boto3
|
||||
from PIL import Image
|
||||
|
||||
import io
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
|
||||
class S3Service:
|
||||
class AmazonS3Service(StorageService):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.__validate_config(**kwargs)
|
||||
|
||||
self.bucket_name = kwargs.get("bucket_name")
|
||||
self.region_name = kwargs.get("region_name")
|
||||
|
||||
self.expires_in = kwargs.get("expires_in")
|
||||
|
||||
self.s3 = boto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=kwargs.get("aws_access_key_id"),
|
||||
@@ -24,69 +33,47 @@ class S3Service:
|
||||
self, file_name, file_type: FileType
|
||||
) -> dict[str, str | Any]:
|
||||
return {
|
||||
"presigned_url": self._get_presigned_right_url(file_name, file_type),
|
||||
"presigned_url": self._get_presigned_write_url(file_name, file_type),
|
||||
"file_key": self._get_object_url(file_name),
|
||||
}
|
||||
|
||||
def get_temp_read_link(self, file_name) -> dict[str, str | Any]:
|
||||
return {"presigned_url": self._get_presigned_read_url(file_name)}
|
||||
|
||||
def process_image(self, file_name) -> None:
|
||||
img = self._get_image_obj(file_name)
|
||||
def process_file(self, file_name: str, file_type: FileType) -> None:
|
||||
file_bytes = self._get_file_obj(file_name)
|
||||
handler = FILE_HANDLER[file_type]["handler"]
|
||||
|
||||
img = self._resize_img(img)
|
||||
img = self._remove_img_metadata(img)
|
||||
self._upload_file(file_name, handler(file_bytes))
|
||||
|
||||
self._upload_image(file_name, img)
|
||||
|
||||
def _get_object_url(self, file_name: str):
|
||||
def _get_object_url(self, file_name: str) -> str:
|
||||
return f"https://{self.bucket_name}.s3.{self.region_name}.amazonaws.com/{file_name}"
|
||||
|
||||
def _get_presigned_right_url(self, file_name, file_type: FileType):
|
||||
def _get_presigned_write_url(self, file_name, file_type: FileType) -> str:
|
||||
return self.s3.generate_presigned_url(
|
||||
"put_object",
|
||||
Params={
|
||||
"Bucket": self.bucket_name,
|
||||
"Key": file_name,
|
||||
"ContentType": CONTENT_TYPE[file_type],
|
||||
"ContentType": FILE_HANDLER[file_type]["content_type"],
|
||||
},
|
||||
ExpiresIn=3600,
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
|
||||
def _get_presigned_read_url(self, file_name):
|
||||
def _get_presigned_read_url(self, file_name) -> str:
|
||||
return self.s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": self.bucket_name, "Key": file_name},
|
||||
ExpiresIn=3600,
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
|
||||
def _get_image_obj(self, file_name: str):
|
||||
object_byte = io.BytesIO(
|
||||
def _get_file_obj(self, file_name: str) -> io.BytesIO:
|
||||
return io.BytesIO(
|
||||
self.s3.get_object(Bucket=self.bucket_name, Key=file_name)["Body"].read()
|
||||
)
|
||||
|
||||
return Image.open(object_byte)
|
||||
|
||||
def _upload_image(self, file_name: str, img: Image):
|
||||
new_byte_img = io.BytesIO()
|
||||
img.save(new_byte_img, format="PNG")
|
||||
|
||||
new_byte_img.seek(0)
|
||||
self.s3.upload_fileobj(new_byte_img, Bucket=self.bucket_name, Key=file_name)
|
||||
|
||||
@staticmethod
|
||||
def _resize_img(img):
|
||||
img.thumbnail((320, 320))
|
||||
|
||||
return img
|
||||
|
||||
@staticmethod
|
||||
def _remove_img_metadata(img):
|
||||
data = list(img.getdata())
|
||||
image_without_exif = Image.new(img.mode, img.size)
|
||||
image_without_exif.putdata(data)
|
||||
|
||||
return image_without_exif
|
||||
def _upload_file(self, file_name: str, file_bytes: io.BytesIO) -> None:
|
||||
self.s3.upload_fileobj(file_bytes, Bucket=self.bucket_name, Key=file_name)
|
||||
|
||||
@staticmethod
|
||||
def __validate_config(**kwargs):
|
||||
25
resize_image_service/service/storage_service.py
Normal file
25
resize_image_service/service/storage_service.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from resize_image_service.utils.enums.file_type import FileType
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
|
||||
class StorageService(ABC):
|
||||
def __init__(self, **kwargs):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_temp_upload_link(
|
||||
self, file_name, file_type: FileType
|
||||
) -> dict[str, str | Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_temp_read_link(self, file_name) -> dict[str, str | Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def process_file(self, file_name) -> None:
|
||||
pass
|
||||
@@ -4,6 +4,3 @@ from enum import Enum
|
||||
class FileType(Enum):
|
||||
PNG = "png"
|
||||
JPEG = "jpeg"
|
||||
|
||||
|
||||
CONTENT_TYPE = {FileType.PNG: "image/png", FileType.JPEG: "image/jpeg"}
|
||||
|
||||
5
resize_image_service/utils/enums/storage_type.py
Normal file
5
resize_image_service/utils/enums/storage_type.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class StorageType(Enum):
|
||||
S3_STORAGE = "s3"
|
||||
9
resize_image_service/utils/file_handler/__init__.py
Normal file
9
resize_image_service/utils/file_handler/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from resize_image_service.utils.enums.file_type import FileType
|
||||
from resize_image_service.utils.file_handler.handlers.image_handler import (
|
||||
image_handler,
|
||||
)
|
||||
|
||||
FILE_HANDLER = {
|
||||
FileType.PNG: {"content_type": "image/png", "handler": image_handler},
|
||||
FileType.JPEG: {"content_type": "image/jpeg", "handler": image_handler},
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
from PIL import Image
|
||||
|
||||
import io
|
||||
|
||||
|
||||
def image_handler(file_bytes: io.BytesIO) -> io.BytesIO:
|
||||
img = Image.open(file_bytes)
|
||||
|
||||
img.thumbnail((320, 320))
|
||||
|
||||
data = list(img.getdata())
|
||||
image_without_exif = Image.new(img.mode, img.size)
|
||||
image_without_exif.putdata(data)
|
||||
|
||||
new_byte_img = io.BytesIO()
|
||||
img.save(new_byte_img, format="PNG")
|
||||
|
||||
new_byte_img.seek(0)
|
||||
|
||||
return new_byte_img
|
||||
@@ -1,7 +0,0 @@
|
||||
from resize_image_service.depends.depend_s3_service import (
|
||||
dependency_s3_service,
|
||||
)
|
||||
|
||||
|
||||
def s3_image_worker(string_url: str) -> None:
|
||||
dependency_s3_service().process_image(string_url)
|
||||
7
resize_image_service/worker/storage_file_worker.py
Normal file
7
resize_image_service/worker/storage_file_worker.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from resize_image_service.depends.depend_s3_service import (
|
||||
dependency_storage_service,
|
||||
)
|
||||
|
||||
|
||||
def storage_file_worker(string_url: str) -> None:
|
||||
dependency_storage_service().process_image(string_url)
|
||||
Reference in New Issue
Block a user