Refactors Project for More Declarative Package Name
This commit is contained in:
0
storage_service/__init__.py
Normal file
0
storage_service/__init__.py
Normal file
14
storage_service/__main__.py
Normal file
14
storage_service/__main__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from storage_service.config.config_server import get_config_server
|
||||
from storage_service.controller import app
|
||||
|
||||
import uvicorn
|
||||
|
||||
|
||||
def main():
|
||||
config = get_config_server()
|
||||
|
||||
uvicorn.run(app, host=config["host"], port=config["port"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
storage_service/config/__init__.py
Normal file
0
storage_service/config/__init__.py
Normal file
14
storage_service/config/config_allowed_origins.py
Normal file
14
storage_service/config/config_allowed_origins.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
def get_allowed_origins():
|
||||
load_dotenv()
|
||||
|
||||
origins = os.environ.get("ALLOWED_ORIGINS", None)
|
||||
|
||||
if origins is None:
|
||||
return []
|
||||
|
||||
return origins.split(",")
|
||||
12
storage_service/config/config_redis.py
Normal file
12
storage_service/config/config_redis.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_config_redis():
|
||||
load_dotenv()
|
||||
return {
|
||||
"host": os.environ.get("REDIS_HOST", "localhost"),
|
||||
"port": os.environ.get("REDIS_PORT", 6379),
|
||||
"password": os.environ.get("REDIS_PASSWORD", None),
|
||||
}
|
||||
17
storage_service/config/config_s3.py
Normal file
17
storage_service/config/config_s3.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from storage_service.utils.enums.storage_type import StorageType
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_config_s3():
|
||||
load_dotenv()
|
||||
|
||||
return {
|
||||
"aws_access_key_id": os.environ.get("AWS_ACCESS_KEY_ID", None),
|
||||
"aws_secret_access_key": os.environ.get("AWS_SECRET_ACCESS_KEY", None),
|
||||
"region_name": os.environ.get("AWS_REGION_NAME", None),
|
||||
"bucket_name": os.environ.get("AWS_BUCKET_NAME", None),
|
||||
"expires_in": os.environ.get("EXPIRES_IN", 3600),
|
||||
}
|
||||
11
storage_service/config/config_server.py
Normal file
11
storage_service/config/config_server.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def get_config_server():
|
||||
load_dotenv()
|
||||
return {
|
||||
"host": os.environ.get("SERVER_HOST", "0.0.0.0"),
|
||||
"port": int(os.environ.get("SERVER_PORT", 8000)),
|
||||
}
|
||||
19
storage_service/controller/__init__.py
Normal file
19
storage_service/controller/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from storage_service.config.config_allowed_origins import get_allowed_origins
|
||||
from storage_service.controller.storage_controller import s3_router
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=get_allowed_origins(),
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(s3_router)
|
||||
46
storage_service/controller/storage_controller.py
Normal file
46
storage_service/controller/storage_controller.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from storage_service.depends.depend_queue import dependency_queue
|
||||
from storage_service.depends.depend_s3_service import (
|
||||
dependency_storage_service,
|
||||
)
|
||||
from storage_service.service.storage_service import StorageService
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_name_hash import file_name_hash
|
||||
from storage_service.worker.storage_file_worker import storage_file_worker
|
||||
|
||||
from fastapi import Body, Depends, Form
|
||||
from fastapi_utils.cbv import cbv
|
||||
from fastapi_utils.inferring_router import InferringRouter
|
||||
from rq import Queue
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
s3_router = InferringRouter()
|
||||
|
||||
|
||||
@cbv(s3_router)
|
||||
class StorageController:
|
||||
queue: Queue = Depends(dependency_queue, use_cache=True)
|
||||
storage_service: StorageService = Depends(dependency_storage_service, use_cache=True)
|
||||
|
||||
@s3_router.post("/new_file_url/", status_code=200)
|
||||
def new_file_url(
|
||||
self,
|
||||
username: Annotated[str, Body(embed=True)],
|
||||
file_postfix: Annotated[str, Body(embed=True)],
|
||||
file_type: Annotated[FileType, Body(embed=True)],
|
||||
) -> dict[str, str]:
|
||||
return self.storage_service.get_temp_upload_link(
|
||||
file_name_hash(username, file_postfix), file_type
|
||||
)
|
||||
|
||||
@s3_router.get("/file_url/", status_code=200)
|
||||
def file_url(self, username: str, file_postfix: str) -> dict[str, str]:
|
||||
return self.storage_service.get_temp_read_link(
|
||||
file_name_hash(username, file_postfix)
|
||||
)
|
||||
|
||||
@s3_router.post("/process_file/", status_code=200)
|
||||
def process_file(self,
|
||||
username: Annotated[str, Body(embed=True)],
|
||||
file_postfix: Annotated[str, Body(embed=True)]):
|
||||
self.queue.enqueue(storage_file_worker, username, file_postfix)
|
||||
0
storage_service/depends/__init__.py
Normal file
0
storage_service/depends/__init__.py
Normal file
8
storage_service/depends/depend_queue.py
Normal file
8
storage_service/depends/depend_queue.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from storage_service.config.config_redis import get_config_redis
|
||||
|
||||
from redis import Redis
|
||||
from rq import Queue
|
||||
|
||||
|
||||
def dependency_queue():
|
||||
return Queue(connection=Redis(**get_config_redis()))
|
||||
19
storage_service/depends/depend_s3_service.py
Normal file
19
storage_service/depends/depend_s3_service.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from storage_service.config.config_s3 import get_config_s3
|
||||
from storage_service.service.amazon_s3_service import AmazonS3Service
|
||||
from storage_service.service.storage_service import StorageService
|
||||
from storage_service.utils.enums.storage_type import StorageType
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import os
|
||||
from functools import cache
|
||||
|
||||
|
||||
@cache
|
||||
def dependency_storage_service() -> StorageService:
|
||||
load_dotenv()
|
||||
|
||||
if StorageType(os.environ["STORAGE_TYPE"]) == StorageType.S3_STORAGE:
|
||||
return AmazonS3Service(**get_config_s3())
|
||||
|
||||
raise RuntimeError("Invalid Storage Type")
|
||||
0
storage_service/service/__init__.py
Normal file
0
storage_service/service/__init__.py
Normal file
93
storage_service/service/amazon_s3_service.py
Normal file
93
storage_service/service/amazon_s3_service.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from storage_service.service.storage_service import StorageService
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_handler import FILE_HANDLER
|
||||
|
||||
import boto3
|
||||
from PIL import Image
|
||||
|
||||
import io
|
||||
from typing import Any
|
||||
|
||||
|
||||
class AmazonS3Service(StorageService):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.__validate_config(**kwargs)
|
||||
|
||||
self.bucket_name = kwargs.get("bucket_name")
|
||||
self.region_name = kwargs.get("region_name")
|
||||
|
||||
self.expires_in = kwargs.get("expires_in")
|
||||
|
||||
self.s3 = boto3.client(
|
||||
"s3",
|
||||
aws_access_key_id=kwargs.get("aws_access_key_id"),
|
||||
aws_secret_access_key=kwargs.get("aws_secret_access_key"),
|
||||
region_name=kwargs.get("region_name"),
|
||||
)
|
||||
|
||||
def get_temp_upload_link(
|
||||
self, file_name, file_type: FileType
|
||||
) -> dict[str, str | Any]:
|
||||
return {
|
||||
"presigned_url": self._get_presigned_write_url(file_name, file_type),
|
||||
"file_key": self._get_object_url(file_name),
|
||||
}
|
||||
|
||||
def get_temp_read_link(self, file_name) -> dict[str, str | Any]:
|
||||
return {"presigned_url": self._get_presigned_read_url(file_name)}
|
||||
|
||||
def process_file(self, file_name: str, file_type: FileType = FileType.PNG) -> None:
|
||||
file_bytes = self._get_file_obj(file_name)
|
||||
handler = FILE_HANDLER[file_type]["handler"]
|
||||
|
||||
self._upload_file(file_name, handler(file_bytes))
|
||||
|
||||
def _get_object_url(self, file_name: str) -> str:
|
||||
return f"https://{self.bucket_name}.s3.{self.region_name}.amazonaws.com/{file_name}"
|
||||
|
||||
def _get_presigned_write_url(self, file_name, file_type: FileType) -> str:
|
||||
return self.s3.generate_presigned_url(
|
||||
"put_object",
|
||||
Params={
|
||||
"Bucket": self.bucket_name,
|
||||
"Key": file_name,
|
||||
"ContentType": FILE_HANDLER[file_type]["content_type"],
|
||||
},
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
|
||||
def _get_presigned_read_url(self, file_name) -> str:
|
||||
return self.s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": self.bucket_name, "Key": file_name},
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
|
||||
def _get_file_obj(self, file_name: str) -> io.BytesIO:
|
||||
return io.BytesIO(
|
||||
self.s3.get_object(Bucket=self.bucket_name, Key=file_name)["Body"].read()
|
||||
)
|
||||
|
||||
def _upload_file(self, file_name: str, file_bytes: io.BytesIO) -> None:
|
||||
self.s3.upload_fileobj(file_bytes, Bucket=self.bucket_name, Key=file_name)
|
||||
|
||||
@staticmethod
|
||||
def __validate_config(**kwargs):
|
||||
if not kwargs.get("bucket_name"):
|
||||
raise RuntimeError("bucket_name is required")
|
||||
|
||||
if not kwargs.get("aws_access_key_id"):
|
||||
raise RuntimeError("aws_access_key_id is required")
|
||||
|
||||
if not kwargs.get("aws_secret_access_key"):
|
||||
raise RuntimeError("aws_secret_access_key is required")
|
||||
|
||||
if not kwargs.get("region_name"):
|
||||
raise RuntimeError("region_name is required")
|
||||
|
||||
if not kwargs.get("bucket_name"):
|
||||
raise RuntimeError("bucket_name is required")
|
||||
25
storage_service/service/storage_service.py
Normal file
25
storage_service/service/storage_service.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
|
||||
class StorageService(ABC):
|
||||
def __init__(self, **kwargs):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_temp_upload_link(
|
||||
self, file_name, file_type: FileType
|
||||
) -> dict[str, str | Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_temp_read_link(self, file_name) -> dict[str, str | Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def process_file(self, file_name: str, file_type: FileType) -> None:
|
||||
pass
|
||||
0
storage_service/utils/__init__.py
Normal file
0
storage_service/utils/__init__.py
Normal file
0
storage_service/utils/enums/__init__.py
Normal file
0
storage_service/utils/enums/__init__.py
Normal file
6
storage_service/utils/enums/file_type.py
Normal file
6
storage_service/utils/enums/file_type.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class FileType(Enum):
|
||||
PNG = "png"
|
||||
JPEG = "jpeg"
|
||||
5
storage_service/utils/enums/storage_type.py
Normal file
5
storage_service/utils/enums/storage_type.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class StorageType(Enum):
|
||||
S3_STORAGE = "s3"
|
||||
9
storage_service/utils/file_handler/__init__.py
Normal file
9
storage_service/utils/file_handler/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_handler.handlers.image_handler import (
|
||||
image_handler,
|
||||
)
|
||||
|
||||
FILE_HANDLER = {
|
||||
FileType.PNG: {"content_type": "image/png", "handler": image_handler},
|
||||
FileType.JPEG: {"content_type": "image/jpeg", "handler": image_handler},
|
||||
}
|
||||
20
storage_service/utils/file_handler/handlers/image_handler.py
Normal file
20
storage_service/utils/file_handler/handlers/image_handler.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from PIL import Image
|
||||
|
||||
import io
|
||||
|
||||
|
||||
def image_handler(file_bytes: io.BytesIO) -> io.BytesIO:
|
||||
img = Image.open(file_bytes)
|
||||
|
||||
img.thumbnail((320, 320))
|
||||
|
||||
data = list(img.getdata())
|
||||
image_without_exif = Image.new(img.mode, img.size)
|
||||
image_without_exif.putdata(data)
|
||||
|
||||
new_byte_img = io.BytesIO()
|
||||
img.save(new_byte_img, format="PNG")
|
||||
|
||||
new_byte_img.seek(0)
|
||||
|
||||
return new_byte_img
|
||||
9
storage_service/utils/file_name_hash.py
Normal file
9
storage_service/utils/file_name_hash.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import base64
|
||||
from hashlib import md5
|
||||
|
||||
|
||||
def file_name_hash(username: str, file_postfix: str) -> str:
|
||||
hashed_username = md5(username.encode("utf-8")).digest()
|
||||
hashed_username = base64.b64encode(hashed_username).decode()
|
||||
|
||||
return f"{hashed_username}_{file_postfix}"
|
||||
0
storage_service/worker/__init__.py
Normal file
0
storage_service/worker/__init__.py
Normal file
11
storage_service/worker/storage_file_worker.py
Normal file
11
storage_service/worker/storage_file_worker.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from storage_service.depends.depend_s3_service import (
|
||||
dependency_storage_service,
|
||||
)
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_name_hash import file_name_hash
|
||||
|
||||
|
||||
def storage_file_worker(username: str, file_postfix: str) -> None:
|
||||
dependency_storage_service().process_file(
|
||||
file_name_hash(username, file_postfix)
|
||||
)
|
||||
Reference in New Issue
Block a user