Refactors Services and Initial Test Implementation
This commit is contained in:
@@ -10,10 +10,11 @@ from storage_service.model.storage.process_file_request import (
|
||||
)
|
||||
from storage_service.model.storage.signed_url_response import SignedUrlResponse
|
||||
from storage_service.service.storage.storage_service import StorageService
|
||||
from storage_service.utils.file_name_hash import file_name_hash
|
||||
from storage_service.utils.exceptions.file_not_found_exception import FileNotFoundException
|
||||
from storage_service.utils.file.file_hash_generator import generate_file_hash
|
||||
from storage_service.worker.storage_file_worker import storage_file_worker
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi_utils.cbv import cbv
|
||||
from rq import Queue
|
||||
|
||||
@@ -29,7 +30,7 @@ class StorageController:
|
||||
|
||||
@s3_router.post("/file", status_code=200)
|
||||
def new_file_url(self, new_file_request: NewFileURLRequest) -> SignedUrlResponse:
|
||||
hashed_file_name = file_name_hash(
|
||||
hashed_file_name = generate_file_hash(
|
||||
new_file_request.file_key, new_file_request.file_postfix
|
||||
)
|
||||
|
||||
@@ -39,13 +40,16 @@ class StorageController:
|
||||
|
||||
@s3_router.get("/file", status_code=200)
|
||||
def file_url(self, file_key: str, file_postfix: str) -> SignedUrlResponse:
|
||||
return self.storage_service.get_temp_read_link(
|
||||
file_name_hash(file_key, file_postfix)
|
||||
)
|
||||
try:
|
||||
return self.storage_service.get_temp_read_link(
|
||||
generate_file_hash(file_key, file_postfix)
|
||||
)
|
||||
except Exception as _:
|
||||
raise FileNotFoundException("File not found")
|
||||
|
||||
@s3_router.delete("/file", status_code=204)
|
||||
def delete_file(self, file_key: str, file_postfix: str):
|
||||
return self.storage_service.delete_file(file_name_hash(file_key, file_postfix))
|
||||
return self.storage_service.delete_file(generate_file_hash(file_key, file_postfix))
|
||||
|
||||
@s3_router.post("/file/process", status_code=200)
|
||||
def process_file(self, process_file_request: ProcessFileRequest):
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
from .storage_service import StorageService
|
||||
from .amazon_s3_service import AmazonS3Service
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from storage_service.depends.depend_virus_checker_service import (
|
||||
dependency_virus_checker_service,
|
||||
)
|
||||
@@ -9,13 +11,15 @@ from storage_service.service.virus_checker.virus_checker_service import (
|
||||
VirusCheckerService,
|
||||
)
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_handler import FILE_HANDLER
|
||||
|
||||
from botocore.client import BaseClient
|
||||
|
||||
import io
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AmazonS3Service(StorageService):
|
||||
virus_checker_service: VirusCheckerService
|
||||
|
||||
@@ -28,7 +32,7 @@ class AmazonS3Service(StorageService):
|
||||
self,
|
||||
s3_client: BaseClient,
|
||||
bucket_name: str,
|
||||
virus_checker_service=dependency_virus_checker_service(),
|
||||
virus_checker_service=None,
|
||||
**kwargs,
|
||||
):
|
||||
self.virus_checker_service = virus_checker_service
|
||||
@@ -41,6 +45,9 @@ class AmazonS3Service(StorageService):
|
||||
raise RuntimeError("Invalid S3 Config: Missing bucket_name")
|
||||
self.bucket_name = bucket_name
|
||||
|
||||
if virus_checker_service is None:
|
||||
self.virus_checker_service = dependency_virus_checker_service()
|
||||
|
||||
if "expires_in" in kwargs:
|
||||
self.expires_in = kwargs["expires_in"]
|
||||
|
||||
@@ -59,15 +66,30 @@ class AmazonS3Service(StorageService):
|
||||
def delete_file(self, file_name: str) -> None:
|
||||
self._delete_file(file_name)
|
||||
|
||||
def process_file(self, file_name: str, file_type: FileType = FileType.PNG) -> None:
|
||||
file_bytes = self._get_file_obj(file_name)
|
||||
def process_file(self, file_name: str, file_type: FileType = FileType.PNG) -> dict:
|
||||
try:
|
||||
file_bytes = self._get_file_obj(file_name)
|
||||
except Exception as _:
|
||||
raise FileNotFoundError("File not found")
|
||||
|
||||
if not self.virus_checker_service.check_virus(file_bytes):
|
||||
self._delete_file(file_name)
|
||||
raise ValueError("Virus Detected")
|
||||
|
||||
handler = FILE_HANDLER[file_type]["handler"]
|
||||
try:
|
||||
old_size = file_bytes.getbuffer().nbytes
|
||||
|
||||
self._upload_file(file_name, handler(file_bytes))
|
||||
file_bytes = file_type.get_validator()(file_bytes)
|
||||
|
||||
new_size = file_bytes.getbuffer().nbytes
|
||||
except Exception as _:
|
||||
raise RuntimeError("Error Processing")
|
||||
|
||||
self._upload_file(file_name, file_bytes)
|
||||
|
||||
return {
|
||||
"previous_size": old_size,
|
||||
"current_size": new_size,
|
||||
}
|
||||
|
||||
def _get_presigned_write_url(self, file_name, file_type: FileType) -> str:
|
||||
return self.s3_client.generate_presigned_url(
|
||||
@@ -75,7 +97,7 @@ class AmazonS3Service(StorageService):
|
||||
Params={
|
||||
"Bucket": self.bucket_name,
|
||||
"Key": file_name,
|
||||
"ContentType": FILE_HANDLER[file_type]["content_type"],
|
||||
"ContentType": file_type.get_content_type(),
|
||||
},
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
@@ -91,7 +113,8 @@ class AmazonS3Service(StorageService):
|
||||
Params={"Bucket": self.bucket_name, "Key": file_name},
|
||||
ExpiresIn=self.expires_in,
|
||||
)
|
||||
return None
|
||||
|
||||
raise FileNotFoundError("File not found")
|
||||
|
||||
def _get_file_obj(self, file_name: str) -> io.BytesIO:
|
||||
return io.BytesIO(
|
||||
|
||||
@@ -20,5 +20,5 @@ class StorageService(ABC):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def process_file(self, file_name: str, file_type: FileType) -> None:
|
||||
def process_file(self, file_name: str, file_type: FileType) -> dict:
|
||||
pass
|
||||
|
||||
@@ -34,7 +34,7 @@ class VirusTotalService(VirusCheckerService):
|
||||
@staticmethod
|
||||
def _is_valid_file(file_stats: dict) -> bool:
|
||||
match file_stats:
|
||||
case {"malicious": 0, "suspicious": 0, "undetected": 0, "harmless": 0}:
|
||||
case {"malicious": 0, "suspicious": 0, "harmless": 0}:
|
||||
return True
|
||||
case _:
|
||||
return False
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
from enum import Enum
|
||||
from io import BytesIO
|
||||
from typing import Callable
|
||||
|
||||
from storage_service.utils.file.validators import image_validator
|
||||
|
||||
|
||||
class FileType(Enum):
|
||||
PNG = "png"
|
||||
JPEG = "jpeg"
|
||||
|
||||
def get_content_type(self) -> str:
|
||||
match self:
|
||||
case FileType.PNG:
|
||||
return "image/png"
|
||||
case FileType.JPEG:
|
||||
return "image/jpeg"
|
||||
case _:
|
||||
raise ValueError("File Type Not Implemented")
|
||||
|
||||
def get_validator(self) -> Callable[[BytesIO], BytesIO]:
|
||||
match self:
|
||||
case FileType.PNG | FileType.JPEG:
|
||||
return image_validator
|
||||
case _:
|
||||
raise ValueError("File Type Not Implemented")
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
|
||||
class FileNotFoundException(HTTPException):
|
||||
def __init__(self, message: str):
|
||||
super().__init__(
|
||||
status.HTTP_400_BAD_REQUEST, detail=message
|
||||
)
|
||||
0
storage_service/utils/file/__init__.py
Normal file
0
storage_service/utils/file/__init__.py
Normal file
@@ -2,7 +2,7 @@ import base64
|
||||
from hashlib import md5
|
||||
|
||||
|
||||
def file_name_hash(file_key: str, file_postfix: str) -> str:
|
||||
def generate_file_hash(file_key: str, file_postfix: str) -> str:
|
||||
hashed_file_key = md5(file_key.encode("utf-8")).digest()
|
||||
hashed_file_key = base64.b64encode(hashed_file_key).decode()
|
||||
|
||||
1
storage_service/utils/file/validators/__init__.py
Normal file
1
storage_service/utils/file/validators/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .image_handler import image_validator
|
||||
@@ -3,10 +3,10 @@ from PIL import Image
|
||||
import io
|
||||
|
||||
|
||||
def image_handler(file_bytes: io.BytesIO) -> io.BytesIO:
|
||||
def image_validator(file_bytes: io.BytesIO) -> io.BytesIO:
|
||||
img = Image.open(file_bytes)
|
||||
|
||||
img.thumbnail((320, 320))
|
||||
img.thumbnail((180, 180))
|
||||
|
||||
data = list(img.getdata())
|
||||
image_without_exif = Image.new(img.mode, img.size)
|
||||
@@ -1,9 +0,0 @@
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_handler.handlers.image_handler import (
|
||||
image_handler,
|
||||
)
|
||||
|
||||
FILE_HANDLER = {
|
||||
FileType.PNG: {"content_type": "image/png", "handler": image_handler},
|
||||
FileType.JPEG: {"content_type": "image/jpeg", "handler": image_handler},
|
||||
}
|
||||
@@ -1,9 +1,30 @@
|
||||
import logging
|
||||
|
||||
from storage_service.depends.depend_s3_service import (
|
||||
dependency_storage_service,
|
||||
)
|
||||
from storage_service.utils.enums.file_type import FileType
|
||||
from storage_service.utils.file_name_hash import file_name_hash
|
||||
from storage_service.utils.file.file_hash_generator import generate_file_hash
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def storage_file_worker(username: str, file_postfix: str) -> None:
|
||||
dependency_storage_service().process_file(file_name_hash(username, file_postfix))
|
||||
storage_service = dependency_storage_service()
|
||||
|
||||
file_name = generate_file_hash(username, file_postfix)
|
||||
try:
|
||||
stats = storage_service.process_file(file_name)
|
||||
|
||||
print(
|
||||
f"File processed: {file_name} - "
|
||||
f"Previous Size: {stats["previous_size"]/1_000}kb - "
|
||||
f"New Size: {stats["current_size"]/1_000}kb"
|
||||
)
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Error processing file: {e}."
|
||||
f" Deleting file: {file_name}."
|
||||
)
|
||||
|
||||
storage_service.delete_file(file_name)
|
||||
|
||||
Reference in New Issue
Block a user