Refactors Services and API Endpoints

This commit is contained in:
2024-05-20 22:36:42 -03:00
parent aeaa45bde6
commit 83f531c2d7
16 changed files with 172 additions and 120 deletions

View File

@@ -3,46 +3,58 @@ from __future__ import annotations
from storage_service.depends.depend_virus_checker_service import (
dependency_virus_checker_service,
)
from storage_service.model.storage.signed_url_response import SignedUrlResponse
from storage_service.service.storage.storage_service import StorageService
from storage_service.service.virus_checker.virus_checker_service import (
VirusCheckerService,
)
from storage_service.utils.enums.file_type import FileType
from storage_service.utils.file_handler import FILE_HANDLER
import boto3
from botocore.client import BaseClient
import io
from typing import Any
class AmazonS3Service(StorageService):
virus_checker_service = dependency_virus_checker_service()
virus_checker_service: VirusCheckerService
def __init__(self, **kwargs):
super().__init__(**kwargs)
s3_client: BaseClient
bucket_name: str
self.__validate_config(**kwargs)
expires_in: int = 3600
self.bucket_name = kwargs.get("bucket_name")
self.region_name = kwargs.get("region_name")
def __init__(
self,
s3_client: BaseClient,
bucket_name: str,
virus_checker_service=dependency_virus_checker_service(),
**kwargs,
):
self.virus_checker_service = virus_checker_service
self.expires_in = kwargs.get("expires_in")
if s3_client is None:
raise RuntimeError("Invalid S3 Config: Missing s3_client")
self.s3_client = s3_client
self.s3 = boto3.client(
"s3",
aws_access_key_id=kwargs.get("aws_access_key_id"),
aws_secret_access_key=kwargs.get("aws_secret_access_key"),
region_name=kwargs.get("region_name"),
if bucket_name is None:
raise RuntimeError("Invalid S3 Config: Missing bucket_name")
self.bucket_name = bucket_name
if "expires_in" in kwargs:
self.expires_in = kwargs["expires_in"]
def get_temp_upload_link(self, file_name, file_type: FileType) -> SignedUrlResponse:
return SignedUrlResponse(
signed_url=self._get_presigned_write_url(file_name, file_type),
expires_in=self.expires_in,
)
def get_temp_upload_link(
self, file_name, file_type: FileType
) -> dict[str, str | Any]:
return {
"presigned_url": self._get_presigned_write_url(file_name, file_type),
"file_key": self._get_object_url(file_name),
}
def get_temp_read_link(self, file_name) -> dict[str, str | None]:
return {"presigned_url": self._get_presigned_read_url(file_name)}
def get_temp_read_link(self, file_name) -> SignedUrlResponse:
return SignedUrlResponse(
signed_url=self._get_presigned_read_url(file_name),
expires_in=self.expires_in,
)
def delete_file(self, file_name: str) -> None:
self._delete_file(file_name)
@@ -57,11 +69,8 @@ class AmazonS3Service(StorageService):
self._upload_file(file_name, handler(file_bytes))
def _get_object_url(self, file_name: str) -> str:
return f"https://{self.bucket_name}.s3.{self.region_name}.amazonaws.com/{file_name}"
def _get_presigned_write_url(self, file_name, file_type: FileType) -> str:
return self.s3.generate_presigned_url(
return self.s3_client.generate_presigned_url(
"put_object",
Params={
"Bucket": self.bucket_name,
@@ -72,12 +81,12 @@ class AmazonS3Service(StorageService):
)
def _get_presigned_read_url(self, file_name) -> str | None:
result = self.s3.list_objects(Bucket=self.bucket_name, Prefix=file_name)
result = self.s3_client.list_objects(Bucket=self.bucket_name, Prefix=file_name)
if "Contents" in result and file_name in map(
lambda x: x["Key"], result["Contents"]
):
return self.s3.generate_presigned_url(
return self.s3_client.generate_presigned_url(
"get_object",
Params={"Bucket": self.bucket_name, "Key": file_name},
ExpiresIn=self.expires_in,
@@ -86,28 +95,15 @@ class AmazonS3Service(StorageService):
def _get_file_obj(self, file_name: str) -> io.BytesIO:
return io.BytesIO(
self.s3.get_object(Bucket=self.bucket_name, Key=file_name)["Body"].read()
self.s3_client.get_object(Bucket=self.bucket_name, Key=file_name)[
"Body"
].read()
)
def _upload_file(self, file_name: str, file_bytes: io.BytesIO) -> None:
self.s3.upload_fileobj(file_bytes, Bucket=self.bucket_name, Key=file_name)
self.s3_client.upload_fileobj(
file_bytes, Bucket=self.bucket_name, Key=file_name
)
def _delete_file(self, file_name: str) -> None:
self.s3.delete_object(Bucket=self.bucket_name, Key=file_name)
@staticmethod
def __validate_config(**kwargs):
if not kwargs.get("bucket_name"):
raise RuntimeError("bucket_name is required")
if not kwargs.get("aws_access_key_id"):
raise RuntimeError("aws_access_key_id is required")
if not kwargs.get("aws_secret_access_key"):
raise RuntimeError("aws_secret_access_key is required")
if not kwargs.get("region_name"):
raise RuntimeError("region_name is required")
if not kwargs.get("bucket_name"):
raise RuntimeError("bucket_name is required")
self.s3_client.delete_object(Bucket=self.bucket_name, Key=file_name)

View File

@@ -1,23 +1,18 @@
from __future__ import annotations
from storage_service.model.storage.signed_url_response import SignedUrlResponse
from storage_service.utils.enums.file_type import FileType
from abc import ABC, abstractmethod
from typing import Any
class StorageService(ABC):
def __init__(self, **kwargs):
@abstractmethod
def get_temp_upload_link(self, file_name, file_type: FileType) -> SignedUrlResponse:
pass
@abstractmethod
def get_temp_upload_link(
self, file_name, file_type: FileType
) -> dict[str, str | Any]:
pass
@abstractmethod
def get_temp_read_link(self, file_name) -> dict[str, str | None]:
def get_temp_read_link(self, file_name) -> SignedUrlResponse:
pass
@abstractmethod