Reformats Endpoints Configuration and Start Script

This commit is contained in:
2024-05-21 20:36:16 -03:00
parent 000cb08e37
commit c59dc14810
10 changed files with 92 additions and 117 deletions

1
.gitignore vendored
View File

@@ -1,4 +1,5 @@
.env*
.coverage*
.idea

View File

@@ -1,64 +1,11 @@
# `python-base` sets up all our shared environment variables
FROM python:3.12-slim as python-base
FROM python:3.12
# python
ENV PYTHONUNBUFFERED=1 \
# prevents python creating .pyc files
PYTHONDONTWRITEBYTECODE=1 \
\
# pip
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.5.1 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root
# it gets named `.venv`
POETRY_VIRTUALENVS_IN_PROJECT=true \
# do not ask any interactive question
POETRY_NO_INTERACTION=1 \
\
# paths
# this is where our requirements + virtual environment will live
PYSETUP_PATH="/opt/pysetup" \
VENV_PATH="/opt/pysetup/.venv"
WORKDIR /app
RUN pip install poetry
# prepend poetry and venv to path
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
COPY ./ /app/
RUN poetry install
# `builder-base` stage is used to build deps + create our virtual environment
FROM python-base as builder-base
RUN apt-get update \
&& apt-get install --no-install-recommends -y \
# deps for installing poetry
curl \
# deps for building python deps
build-essential
# install poetry - respects $POETRY_VERSION & $POETRY_HOME
RUN curl -sSL https://install.python-poetry.org | python3 -
# copy project requirement files here to ensure they will be cached.
WORKDIR $PYSETUP_PATH
COPY . .
# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally
RUN poetry install --no-dev
# `builder-base` stage is used to build deps + create our virtual environment
FROM python-base as production
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
WORKDIR $PYSETUP_PATH
EXPOSE 5000-9000
# Run your app
CMD [ "./run-queue.sh" ]
ENTRYPOINT ["poetry", "run", "python", "-m", "storage_service"]

View File

@@ -33,6 +33,8 @@ pre-commit = "^3.7.1"
[tool.poe.tasks]
'run' = "python -m storage_service"
'run:queue' = "python -m storage_service --queue"
'run:dev' = "python -m storage_service --dev"
'create-hooks' = "bash .githooks/set-hooks.sh"
'test' = "coverage run -m unittest -v"

View File

@@ -1,11 +0,0 @@
#!/bin/bash
if [[ $1 == "--queue" || $1 == "-q" ]]; then
rq worker --with-scheduler
exit 0
else
python -m storage_service
fi
exec "$@"

View File

@@ -0,0 +1,28 @@
from storage_service.config.config_allowed_origins import get_allowed_origins
from storage_service.controller import health_router, storage_router
from storage_service.utils.exception_handler import (
http_exception_handler,
validation_exception_handler,
)
from fastapi import FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
app = FastAPI()
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_middleware(
CORSMiddleware,
allow_origins=get_allowed_origins(),
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(storage_router)
app.include_router(health_router)

View File

@@ -1,14 +1,42 @@
from storage_service.config.config_server import get_config_server
from storage_service.controller import app
from storage_service.depends.depend_queue import dependency_queue_worker
import uvicorn
import argparse
def main():
config = get_config_server()
uvicorn.run(app, host=config["host"], port=config["port"])
def main(is_queue=False, is_dev=False):
if is_queue:
dependency_queue_worker().work(with_scheduler=True)
else:
config = {
**get_config_server(),
"reload": is_dev,
}
uvicorn.run("storage_service.__init__:app", **config)
if __name__ == "__main__":
main()
parser = argparse.ArgumentParser(description="Storage Service")
parser.add_argument(
"-q",
"--queue",
dest="queue",
default=False,
action="store_true",
help="Runs the worker to process the queue",
)
parser.add_argument(
"-d",
"--dev",
dest="dev_mode",
default=False,
action="store_true",
help="Run the server in development mode.",
)
args = parser.parse_args()
main(args.queue, args.dev_mode)

View File

@@ -1,30 +1,2 @@
from storage_service.config.config_allowed_origins import get_allowed_origins
from storage_service.controller.health_checker_controller import health_router
from storage_service.controller.storage_controller import s3_router
from storage_service.utils.exception_handler import (
http_exception_handler,
validation_exception_handler,
)
from fastapi import FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
from starlette.responses import JSONResponse
app = FastAPI()
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_middleware(
CORSMiddleware,
allow_origins=get_allowed_origins(),
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(s3_router)
app.include_router(health_router)
from .health_checker_controller import router as health_router
from .storage_controller import router as storage_router

View File

@@ -5,11 +5,11 @@ from storage_service.model.health_check.health_check_response import (
from fastapi import APIRouter
from fastapi_utils.cbv import cbv
health_router = APIRouter(tags=["health"])
router = APIRouter(tags=["health"])
@cbv(health_router)
@cbv(router)
class HealthCheckerController:
@health_router.get("/health", status_code=200)
@router.get("/health", status_code=200)
def health(self) -> HealthCheckResponse:
return HealthCheckResponse(status="healthy")

View File

@@ -20,17 +20,17 @@ from fastapi import APIRouter, Depends, HTTPException
from fastapi_utils.cbv import cbv
from rq import Queue
s3_router = APIRouter(tags=["storage"])
router = APIRouter(tags=["storage"])
@cbv(s3_router)
@cbv(router)
class StorageController:
queue: Queue = Depends(dependency_queue, use_cache=True)
storage_service: StorageService = Depends(
dependency_storage_service, use_cache=True
)
@s3_router.post("/file", status_code=200)
@router.post("/file", status_code=200)
def new_file_url(self, new_file_request: NewFileURLRequest) -> SignedUrlResponse:
hashed_file_name = generate_file_hash(
new_file_request.file_key, new_file_request.file_postfix
@@ -40,7 +40,7 @@ class StorageController:
hashed_file_name, new_file_request.file_type
)
@s3_router.get("/file", status_code=200)
@router.get("/file", status_code=200)
def file_url(self, file_key: str, file_postfix: str) -> SignedUrlResponse:
try:
return self.storage_service.get_temp_read_link(
@@ -49,13 +49,13 @@ class StorageController:
except Exception as _:
raise FileNotFoundException("File not found")
@s3_router.delete("/file", status_code=204)
@router.delete("/file", status_code=204)
def delete_file(self, file_key: str, file_postfix: str):
return self.storage_service.delete_file(
generate_file_hash(file_key, file_postfix)
)
@s3_router.post("/file/process", status_code=200)
@router.post("/file/process", status_code=200)
def process_file(self, process_file_request: ProcessFileRequest):
self.queue.enqueue(
storage_file_worker,

View File

@@ -1,8 +1,16 @@
from storage_service.config.config_redis import get_config_redis
from redis import Redis
from rq import Queue
from rq import Queue, Worker
from functools import cache
def dependency_queue():
return Queue(connection=Redis(**get_config_redis()))
@cache
def dependency_queue() -> Queue:
return Queue(name="default", connection=Redis(**get_config_redis()))
@cache
def dependency_queue_worker() -> Worker:
return Worker(["default"], connection=Redis(**get_config_redis()))