Merge pull request #28 from HideyoshiSolutions/develop

develop - feat: better ci pipeline
This commit is contained in:
2025-11-08 21:06:38 -03:00
committed by GitHub
20 changed files with 1780 additions and 1137 deletions

28
.dockerignore Normal file
View File

@@ -0,0 +1,28 @@
# Project files
README.md
# Build and CI files
.k8s/
# Git files
.git/
.github/
.githooks/
.gitignore
# Test files
tests/
# IDE files
.vscode/
.idea/
# Docker files
Dockerfile
docker-compose.yml
.dockerignore

161
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,161 @@
name: ci
on:
push:
workflow_dispatch:
inputs:
tag:
description: 'Tag to deploy'
required: false
jobs:
run-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Cache Poetry dependencies
uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.cache/pip
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
pip install poetry
poetry sync
- name: Run tests
run: |
poetry run python -m unittest
docker:
runs-on: ubuntu-latest
needs: [run-tests]
if: github.event_name == 'push' && (github.ref_name == 'main' || github.ref_name == 'develop')
permissions:
contents: read
packages: write # required to push to ghcr.io
id-token: write # optional for OIDC if you use it
steps:
- uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Prepare image tags
run: |
OWNER=$(echo "${GITHUB_REPOSITORY_OWNER}" | tr '[:upper:]' '[:lower:]')
REPO=$(echo "${GITHUB_REPOSITORY#*/}" | tr '[:upper:]' '[:lower:]')
# Determine tag
if [ "${GITHUB_REF_NAME}" = "main" ]; then
TAG="latest"
else
TAG="dev"
fi
SHORT_SHA=$(echo "${GITHUB_SHA}" | cut -c1-7)
IMAGE_BASE="ghcr.io/${OWNER}/${REPO}"
echo "IMAGE_LATEST=${IMAGE_BASE}:${TAG}" >> $GITHUB_ENV
echo "IMAGE_SHA=${IMAGE_BASE}:sha-${SHORT_SHA}" >> $GITHUB_ENV
- name: Build and push Docker image (with registry cache)
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: |
${{ env.IMAGE_LATEST }}
${{ env.IMAGE_SHA }}
cache-from: type=gha
cache-to: type=gha,mode=max
deploy:
needs: [docker]
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && github.ref_name == 'main')
environment:
name: ${{ github.ref_name == 'main' && 'production' || 'dev' }}
url: https://${{ vars.KUBE_DOMAIN }}
env:
# Kubernetes Specific
KUBE_NAMESPACE: ${{ vars.KUBE_NAMESPACE }}
KUBE_API_DOMAIN: ${{ vars.KUBE_API_DOMAIN }}
WORKER_NODE_LABEL: ${{ vars.WORKER_NODE_LABEL }}
# Application Specific
FRONTEND_PATH: ${{ vars.FRONTEND_PATH }}
steps:
- uses: actions/checkout@v4
- uses: azure/setup-kubectl@v4
- name: Set Up Kubeconfig
uses: azure/k8s-set-context@v4
with:
kubeconfig: ${{ secrets.PORTFOLIO_KUBECONFIG }}
- name: Prepare Image Tag
run: |
OWNER=$(echo "${GITHUB_REPOSITORY_OWNER}" | tr '[:upper:]' '[:lower:]')
REPO=$(echo "${GITHUB_REPOSITORY#*/}" | tr '[:upper:]' '[:lower:]')
SHORT_SHA=$(echo "${GITHUB_SHA}" | cut -c1-7)
IMAGE_BASE="ghcr.io/${OWNER}/${REPO}"
IMAGE_TAG="${{ github.event.inputs.tag || '' }}"
if [ -z "$IMAGE_TAG" ]; then
IMAGE_TAG="sha-$SHORT_SHA"
fi
echo "IMAGE_BASE=${IMAGE_BASE}" >> $GITHUB_ENV
echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_ENV
echo "" >> $GITHUB_ENV
- name: Import SOPS GPG Key
run: |
echo "${{ secrets.PORTFOLIO_GPG_PRIVATE_KEY }}" | gpg --import
- name: Install SOPS
run: |
curl -L https://github.com/mozilla/sops/releases/download/v3.9.1/sops-v3.9.1.linux.amd64 -o /usr/local/bin/sops
chmod +x /usr/local/bin/sops
- name: Decrypt SOPS Secrets Test
run: |
cd .k8s
sops -d secrets.enc.yml > secrets.yml
- name: Apply Kubernetes Manifests - Configuration
run: cat .k8s/config.template.yml | envsubst | kubectl apply -f -
- name: Apply Kubernetes Manifests - Secrets
run: cat .k8s/secrets.yml | envsubst | kubectl apply -f -
- name: Apply Kubernetes Manifests - Redis Cluster
run: cat .k8s/redis.template.yml | envsubst | kubectl apply -f -
- name: Apply Kubernetes Manifests - Deployment
run: |
cat .k8s/deployment.template.yml | envsubst | kubectl apply -f -
cat .k8s/deployment.template.yml | envsubst | kubectl rollout status deployment/frontend-deployment -n ${KUBE_NAMESPACE} --timeout=120s

View File

@@ -1,44 +0,0 @@
name: ci
on:
push:
branches:
- 'main'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
platforms: linux/amd64,linux/arm64
push: true
tags: yoshiunfriendly/storage-hideyoshi.com:latest
run-dispatcher:
needs: docker
runs-on: ubuntu-latest
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Runs Infra-Hideyoshi.com Deployment Dispatcher
run: |
curl -X POST https://api.github.com/repos/HideyoshiSolutions/infra-hideyoshi.com/dispatches \
-H 'Accept: application/vnd.github.everest-preview+json' \
-u ${{ secrets.ACTIONS_KEY }} \
--data '{"event_type": "refresh-deployments", "client_payload": { "deployments": "storage-deployment storage-processor-deployment" }}'

View File

@@ -1,27 +0,0 @@
name: ci
on:
push
jobs:
run-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install poetry
poetry install
- name: Run tests
run: |
poetry run python -m unittest

6
.gitignore vendored
View File

@@ -3,4 +3,8 @@
.idea
**/__pycache__/
**/__pycache__/
.k8s/*.yml
!.k8s/*.template.yml
!.k8s/*.enc.yml

5
.k8s/.sops.yaml Normal file
View File

@@ -0,0 +1,5 @@
creation_rules:
- path_regex: ^secrets(\.enc)?\.yml$
encrypted_regex: '^(data|stringData)$'
pgp: >-
8C8D94A7639C87559B0F2F64B7E1F62F69798EB6

12
.k8s/config.template.yml Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: storage-config
namespace: ${KUBE_NAMESPACE}
data:
SERVER_PORT: "8000"
REDIS_HOST: "storage-redis-service"
REDIS_PORT: "6379"
EXPIRES_IN: "1800000"

View File

@@ -0,0 +1,63 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: storage-deployment
namespace: ${KUBE_NAMESPACE}
spec:
replicas: 1
selector:
matchLabels:
app: storage
template:
metadata:
labels:
app: storage
spec:
nodeSelector:
${WORKER_NODE_LABEL}
imagePullSecrets:
- name: ghcr-secret
containers:
- name: storage
image: ${IMAGE_BASE}:${IMAGE_TAG}
imagePullPolicy: Always
resources:
requests:
memory: "256Mi"
cpu: "250m"
limits:
memory: "256Mi"
cpu: "1000m"
ports:
- containerPort: 8000
readinessProbe:
httpGet:
path: /health
port: 8000
initialDelaySeconds: 60
livenessProbe:
httpGet:
path: /health
port: 8000
initialDelaySeconds: 60
envFrom:
- configMapRef:
name: storage-config
- secretRef:
name: storage-secret
---
apiVersion: v1
kind: Service
metadata:
namespace: ${KUBE_NAMESPACE}
name: storage-service
spec:
selector:
app: backend
ports:
- port: 8000
protocol: TCP
targetPort: 8000
type: ClusterIP

47
.k8s/redis.template.yml Normal file
View File

@@ -0,0 +1,47 @@
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: ${KUBE_NAMESPACE}
name: storage-redis-deployment
spec:
replicas: 1
selector:
matchLabels:
app: storage-redis
template:
metadata:
labels:
app: storage-redis
spec:
containers:
- name: redis
image: valkey/valkey:8.0.6-alpine
imagePullPolicy: "IfNotPresent"
resources:
requests:
memory: "256Mi"
cpu: "75m"
limits:
memory: "256Mi"
cpu: "256m"
ports:
- containerPort: 6379
env:
- name: VALKEY_PASSWORD
valueFrom:
secretKeyRef:
name: storage-secret
key: REDIS_PASSWORD
---
apiVersion: v1
kind: Service
metadata:
namespace: ${KUBE_NAMESPACE}
name: storage-redis-service
spec:
selector:
app: storage-redis
ports:
- port: 6379
type: ClusterIP

38
.k8s/secrets.enc.yml Normal file
View File

@@ -0,0 +1,38 @@
apiVersion: v1
kind: Secret
metadata:
name: storage-secret
namespace: ${KUBE_NAMESPACE}
data:
REDIS_PASSWORD: ENC[AES256_GCM,data:QAjxHjGqOIun8jTN,iv:tfOsGEP0+kGfqvL90Ek6ZNOcueUFnYpYGE6f7r2EvoM=,tag:lAxPPRGE2dO//3hLFItOnA==,type:str]
AWS_ACCESS_KEY_ID: ENC[AES256_GCM,data:AtT8rcJxkxgWQ7SEPj5UEVvR+1b/CJt1Rc2zZQ==,iv:862ChLY0MkOwj+HG/sI+NqWFeK1mz6L3m8mWSKDqH4E=,tag:jpRNIXo7jMTYX/BAQG/7aQ==,type:str]
AWS_SECRET_ACCESS_KEY: ENC[AES256_GCM,data:vudblTMIDdRZiAmJeYqYtCXd4Jdv++QnvdIUH8PX3IJceW/vES2UybfcKwtKGJgMt6ZPnskOamM=,iv:JHbcu/Tv81d9NWPqPXcFZBSTmrml8xBEU4+G/pVgjGM=,tag:HnAf3tGcmv3CBUOA9px/rw==,type:str]
AWS_REGION_NAME: ENC[AES256_GCM,data:GfwdCPgAe7vAtrZPYRp0Ng==,iv:RkZ69hQkuobcOL9DSfhcmLIKsK0rnevHTWF+0Mbw7Dg=,tag:0XAm2oXhETKeUbuN2UPgyA==,type:str]
AWS_BUCKET_NAME: ENC[AES256_GCM,data:atzKtMWzlco3rZf5vsFmse1zd+f3xWHUYldeF8O2egmQOmkf,iv:lYw2tM65fszSG8+MhN6edZgvGlwxw0mhOIDvp93LWnE=,tag:iNR7JJzksrFkXrg4/87E/w==,type:str]
VIRUS_CHECKER_API_KEY: ENC[AES256_GCM,data:4Ick9JIfUIIZ/LEp09kMiJM9T4FaLmXKiksoeW0BiTGJlYcps5fZFgDEYBUzEcQTGMK8SD5yPZhEbytXXAvyNFanASsVa8rXX+sO98Xr6XFJqVZEjCJbiw==,iv:5/yl9sUEF7cFqf1RKhoV0oGWTLBxTeJDxShUns8/Qsg=,tag:LvEbdmPIeWQBw6G7wnWQkg==,type:str]
sops:
lastmodified: "2025-11-08T22:29:57Z"
mac: ENC[AES256_GCM,data:s15EYKavUjMl9CNqHY6D6XHJZMSwcWhdfW6IFeGhYtjHi6SXGnx/Vpj+L96011xuDca20gU8YS/m69ML2qPV3D5wTMogXYNYGW70LCnXm4YQK+QAIJmhRYPqjlZoQpI85c698+abWuh25zER4tukSZ/ZmmNxnNGZQEyOOnaw8oU=,iv:4nMkWcFmYzBFRoiqoFLO3pR0NTb3QXxqduBqQxfQ0Sc=,tag:uwcqo8+ErSMCOuZM/eUoQA==,type:str]
pgp:
- created_at: "2025-11-08T22:29:57Z"
enc: |-
-----BEGIN PGP MESSAGE-----
hQIMAwzdivR1H/BQAQ//WSCkM5pDyRMbSSMEP8lfREjwHszaXZLF0MkNfjlDsAEC
h1yVM2+v5snJj5clipD+8FRiTl69Jho7GGjCEDfoahNyhtgr7t/UH7XVDJKJeI8W
55JrWU/T8PBvXo+Ld0c3lKqqdNQ18nWYoEMJeHyUT0MBWYmYlJbvmxKlrgLyQPs5
EUBG77aJUXIn3j0O39GEUTTmHLNITNyincZQsx4Hndgi9T7IriJfGwqQP1+9WMYw
2Hzld8Qb3VeByrVn+Ybykem6vB8IR5aULpfTU9bKvOrQh1iSgVew9bSXZcqO34Kl
OA+DpfJ8LhspPd/00zfto+2h/pwo7tAxaaJtE3NjZfAysFuSF6lX+pkPqj7FOoxh
P/ps7wzsk3F3nI/z62ITh91m/O+WhSBVsOou4xAGIatDBapH2YYy9qF/31IP9u3W
OOiXNoY1AOOu1/+c9/2ROjQ9/ENhJxiDU/Vl921+PoA+E4EGsFtgXdYiulG5TqZu
rkF9eOYw/ds/ptviA4YafFOCmk2oKxDRwhb3oNChqEFQ5hGZ/FqwcwMU+NEI8Blu
nmA/UHR/MBdxAWH/H6HMf/DJ1CNLNSgMDaEkvAA/QLPMswuoecpPLstwpOeGC/nL
qULiv7+wqlXhggT6QcmekKaw6tjOkMOA/sjqpZMpVnXMA0RpV1lPbK52Z8jINufS
XgEyDLyvaqEPLzhG7rNx1IXNBDymXKPmySvopbzjVWXao75verYNK3o/cV+OFihL
sgPoqDMMRHhLA6oepxqw9fK5f5jLOVJEc4TPbpjANZIOTIaxvir3E6avoaO3Qx8=
=7oeD
-----END PGP MESSAGE-----
fp: 8C8D94A7639C87559B0F2F64B7E1F62F69798EB6
encrypted_regex: ^(data|stringData)$
version: 3.11.0

View File

@@ -1,11 +1,30 @@
FROM python:3.12
## ------------------------------- Builder Stage ------------------------------ ##
FROM python:3.12-slim-bookworm AS builder
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
build-essential \
python3-dev \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
WORKDIR /app
RUN pip install poetry
RUN pip install poetry==2.2.1
COPY ./ /app/
COPY pyproject.toml poetry.lock poetry.toml ./
RUN poetry install
RUN poetry sync --no-root --without dev
ENTRYPOINT ["poetry", "run", "python", "-m", "storage_service"]
## ------------------------------- Final Stage ------------------------------ ##
FROM python:3.12-slim-bookworm AS production
WORKDIR /app
COPY --from=builder /app/.venv .venv
COPY . .
# Set up environment variables for production
ENV PATH="/app/.venv/bin:$PATH"
ENTRYPOINT ["python", "-m", "storage_service"]

2394
poetry.lock generated

File diff suppressed because it is too large Load Diff

2
poetry.toml Normal file
View File

@@ -0,0 +1,2 @@
[virtualenvs]
in-project = true

View File

@@ -3,7 +3,6 @@ name = "resize-image-service"
version = "0.1.0"
description = ""
authors = ["Vitor Hideyoshi <vitor.h.n.batista@gmail.com>"]
readme = "README.md"
packages = [{include = "storage_service"}]
[tool.poetry.dependencies]
@@ -18,9 +17,7 @@ uvicorn = "^0.29.0"
boto3 = "^1.34.109"
python-multipart = "^0.0.9"
virustotal-python = "^1.0.2"
fastapi-utils = "^0.6.0"
typing-inspect = "^0.9.0"
poethepoet = "^0.26.1"
fastapi-utils = {extras = ["all"], version = "^0.8.0"}
[tool.poetry.group.dev.dependencies]
@@ -28,6 +25,7 @@ isort = "^5.12.0"
black = "^23.7.0"
coverage = "^7.5.1"
pre-commit = "^3.7.1"
poethepoet = "^0.37.0"

View File

@@ -1,4 +1,3 @@
from storage_service.config.config_allowed_origins import get_allowed_origins
from storage_service.controller import health_router, storage_router
from storage_service.utils.exception_handler import (
http_exception_handler,
@@ -7,7 +6,6 @@ from storage_service.utils.exception_handler import (
from fastapi import FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
app = FastAPI()
@@ -15,14 +13,5 @@ app = FastAPI()
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_middleware(
CORSMiddleware,
allow_origins=get_allowed_origins(),
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(storage_router)
app.include_router(health_router)

View File

@@ -1,14 +0,0 @@
from dotenv import load_dotenv
import os
def get_allowed_origins():
load_dotenv()
origins = os.environ.get("ALLOWED_ORIGINS", None)
if origins is None:
return []
return origins.split(",")

View File

@@ -1,5 +1,3 @@
from storage_service.utils.enums.storage_type import StorageType
from dotenv import load_dotenv
import os
@@ -14,5 +12,5 @@ def get_config_s3():
"aws_secret_access_key": os.environ.get("AWS_SECRET_ACCESS_KEY", None),
"region_name": os.environ.get("AWS_REGION_NAME", None),
"bucket_name": os.environ.get("AWS_BUCKET_NAME", None),
"expires_in": os.environ.get("EXPIRES_IN", 3600),
"expires_in": os.environ.get("EXPIRES_IN", "3600"),
}

View File

@@ -16,7 +16,7 @@ from storage_service.utils.exceptions.file_not_found_exception import (
from storage_service.utils.file.file_hash_generator import generate_file_hash
from storage_service.worker.storage_file_worker import storage_file_worker
from fastapi import APIRouter, Depends, HTTPException
from fastapi import APIRouter, Depends
from fastapi_utils.cbv import cbv
from rq import Queue

View File

@@ -37,12 +37,15 @@ def build_client_s3(config: dict) -> botocore.client.BaseClient:
def dependency_storage_service() -> StorageService:
load_dotenv()
if StorageType(os.environ["STORAGE_TYPE"]) == StorageType.S3_STORAGE:
s3_config = get_config_s3()
storage_type = StorageType(os.environ.get("STORAGE_TYPE", "s3"))
return AmazonS3Service(
build_client_s3(s3_config),
s3_config["bucket_name"],
)
match storage_type:
case StorageType.S3_STORAGE:
s3_config = get_config_s3()
raise RuntimeError("Invalid Storage Type")
return AmazonS3Service(
build_client_s3(s3_config),
s3_config["bucket_name"],
)
case _:
raise RuntimeError("Invalid Storage Type")

View File

@@ -23,12 +23,9 @@ from functools import cache
def dependency_virus_checker_service() -> VirusCheckerService:
load_dotenv()
try:
type = VirusCheckerType(os.environ["VIRUS_CHECKER_TYPE"])
except ValueError:
raise RuntimeError("Invalid Virus Checker Type")
checker_type = VirusCheckerType(os.environ.get("VIRUS_CHECKER_TYPE", "total_virus"))
match type:
match checker_type:
case VirusCheckerType.TOTAL_VIRUS:
virus_checker = Virustotal(get_virus_checker_api_key())
return VirusTotalService(virus_checker)