From 51d66eacf004c87453c1af41d3ba1604888f3e11 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 10:22:59 -0600 Subject: [PATCH 01/36] feat: add docker compose for run tests --- docker-compose.yml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..121171a --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,34 @@ +services: + db-postgres: + image: postgres:13 + environment: + POSTGRES_USER: test + POSTGRES_PASSWORD: test + POSTGRES_DB: test_db + ports: + - 9999:5432 + + db-mongodb: + image: mongo:latest + ports: + - 27017:27017 + + db-redis: + image: redis:latest + ports: + - 6379:6379 + + testing: + build: + context: . + dockerfile: docker_images/testing/Dockerfile.testing + env_file: + - ./.envs/.env.testing.docker + environment: + POSTGRESQL_URL: postgresql://test:test@db-postgres:5432/test_db + MONGO_URL: mongodb://localhost:27017/test_db + REDIS_URL: redis://localhost:6379 + depends_on: + - db-postgres + - db-mongodb + - db-redis \ No newline at end of file From fe656837a3ca5154ac565ab9040ede881f7ec6ef Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 10:25:18 -0600 Subject: [PATCH 02/36] feat: add Dockerfile for run tests and add settings for run Dockerfile testing --- docker_images/testing/Dockerfile.testing | 44 +++++++++++++++ docker_images/testing/ci.env.sh | 39 ++++++++++++++ docker_images/testing/entrypoint.sh | 68 ++++++++++++++++++++++++ 3 files changed, 151 insertions(+) create mode 100644 docker_images/testing/Dockerfile.testing create mode 100644 docker_images/testing/ci.env.sh create mode 100644 docker_images/testing/entrypoint.sh diff --git a/docker_images/testing/Dockerfile.testing b/docker_images/testing/Dockerfile.testing new file mode 100644 index 0000000..665c9b1 --- /dev/null +++ b/docker_images/testing/Dockerfile.testing @@ -0,0 +1,44 @@ +# Dockerfile.testing + +FROM python:3.13-slim + +# Establecer directorio de trabajo +WORKDIR /app + +# Instalar herramientas necesarias del sistema +RUN apt-get update && apt-get install -y \ + gcc \ + python3-dev \ + libpq-dev \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Instalar Poetry oficialmente +RUN curl -sSL https://install.python-poetry.org | python3 - && \ + ln -s /root/.local/bin/poetry /usr/local/bin/poetry + +# Copiar archivos de dependencias para aprovechar la cache de Docker +COPY pyproject.toml poetry.lock* /tmp/ + +# Instalar dependencias del proyecto +RUN cd /tmp && \ + poetry config virtualenvs.create false && \ + poetry install --no-interaction --no-root --with dev + +# Copia el resto del proyecto +COPY pyproject.toml poetry.lock* ./ +COPY docker_images/testing/ci.env.sh . +COPY docker_images/testing/entrypoint.sh . +COPY src/ ./src/ + +# Dar permisos de ejecuciΓ³n a scripts +RUN chmod +x ci.env.sh entrypoint.sh + +# Establecer PYTHONPATH para imports +ENV PYTHONPATH=/app + +# Usar el entrypoint custom +ENTRYPOINT ["./entrypoint.sh"] + +# Comando por defecto: correr los tests +CMD [] diff --git a/docker_images/testing/ci.env.sh b/docker_images/testing/ci.env.sh new file mode 100644 index 0000000..9b4a954 --- /dev/null +++ b/docker_images/testing/ci.env.sh @@ -0,0 +1,39 @@ +#!/bin/bash +echo "πŸ“¦ Running ci.env.sh..." + +if [ "$CI" = "true" ]; then + echo "βœ… CI detected, overriding variables with GitHub Actions values..." + + # Here you can associate environment variables with GitHub Actions values + # Available variables in GitHub Actions: + # - GITHUB_DATABASE_POSTGRESQL ==> connects to PostgreSQL database running in github actions + # - GITHUB_DATABASE_MONGODB ==> connects to MongoDB database running in github actions + # - GITHUB_DATABASE_REDIS ==> connects to Redis database running in github actions + # Example: + # export POSTGRESQL_URL="${GITHUB_DATABASE_POSTGRESQL:-$POSTGRESQL_URL}" + # export MONGODB_URL="${GITHUB_DATABASE_MONGODB:-$MONGODB_URL}" + # export REDIS_URL="${GITHUB_DATABASE_REDIS:-$REDIS_URL}" + + # Add environment variable associations with GitHub Actions values here + # ----------------------------------------------------------------- + export POSTGRESQL_URL="${GITHUB_DATABASE_POSTGRESQL:-$POSTGRESQL_URL}" + export MONGO_URL="${GITHUB_DATABASE_MONGODB:-$MONGO_URL}" + export REDIS_URL="${GITHUB_DATABASE_REDIS:-$REDIS_URL}" + + + + + + + + + + + echo "πŸ” POSTGRESQL_URL=${POSTGRESQL_URL}" + echo "πŸ” MONGO_URL=${MONGO_URL}" + echo "πŸ” REDIS_URL=${REDIS_URL}" +else + echo "πŸ§ͺ Local mode: using local environment variables" +fi + +echo "🎬 Variables ready. Continuing with tests..." \ No newline at end of file diff --git a/docker_images/testing/entrypoint.sh b/docker_images/testing/entrypoint.sh new file mode 100644 index 0000000..6f41da1 --- /dev/null +++ b/docker_images/testing/entrypoint.sh @@ -0,0 +1,68 @@ +#!/bin/sh +set -e + +echo "πŸ“¦ Running entrypoint.sh..." + +# 🧬 Cargar variables si el archivo existe +if [ -f ./ci.env.sh ]; then + echo "πŸ“¦ Running ci.env.sh..." + . ./ci.env.sh +else + echo "⚠️ No ci.env.sh found. Skipping..." +fi + +# πŸ—‚οΈ Asegurar que la carpeta de reportes exista +mkdir -p reports +echo "πŸ“ Reports directory created/verified at: $(pwd)/reports" + +# πŸ§ͺ Ejecutar tests con coverage +echo "πŸ§ͺ Running tests with coverage..." +poetry run coverage run -m pytest src/tests -s -v --lf --junitxml=reports/unittest_report.xml + +# πŸ“„ Generar reportes de cobertura +echo "πŸ“Š Generating coverage reports..." +poetry run coverage xml -o reports/coverage.xml +poetry run coverage report + +# πŸ–ΌοΈ Generar badge en SVG +echo "🎨 Generating coverage badge..." +poetry run coverage-badge -o reports/coverage.svg + +# Verificar que los archivos existan antes de copiarlos +echo "πŸ” Verificando que los archivos de reporte existan:" +ls -la reports/ + +# πŸ“€ Copiar archivos seleccionados a /app/coverage-reports/ +echo "πŸ“€ Copiando reportes a /app/coverage-reports/" +mkdir -p /app/coverage-reports/ +echo "πŸ“ Directorio de destino creado: /app/coverage-reports/" + +# Copiar coverage.xml con verificaciΓ³n +if [ -f reports/coverage.xml ]; then + cp reports/coverage.xml /app/coverage-reports/ + echo "βœ… coverage.xml copiado exitosamente" +else + echo "❌ reports/coverage.xml no existe!" +fi + +# Copiar coverage.svg con verificaciΓ³n +if [ -f reports/coverage.svg ]; then + cp reports/coverage.svg /app/coverage-reports/ + echo "βœ… coverage.svg copiado exitosamente" +else + echo "❌ reports/coverage.svg no existe!" +fi + +# Copiar unittest_report.xml con verificaciΓ³n +if [ -f reports/unittest_report.xml ]; then + cp reports/unittest_report.xml /app/coverage-reports/ + echo "βœ… unittest_report.xml copiado exitosamente" +else + echo "❌ reports/unittest_report.xml no existe!" +fi + +# Mostrar el contenido de la carpeta de destino +echo "πŸ“‹ Contenido de /app/coverage-reports/:" +ls -la /app/coverage-reports/ + +echo "βœ… Entrypoint complete." From 50371ecb5bd52a0b2fac106f659cc0e16156c7c7 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 10:31:01 -0600 Subject: [PATCH 03/36] feat: add environment files for run test --- .envs/.env.testing.docker | 3 +++ .envs/.env.testing.local | 3 +++ .gitignore | 6 ++++++ 3 files changed, 12 insertions(+) create mode 100644 .envs/.env.testing.docker create mode 100644 .envs/.env.testing.local diff --git a/.envs/.env.testing.docker b/.envs/.env.testing.docker new file mode 100644 index 0000000..a56092e --- /dev/null +++ b/.envs/.env.testing.docker @@ -0,0 +1,3 @@ +POSTGRESQL_URL=postgresql://test:test@db-postgres:5432/test_db +MONGO_URL=mongodb://db-mongodb:27017/test_db +REDIS_URL=redis://db-redis:6379 \ No newline at end of file diff --git a/.envs/.env.testing.local b/.envs/.env.testing.local new file mode 100644 index 0000000..023b8aa --- /dev/null +++ b/.envs/.env.testing.local @@ -0,0 +1,3 @@ +POSTGRESQL_URL=postgresql://test:test@localhost:5432/test_db +MONGO_URL=mongodb://localhost:27017/test_db +REDIS_URL=redis://localhost:6379 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 0a19790..39df112 100644 --- a/.gitignore +++ b/.gitignore @@ -135,6 +135,7 @@ venv/ ENV/ env.bak/ venv.bak/ +.envs/ # Spyder project settings .spyderproject @@ -172,3 +173,8 @@ cython_debug/ # PyPI configuration file .pypirc + + +# Not ignored files +!.envs/.env.testing.docker +!.envs/.env.testing.local \ No newline at end of file From 6c801e604535bd20a1a506051ee663613fc8db64 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 12:52:09 -0600 Subject: [PATCH 04/36] feat: add conventions settings --- .envs/.env.testing.local | 3 -- .gitignore | 2 +- .pre-commit-config.yaml | 3 +- .vscode/launch.json | 1 - src/core/__init__.py | 0 src/core/settings/__init__.py | 83 ++++++++++++++++++++++++++++++++ src/core/settings/base.py | 61 +++++++++++++++++++++++ src/core/settings/development.py | 4 ++ src/core/settings/local.py | 14 ++++++ src/core/settings/production.py | 4 ++ src/core/settings/staging.py | 4 ++ src/core/settings/testing.py | 4 ++ src/main.py | 1 + src/shared/__init__.py | 0 src/shared/environment.py | 40 +++++++++++++++ src/shared/path.py | 69 ++++++++++++++++++++++++++ 16 files changed, 287 insertions(+), 6 deletions(-) delete mode 100644 .envs/.env.testing.local create mode 100644 src/core/__init__.py create mode 100644 src/core/settings/__init__.py create mode 100644 src/core/settings/base.py create mode 100644 src/core/settings/development.py create mode 100644 src/core/settings/local.py create mode 100644 src/core/settings/production.py create mode 100644 src/core/settings/staging.py create mode 100644 src/core/settings/testing.py create mode 100644 src/shared/__init__.py create mode 100644 src/shared/environment.py create mode 100644 src/shared/path.py diff --git a/.envs/.env.testing.local b/.envs/.env.testing.local deleted file mode 100644 index 023b8aa..0000000 --- a/.envs/.env.testing.local +++ /dev/null @@ -1,3 +0,0 @@ -POSTGRESQL_URL=postgresql://test:test@localhost:5432/test_db -MONGO_URL=mongodb://localhost:27017/test_db -REDIS_URL=redis://localhost:6379 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 39df112..e3cb4d5 100644 --- a/.gitignore +++ b/.gitignore @@ -177,4 +177,4 @@ cython_debug/ # Not ignored files !.envs/.env.testing.docker -!.envs/.env.testing.local \ No newline at end of file +!.envs/.env.test \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4564c0e..125181c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,4 +5,5 @@ repos: - id: mypy args: ["src","--config-file=mypy.ini"] language: system - pass_filenames: false \ No newline at end of file + pass_filenames: false + exclude: ^(src/shared/path.py|src/shared/environment.py)$ \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index fc3dc81..6a7dda6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -17,7 +17,6 @@ "jinja": true, "cwd": "${workspaceFolder}/src", "env": { - "ENVIRONMENT": "development" }, "python": "${workspaceFolder}/env/bin/python" } diff --git a/src/core/__init__.py b/src/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/core/settings/__init__.py b/src/core/settings/__init__.py new file mode 100644 index 0000000..6f5b2e7 --- /dev/null +++ b/src/core/settings/__init__.py @@ -0,0 +1,83 @@ +from typing import ClassVar + +import sentry_sdk +from loguru import logger + +from core.settings.base import Settings +from shared.path import APP_ENVIRONMENT +from shared.environment import AppEnvironment +from core.settings.development import DevelopmentSettings +from core.settings.local import LocalSettings +from core.settings.production import ProductionSettings +from core.settings.staging import StagingSettings +from core.settings.testing import TestingSettings +import sys +import logging + + +class SettingsManager: + + SETTINGS_CLASS_DICT: ClassVar[dict[str, type[Settings]]] = { + AppEnvironment.LOCAL.value: LocalSettings, + AppEnvironment.DEVELOPMENT.value: DevelopmentSettings, + AppEnvironment.STAGING.value: StagingSettings, + AppEnvironment.PRODUCTION.value: ProductionSettings, + AppEnvironment.TESTING.value: TestingSettings, + } + + def __init__(self, environment: str): + self.environment = environment + self.settings: Settings = self._get_settings() + self._initialize_third_apps() + self._show_project_info() + + def _initialize_third_apps(self) -> None: + self._initialize_sentry() + self._initialize_logger() + + def _show_project_info(self) -> None: + logger.info(f"ENVIRONMENT: {self.settings.ENVIRONMENT}") + logger.info(f"PROJECT: {self.settings.PROJECT.NAME}") + logger.info(f"DESCRIPTION: {self.settings.PROJECT.DESCRIPTION}") + logger.info(f"VERSION: {self.settings.PROJECT.VERSION}") + logger.info(f"CODE: {self.settings.PROJECT.CODE}") + logger.info(f"AUTHORS: {self.settings.PROJECT.AUTHORS}") + + + def _initialize_sentry(self) -> None: + if self.environment in [ + AppEnvironment.DEVELOPMENT, + AppEnvironment.STAGING, + AppEnvironment.PRODUCTION, + ]: + self._sentry_setup() + + def _initialize_logger(self) -> None: + + logger.remove() + level = logging.DEBUG if self.settings.LOG.DEBUG else logging.INFO + logger.add( + sink=sys.stdout, + level=level, + colorize=self.settings.LOG.COLORIZE, + enqueue=self.settings.LOG.ENQUEUE, + serialize=self.settings.LOG.SERIALIZE, + ) + + + def _sentry_setup(self) -> None: + sentry_sdk.init( + dsn=self.settings.SENTRY_DSN, + environment=self.settings.ENVIRONMENT, + traces_sample_rate=0, + ) + + def _get_settings(self) -> Settings: + try: + settings_class: type[Settings] = self.SETTINGS_CLASS_DICT[self.environment] + except KeyError as exc: + raise ValueError(f"Unrecognized environment value: {self.environment}") from exc + return settings_class() + + +settings: Settings = SettingsManager(environment=APP_ENVIRONMENT).settings \ No newline at end of file diff --git a/src/core/settings/base.py b/src/core/settings/base.py new file mode 100644 index 0000000..6333e03 --- /dev/null +++ b/src/core/settings/base.py @@ -0,0 +1,61 @@ +from pydantic import BaseModel +from pydantic_settings import BaseSettings, SettingsConfigDict +from pydantic import Field +from shared.path import ENV_FILE_PATH, APP_ENVIRONMENT + +class ProjectSettings(BaseModel): + NAME: str + DESCRIPTION: str | None = None + VERSION: str = "1.0.0" + CODE: str + AUTHORS: str + LOGO_URL: str = "https://davidronihdz99.pythonanywhere.com/media/fotosPerfil/roni_3dqmEf6.jpg" + +class LogSettings(BaseModel): + DEBUG: bool = False + COLORIZE: bool = False + SERIALIZE: bool = False + ENQUEUE: bool = False + +class Settings(BaseSettings): + model_config = SettingsConfigDict( + env_file=ENV_FILE_PATH, + env_nested_delimiter="__", + case_sensitive=True, + extra="forbid" + ) + + # General settings + # ---------------------------------------------------------------- + + ENVIRONMENT: str = Field( + default=APP_ENVIRONMENT, + validate_default=True + ) + + SENTRY_DSN: str | None = None + + # Project metadata + # ---------------------------------------------------------------- + + PROJECT: ProjectSettings = Field( + default=ProjectSettings( + NAME="Api", + DESCRIPTION="API implemented with FastAPI", + VERSION="1.0.0", + CODE="api-001", + AUTHORS="R2" + ), + validate_default=True + ) + + # Log settings + # ---------------------------------------------------------------- + + LOG: LogSettings = LogSettings( + DEBUG=False, + COLORIZE=False, + SERIALIZE=False, + ENQUEUE=False + ) + diff --git a/src/core/settings/development.py b/src/core/settings/development.py new file mode 100644 index 0000000..8d60e39 --- /dev/null +++ b/src/core/settings/development.py @@ -0,0 +1,4 @@ +from core.settings.base import Settings + +class DevelopmentSettings(Settings): + pass \ No newline at end of file diff --git a/src/core/settings/local.py b/src/core/settings/local.py new file mode 100644 index 0000000..6ea5fc6 --- /dev/null +++ b/src/core/settings/local.py @@ -0,0 +1,14 @@ +from core.settings.base import Settings, LogSettings + + +class LocalSettings(Settings): + + # Log settings + # ---------------------------------------------------------------- + + LOG: LogSettings = LogSettings( + DEBUG=False, + COLORIZE=False, + SERIALIZE=False, + ENQUEUE=False + ) diff --git a/src/core/settings/production.py b/src/core/settings/production.py new file mode 100644 index 0000000..7f895a8 --- /dev/null +++ b/src/core/settings/production.py @@ -0,0 +1,4 @@ +from core.settings.base import Settings + +class ProductionSettings(Settings): + pass \ No newline at end of file diff --git a/src/core/settings/staging.py b/src/core/settings/staging.py new file mode 100644 index 0000000..7c85b11 --- /dev/null +++ b/src/core/settings/staging.py @@ -0,0 +1,4 @@ +from core.settings.base import Settings + +class StagingSettings(Settings): + pass \ No newline at end of file diff --git a/src/core/settings/testing.py b/src/core/settings/testing.py new file mode 100644 index 0000000..bc78d1f --- /dev/null +++ b/src/core/settings/testing.py @@ -0,0 +1,4 @@ +from core.settings.base import Settings + +class TestingSettings(Settings): + pass \ No newline at end of file diff --git a/src/main.py b/src/main.py index 996a93f..f383741 100644 --- a/src/main.py +++ b/src/main.py @@ -1,3 +1,4 @@ +from core.settings import settings from fastapi import FastAPI from mangum import Mangum from fastapi.openapi.utils import get_openapi diff --git a/src/shared/__init__.py b/src/shared/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/shared/environment.py b/src/shared/environment.py new file mode 100644 index 0000000..1aef473 --- /dev/null +++ b/src/shared/environment.py @@ -0,0 +1,40 @@ +from enum import StrEnum + +class AppEnvironment(StrEnum): + LOCAL = "local", "local" + DEVELOPMENT = "development", "dev" + STAGING = "staging", "stg" + PRODUCTION = "production", "prod" + TESTING = "testing", "test" + + def __new__(cls, value: str, suffix: str) -> "AppEnvironment": + obj = str.__new__(cls, value) + obj._value_ = value + obj._suffix = suffix # type: ignore + return obj + + @property + def suffix(self) -> str: + return self._suffix # type: ignore + + @property + def environment(self) -> str: + return self.value + + def get_file_name(self) -> str: + return f".env.{self.suffix}" + + @classmethod + def _is_valid_value(cls, value: str) -> bool: + return value in cls._value2member_map_ + + @classmethod + def _get_valid_values(cls) -> list[str]: + return [member.value for member in cls] + + @classmethod + def check_value(cls, value: str) -> None: + if not cls._is_valid_value(value): + raise ValueError( + f"{value} is not a valid Environment value. Valid values are: {', '.join(cls._get_valid_values())}" + ) \ No newline at end of file diff --git a/src/shared/path.py b/src/shared/path.py new file mode 100644 index 0000000..3047723 --- /dev/null +++ b/src/shared/path.py @@ -0,0 +1,69 @@ +import os +import sys +from pathlib import Path +from dotenv import load_dotenv +from loguru import logger +from .environment import AppEnvironment + +# Base project directory +BASE_DIR = Path(__file__).resolve().parent.parent +logger.debug(f"BASE_DIR resolved to: {BASE_DIR}") + +# Load extra paths +# ---------------------------------------------------------------- +LIST_PATH_TO_ADD: list[str] = [ + # AquΓ­ puedes aΓ±adir rutas adicionales si necesitas +] + +if LIST_PATH_TO_ADD: + sys.path.extend(LIST_PATH_TO_ADD) + logger.info(f"Added to sys.path: {LIST_PATH_TO_ADD}") +else: + logger.info("No extra paths to add to sys.path") + + +# Load base .env +# ---------------------------------------------------------------- +ENVS_DIR = BASE_DIR.parent / ".envs" +logger.debug(f"ENVS_DIR resolved to: {ENVS_DIR}") + +ENV_BASE_FILE_PATH = ENVS_DIR / ".env.base" +logger.info(f"Loading base environment file from: {ENV_BASE_FILE_PATH}") +if ENV_BASE_FILE_PATH.exists(): + load_dotenv(ENV_BASE_FILE_PATH) + logger.success(f"Loaded .env.base successfully") +else: + logger.warning(f".env.base not found at: {ENV_BASE_FILE_PATH}") + + +# Load environments from .env.base +# ---------------------------------------------------------------- +try: + APP_ENVIRONMENT: str = os.environ["ENVIRONMENT"] +except KeyError: + raise ValueError("ENVIRONMENT is not set") + +logger.info(f"Environment project: {APP_ENVIRONMENT}") + + +# Validate environments values from .env.base +# ---------------------------------------------------------------- +try: + AppEnvironment.check_value(APP_ENVIRONMENT) + ENVIRONMENT_ENUM = AppEnvironment(APP_ENVIRONMENT) # type: ignore + logger.success(f"Environment '{APP_ENVIRONMENT}' validated successfully") +except ValueError as e: + logger.critical(f"Invalid ENVIRONMENT value: {APP_ENVIRONMENT} β€” {e}") + raise + + +# Load specific env file +# ---------------------------------------------------------------- +ENV_FILE_PATH = ENVS_DIR / ENVIRONMENT_ENUM.get_file_name() +logger.info(f"Loading environment-specific file from: {ENV_FILE_PATH}") + +if ENV_FILE_PATH.exists(): + load_dotenv(ENV_FILE_PATH) + logger.success(f"Loaded environment file for {APP_ENVIRONMENT} successfully") +else: + logger.warning(f"Environment file not found: {ENV_FILE_PATH}") From e188480b4b1e13aacd3a4a81d0fd1b5482885738 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 13:26:04 -0600 Subject: [PATCH 05/36] feat: replace requirement.txt for poetry files --- poetry.lock | 1356 ++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 39 ++ requirements.txt | 35 -- 3 files changed, 1395 insertions(+), 35 deletions(-) create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 requirements.txt diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..9a599ed --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1356 @@ +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.9.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "certifi" +version = "2025.4.26" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "click" +version = "8.2.0" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"}, + {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "coverage" +version = "7.8.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "coverage-badge" +version = "1.1.2" +description = "Generate coverage badges for Coverage.py." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "coverage_badge-1.1.2-py2.py3-none-any.whl", hash = "sha256:d8413ce51c91043a1692b943616b450868cbeeb0ea6a0c54a32f8318c9c96ff7"}, + {file = "coverage_badge-1.1.2.tar.gz", hash = "sha256:fe7ed58a3b72dad85a553b64a99e963dea3847dcd0b8ddd2b38a00333618642c"}, +] + +[package.dependencies] +coverage = "*" +setuptools = "*" + +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "fastapi" +version = "0.115.12" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, + {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.47.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "greenlet" +version = "3.2.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f"}, + {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13"}, + {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5"}, + {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057"}, + {file = "greenlet-3.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f"}, + {file = "greenlet-3.2.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74"}, + {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe"}, + {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e"}, + {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6"}, + {file = "greenlet-3.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b"}, + {file = "greenlet-3.2.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65"}, + {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3"}, + {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e"}, + {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5"}, + {file = "greenlet-3.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec"}, + {file = "greenlet-3.2.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825"}, + {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d"}, + {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf"}, + {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708"}, + {file = "greenlet-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b"}, + {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207"}, + {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8"}, + {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51"}, + {file = "greenlet-3.2.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240"}, + {file = "greenlet-3.2.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819"}, + {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc"}, + {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457"}, + {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659"}, + {file = "greenlet-3.2.2-cp39-cp39-win32.whl", hash = "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61"}, + {file = "greenlet-3.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834"}, + {file = "greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "identify" +version = "2.6.10" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25"}, + {file = "identify-2.6.10.tar.gz", hash = "sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "loguru" +version = "0.7.3" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = "<4.0,>=3.5" +groups = ["main"] +files = [ + {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, + {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] + +[[package]] +name = "mangum" +version = "0.19.0" +description = "AWS Lambda support for ASGI applications" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mangum-0.19.0-py3-none-any.whl", hash = "sha256:e500b35f495d5e68ac98bc97334896d6101523f2ee2c57ba6a61893b65266e59"}, + {file = "mangum-0.19.0.tar.gz", hash = "sha256:e388e7c491b7b67970f8234e46fd4a7b21ff87785848f418de08148f71cf0bd6"}, +] + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "mypy" +version = "1.15.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "4.2.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, +] + +[[package]] +name = "pydantic" +version = "2.11.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"}, + {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.9.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef"}, + {file = "pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pymongo" +version = "4.12.1" +description = "Python driver for MongoDB " +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pymongo-4.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1897c64a11e19aae4e85126441f319c3bf3fb7b60d122f51528cab2b95caaad3"}, + {file = "pymongo-4.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0ba42b4f2046595f64c492ef73c92ac78c502db59024c9be0113d0a33ed60c15"}, + {file = "pymongo-4.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:777800dc731ea7713635a44dcfb93d88eb2be4b31883feb3238afce5d32ef6d5"}, + {file = "pymongo-4.12.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:670bb6c9163f2623d8e3c42ff029dc89d2e8bf41feeeea4c11a8a21f9a9b0df7"}, + {file = "pymongo-4.12.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c9d447042433b3574df8d7d1b3bb9b1f1277d019534b29a39fd92670ab72d4e"}, + {file = "pymongo-4.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c8dbb6a10753cbbbcb3e8ab723f87cb520de855e667a32dd2889e73323e82f"}, + {file = "pymongo-4.12.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bd0cc14726baa07081abe8ecda309a1049992b84b37d3c50c5fbd7f935b8925"}, + {file = "pymongo-4.12.1-cp310-cp310-win32.whl", hash = "sha256:e75c42dedc5f59a985976f8bc2e2f0b90c44ce40fa9a2e99b147ec7e64c735a2"}, + {file = "pymongo-4.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:13953f8bbdbfee00530ac9f5c09a2474b81cd76648925012b5cfd2727293bd17"}, + {file = "pymongo-4.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72b45f7e72b2db4cd7abd40c38c57ed4105d7be0d4dce85a6b77a730e8a613f7"}, + {file = "pymongo-4.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f3104bd97642f508f70a83af256b9d88e9a7319e8048c27f1c8ca6572ad7b7f"}, + {file = "pymongo-4.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730a19d96ef902ee8d8f9e84738142d355096becb677ec82489dc9ad8e54d8e9"}, + {file = "pymongo-4.12.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dd2b771387e3ac297399b7b4d9a4bfffbaabba6f17c79996e8462cde3e7c30"}, + {file = "pymongo-4.12.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5e5968da22f5534fc678dad58d3e9f7305bf53abc94968c800335b1f511ab8b"}, + {file = "pymongo-4.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc5fad32274a1de9dfe13d06da169cf2a405a98f049595aafda13af02921853e"}, + {file = "pymongo-4.12.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:808168f5f4398c0057d15f21b1453de323157447915179c7afedf4334d2a1815"}, + {file = "pymongo-4.12.1-cp311-cp311-win32.whl", hash = "sha256:ee69dba3e023e0fa1b547b4f7a41182618f2e612df09ff954bba32de0111a596"}, + {file = "pymongo-4.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:40e2812e5b546f7ceef4abf82c31d4790d9878f2a0d43a67a2645de3eb06bdca"}, + {file = "pymongo-4.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b771aa2f0854ddf7861e8ce2365f29df9159393543d047e43d8475bc4b8813"}, + {file = "pymongo-4.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34fd8681b6fa6e1025dd1000004f6b81cbf1961f145b8c58bd15e3957976068d"}, + {file = "pymongo-4.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981e19b8f1040247dee5f7879e45f640f7e21a4d87eabb19283ce5a2927dd2e7"}, + {file = "pymongo-4.12.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9a487dc1fe92736987a156325d3d9c66cbde6eac658b2875f5f222b6d82edca"}, + {file = "pymongo-4.12.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1525051c13984365c4a9b88ee2d63009fae277921bc89a0d323b52c51f91cbac"}, + {file = "pymongo-4.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ad689e0e4f364809084f9e5888b2dcd6f0431b682a1c68f3fdf241e20e14475"}, + {file = "pymongo-4.12.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f9b18abca210c2917041ab2a380c12f6ddd2810844f1d64afb39caf8a15425e"}, + {file = "pymongo-4.12.1-cp312-cp312-win32.whl", hash = "sha256:d9d90fec041c6d695a639c26ca83577aa74383f5e3744fd7931537b208d5a1b5"}, + {file = "pymongo-4.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:d004b13e4f03d73a3ad38505ba84b61a2c8ba0a304f02fe1b27bfc986c244192"}, + {file = "pymongo-4.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:90de2b060d69c22658ada162a5380a0f88cb8c0149023241b9e379732bd36152"}, + {file = "pymongo-4.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edf4e05331ac875d3b27b4654b74d81e44607af4aa7d6bcd4a31801ca164e6fd"}, + {file = "pymongo-4.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa7a817c9afb7b8775d98c469ddb3fe9c17daf53225394c1a74893cf45d3ade9"}, + {file = "pymongo-4.12.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d142ca531694e9324b3c9ba86c0e905c5f857599c4018a386c4dc02ca490fa"}, + {file = "pymongo-4.12.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5d4c0461f5cd84d9fe87d5a84b1bc16371c4dd64d56dcfe5e69b15c0545a5ac"}, + {file = "pymongo-4.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43afd2f39182731ac9fb81bbc9439d539e4bd2eda72cdee829d2fa906a1c4d37"}, + {file = "pymongo-4.12.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827ac668c003da7b175b8e5f521850e2c182b4638a3dec96d97f0866d5508a1e"}, + {file = "pymongo-4.12.1-cp313-cp313-win32.whl", hash = "sha256:7c2269b37f034124a245eaeb34ce031cee64610437bd597d4a883304babda3cd"}, + {file = "pymongo-4.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:3b28ecd1305b89089be14f137ffbdf98a3b9f5c8dbbb2be4dec084f2813fbd5f"}, + {file = "pymongo-4.12.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f27b22a8215caff68bdf46b5b61ccd843a68334f2aa4658e8d5ecb5d3fbebb3b"}, + {file = "pymongo-4.12.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e9d23a3c290cf7409515466a7f11069b70e38ea2b786bbd7437bdc766c9e176"}, + {file = "pymongo-4.12.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efeb430f7ca8649a6544a50caefead343d1fd096d04b6b6a002c6ce81148a85c"}, + {file = "pymongo-4.12.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a34e4a08bbcff56fdee86846afbc9ce751de95706ca189463e01bf5de3dd9927"}, + {file = "pymongo-4.12.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b063344e0282537f05dbb11147591cbf58fc09211e24fc374749e343f880910a"}, + {file = "pymongo-4.12.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3f7941e01b3e5d4bfb3b4711425e809df8c471b92d1da8d6fab92c7e334a4cb"}, + {file = "pymongo-4.12.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b41235014031739f32be37ff13992f51091dae9a5189d3bcc22a5bf81fd90dae"}, + {file = "pymongo-4.12.1-cp313-cp313t-win32.whl", hash = "sha256:9a1f07fe83a8a34651257179bd38d0f87bd9d90577fcca23364145c5e8ba1bc0"}, + {file = "pymongo-4.12.1-cp313-cp313t-win_amd64.whl", hash = "sha256:46d86cf91ee9609d0713242a1d99fa9e9c60b4315e1a067b9a9e769bedae629d"}, + {file = "pymongo-4.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0517c363f31f770cfa450df7d52a73340168bde71fac423b2b3eea0336468f3e"}, + {file = "pymongo-4.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:07c6e9ade249fa811fa344467889f61221eb533b8465de7e1c467cca03b38a1e"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e95211e335a2a762fd9dfb084579e6ebaec59cd2c6848d7a898af3342ef63f06"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4285d7ffedc7adc0531949e66d5f884801c522e7a30cdfcf80e2727b9dbee8c"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71edcd51265e69d73d10f032164983701d3efa768c946a2736ec4d40793bf63e"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e90b2114e876c0a2864f729f32b025114920c6f00898a6d5ef41dba98d8690"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5db1a20d0223af2bbbbfd5f8b7f1ff0f08628c245096bad12ddeee86db226925"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:457eed26aa307c8d92edaf9be2ba9551b54af72bc7cd555706644374f155331c"}, + {file = "pymongo-4.12.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27806c4310203a19af868f4aedd09615ffa613d4e13570954df10193b29f7fd3"}, + {file = "pymongo-4.12.1-cp39-cp39-win32.whl", hash = "sha256:3dc3c26f52214119b86decdd8ef4595610cfbff67401f47be14eb433afb1d838"}, + {file = "pymongo-4.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:7af466b5dc2c6dcdce78677b4d60886c48c70810c3ebe355f210a0f9ededb156"}, + {file = "pymongo-4.12.1.tar.gz", hash = "sha256:8921bac7f98cccb593d76c4d8eaa1447e7d537ba9a2a202973e92372a05bd1eb"}, +] + +[package.dependencies] +dnspython = ">=1.16.0,<3.0.0" + +[package.extras] +aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] +docs = ["furo (==2024.8.6)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<9)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<4)", "sphinxcontrib-shellcheck (>=1,<2)"] +encryption = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.13.0,<2.0.0)"] +gssapi = ["pykerberos ; os_name != \"nt\"", "winkerberos (>=0.5.0) ; os_name == \"nt\""] +ocsp = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +test = ["pytest (>=8.2)", "pytest-asyncio (>=0.24.0)"] +zstd = ["zstandard"] + +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "6.1.1" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "redis" +version = "6.0.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "redis-6.0.0-py3-none-any.whl", hash = "sha256:a2e040aee2cdd947be1fa3a32e35a956cd839cc4c1dbbe4b2cdee5b9623fd27c"}, + {file = "redis-6.0.0.tar.gz", hash = "sha256:5446780d2425b787ed89c91ddbfa1be6d32370a636c8fdb687f11b1c26c1fa88"}, +] + +[package.extras] +hiredis = ["hiredis (>=3.0.0)"] +jwt = ["pyjwt (>=2.9.0,<2.10.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + +[[package]] +name = "sentry-sdk" +version = "2.27.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.27.0-py2.py3-none-any.whl", hash = "sha256:c58935bfff8af6a0856d37e8adebdbc7b3281c2b632ec823ef03cd108d216ff0"}, + {file = "sentry_sdk-2.27.0.tar.gz", hash = "sha256:90f4f883f9eff294aff59af3d58c2d1b64e3927b28d5ada2b9b41f5aeda47daf"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "80.4.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2"}, + {file = "setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.40" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.46.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, + {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.34.2" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, + {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "virtualenv" +version = "20.31.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, + {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, + {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, +] + +[package.extras] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.13,<4.0" +content-hash = "831b1156c4cfe365136aa34c8b4b67dbed2c6d37f8ed95eda398d38ebd9520a1" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..31ba170 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[project] +name = "aws-lambda-fastapi" +version = "0.1.0" +description = "" +authors = [ + {name = "ronihdzz",email = "ronaldo.runing_@hotmail.com"} +] +readme = "README.md" +requires-python = ">=3.13,<4.0" +dependencies = [ + "fastapi (>=0.115.12,<0.116.0)", + "sqlalchemy (>=2.0.40,<3.0.0)", + "psycopg2-binary (>=2.9.10,<3.0.0)", + "pydantic-settings (>=2.9.1,<3.0.0)", + "pymongo (>=4.12.1,<5.0.0)", + "redis (>=6.0.0,<7.0.0)", + "pydantic (>=2.11.4,<3.0.0)", + "uvicorn (>=0.34.2,<0.35.0)", + "sentry-sdk (>=2.27.0,<3.0.0)", + "loguru (>=0.7.3,<0.8.0)", + "mangum (>=0.19.0,<0.20.0)", + "httpx (>=0.28.1,<0.29.0)", +] + + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.group.dev.dependencies] +pytest = "^8.3.5" +pytest-cov = "^6.1.1" +coverage = "^7.8.0" +coverage-badge = "^1.1.2" +pre-commit = "^4.2.0" +mypy = "^1.15.0" + +[tool.poetry] +package-mode = false diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 245ba3a..0000000 --- a/requirements.txt +++ /dev/null @@ -1,35 +0,0 @@ -annotated-types==0.7.0 -anyio==4.9.0 -certifi==2025.1.31 -cfgv==3.4.0 -charset-normalizer==3.4.1 -click==8.1.8 -distlib==0.3.9 -fastapi==0.115.11 -filelock==3.18.0 -h11==0.14.0 -httpcore==1.0.7 -httpx==0.28.1 -identify==2.6.9 -idna==3.10 -iniconfig==2.0.0 -loguru==0.7.3 -mangum==0.19.0 -mypy==1.15.0 -mypy-extensions==1.0.0 -nodeenv==1.9.1 -packaging==24.2 -platformdirs==4.3.7 -pluggy==1.5.0 -pre_commit==4.2.0 -pydantic==2.10.6 -pydantic_core==2.27.2 -pytest==8.3.5 -PyYAML==6.0.2 -requests==2.32.3 -sniffio==1.3.1 -starlette==0.46.1 -typing_extensions==4.12.2 -urllib3==2.3.0 -uvicorn==0.34.0 -virtualenv==20.29.3 From c8f4b0f4720bdbf35a1d1637f676b3139be1dad3 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 13:49:28 -0600 Subject: [PATCH 06/36] feat: add database connections tests --- .vscode/launch.json | 2 +- src/core/settings/__init__.py | 2 +- src/core/settings/base.py | 9 ++++++++- src/tests/test_database_connections.py | 28 ++++++++++++++++++++++++++ 4 files changed, 38 insertions(+), 3 deletions(-) create mode 100644 src/tests/test_database_connections.py diff --git a/.vscode/launch.json b/.vscode/launch.json index 6a7dda6..1b2e35e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -12,7 +12,7 @@ "args": [ "main:app", "--reload", - "--port=9999" + "--port=9000" ], "jinja": true, "cwd": "${workspaceFolder}/src", diff --git a/src/core/settings/__init__.py b/src/core/settings/__init__.py index 6f5b2e7..4204d45 100644 --- a/src/core/settings/__init__.py +++ b/src/core/settings/__init__.py @@ -77,7 +77,7 @@ def _get_settings(self) -> Settings: settings_class: type[Settings] = self.SETTINGS_CLASS_DICT[self.environment] except KeyError as exc: raise ValueError(f"Unrecognized environment value: {self.environment}") from exc - return settings_class() + return settings_class() # type: ignore settings: Settings = SettingsManager(environment=APP_ENVIRONMENT).settings \ No newline at end of file diff --git a/src/core/settings/base.py b/src/core/settings/base.py index 6333e03..dd0e05b 100644 --- a/src/core/settings/base.py +++ b/src/core/settings/base.py @@ -1,6 +1,6 @@ from pydantic import BaseModel from pydantic_settings import BaseSettings, SettingsConfigDict -from pydantic import Field +from pydantic import Field, PostgresDsn, RedisDsn, MongoDsn from shared.path import ENV_FILE_PATH, APP_ENVIRONMENT class ProjectSettings(BaseModel): @@ -59,3 +59,10 @@ class Settings(BaseSettings): ENQUEUE=False ) + # Database settings + # ---------------------------------------------------------------- + + POSTGRESQL_URL: PostgresDsn + MONGO_URL: MongoDsn + REDIS_URL: RedisDsn + diff --git a/src/tests/test_database_connections.py b/src/tests/test_database_connections.py new file mode 100644 index 0000000..d237059 --- /dev/null +++ b/src/tests/test_database_connections.py @@ -0,0 +1,28 @@ +from unittest import TestCase +from sqlalchemy import create_engine, text +from pymongo import MongoClient +from redis import Redis +import os +from core.settings import settings + +class TestDatabaseConnections(TestCase): + + def test_postgresql_connection(self) -> None: + engine = create_engine(settings.POSTGRESQL_URL.unicode_string()) + self.assertIsNotNone(engine) + with engine.connect() as connection: + result = connection.execute(text("SELECT 1;")) + self.assertIsNotNone(result) + self.assertTrue(result.fetchone()[0] == 1) # type: ignore + engine.dispose() + + def test_mongodb_connection(self) -> None: + client = MongoClient(settings.MONGO_URL.unicode_string()) # type: ignore + server_info = client.server_info() + self.assertIn("version", server_info) + client.close() + + def test_redis_connection(self) -> None: + redis = Redis.from_url(settings.REDIS_URL.unicode_string()) + self.assertTrue(redis.ping()) + redis.close() \ No newline at end of file From 5aa39a65b21a00272f051b736d832e0a3eba66b7 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:02:10 -0600 Subject: [PATCH 07/36] feat: add environment TESTING_DOCKER --- .envs/.env.testing.docker | 3 --- .gitignore | 2 +- docker-compose.yml | 7 ++++--- src/core/settings/__init__.py | 1 + src/shared/environment.py | 1 + 5 files changed, 7 insertions(+), 7 deletions(-) delete mode 100644 .envs/.env.testing.docker diff --git a/.envs/.env.testing.docker b/.envs/.env.testing.docker deleted file mode 100644 index a56092e..0000000 --- a/.envs/.env.testing.docker +++ /dev/null @@ -1,3 +0,0 @@ -POSTGRESQL_URL=postgresql://test:test@db-postgres:5432/test_db -MONGO_URL=mongodb://db-mongodb:27017/test_db -REDIS_URL=redis://db-redis:6379 \ No newline at end of file diff --git a/.gitignore b/.gitignore index e3cb4d5..6d5deb5 100644 --- a/.gitignore +++ b/.gitignore @@ -176,5 +176,5 @@ cython_debug/ # Not ignored files -!.envs/.env.testing.docker +!.envs/.env.test.docker !.envs/.env.test \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 121171a..bd5501a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -23,11 +23,12 @@ services: context: . dockerfile: docker_images/testing/Dockerfile.testing env_file: - - ./.envs/.env.testing.docker + - ./.envs/.env.test.docker environment: + ENVIRONMENT: testing_docker POSTGRESQL_URL: postgresql://test:test@db-postgres:5432/test_db - MONGO_URL: mongodb://localhost:27017/test_db - REDIS_URL: redis://localhost:6379 + MONGO_URL: mongodb://db-mongodb:27017/test_db + REDIS_URL: redis://db-redis:6379 depends_on: - db-postgres - db-mongodb diff --git a/src/core/settings/__init__.py b/src/core/settings/__init__.py index 4204d45..8efec15 100644 --- a/src/core/settings/__init__.py +++ b/src/core/settings/__init__.py @@ -23,6 +23,7 @@ class SettingsManager: AppEnvironment.STAGING.value: StagingSettings, AppEnvironment.PRODUCTION.value: ProductionSettings, AppEnvironment.TESTING.value: TestingSettings, + AppEnvironment.TESTING_DOCKER.value: TestingSettings, } def __init__(self, environment: str): diff --git a/src/shared/environment.py b/src/shared/environment.py index 1aef473..74dc464 100644 --- a/src/shared/environment.py +++ b/src/shared/environment.py @@ -6,6 +6,7 @@ class AppEnvironment(StrEnum): STAGING = "staging", "stg" PRODUCTION = "production", "prod" TESTING = "testing", "test" + TESTING_DOCKER = "testing_docker", "test.docker" def __new__(cls, value: str, suffix: str) -> "AppEnvironment": obj = str.__new__(cls, value) From afbed872e124f59eca14d9912277cfe68bd7bc3b Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:46:20 -0600 Subject: [PATCH 08/36] feat: add package pytz --- poetry.lock | 14 +++++++++++++- pyproject.toml | 1 + 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 9a599ed..0c518cc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -965,6 +965,18 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2025.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -1353,4 +1365,4 @@ dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [metadata] lock-version = "2.1" python-versions = ">=3.13,<4.0" -content-hash = "831b1156c4cfe365136aa34c8b4b67dbed2c6d37f8ed95eda398d38ebd9520a1" +content-hash = "658762cdceef3aedc229940faac0dca324d3c265dbfa442c2e8d0028ffbdafcd" diff --git a/pyproject.toml b/pyproject.toml index 31ba170..3ab504d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ dependencies = [ "loguru (>=0.7.3,<0.8.0)", "mangum (>=0.19.0,<0.20.0)", "httpx (>=0.28.1,<0.29.0)", + "pytz (>=2025.2,<2026.0)", ] From 10baba183b26cdef4797f076114de67e717d6ea7 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:49:19 -0600 Subject: [PATCH 09/36] feat: add function get_app_current_time --- src/core/settings/base.py | 2 ++ src/shared/utils_dates.py | 9 +++++++++ 2 files changed, 11 insertions(+) create mode 100644 src/shared/utils_dates.py diff --git a/src/core/settings/base.py b/src/core/settings/base.py index dd0e05b..f131979 100644 --- a/src/core/settings/base.py +++ b/src/core/settings/base.py @@ -35,6 +35,8 @@ class Settings(BaseSettings): SENTRY_DSN: str | None = None + TIME_ZONE: str = "America/Mexico_City" + # Project metadata # ---------------------------------------------------------------- diff --git a/src/shared/utils_dates.py b/src/shared/utils_dates.py new file mode 100644 index 0000000..0dcecce --- /dev/null +++ b/src/shared/utils_dates.py @@ -0,0 +1,9 @@ +import datetime +from pytz import timezone +from core.settings import settings + + + +def get_app_current_time(tz: str = settings.TIME_ZONE) -> datetime.datetime: + return datetime.datetime.now(timezone(tz)) + From be10d86db4f80e4f850f5d8e22433afc3ffbaecb Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:52:33 -0600 Subject: [PATCH 10/36] feat: add mongo settings with use example --- src/db/mongo/__init__.py | 4 ++ src/db/mongo/base.py | 54 +++++++++++++++++++++++++ src/db/mongo/connection.py | 43 ++++++++++++++++++++ src/db/mongo/models/__init__.py | 0 src/db/mongo/models/public/__init__.py | 15 +++++++ src/db/mongo/models/public/books.py | 11 +++++ src/db/mongo/models/public/constants.py | 6 +++ src/db/mongo/models/public/schemas.py | 8 ++++ 8 files changed, 141 insertions(+) create mode 100644 src/db/mongo/__init__.py create mode 100644 src/db/mongo/base.py create mode 100644 src/db/mongo/connection.py create mode 100644 src/db/mongo/models/__init__.py create mode 100644 src/db/mongo/models/public/__init__.py create mode 100644 src/db/mongo/models/public/books.py create mode 100644 src/db/mongo/models/public/constants.py create mode 100644 src/db/mongo/models/public/schemas.py diff --git a/src/db/mongo/__init__.py b/src/db/mongo/__init__.py new file mode 100644 index 0000000..a18c5ef --- /dev/null +++ b/src/db/mongo/__init__.py @@ -0,0 +1,4 @@ +from .base import BaseMongoDocument, MongoAbstractRepository +from .connection import MongoDBConnection + +__all__ = ["MongoDBConnection", "BaseMongoDocument", "MongoAbstractRepository"] diff --git a/src/db/mongo/base.py b/src/db/mongo/base.py new file mode 100644 index 0000000..6975ffa --- /dev/null +++ b/src/db/mongo/base.py @@ -0,0 +1,54 @@ +import uuid +from abc import ABC +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, Field + +from shared.utils_dates import get_app_current_time + +from .connection import MongoDBConnection + + +def default_mongodb_id(): + return uuid.uuid4() + + +def default_mongodb_created_at(): + return get_app_current_time() + + +class BaseMongoDocument(BaseModel): + id: UUID = Field(default_factory=default_mongodb_id, alias="_id") + created_at: datetime = Field(default_factory=default_mongodb_created_at) + updated_at: datetime = Field(default_factory=default_mongodb_created_at) + deleted_at: datetime | None = Field(default=None) + + class Config: + allow_population_by_field_name = False + json_encoders = { + UUID: lambda v: str(v), + } + + +class MongoAbstractRepository(ABC): + collection_name: str + document_model: type[BaseMongoDocument] + + def __init__(self): + self._validate_attributes() + self._init_collection() + + def _init_collection(self): + self.collection = MongoDBConnection.get_collection(self.collection_name) + + def _validate_attributes(self): + if not self.collection_name: + raise ValueError("Collection name is required") + if not self.document_model: + raise ValueError("Document model is required") + + def add(self, data: BaseMongoDocument) -> BaseMongoDocument: + if not isinstance(data, self.document_model): + raise TypeError(f"Expected {self.document_model}, got {type(data)}") + self.collection.insert_one(data.model_dump(mode="json")) diff --git a/src/db/mongo/connection.py b/src/db/mongo/connection.py new file mode 100644 index 0000000..85162a0 --- /dev/null +++ b/src/db/mongo/connection.py @@ -0,0 +1,43 @@ +import certifi +from pymongo import MongoClient + +from core.settings import settings +from shared.environment import Environment + + +class MongoDBConnection: + _client = None + _db = None + + @staticmethod + def get_db(mongo_url=None, force_update=False): # noqa: FBT002 + """ + Returns the single instance of the database. + Parameters: + - mongo_url: Connection URL including the database. + - force_update: If True, forces the creation of a new connection. + """ + + if mongo_url is None: + # mongo_url = settings.MONGO_URL.unicode_string() + mongo_url = settings.MONGO_URL + if MongoDBConnection._db is None or force_update: + # Create MongoDB client + MongoDBConnection._client = MongoDBConnection.get_mongo_client(mongo_url) + # Get the database from the URL + MongoDBConnection._db = MongoDBConnection._client.get_database() + return MongoDBConnection._db + + @staticmethod + def get_collection(collection_name: str): + db = MongoDBConnection.get_db() + return db[collection_name] + + @staticmethod + def get_mongo_client(mongo_url: str): + if settings.ENVIRONMENT != Environment.LOCAL: + ca = certifi.where() + client = MongoClient(mongo_url, tlsCAFile=ca) + else: + client = MongoClient(mongo_url) + return client diff --git a/src/db/mongo/models/__init__.py b/src/db/mongo/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/db/mongo/models/public/__init__.py b/src/db/mongo/models/public/__init__.py new file mode 100644 index 0000000..1f9dab5 --- /dev/null +++ b/src/db/mongo/models/public/__init__.py @@ -0,0 +1,15 @@ +from .books import ( + BookMongoRepository, +) +from .schemas import ( + BookDocument, +) +from .constants import ( + BookType, +) + +__all__ = [ + "BookMongoRepository", + "BookDocument", + "BookType", +] diff --git a/src/db/mongo/models/public/books.py b/src/db/mongo/models/public/books.py new file mode 100644 index 0000000..7b9d223 --- /dev/null +++ b/src/db/mongo/models/public/books.py @@ -0,0 +1,11 @@ +from db.mongo.base import MongoAbstractRepository + +from .schemas import ( + BookDocument, +) + + +class BookMongoRepository(MongoAbstractRepository): + collection_name = "books" + document_model = BookDocument + diff --git a/src/db/mongo/models/public/constants.py b/src/db/mongo/models/public/constants.py new file mode 100644 index 0000000..f755797 --- /dev/null +++ b/src/db/mongo/models/public/constants.py @@ -0,0 +1,6 @@ +from enum import StrEnum + +class BookType(StrEnum): + ONLINE = "online" + FISICAL = "fisical" + BOTH = "both" \ No newline at end of file diff --git a/src/db/mongo/models/public/schemas.py b/src/db/mongo/models/public/schemas.py new file mode 100644 index 0000000..a5990ba --- /dev/null +++ b/src/db/mongo/models/public/schemas.py @@ -0,0 +1,8 @@ +from db.mongo import BaseMongoDocument +from .constants import BookType + +class BookDocument(BaseMongoDocument): + title: str + author: str + year: int + type: BookType \ No newline at end of file From 0dcd5429f0cf4558a01d578ef0b44517e645a387 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:53:56 -0600 Subject: [PATCH 11/36] feat: add posgresql settings with use example --- src/db/__init__.py | 0 src/db/posgresql/__init__.py | 8 ++++++ src/db/posgresql/base.py | 28 +++++++++++++++++++++ src/db/posgresql/connection.py | 23 +++++++++++++++++ src/db/posgresql/models/public/books.py | 16 ++++++++++++ src/db/posgresql/models/public/constants.py | 6 +++++ 6 files changed, 81 insertions(+) create mode 100644 src/db/__init__.py create mode 100644 src/db/posgresql/__init__.py create mode 100644 src/db/posgresql/base.py create mode 100644 src/db/posgresql/connection.py create mode 100644 src/db/posgresql/models/public/books.py create mode 100644 src/db/posgresql/models/public/constants.py diff --git a/src/db/__init__.py b/src/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/db/posgresql/__init__.py b/src/db/posgresql/__init__.py new file mode 100644 index 0000000..aabb7cc --- /dev/null +++ b/src/db/posgresql/__init__.py @@ -0,0 +1,8 @@ +from .connection import get_db_context +from .base import BaseModel, Base + +__all__ = [ + "get_db_context", + "BaseModel", + "Base", +] diff --git a/src/db/posgresql/base.py b/src/db/posgresql/base.py new file mode 100644 index 0000000..8a6fb08 --- /dev/null +++ b/src/db/posgresql/base.py @@ -0,0 +1,28 @@ +from sqlalchemy.orm import declarative_base +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, DateTime, func + +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.ext.declarative import declared_attr +import uuid +from shared.utils_dates import get_app_current_time + +Base = declarative_base() + + +class BaseModel: + @declared_attr + def id(cls): + return Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + + @declared_attr + def created_at(cls): + return Column(DateTime, default=get_app_current_time) + + @declared_attr + def updated_at(cls): + return Column(DateTime, default=get_app_current_time, onupdate=get_app_current_time) + + @declared_attr + def deleted_at(cls): + return Column(DateTime, nullable=True) \ No newline at end of file diff --git a/src/db/posgresql/connection.py b/src/db/posgresql/connection.py new file mode 100644 index 0000000..1a1a2d1 --- /dev/null +++ b/src/db/posgresql/connection.py @@ -0,0 +1,23 @@ +from contextlib import contextmanager + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import NullPool + +from core.settings import settings + + +application_name = settings.APP_NAME.replace(" ", "-").lower() +engine = create_engine( + settings.POSTGRESQL_URL.unicode_string(), connect_args={"application_name": application_name}, poolclass=NullPool +) +SessionLocal = sessionmaker(autocommit=False, bind=engine) + + +@contextmanager +def get_db_context(): + db = SessionLocal() + try: + yield db + finally: + db.close() diff --git a/src/db/posgresql/models/public/books.py b/src/db/posgresql/models/public/books.py new file mode 100644 index 0000000..5302864 --- /dev/null +++ b/src/db/posgresql/models/public/books.py @@ -0,0 +1,16 @@ +from sqlalchemy import Column, Integer, String, Enum +from .constants import BookType + +from db.posgresql.base import Base, BaseModel + +class Book(Base, BaseModel): + __tablename__ = "books" + __table_args__ = {"schema": "public"} + + title: str = Column(String, nullable=False) + author: str = Column(String, nullable=False) + year: int = Column(Integer, nullable=False) + type: BookType = Column(Enum(BookType), nullable=False) + + + \ No newline at end of file diff --git a/src/db/posgresql/models/public/constants.py b/src/db/posgresql/models/public/constants.py new file mode 100644 index 0000000..f755797 --- /dev/null +++ b/src/db/posgresql/models/public/constants.py @@ -0,0 +1,6 @@ +from enum import StrEnum + +class BookType(StrEnum): + ONLINE = "online" + FISICAL = "fisical" + BOTH = "both" \ No newline at end of file From b61d4d0fec4f9292a9fe9a5e599e3c6ed0bd4572 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 14:54:21 -0600 Subject: [PATCH 12/36] feat: add settings tests --- src/tests/__init__.py | 14 ++++++++++++++ src/tests/create_databases.py | 23 +++++++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 src/tests/create_databases.py diff --git a/src/tests/__init__.py b/src/tests/__init__.py index e69de29..e22c3f8 100644 --- a/src/tests/__init__.py +++ b/src/tests/__init__.py @@ -0,0 +1,14 @@ +from loguru import logger + +from core.settings import settings +from db.posgresql.models.public import ( # import all models for create tables for database testing + Book +) +from shared.environment import AppEnvironment +from tests.create_databases import prepare_database + +if settings.ENVIRONMENT in [AppEnvironment.TESTING, AppEnvironment.TESTING_DOCKER]: + logger.info("Preparing database for tests") + prepare_database( + schemas_to_create=["public"], + ) \ No newline at end of file diff --git a/src/tests/create_databases.py b/src/tests/create_databases.py new file mode 100644 index 0000000..812aa33 --- /dev/null +++ b/src/tests/create_databases.py @@ -0,0 +1,23 @@ +from sqlalchemy import create_engine, text +from sqlalchemy.pool import NullPool + +from db.posgresql import Base +from core.settings import settings + + +def create_schema(engine, schema_name): + schema_format = "CREATE SCHEMA IF NOT EXISTS {}" + query_schema = text(schema_format.format(schema_name)) + with engine.connect() as conn, conn.begin(): + conn.execute(query_schema) + + +def create_schemas(engine, schemas_to_create: list[str]): + for schema in schemas_to_create: + create_schema(engine, schema) + + +def prepare_database(schemas_to_create: list[str]): + engine = create_engine(settings.POSTGRESQL_URL.unicode_string(), poolclass=NullPool) + create_schemas(engine, schemas_to_create) + Base.metadata.create_all(engine) \ No newline at end of file From c3a6468445c7d5d57e6e03d771e7a68b68857810 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 15:02:28 -0600 Subject: [PATCH 13/36] fix: database settings posgresql --- src/db/posgresql/base.py | 3 +-- src/db/posgresql/connection.py | 2 +- src/db/posgresql/models/public/__init__.py | 4 ++++ 3 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 src/db/posgresql/models/public/__init__.py diff --git a/src/db/posgresql/base.py b/src/db/posgresql/base.py index 8a6fb08..bf35622 100644 --- a/src/db/posgresql/base.py +++ b/src/db/posgresql/base.py @@ -1,6 +1,5 @@ from sqlalchemy.orm import declarative_base -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import Column, DateTime, func +from sqlalchemy import Column, DateTime from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.ext.declarative import declared_attr diff --git a/src/db/posgresql/connection.py b/src/db/posgresql/connection.py index 1a1a2d1..3b32ff2 100644 --- a/src/db/posgresql/connection.py +++ b/src/db/posgresql/connection.py @@ -7,7 +7,7 @@ from core.settings import settings -application_name = settings.APP_NAME.replace(" ", "-").lower() +application_name = settings.PROJECT.NAME.replace(" ", "-").lower() engine = create_engine( settings.POSTGRESQL_URL.unicode_string(), connect_args={"application_name": application_name}, poolclass=NullPool ) diff --git a/src/db/posgresql/models/public/__init__.py b/src/db/posgresql/models/public/__init__.py new file mode 100644 index 0000000..8b0d45f --- /dev/null +++ b/src/db/posgresql/models/public/__init__.py @@ -0,0 +1,4 @@ +from .books import Book +from .constants import BookType + +__all__ = ["Book", "BookType"] \ No newline at end of file From c44f693782890c3b3ad2d71087661e932ca9c25c Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 16:17:54 -0600 Subject: [PATCH 14/36] feat: add actions tests and modulate in diferent jobs the steps --- .github/actions/save-coverage/action.yml | 186 +++++++++++++++++++ .github/workflows/main.yml | 222 +++++++++++++++++------ .pre-commit-config.yaml | 5 +- README.md | 7 + docker_images/testing/Dockerfile.testing | 4 + mypy.ini | 8 +- 6 files changed, 376 insertions(+), 56 deletions(-) create mode 100644 .github/actions/save-coverage/action.yml diff --git a/.github/actions/save-coverage/action.yml b/.github/actions/save-coverage/action.yml new file mode 100644 index 0000000..24416de --- /dev/null +++ b/.github/actions/save-coverage/action.yml @@ -0,0 +1,186 @@ +name: "Save Coverage Artifacts" +description: "Guarda los reportes de cobertura en la rama de artefactos" + +inputs: + gh-token: + description: "GitHub Token para poder pushear a la rama de artefactos" + required: true + artifacts-branch: + description: "Nombre de la rama donde se subirΓ‘n los artefactos (reportes de cobertura, etc.)" + required: false + default: "artifacts" + coverage-source: + description: "Ruta relativa al workspace donde se encuentran los reportes, o si is-artifact es true, nombre (o ruta) del archivo ZIP" + required: false + default: "coverage-reports" + is-artifact: + description: "Si es true, indica que el reporte viene de un artifact y se debe descargar; en caso contrario se copiarΓ‘ la carpeta directamente" + required: false + default: "false" + +runs: + using: "composite" + steps: + # ---------------------------------------------------------------------------------------- + # Paso 1: Hacer checkout del cΓ³digo en la carpeta "action-repo" + # ---------------------------------------------------------------------------------------- + - name: "Checkout code" + uses: actions/checkout@v3 + with: + path: "action-repo" + + # ---------------------------------------------------------------------------------------- + # Paso 2A (si is-artifact es true): Descargar el artifact de cobertura + # ---------------------------------------------------------------------------------------- + - name: "Descargar artifact de cobertura" + if: ${{ inputs.is-artifact == 'true' }} + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.coverage-source }} + path: downloaded-coverage + + # ---------------------------------------------------------------------------------------- + # Paso 2B: Verificar el contenido de la cobertura + # - Si is-artifact es true se revisa "downloaded-coverage". + # - Si is-artifact es false se revisa el folder "coverage-source" directamente. + # ---------------------------------------------------------------------------------------- + - name: "Verificar artifact descargado de cobertura" + if: ${{ inputs.is-artifact == 'true' }} + shell: bash + env: + WORKSPACE_PATH: ${{ github.workspace }} + run: | + set -e + echo "▢️ Verificando artifact descargado..." + cd "${WORKSPACE_PATH}/downloaded-coverage" + if [ ! "$(ls -A .)" ]; then + echo "❌ Error: La carpeta 'downloaded-coverage' estΓ‘ vacΓ­a." + exit 1 + fi + echo "βœ… Artifact descargado y contiene archivos. Listando contenido:" + ls -lahR . + echo "REPORTS_PATH=${WORKSPACE_PATH}/downloaded-coverage" >> "$GITHUB_ENV" + + - name: "Verificar carpeta de cobertura" + if: ${{ inputs.is-artifact != 'true' }} + shell: bash + env: + WORKSPACE_PATH: ${{ github.workspace }} + REPORTS_PATH: ${{ inputs.coverage-source }} + run: | + set -e + echo "▢️ Verificando la carpeta de cobertura..." + cd "${WORKSPACE_PATH}" + echo "πŸ” Revisando la carpeta '${REPORTS_PATH}' en '${WORKSPACE_PATH}'" + if [ ! -d "${REPORTS_PATH}" ]; then + echo "❌ Error: La carpeta '${WORKSPACE_PATH}/${REPORTS_PATH}' no existe." + exit 1 + fi + FILE_COUNT=$(find "${REPORTS_PATH}" | wc -l) + if [ "${FILE_COUNT}" -le 1 ]; then + echo "❌ Error: La carpeta '${WORKSPACE_PATH}/${REPORTS_PATH}' estΓ‘ vacΓ­a." + exit 1 + fi + echo "βœ… La carpeta '${REPORTS_PATH}' existe y contiene archivos. Listando contenido:" + ls -lahR "${REPORTS_PATH}" + echo "REPORTS_PATH=${WORKSPACE_PATH}/${REPORTS_PATH}" >> "$GITHUB_ENV" + + # ---------------------------------------------------------------------------------------- + # Paso 3: Clonar (o crear) la rama de artefactos en la carpeta "artifacts-repo" + # ---------------------------------------------------------------------------------------- + - name: "Clonar o crear la rama de artefactos" + shell: bash + env: + GH_TOKEN: ${{ inputs.gh-token }} + COMMIT_ID: ${{ github.sha }} + BRANCH_NAME: ${{ github.ref_name }} + REPO_NAME: ${{ github.repository }} + ARTIFACTS_BRANCH: ${{ inputs.artifacts-branch }} + WORKSPACE_PATH: ${{ github.workspace }} + run: | + set -e + echo "▢️ Iniciando clonaciΓ³n o creaciΓ³n de la rama '${ARTIFACTS_BRANCH}'" + # Configurar Git + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global init.defaultBranch "${ARTIFACTS_BRANCH}" + + echo "πŸ”Ž Verificando si la rama '${ARTIFACTS_BRANCH}' ya existe en remoto" + BRANCH_EXISTS=$(git ls-remote --heads "https://x-access-token:${GH_TOKEN}@github.com/${REPO_NAME}.git" "${ARTIFACTS_BRANCH}") + + if [ -n "${BRANCH_EXISTS}" ]; then + echo "βœ… La rama '${ARTIFACTS_BRANCH}' existe. ClonΓ‘ndola en la carpeta 'artifacts-repo'." + git clone --branch "${ARTIFACTS_BRANCH}" --single-branch "https://x-access-token:${GH_TOKEN}@github.com/${REPO_NAME}.git" "artifacts-repo" + else + echo "πŸ†• La rama '${ARTIFACTS_BRANCH}' NO existe. CreΓ‘ndola como rama huΓ©rfana." + mkdir -p "artifacts-repo" + cd "artifacts-repo" + git init + git checkout --orphan "${ARTIFACTS_BRANCH}" + git commit --allow-empty -m "Commit inicial para la rama ${ARTIFACTS_BRANCH}" + git remote add origin "https://x-access-token:${GH_TOKEN}@github.com/${REPO_NAME}.git" + git push --set-upstream origin "${ARTIFACTS_BRANCH}" + cd .. + fi + echo "πŸ“ Directorio final: $(pwd). Si existe 'artifacts-repo', se clonΓ³ o se inicializΓ³." + + # ---------------------------------------------------------------------------------------- + # Paso 4: Copiar los reportes de cobertura desde la carpeta REPORTS_PATH (descargada o local) + # a 'artifacts-repo' y hacer commit de los artefactos. + # ---------------------------------------------------------------------------------------- + - name: "Copiar cobertura y hacer commit" + shell: bash + env: + GH_TOKEN: ${{ inputs.gh-token }} + COMMIT_ID: ${{ github.sha }} + BRANCH_NAME: ${{ github.ref_name }} + REPO_NAME: ${{ github.repository }} + ARTIFACTS_BRANCH: ${{ inputs.artifacts-branch }} + WORKSPACE_PATH: ${{ github.workspace }} + run: | + set -e + # Verificar que la variable REPORTS_PATH estΓ© definida (desde pasos anteriores) + if [ -z "${REPORTS_PATH}" ]; then + echo "❌ Error: La variable REPORTS_PATH no estΓ‘ definida." + exit 1 + fi + echo "Usando REPORTS_PATH=${REPORTS_PATH}" + + # Ubicarnos en la carpeta clonada de la rama de artefactos + if [ -d "artifacts-repo" ]; then + cd "artifacts-repo" + else + echo "⚠️ No se encontrΓ³ 'artifacts-repo'. Revisar si hubo error previo en la clonaciΓ³n." + exit 1 + fi + + echo "▢️ Creando directorios para guardar la cobertura en 'artifacts-repo'." + mkdir -p "${BRANCH_NAME}/${COMMIT_ID}" + mkdir -p "${BRANCH_NAME}/latest" + + echo "πŸ”€ Regresando a '${WORKSPACE_PATH}' para copiar la cobertura." + cd "${WORKSPACE_PATH}" + + echo "πŸ“‚ Copiando contenido de '${REPORTS_PATH}' a 'artifacts-repo/${BRANCH_NAME}/${COMMIT_ID}'" + cp -r "${REPORTS_PATH}/." "${WORKSPACE_PATH}/artifacts-repo/${BRANCH_NAME}/${COMMIT_ID}/" + cp -r "${REPORTS_PATH}/." "${WORKSPACE_PATH}/artifacts-repo/${BRANCH_NAME}/latest/" + + echo "βœ… Cobertura copiada. Preparando commit..." + cd "${WORKSPACE_PATH}/artifacts-repo" + + echo "πŸ“ Creando (o editando) README.md" + echo "# Repositorio de Artefactos" > README.md + echo "" >> README.md + echo "Contiene reportes de test y otros artefactos del proyecto." >> README.md + + echo "πŸ“¦ Agregando todos los cambios al staging." + git add . + + COMMIT_MESSAGE="Agregar artefactos y README para commit ${COMMIT_ID} en la rama ${BRANCH_NAME}" + echo "πŸ“¦ Haciendo commit con mensaje: '${COMMIT_MESSAGE}'" + git commit -m "${COMMIT_MESSAGE}" || echo "⚠️ No hay cambios nuevos que comitear." + + echo "πŸš€ Haciendo push a la rama '${ARTIFACTS_BRANCH}'" + git push origin "${ARTIFACTS_BRANCH}" + + echo "βœ… Artefactos subidos exitosamente a la rama '${ARTIFACTS_BRANCH}'." diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 335d52b..2d8e80d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -2,96 +2,214 @@ name: FastAPI CI/CD on: push: - branches: - - main + branches: [main, development] + pull_request: + branches: [main, development] +permissions: + contents: write + +# Re‐usar versiones y directorios en todos los jobs +env: + PYTHON_VERSION: "3.13" + COVERAGE_REPORTS: coverage-reports + AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }} + +############################################################################### +# 1. LINTING – pre-commit +############################################################################### jobs: - CI: + lint: runs-on: ubuntu-latest + steps: - uses: actions/checkout@v3 - + - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.12' - - - name: Cache pip dependencies + python-version: ${{ env.PYTHON_VERSION }} + + - name: Cache Poetry virtualenv uses: actions/cache@v3 with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - + path: ~/.cache/pypoetry + key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: ${{ runner.os }}-poetry- + + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + echo "$HOME/.local/bin" >> $GITHUB_PATH + - name: Install dependencies + run: poetry install --no-interaction --no-root --with dev + + - name: Run pre-commit + run: poetry run pre-commit run + + +############################################################################### +# 2. TEST – contenedores + cobertura +############################################################################### + test: + runs-on: ubuntu-latest + needs: lint # solo se ejecuta si lint pasa + + services: + postgres: + image: postgres:13 + env: + POSTGRES_USER: test + POSTGRES_PASSWORD: test + POSTGRES_DB: test_db + ports: + - 5432:5432 + options: >- + --health-cmd="pg_isready" + --health-interval=10s + --health-timeout=5s + --health-retries=5 + + mongodb: + image: mongo:4.4 + ports: + - 27017:27017 + options: >- + --health-cmd="mongo --eval 'db.runCommand({ ping: 1 })'" + --health-interval=10s + --health-timeout=5s + --health-retries=5 + + redis: + image: redis:6 + ports: + - 6379:6379 + options: >- + --health-cmd="redis-cli ping" + --health-interval=10s + --health-timeout=5s + --health-retries=5 + + steps: + - name: 🧾 Checkout code + uses: actions/checkout@v3 + + - name: πŸ—οΈ Build test image + run: | + docker build --progress=plain -t my-test-image -f docker_images/testing/Dockerfile.testing . + + - name: πŸš€ Run tests in container + run: | + docker run \ + --name my-tests \ + --network=host \ + -e CI=true \ + -e GITHUB_DATABASE_POSTGRESQL=postgresql://test:test@localhost:5432/test_db \ + -e GITHUB_DATABASE_MONGODB=mongodb://localhost:27017 \ + -e GITHUB_DATABASE_REDIS=redis://localhost:6379 \ + -v ${{ github.workspace }}/artifacts:/app/artifacts \ + my-test-image + + - name: πŸ“₯ Copiar reportes desde el contenedor + run: | + mkdir -p ${{ env.COVERAGE_REPORTS }} + docker cp my-tests:/app/coverage-reports/. ${{ env.COVERAGE_REPORTS }} + + echo "πŸ“„ Archivos copiados desde el contenedor:" + ls -lh ${{ env.COVERAGE_REPORTS }} + + - name: πŸ“€ Subir cobertura como artefacto + uses: actions/upload-artifact@v4 + with: + name: ${{ env.COVERAGE_REPORTS }} + path: ${{ env.COVERAGE_REPORTS }} + + - name: Guardar coverage + uses: ./.github/actions/save-coverage + with: + gh-token: ${{ secrets.GITHUB_TOKEN }} + artifacts-branch: 'artifacts' + coverage-source: ${{ env.COVERAGE_REPORTS }} + is-artifact: false + +############################################################################### +# 3. BUILD – empacar Lambda (o tu imagen de despliegue) +############################################################################### + build: + runs-on: ubuntu-latest + needs: test # solo si los tests pasaron + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install deps run: | python -m pip install --upgrade pip - python -m venv venv - source venv/bin/activate pip install -r requirements.txt - - - name: Run pre-commit hooks - run: | - source venv/bin/activate - pre-commit run --all-files - - - name: Run Tests + + - name: Package Lambda zip run: | + python -m venv venv source venv/bin/activate - export PYTHONPATH=$PYTHONPATH:$(pwd)/src - echo "PYTHONPATH=$PYTHONPATH" - pytest src/tests - - - name: Package API - run: | - cd ./venv/lib/python3.12/site-packages + cd venv/lib/python${{ env.PYTHON_VERSION }}/site-packages zip -r9 ../../../../api.zip . cd ../../../../src zip -g ../api.zip -r . - + - name: Upload artifact uses: actions/upload-artifact@v4 with: - name: api + name: api-package path: api.zip - CD: +############################################################################### +# 4. DEPLOY – solo en rama main +############################################################################### + deploy: + if: github.ref == 'refs/heads/main' runs-on: ubuntu-latest - needs: [ CI ] + needs: build # espera a que el build termine + steps: - - name: Configurar credenciales de AWS + - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v2 with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_DEFAULT_REGION }} + aws-region: ${{ env.AWS_REGION }} - - name: Descargar el artefacto de Lambda + - name: Download package uses: actions/download-artifact@v4 with: - name: api - path: . - - - name: Verificar que el archivo existe - run: ls -lah api.zip + name: api-package - - name: Subir a S3 + - name: Upload to S3 run: aws s3 cp api.zip s3://${{ secrets.AWS_S3_BUCKET }}/api.zip - - name: Actualizar el cΓ³digo de la funciΓ³n Lambda - run: aws lambda update-function-code --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} --s3-bucket ${{ secrets.AWS_S3_BUCKET }} --s3-key api.zip + - name: Update Lambda code + run: | + aws lambda update-function-code \ + --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} \ + --s3-bucket ${{ secrets.AWS_S3_BUCKET }} \ + --s3-key api.zip - - name: Esperar a que termine la actualizaciΓ³n de Lambda + - name: Wait for Lambda update run: | while true; do - STATUS=$(aws lambda get-function-configuration --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} --query "LastUpdateStatus" --output text) - echo "Estado actual de Lambda: $STATUS" - if [[ "$STATUS" == "Successful" ]]; then - break - fi + STATUS=$(aws lambda get-function-configuration \ + --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} \ + --query "LastUpdateStatus" --output text) + echo "Lambda status: $STATUS" + [[ "$STATUS" == "Successful" ]] && break sleep 5 done - - name: Publicar nueva versiΓ³n de la Lambda - run: aws lambda publish-version --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} + - name: Publish new version + run: aws lambda publish-version \ + --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 125181c..45e34ac 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,6 @@ repos: rev: v1.8.0 hooks: - id: mypy - args: ["src","--config-file=mypy.ini"] + args: ["--config-file=mypy.ini"] language: system - pass_filenames: false - exclude: ^(src/shared/path.py|src/shared/environment.py)$ \ No newline at end of file + pass_filenames: true \ No newline at end of file diff --git a/README.md b/README.md index 97c849f..bb72739 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,12 @@ # aws-lambda-fastapi +| CI Environment | Coverage | +|-----------|----------| +| main| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/tree/artifacts/main/latest/coverage.svg) | +| development| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/tree/artifacts/development/latest/coverage.svg) | + + + ## Run project ``` diff --git a/docker_images/testing/Dockerfile.testing b/docker_images/testing/Dockerfile.testing index 665c9b1..5b9bdd3 100644 --- a/docker_images/testing/Dockerfile.testing +++ b/docker_images/testing/Dockerfile.testing @@ -2,6 +2,10 @@ FROM python:3.13-slim +# Establecer variables de entorno + +ENV ENVIRONMENT=testing + # Establecer directorio de trabajo WORKDIR /app diff --git a/mypy.ini b/mypy.ini index 3e81927..53eb59b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,9 @@ [mypy] ignore_missing_imports = True -strict = True \ No newline at end of file +strict = True + +[mypy-src.shared.path] +ignore_errors = True + +[mypy-src.shared.environment] +ignore_errors = True From 253c24f9520ce7f855a2f5d73c5324f4ed10e0f8 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 17:01:52 -0600 Subject: [PATCH 15/36] refactor: only execute jobs: build, deploy for directly push to branch main or development --- .github/workflows/main.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2d8e80d..dec7d01 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -135,8 +135,9 @@ jobs: ############################################################################### # 3. BUILD – empacar Lambda (o tu imagen de despliegue) -############################################################################### +############################################# #1 from ronihdzz/feat/docker_deploy################################## build: + if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' runs-on: ubuntu-latest needs: test # solo si los tests pasaron @@ -172,7 +173,7 @@ jobs: # 4. DEPLOY – solo en rama main ############################################################################### deploy: - if: github.ref == 'refs/heads/main' + if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' runs-on: ubuntu-latest needs: build # espera a que el build termine From c5673b2c2ab819e9441176df60690eb847762ba1 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 17:18:21 -0600 Subject: [PATCH 16/36] fix: build step --- .github/workflows/main.yml | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index dec7d01..55c6b8c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,7 +13,6 @@ permissions: env: PYTHON_VERSION: "3.13" COVERAGE_REPORTS: coverage-reports - AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }} ############################################################################### # 1. LINTING – pre-commit @@ -136,10 +135,11 @@ jobs: ############################################################################### # 3. BUILD – empacar Lambda (o tu imagen de despliegue) ############################################# #1 from ronihdzz/feat/docker_deploy################################## + build: if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' runs-on: ubuntu-latest - needs: test # solo si los tests pasaron + needs: test steps: - uses: actions/checkout@v3 @@ -149,15 +149,27 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - name: Install deps + # ➊ Instalar Poetry y plugin de export + - name: Install Poetry and export plugin run: | - python -m pip install --upgrade pip - pip install -r requirements.txt + curl -sSL https://install.python-poetry.org | python3 - + echo "$HOME/.local/bin" >> $GITHUB_PATH + poetry self add poetry-plugin-export - - name: Package Lambda zip + # βž‹ Exportar las deps a requirements.txt + - name: Export dependencies + run: poetry export --without-hashes -f requirements.txt -o requirements.txt + + # ➌ Instalar en un venv para luego empaquetar + - name: Install deps (pip) run: | python -m venv venv source venv/bin/activate + pip install --upgrade pip + pip install -r requirements.txt + + - name: Package Lambda zip + run: | cd venv/lib/python${{ env.PYTHON_VERSION }}/site-packages zip -r9 ../../../../api.zip . cd ../../../../src @@ -169,6 +181,7 @@ jobs: name: api-package path: api.zip + ############################################################################### # 4. DEPLOY – solo en rama main ############################################################################### @@ -183,7 +196,7 @@ jobs: with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ env.AWS_REGION }} + aws-region: ${{ secrets.AWS_DEFAULT_REGION }} - name: Download package uses: actions/download-artifact@v4 From a2021165189d3d7ff75ffc1ddea527f59eaee09e Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 17:51:44 -0600 Subject: [PATCH 17/36] feat: add support for python-version from 3.12 --- .github/workflows/main.yml | 2 +- docker_images/testing/Dockerfile.testing | 2 +- poetry.lock | 7 ++++--- pyproject.toml | 2 +- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 55c6b8c..69e6a67 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,7 +11,7 @@ permissions: # Re‐usar versiones y directorios en todos los jobs env: - PYTHON_VERSION: "3.13" + PYTHON_VERSION: "3.12" COVERAGE_REPORTS: coverage-reports ############################################################################### diff --git a/docker_images/testing/Dockerfile.testing b/docker_images/testing/Dockerfile.testing index 5b9bdd3..3f53e3f 100644 --- a/docker_images/testing/Dockerfile.testing +++ b/docker_images/testing/Dockerfile.testing @@ -1,6 +1,6 @@ # Dockerfile.testing -FROM python:3.13-slim +FROM python:3.12-slim # Establecer variables de entorno diff --git a/poetry.lock b/poetry.lock index 0c518cc..55fe4e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,6 +27,7 @@ files = [ [package.dependencies] idna = ">=2.8" sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] @@ -255,7 +256,7 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" files = [ {file = "greenlet-3.2.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6"}, {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7"}, @@ -1364,5 +1365,5 @@ dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [metadata] lock-version = "2.1" -python-versions = ">=3.13,<4.0" -content-hash = "658762cdceef3aedc229940faac0dca324d3c265dbfa442c2e8d0028ffbdafcd" +python-versions = ">=3.12,<4.0" +content-hash = "83ef40bfa8837c231a75f75ae85301b4a5b746337a0fdaf3dbbca70e7ae93c49" diff --git a/pyproject.toml b/pyproject.toml index 3ab504d..2161516 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = [ {name = "ronihdzz",email = "ronaldo.runing_@hotmail.com"} ] readme = "README.md" -requires-python = ">=3.13,<4.0" +requires-python = ">=3.12,<4.0" dependencies = [ "fastapi (>=0.115.12,<0.116.0)", "sqlalchemy (>=2.0.40,<3.0.0)", From 2ea6bd1c4db5df2b16483c7e171c56cdd6a9e274 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 18:02:57 -0600 Subject: [PATCH 18/36] ci: fix deploy job --- .github/workflows/main.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 69e6a67..8bd1d7f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -225,5 +225,9 @@ jobs: done - name: Publish new version - run: aws lambda publish-version \ - --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} + env: + FUNCTION_NAME: ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} + run: | + set -euo pipefail + aws lambda publish-version \ + --function-name "$FUNCTION_NAME" \ No newline at end of file From 9fcff09652fc9453e30fd12095daa36cd227f09b Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 22:19:18 -0600 Subject: [PATCH 19/36] feat: add docker image lambda deploy --- README.md | 23 +++++++++++++++++++++++ docker-compose.yml | 16 +++++++++++++++- docker_images/deploy/Dockerfile.deploy | 24 ++++++++++++++++++++++++ src/core/settings/__init__.py | 1 + src/shared/environment.py | 2 +- 5 files changed, 64 insertions(+), 2 deletions(-) create mode 100644 docker_images/deploy/Dockerfile.deploy diff --git a/README.md b/README.md index bb72739..9d4aa35 100644 --- a/README.md +++ b/README.md @@ -25,3 +25,26 @@ Run mympi: mypy src ``` + + +Check docker deploy lambda + +``` +curl -Xcurl -X POST "http://localhost:9100/2015-03-31/functions/function/invocations" \ + -H "Content-Type: application/json" \ + -d '{ + "version": "2.0", + "routeKey": "GET /", + "rawPath": "/", + "rawQueryString": "", + "headers": {}, + "requestContext": { + "http": { + "method": "GET", + "path": "/", + "sourceIp": "127.0.0.1" + } + }, + "isBase64Encoded": false + }' +``` diff --git a/docker-compose.yml b/docker-compose.yml index bd5501a..e25385c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,4 +32,18 @@ services: depends_on: - db-postgres - db-mongodb - - db-redis \ No newline at end of file + - db-redis + + lambda-app: + build: + context: . + dockerfile: docker_images/deploy/Dockerfile.deploy + environment: + - ENVIRONMENT=local_docker + - POSTGRESQL_URL=postgresql://test:test@db-postgres:5432/test_db + - MONGO_URL=mongodb://db-mongodb:27017/test_db + - REDIS_URL=redis://db-redis:6379 + env_file: + - ./.envs/.env.local.docker + ports: + - 9100:8080 diff --git a/docker_images/deploy/Dockerfile.deploy b/docker_images/deploy/Dockerfile.deploy new file mode 100644 index 0000000..feec6bd --- /dev/null +++ b/docker_images/deploy/Dockerfile.deploy @@ -0,0 +1,24 @@ +# --- Base runtime de AWS Lambda --- +FROM public.ecr.aws/lambda/python:3.12 + +# 1️⃣ Ubicamos la raΓ­z de la app aquΓ­ +WORKDIR /app + +# 2️⃣ Instalamos Poetry (sin virtualenv para dejar deps en la capa global) +RUN curl -sSL https://install.python-poetry.org | python3 - && \ + ln -s /root/.local/bin/poetry /usr/local/bin/poetry + +# 3️⃣ ResoluciΓ³n de dependencias +COPY pyproject.toml poetry.lock* /tmp/ +RUN cd /tmp && \ + poetry config virtualenvs.create false && \ + poetry install --no-interaction --no-root --without dev + +# 4️⃣ Copiamos el cΓ³digo. β–Έ /app/api /app/core /app/shared … +COPY src/ . + +# 5️⃣ Aseguramos que /app estΓ© en el path de Python (opcional, ya lo estΓ‘) +ENV PYTHONPATH="/app" + +# 6️⃣ Lambda buscarΓ‘ api/main.py y llamarΓ‘ a handler() +CMD ["main.handler"] \ No newline at end of file diff --git a/src/core/settings/__init__.py b/src/core/settings/__init__.py index 8efec15..5bd20e0 100644 --- a/src/core/settings/__init__.py +++ b/src/core/settings/__init__.py @@ -24,6 +24,7 @@ class SettingsManager: AppEnvironment.PRODUCTION.value: ProductionSettings, AppEnvironment.TESTING.value: TestingSettings, AppEnvironment.TESTING_DOCKER.value: TestingSettings, + AppEnvironment.LOCAL_DOCKER.value: LocalSettings, } def __init__(self, environment: str): diff --git a/src/shared/environment.py b/src/shared/environment.py index 74dc464..384b630 100644 --- a/src/shared/environment.py +++ b/src/shared/environment.py @@ -7,7 +7,7 @@ class AppEnvironment(StrEnum): PRODUCTION = "production", "prod" TESTING = "testing", "test" TESTING_DOCKER = "testing_docker", "test.docker" - + LOCAL_DOCKER = "local_docker", "local.docker" def __new__(cls, value: str, suffix: str) -> "AppEnvironment": obj = str.__new__(cls, value) obj._value_ = value From 935790beed4193c90481603cd054c8484e47e9c7 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Sun, 11 May 2025 22:38:20 -0600 Subject: [PATCH 20/36] refactor: readme.md --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9d4aa35..9db1f9a 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,8 @@ | CI Environment | Coverage | |-----------|----------| -| main| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/tree/artifacts/main/latest/coverage.svg) | -| development| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/tree/artifacts/development/latest/coverage.svg) | - +| main| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/blob/artifacts/main/latest/coverage.svg) | +| development| ![Coverage Badge](https://github.com/ronihdzz/aws-lambda-fastapi/blob/artifacts/development/latest/coverage.svg) | ## Run project From 151fcbd31c174fc67e6d9f42fc630729c4a41cef Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 14 May 2025 18:33:15 -0600 Subject: [PATCH 21/36] ci: removed old zip-based deployment and implemented environment-based Docker image build and ECR push --- .github/workflows/main.yml | 117 ++++++++++++++----------------------- 1 file changed, 43 insertions(+), 74 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8bd1d7f..cbbcd5a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -141,93 +141,62 @@ jobs: runs-on: ubuntu-latest needs: test + env: + IMAGE_NAME: my-fastapi-lambda + ECR_REGISTRY: ${{ secrets.ECR_REGISTRY }} # ej. 284789984208.dkr.ecr.us-east-1.amazonaws.com + ECR_REPO_NAME: ${{ secrets.ECR_REPO_NAME }} # ej. my-fastapi-lambda + BRANCH_ENV_MAP: '{"main": "prod", "development": "dev", "staging": "stg", "testing": "tst"}' + steps: - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_VERSION }} - - # ➊ Instalar Poetry y plugin de export - - name: Install Poetry and export plugin - run: | - curl -sSL https://install.python-poetry.org | python3 - - echo "$HOME/.local/bin" >> $GITHUB_PATH - poetry self add poetry-plugin-export + - name: πŸ› οΈ Instalar jq + run: sudo apt-get update && sudo apt-get install -y jq - # βž‹ Exportar las deps a requirements.txt - - name: Export dependencies - run: poetry export --without-hashes -f requirements.txt -o requirements.txt + - name: πŸ” Configurar credenciales AWS + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_DEFAULT_REGION }} - # ➌ Instalar en un venv para luego empaquetar - - name: Install deps (pip) - run: | - python -m venv venv - source venv/bin/activate - pip install --upgrade pip - pip install -r requirements.txt + - name: πŸ” Login a ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 - - name: Package Lambda zip + - name: 🧠 Mapear rama a entorno y generar tags run: | - cd venv/lib/python${{ env.PYTHON_VERSION }}/site-packages - zip -r9 ../../../../api.zip . - cd ../../../../src - zip -g ../api.zip -r . - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: api-package - path: api.zip + echo "πŸ“¦ Configurando entorno y versionado..." + BRANCH_NAME="${GITHUB_REF#refs/heads/}" + echo "πŸ” Rama detectada: $BRANCH_NAME" + echo "🧭 Mapeo de ramas a entornos: $BRANCH_ENV_MAP" -############################################################################### -# 4. DEPLOY – solo en rama main -############################################################################### - deploy: - if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' - runs-on: ubuntu-latest - needs: build # espera a que el build termine + ENVIRONMENT=$(echo "$BRANCH_ENV_MAP" | jq -r --arg branch "$BRANCH_NAME" '.[$branch]') + if [[ "$ENVIRONMENT" == "null" || -z "$ENVIRONMENT" ]]; then + echo "❌ Error: rama '$BRANCH_NAME' no estΓ‘ mapeada a un entorno vΓ‘lido." + exit 1 + fi - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_DEFAULT_REGION }} + VERSION=$(date -u +"%Y%m%dT%H%M%SZ") - - name: Download package - uses: actions/download-artifact@v4 - with: - name: api-package + echo "🏷️ ENVIRONMENT=$ENVIRONMENT" + echo "πŸ“… VERSION=$VERSION" - - name: Upload to S3 - run: aws s3 cp api.zip s3://${{ secrets.AWS_S3_BUCKET }}/api.zip + echo "ENVIRONMENT=$ENVIRONMENT" >> $GITHUB_ENV + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "IMAGE_VERSION=$ECR_REGISTRY/$ECR_REPO_NAME:$ENVIRONMENT-$VERSION" >> $GITHUB_ENV + echo "IMAGE_LATEST=$ECR_REGISTRY/$ECR_REPO_NAME:$ENVIRONMENT-latest" >> $GITHUB_ENV - - name: Update Lambda code + - name: πŸ—οΈ Build de imagen Docker para Lambda run: | - aws lambda update-function-code \ - --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} \ - --s3-bucket ${{ secrets.AWS_S3_BUCKET }} \ - --s3-key api.zip + docker build \ + -f docker_images/deploy/Dockerfile.deploy \ + -t $IMAGE_VERSION . - - name: Wait for Lambda update - run: | - while true; do - STATUS=$(aws lambda get-function-configuration \ - --function-name ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} \ - --query "LastUpdateStatus" --output text) - echo "Lambda status: $STATUS" - [[ "$STATUS" == "Successful" ]] && break - sleep 5 - done - - - name: Publish new version - env: - FUNCTION_NAME: ${{ secrets.AWS_LAMBDA_FUNCTION_NAME }} + docker tag $IMAGE_VERSION $IMAGE_LATEST + + - name: πŸš€ Push de imΓ‘genes a ECR run: | - set -euo pipefail - aws lambda publish-version \ - --function-name "$FUNCTION_NAME" \ No newline at end of file + docker push $IMAGE_VERSION + docker push $IMAGE_LATEST From 4725e17e6b280355f67ab91612b79927a9afdf76 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 14 May 2025 18:47:03 -0600 Subject: [PATCH 22/36] feat: add deploy job --- .github/workflows/main.yml | 65 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 63 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cbbcd5a..e7fc725 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -133,8 +133,8 @@ jobs: is-artifact: false ############################################################################### -# 3. BUILD – empacar Lambda (o tu imagen de despliegue) -############################################# #1 from ronihdzz/feat/docker_deploy################################## +# 3. BUILD +############################################################################### build: if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' @@ -200,3 +200,64 @@ jobs: run: | docker push $IMAGE_VERSION docker push $IMAGE_LATEST + + - name: πŸ“€ Guardar IMAGE_LATEST como artifact + run: echo "IMAGE_LATEST=$IMAGE_LATEST" > image.env + + - name: πŸ“¦ Subir artifact con IMAGE_LATEST + uses: actions/upload-artifact@v4 + with: + name: image-env + path: image.env + +############################################################################## +# 4. DEPLOY +############################################################################## + + deploy: + if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/development' + runs-on: ubuntu-latest + needs: build + + steps: + - name: πŸ” Configurar credenciales AWS + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_DEFAULT_REGION }} + + - name: πŸ“₯ Descargar artifact con IMAGE_LATEST + uses: actions/download-artifact@v4 + with: + name: image-env + + - name: πŸ§ͺ Cargar variable IMAGE_LATEST + run: | + set -a + source image.env + set +a + echo "πŸ“¦ Imagen a desplegar: $IMAGE_LATEST" + echo "IMAGE_LATEST=$IMAGE_LATEST" >> $GITHUB_ENV + + - name: πŸš€ Actualizar Lambda con nueva imagen + run: | + aws lambda update-function-code \ + --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" \ + --image-uri "$IMAGE_LATEST" + + - name: ⏳ Esperar a que termine la actualizaciΓ³n + run: | + while true; do + STATUS=$(aws lambda get-function-configuration \ + --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" \ + --query "LastUpdateStatus" --output text) + echo "Lambda status: $STATUS" + [[ "$STATUS" == "Successful" ]] && break + sleep 5 + done + + - name: πŸ“¦ Publicar nueva versiΓ³n + run: | + aws lambda publish-version \ + --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" From 17441e04610f913b90952a3cbe0c1f77ee4a22bd Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Thu, 15 May 2025 22:36:15 -0600 Subject: [PATCH 23/36] ci: refactor for deploy lambda on diferents environemtns --- .github/workflows/main.yml | 44 +++++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 8 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e7fc725..2f66ffa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -132,6 +132,7 @@ jobs: coverage-source: ${{ env.COVERAGE_REPORTS }} is-artifact: false + ############################################################################### # 3. BUILD ############################################################################### @@ -201,15 +202,18 @@ jobs: docker push $IMAGE_VERSION docker push $IMAGE_LATEST - - name: πŸ“€ Guardar IMAGE_LATEST como artifact - run: echo "IMAGE_LATEST=$IMAGE_LATEST" > image.env + - name: πŸ“€ Guardar variables como artifact + run: | + echo "IMAGE_LATEST=$IMAGE_LATEST" > image.env + echo "ENVIRONMENT=$ENVIRONMENT" >> image.env - name: πŸ“¦ Subir artifact con IMAGE_LATEST uses: actions/upload-artifact@v4 with: - name: image-env + name: image-env-${{ github.ref_name }} path: image.env + ############################################################################## # 4. DEPLOY ############################################################################## @@ -230,27 +234,51 @@ jobs: - name: πŸ“₯ Descargar artifact con IMAGE_LATEST uses: actions/download-artifact@v4 with: - name: image-env + name: image-env-${{ github.ref_name }} + - - name: πŸ§ͺ Cargar variable IMAGE_LATEST + - name: πŸ§ͺ Cargar y validar variables de entorno del build run: | set -a source image.env set +a + echo "πŸ“¦ Imagen a desplegar: $IMAGE_LATEST" + echo "🌎 Entorno detectado: $ENVIRONMENT" + + # Validar que ENVIRONMENT no estΓ© vacΓ­o + if [[ -z "$ENVIRONMENT" ]]; then + echo "❌ ENVIRONMENT no fue cargado correctamente." + exit 1 + fi + + # Validar que IMAGE_LATEST no estΓ© vacΓ­o y sea una URI vΓ‘lida + if [[ -z "$IMAGE_LATEST" || "$IMAGE_LATEST" != *".dkr.ecr."* ]]; then + echo "❌ IMAGE_LATEST no es una URI vΓ‘lida de imagen ECR: $IMAGE_LATEST" + exit 1 + fi + echo "IMAGE_LATEST=$IMAGE_LATEST" >> $GITHUB_ENV + echo "ENVIRONMENT=$ENVIRONMENT" >> $GITHUB_ENV + + + - name: 🧩 Construir nombre completo de la Lambda + run: | + FULL_LAMBDA_NAME="${{ secrets.AWS_LAMBDA_BASE_NAME }}-${ENVIRONMENT}" + echo "πŸ‘ Lambda completa: $FULL_LAMBDA_NAME" + echo "FULL_LAMBDA_NAME=$FULL_LAMBDA_NAME" >> $GITHUB_ENV - name: πŸš€ Actualizar Lambda con nueva imagen run: | aws lambda update-function-code \ - --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" \ + --function-name "$FULL_LAMBDA_NAME" \ --image-uri "$IMAGE_LATEST" - name: ⏳ Esperar a que termine la actualizaciΓ³n run: | while true; do STATUS=$(aws lambda get-function-configuration \ - --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" \ + --function-name "$FULL_LAMBDA_NAME" \ --query "LastUpdateStatus" --output text) echo "Lambda status: $STATUS" [[ "$STATUS" == "Successful" ]] && break @@ -260,4 +288,4 @@ jobs: - name: πŸ“¦ Publicar nueva versiΓ³n run: | aws lambda publish-version \ - --function-name "${{ secrets.AWS_LAMBDA_FUNCTION_NAME }}" + --function-name "$FULL_LAMBDA_NAME" \ No newline at end of file From f709ef15fce2e3fb05dda242c50072e8d025f759 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Thu, 15 May 2025 23:32:43 -0600 Subject: [PATCH 24/36] ci: fix deploy job --- .github/workflows/main.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2f66ffa..a276a76 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -223,6 +223,9 @@ jobs: runs-on: ubuntu-latest needs: build + env: + ARTIFACT_NAME: image-env-${{ github.ref_name }} + steps: - name: πŸ” Configurar credenciales AWS uses: aws-actions/configure-aws-credentials@v2 @@ -234,9 +237,9 @@ jobs: - name: πŸ“₯ Descargar artifact con IMAGE_LATEST uses: actions/download-artifact@v4 with: - name: image-env-${{ github.ref_name }} - - + name: ${{ env.ARTIFACT_NAME }} + + - name: πŸ§ͺ Cargar y validar variables de entorno del build run: | set -a From d4dfba9c6ab8497097d06bd237036e8119e1f4eb Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Fri, 16 May 2025 06:46:22 -0600 Subject: [PATCH 25/36] feat: add base templates for contextvards, exceptions, responses and internal_errors --- mypy.ini | 13 +++++++ src/shared/base_contextvars.py | 4 +++ src/shared/base_exceptions.py | 31 +++++++++++++++++ src/shared/base_internal_errors.py | 16 +++++++++ src/shared/base_responses.py | 55 ++++++++++++++++++++++++++++++ 5 files changed, 119 insertions(+) create mode 100644 src/shared/base_contextvars.py create mode 100644 src/shared/base_exceptions.py create mode 100644 src/shared/base_internal_errors.py create mode 100644 src/shared/base_responses.py diff --git a/mypy.ini b/mypy.ini index 53eb59b..33f7120 100644 --- a/mypy.ini +++ b/mypy.ini @@ -7,3 +7,16 @@ ignore_errors = True [mypy-src.shared.environment] ignore_errors = True + +[mypy-src.shared.base_error_internal] +ignore_errors = True + +[mypy-src.shared.base_contextvars] +ignore_errors = True + +[mypy-src.shared.base_responses] +ignore_errors = True + +[mypy-src.shared.base_exceptions] +ignore_errors = True + diff --git a/src/shared/base_contextvars.py b/src/shared/base_contextvars.py new file mode 100644 index 0000000..cb5b926 --- /dev/null +++ b/src/shared/base_contextvars.py @@ -0,0 +1,4 @@ +from contextvars import ContextVar + +ctx_trace_id = ContextVar("ctx_trace_id", default=None) +ctx_caller_id = ContextVar("ctx_caller_id", default=None) \ No newline at end of file diff --git a/src/shared/base_exceptions.py b/src/shared/base_exceptions.py new file mode 100644 index 0000000..04b4962 --- /dev/null +++ b/src/shared/base_exceptions.py @@ -0,0 +1,31 @@ +import fastapi +from typing import Optional, Any, Dict +from loguru import logger +from shared.base_internal_errors import ErrorCodes + +class BaseApiRestException(Exception): + GENERAL_STATUS_CODE_HTTP = fastapi.status.HTTP_400_BAD_REQUEST + GENERAL_ERROR_CODE = ErrorCodes.UNKNOW + + def __init__(self, + status_code_http: int = None, + error_code: ErrorCodes = None, + message: Optional[str] = None, + data: Optional[Dict[str, Any]] = None): + super().__init__(message) + self.status_code_http = status_code_http if status_code_http else self.GENERAL_STATUS_CODE_HTTP + self.error_code = error_code if error_code else self.GENERAL_ERROR_CODE + self.data = data + self.message = message + logger.warning(self.__str__()) + + def __str__(self): + return f"[{self.status_code_http}] {self.error_code.description}: {self.message}" + + + +class UserException(BaseApiRestException): + GENERAL_ERROR_CODE = ErrorCodes.USER_ERROR + +class TokenException(BaseApiRestException): + GENERAL_ERROR_CODE = ErrorCodes.TOKEN_ERROR \ No newline at end of file diff --git a/src/shared/base_internal_errors.py b/src/shared/base_internal_errors.py new file mode 100644 index 0000000..9ab39a5 --- /dev/null +++ b/src/shared/base_internal_errors.py @@ -0,0 +1,16 @@ +from enum import Enum + +class ErrorCodes(Enum): + UNKNOW = 100, "Unknown Error" + PYDANTIC_VALIDATIONS_REQUEST = 8001, "Failed pydantic validations on request" + + + def __new__(cls, value: int, description: str) -> 'ErrorCodes': + obj = object.__new__(cls) + obj._value_ = value + obj._description = description + return obj + + @property + def description(self) -> str: + return self._description \ No newline at end of file diff --git a/src/shared/base_responses.py b/src/shared/base_responses.py new file mode 100644 index 0000000..1ff7d12 --- /dev/null +++ b/src/shared/base_responses.py @@ -0,0 +1,55 @@ +from typing import Any, Dict, Optional +from pydantic import BaseModel +from shared.base_contextvars import ctx_trace_id +from fastapi.responses import JSONResponse +from shared.base_internal_errors import ErrorCodes +import fastapi +import json + +class EnvelopeResponse(BaseModel): + success: bool + message: str + data: Dict[str, Any] | None = None + trace_id: str | None = None + +class ErrorDetailResponse(BaseModel): + internal_error: Dict[str, Any] + details: Dict[str, Any] + + @staticmethod + def from_error_code(error_code: ErrorCodes, details: Optional[Dict[str, Any]] = None) -> 'ErrorDetailResponse': + return ErrorDetailResponse( + internal_error={ + "code": error_code.value, + "description": error_code.description, + }, + details=details or {} + ).model_dump() + +def create_response_for_fast_api( + status_code_http: int = fastapi.status.HTTP_200_OK, + data: Any = None, + error_code: Optional[ErrorCodes] = ErrorCodes.UNKNOW, + message: Optional[str] = None +) -> JSONResponse: + success = 200 <= status_code_http < 300 + message = message or ("Operation successful" if success else "An error occurred") + + if isinstance(data, BaseModel): + data = data.model_dump_json() + data = json.loads(data) + + if not success: + data = ErrorDetailResponse.from_error_code(error_code=error_code, details=data) + + envelope_response = EnvelopeResponse( + success=success, + message=message, + data=data, + trace_id=ctx_trace_id.get() + ) + + return JSONResponse( + content=envelope_response.model_dump(), + status_code=status_code_http + ) \ No newline at end of file From df398a6dcae11a5aa7afb1c20cb6b7c7402c791f Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Mon, 19 May 2025 21:50:38 -0600 Subject: [PATCH 26/36] feat: add endpoint ruy --- src/api/v1/endpoints.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/api/v1/endpoints.py b/src/api/v1/endpoints.py index 9bcc618..609e8da 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/endpoints.py @@ -62,4 +62,11 @@ async def delete_book(book_id: int) -> None: logger.info(f"Successfully deleted book with ID: {book_id}") return logger.error(f"Book with ID {book_id} not found for deletion") - raise HTTPException(status_code=404, detail="Book not found") \ No newline at end of file + raise HTTPException(status_code=404, detail="Book not found") + + + +@router.get("/ruy") +async def get_ruy(): + logger.info("Received request to get Ruy") + return {"name": "ruy"} From 53dcfd4052f30eb71927f060b0254c5b4e6c4eb8 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 11:09:30 -0600 Subject: [PATCH 27/36] fix: delete test endpoint --- src/api/v1/endpoints.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/api/v1/endpoints.py b/src/api/v1/endpoints.py index 609e8da..77fa21b 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/endpoints.py @@ -63,10 +63,3 @@ async def delete_book(book_id: int) -> None: return logger.error(f"Book with ID {book_id} not found for deletion") raise HTTPException(status_code=404, detail="Book not found") - - - -@router.get("/ruy") -async def get_ruy(): - logger.info("Received request to get Ruy") - return {"name": "ruy"} From 37c0fe4162efc0a3422dc3c46975b42488fb0229 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 11:30:16 -0600 Subject: [PATCH 28/36] chore: resolve call services from swagger --- src/core/settings/base.py | 1 + src/main.py | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/core/settings/base.py b/src/core/settings/base.py index f131979..cbcd203 100644 --- a/src/core/settings/base.py +++ b/src/core/settings/base.py @@ -32,6 +32,7 @@ class Settings(BaseSettings): default=APP_ENVIRONMENT, validate_default=True ) + ROOT_PATH: str | None = "" SENTRY_DSN: str | None = None diff --git a/src/main.py b/src/main.py index f383741..c645e31 100644 --- a/src/main.py +++ b/src/main.py @@ -6,20 +6,28 @@ from api.routers import api_v1_router from api.endpoints import index_router from typing import Any +from core.settings import settings -app = FastAPI(root_path=os.getenv("ROOT_PATH","")) +app = FastAPI( + title=settings.PROJECT.NAME, + version=settings.PROJECT.VERSION, + description=settings.PROJECT.DESCRIPTION, + root_path=settings.ROOT_PATH, + middleware=[] +) def custom_openapi() -> dict[str, Any]: if app.openapi_schema: return app.openapi_schema openapi_schema = get_openapi( - title="Books API", - version="1.0.0", - description="A sample API for books", + title=app.title, + version=app.version, + description=app.description, routes=app.routes, ) openapi_schema["openapi"] = "3.0.3" + openapi_schema["servers"] = [{"url": settings.ROOT_PATH}] app.openapi_schema = openapi_schema return app.openapi_schema From 2a886077fa0286f7a262e62c22de89c025773e9c Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 11:37:28 -0600 Subject: [PATCH 29/36] fix: test_openapi_schema --- src/core/settings/testing.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/core/settings/testing.py b/src/core/settings/testing.py index bc78d1f..49f5ef7 100644 --- a/src/core/settings/testing.py +++ b/src/core/settings/testing.py @@ -1,4 +1,15 @@ from core.settings.base import Settings +from core.settings.base import ProjectSettings +from pydantic import Field class TestingSettings(Settings): - pass \ No newline at end of file + PROJECT: ProjectSettings = Field( + default=ProjectSettings( + NAME="Books API", + DESCRIPTION="API implemented with FastAPI", + VERSION="1.0.0", + CODE="api-001", + AUTHORS="R2" + ), + validate_default=True + ) \ No newline at end of file From 496247813f62820575ac1e793d80a0895f7d6889 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 13:35:09 -0600 Subject: [PATCH 30/36] feat: change settings internal_codes --- src/core/internal_codes.py | 7 +++++++ src/shared/base_exceptions.py | 18 +++++------------ src/shared/base_internal_codes.py | 30 ++++++++++++++++++++++++++++ src/shared/base_internal_errors.py | 16 --------------- src/shared/base_responses.py | 32 ++++++++++++++++++++---------- 5 files changed, 64 insertions(+), 39 deletions(-) create mode 100644 src/core/internal_codes.py create mode 100644 src/shared/base_internal_codes.py delete mode 100644 src/shared/base_internal_errors.py diff --git a/src/core/internal_codes.py b/src/core/internal_codes.py new file mode 100644 index 0000000..ff782eb --- /dev/null +++ b/src/core/internal_codes.py @@ -0,0 +1,7 @@ +from shared.base_internal_codes import InternalCodeBase + +class InternalCodesApiBook(InternalCodeBase): + BOOK_API_ERROR = 1000, "Book API error" + BOOK_NOT_FOUND = 1001, "Book not found" + + \ No newline at end of file diff --git a/src/shared/base_exceptions.py b/src/shared/base_exceptions.py index 04b4962..9e29c33 100644 --- a/src/shared/base_exceptions.py +++ b/src/shared/base_exceptions.py @@ -1,17 +1,16 @@ import fastapi -from typing import Optional, Any, Dict from loguru import logger -from shared.base_internal_errors import ErrorCodes +from shared.base_internal_codes import CommonInternalCode, InternalCode class BaseApiRestException(Exception): GENERAL_STATUS_CODE_HTTP = fastapi.status.HTTP_400_BAD_REQUEST - GENERAL_ERROR_CODE = ErrorCodes.UNKNOW + GENERAL_ERROR_CODE = CommonInternalCode.UNKNOWN def __init__(self, status_code_http: int = None, - error_code: ErrorCodes = None, - message: Optional[str] = None, - data: Optional[Dict[str, Any]] = None): + error_code: InternalCode = None, + message: str | None = None, + data: dict[str, any] | None = None): super().__init__(message) self.status_code_http = status_code_http if status_code_http else self.GENERAL_STATUS_CODE_HTTP self.error_code = error_code if error_code else self.GENERAL_ERROR_CODE @@ -22,10 +21,3 @@ def __init__(self, def __str__(self): return f"[{self.status_code_http}] {self.error_code.description}: {self.message}" - - -class UserException(BaseApiRestException): - GENERAL_ERROR_CODE = ErrorCodes.USER_ERROR - -class TokenException(BaseApiRestException): - GENERAL_ERROR_CODE = ErrorCodes.TOKEN_ERROR \ No newline at end of file diff --git a/src/shared/base_internal_codes.py b/src/shared/base_internal_codes.py new file mode 100644 index 0000000..a7d89f1 --- /dev/null +++ b/src/shared/base_internal_codes.py @@ -0,0 +1,30 @@ +from enum import IntEnum +from typing import Protocol, runtime_checkable + + +@runtime_checkable +class InternalCode(Protocol): + value: int + description: str + + def to_dict(self) -> dict[str, any]: ... + + +class InternalCodeBase(IntEnum): + def __new__(cls, value: int, description: str): + obj = int.__new__(cls, value) + obj._value_ = value + obj._description = description + return obj + + @property + def description(self) -> str: + return self._description + + def to_dict(self) -> dict: + return {"code": int(self), "description": self.description} + + +class CommonInternalCode(InternalCodeBase): + UNKNOWN = 100, "Unknown error" + PYDANTIC_VALIDATIONS_REQUEST = 8001, "Failed Pydantic validations on request" diff --git a/src/shared/base_internal_errors.py b/src/shared/base_internal_errors.py deleted file mode 100644 index 9ab39a5..0000000 --- a/src/shared/base_internal_errors.py +++ /dev/null @@ -1,16 +0,0 @@ -from enum import Enum - -class ErrorCodes(Enum): - UNKNOW = 100, "Unknown Error" - PYDANTIC_VALIDATIONS_REQUEST = 8001, "Failed pydantic validations on request" - - - def __new__(cls, value: int, description: str) -> 'ErrorCodes': - obj = object.__new__(cls) - obj._value_ = value - obj._description = description - return obj - - @property - def description(self) -> str: - return self._description \ No newline at end of file diff --git a/src/shared/base_responses.py b/src/shared/base_responses.py index 1ff7d12..c0cb17a 100644 --- a/src/shared/base_responses.py +++ b/src/shared/base_responses.py @@ -1,23 +1,26 @@ -from typing import Any, Dict, Optional +from typing import TypeVar, Any from pydantic import BaseModel from shared.base_contextvars import ctx_trace_id from fastapi.responses import JSONResponse -from shared.base_internal_errors import ErrorCodes import fastapi -import json +import json +from shared.base_internal_codes import InternalCode +from shared.base_internal_codes import CommonInternalCode as CC + +T = TypeVar("T", bound=InternalCode) class EnvelopeResponse(BaseModel): success: bool message: str - data: Dict[str, Any] | None = None + data: dict[str, Any] | list | None = None trace_id: str | None = None class ErrorDetailResponse(BaseModel): - internal_error: Dict[str, Any] - details: Dict[str, Any] + internal_error: dict[str, Any] + details: dict[str, Any] @staticmethod - def from_error_code(error_code: ErrorCodes, details: Optional[Dict[str, Any]] = None) -> 'ErrorDetailResponse': + def from_error_code(error_code: T | None = CC.UNKNOWN, details: dict[str, Any] | None = None) -> 'ErrorDetailResponse': return ErrorDetailResponse( internal_error={ "code": error_code.value, @@ -29,13 +32,22 @@ def from_error_code(error_code: ErrorCodes, details: Optional[Dict[str, Any]] = def create_response_for_fast_api( status_code_http: int = fastapi.status.HTTP_200_OK, data: Any = None, - error_code: Optional[ErrorCodes] = ErrorCodes.UNKNOW, - message: Optional[str] = None + error_code: T | None = CC.UNKNOWN, + message: str | None = None ) -> JSONResponse: success = 200 <= status_code_http < 300 message = message or ("Operation successful" if success else "An error occurred") - if isinstance(data, BaseModel): + if isinstance(data,list): + if len(data) == 0: + data = None + else: + first_element = data[0] + if isinstance(first_element,BaseModel): + data = [element.model_dump() for element in data] + + + elif isinstance(data, BaseModel): data = data.model_dump_json() data = json.loads(data) From 95d95b76e7a3dd0df13faed636b46e51554d10f0 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 13:35:46 -0600 Subject: [PATCH 31/36] feat: add catcher for standars errors responses --- src/shared/middlewares/__init__.py | 4 ++ src/shared/middlewares/catcher_exceptions.py | 42 +++++++++++++++++++ .../middlewares/catcher_pydantic_errors.py | 24 +++++++++++ 3 files changed, 70 insertions(+) create mode 100644 src/shared/middlewares/__init__.py create mode 100644 src/shared/middlewares/catcher_exceptions.py create mode 100644 src/shared/middlewares/catcher_pydantic_errors.py diff --git a/src/shared/middlewares/__init__.py b/src/shared/middlewares/__init__.py new file mode 100644 index 0000000..2d168b6 --- /dev/null +++ b/src/shared/middlewares/__init__.py @@ -0,0 +1,4 @@ +from .catcher_exceptions import CatcherExceptions +from .catcher_pydantic_errors import CatcherExceptionsPydantic + +__all__ = ["CatcherExceptions", "CatcherExceptionsPydantic"] diff --git a/src/shared/middlewares/catcher_exceptions.py b/src/shared/middlewares/catcher_exceptions.py new file mode 100644 index 0000000..5afb23f --- /dev/null +++ b/src/shared/middlewares/catcher_exceptions.py @@ -0,0 +1,42 @@ +from fastapi import HTTPException, Request, status +from fastapi.responses import JSONResponse +from sqlalchemy.orm.exc import NoResultFound +from starlette.middleware.base import BaseHTTPMiddleware + +from shared.base_exceptions import BaseApiRestException +from shared.base_responses import create_response_for_fast_api +from shared.base_internal_codes import CommonInternalCode + +class CatcherExceptions(BaseHTTPMiddleware): + def _init_(self, app): + super()._init_(app) + + async def dispatch(self, request: Request, call_next): + try: + return await call_next(request) + except Exception as e: # noqa: BLE001 + internal_error = CommonInternalCode.UNKNOWN + error_message = None + error_data = None + status_code_http = status.HTTP_500_INTERNAL_SERVER_ERROR + if isinstance(e, HTTPException): + error_data = {"detail": str(e.detail)} + status_code_http = e.status_code + if isinstance(e, NoResultFound): + error_data = {"detail": f"No found: {e}"} + status_code_http = status.HTTP_404_NOT_FOUND + elif isinstance(e, BaseApiRestException): + error_data = e.data + error_message = e.message + status_code_http = e.status_code_http + internal_error = e.error_code + else: + error_data = {"detail": str(e)} + status_code_http = status.HTTP_500_INTERNAL_SERVER_ERROR + + return create_response_for_fast_api( + status_code_http=status_code_http, + data=error_data, + error_code=internal_error, + message=error_message + ) \ No newline at end of file diff --git a/src/shared/middlewares/catcher_pydantic_errors.py b/src/shared/middlewares/catcher_pydantic_errors.py new file mode 100644 index 0000000..47c2905 --- /dev/null +++ b/src/shared/middlewares/catcher_pydantic_errors.py @@ -0,0 +1,24 @@ +from collections import defaultdict + +from fastapi import FastAPI, status +from fastapi.exceptions import RequestValidationError +from fastapi.requests import Request +from shared.base_internal_codes import CommonInternalCode + +from shared.base_responses import create_response_for_fast_api + + +def CatcherExceptionsPydantic(app: FastAPI): + @app.exception_handler(RequestValidationError) + async def validate(_: Request, exc: Exception): + error_detail = defaultdict(list) + for error in exc.errors(): + field = error["loc"][1] if "loc" in error else None + error_msg = error["msg"] + error_detail[field].append(error_msg) + + return create_response_for_fast_api( + data=error_detail, + status_code_http=status.HTTP_400_BAD_REQUEST, + error_code=CommonInternalCode.PYDANTIC_VALIDATIONS_REQUEST + ) \ No newline at end of file From c8c2df10f8f16475666d621ce8a670d74bbd1d20 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 13:36:48 -0600 Subject: [PATCH 32/36] feat: apply standars responses,exceptions,internal_codes to api --- src/api/v1/endpoints.py | 80 +++++++++++++++++++++++++++++++---------- src/core/exceptions.py | 7 ++++ src/main.py | 11 ++++-- 3 files changed, 77 insertions(+), 21 deletions(-) create mode 100644 src/core/exceptions.py diff --git a/src/api/v1/endpoints.py b/src/api/v1/endpoints.py index 77fa21b..d3eed73 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/endpoints.py @@ -1,10 +1,11 @@ -from fastapi import HTTPException from api.v1.schema import Book, BookCreate from loguru import logger - +from shared.base_responses import create_response_for_fast_api,EnvelopeResponse +from core.exceptions import BookException from fastapi import ( APIRouter, ) +from fastapi import Response router = APIRouter(prefix="/books",tags=["Books"]) @@ -15,43 +16,77 @@ # Get all books -@router.get("", response_model=list[Book]) -async def get_books() -> list[Book]: +@router.get("", response_model=EnvelopeResponse) +async def get_books(): logger.info("Retrieving all books") - return books_db + return create_response_for_fast_api( + data=books_db or None, + status_code_http=200 + ) -@router.post("", response_model=Book, status_code=201) -async def create_book(book: BookCreate) -> Book: +# Get all books +@router.get("", response_model=EnvelopeResponse) +async def get_books() -> EnvelopeResponse: + logger.info("Retrieving all books") + return create_response_for_fast_api( + data=books_db, + status_code_http=200 + ) + +@router.post("", response_model=EnvelopeResponse) +async def create_book(book: BookCreate) -> EnvelopeResponse: global counter_id counter_id += 1 new_book = Book(id=counter_id, **book.model_dump()) books_db.append(new_book) logger.info(f"Created new book with ID: {counter_id}") - return new_book + return create_response_for_fast_api( + data=new_book, + status_code_http=201 + ) # Get book by ID -@router.get("/{book_id}", response_model=Book) -async def get_book(book_id: int) -> Book: +@router.get("/{book_id}", response_model=EnvelopeResponse) +async def get_book(book_id: int) -> EnvelopeResponse: logger.info(f"Retrieving book with ID: {book_id}") for book in books_db: if book.id == book_id: - return book + return create_response_for_fast_api( + data=book, + status_code_http=201 + ) logger.error(f"Book with ID {book_id} not found") - raise HTTPException(status_code=404, detail="Book not found") + raise BookException( + message=f"Book with ID {book_id} not found", + data={ + "payload": { + "book_id": book_id + } + } + ) # Update a book -@router.put("/{book_id}", response_model=Book) -async def update_book(book_id: int, updated: BookCreate) -> Book: +@router.put("/{book_id}", response_model=EnvelopeResponse) +async def update_book(book_id: int, updated: BookCreate) -> EnvelopeResponse: logger.info(f"Attempting to update book with ID: {book_id}") for i, book in enumerate(books_db): if book.id == book_id: updated_book = Book(id=book_id, **updated.model_dump()) books_db[i] = updated_book logger.info(f"Successfully updated book with ID: {book_id}") - return updated_book + return create_response_for_fast_api( + data=updated_book, + status_code_http=200 + ) logger.error(f"Book with ID {book_id} not found for update") - raise HTTPException(status_code=404, detail="Book not found") - + raise BookException( + message=f"Book with ID {book_id} not found for update", + data={ + "payload": { + "book_id": book_id + } + } + ) # Delete a book @router.delete("/{book_id}", status_code=204) async def delete_book(book_id: int) -> None: @@ -60,6 +95,13 @@ async def delete_book(book_id: int) -> None: if book.id == book_id: books_db.pop(i) logger.info(f"Successfully deleted book with ID: {book_id}") - return + return Response(status_code=204) logger.error(f"Book with ID {book_id} not found for deletion") - raise HTTPException(status_code=404, detail="Book not found") + raise BookException( + message=f"Book with ID {book_id} not found for deletion", + data={ + "payload": { + "book_id": book_id + } + } + ) \ No newline at end of file diff --git a/src/core/exceptions.py b/src/core/exceptions.py new file mode 100644 index 0000000..26dbd4c --- /dev/null +++ b/src/core/exceptions.py @@ -0,0 +1,7 @@ +from shared.base_exceptions import BaseApiRestException +from core.internal_codes import InternalCodesApiBook +import fastapi + +class BookException(BaseApiRestException): + GENERAL_ERROR_CODE = InternalCodesApiBook.BOOK_API_ERROR + GENERAL_STATUS_CODE_HTTP = fastapi.status.HTTP_400_BAD_REQUEST \ No newline at end of file diff --git a/src/main.py b/src/main.py index c645e31..c44f3b4 100644 --- a/src/main.py +++ b/src/main.py @@ -7,13 +7,20 @@ from api.endpoints import index_router from typing import Any from core.settings import settings +from shared.middlewares import ( + CatcherExceptions, + CatcherExceptionsPydantic +) +from fastapi.middleware import Middleware app = FastAPI( title=settings.PROJECT.NAME, version=settings.PROJECT.VERSION, description=settings.PROJECT.DESCRIPTION, root_path=settings.ROOT_PATH, - middleware=[] + middleware=[ + Middleware(CatcherExceptions) + ] ) @@ -34,5 +41,5 @@ def custom_openapi() -> dict[str, Any]: app.openapi = custom_openapi # type: ignore app.include_router(api_v1_router) app.include_router(index_router) - +CatcherExceptionsPydantic(app) handler = Mangum(app) From f158461b455e925de71ee58d10f2bcf5d19a1c6e Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 14:31:42 -0600 Subject: [PATCH 33/36] feat: add repositories to save data in posgresql database --- src/api/v1/endpoints.py | 142 +++++++++--------------- src/api/v1/repositories.py | 49 ++++++++ src/api/v1/schema.py | 17 ++- src/db/posgresql/base.py | 8 +- src/db/posgresql/models/public/books.py | 5 +- src/shared/base_responses.py | 4 +- 6 files changed, 124 insertions(+), 101 deletions(-) create mode 100644 src/api/v1/repositories.py diff --git a/src/api/v1/endpoints.py b/src/api/v1/endpoints.py index d3eed73..17e2bc9 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/endpoints.py @@ -1,107 +1,69 @@ -from api.v1.schema import Book, BookCreate +from api.v1.schema import BookSchema, BookCreateSchema from loguru import logger -from shared.base_responses import create_response_for_fast_api,EnvelopeResponse +from shared.base_responses import create_response_for_fast_api, EnvelopeResponse from core.exceptions import BookException -from fastapi import ( - APIRouter, -) -from fastapi import Response +from fastapi import APIRouter, Response +from api.v1.repositories import BookRepository +from uuid import UUID -router = APIRouter(prefix="/books",tags=["Books"]) +router = APIRouter(prefix="/books", tags=["Books"]) -# Simulated database -books_db: list[Book] = [] -counter_id = 0 - - -# Get all books -@router.get("", response_model=EnvelopeResponse) -async def get_books(): - logger.info("Retrieving all books") - return create_response_for_fast_api( - data=books_db or None, - status_code_http=200 - ) - -# Get all books @router.get("", response_model=EnvelopeResponse) async def get_books() -> EnvelopeResponse: logger.info("Retrieving all books") - return create_response_for_fast_api( - data=books_db, - status_code_http=200 - ) + success, list_books = BookRepository.get_all() + list_books_schema = [BookSchema(**book.to_dict()) for book in list_books] + return create_response_for_fast_api(data=list_books_schema if success else None) + @router.post("", response_model=EnvelopeResponse) -async def create_book(book: BookCreate) -> EnvelopeResponse: - global counter_id - counter_id += 1 - new_book = Book(id=counter_id, **book.model_dump()) - books_db.append(new_book) - logger.info(f"Created new book with ID: {counter_id}") - return create_response_for_fast_api( - data=new_book, - status_code_http=201 - ) +async def create_book(book: BookCreateSchema) -> EnvelopeResponse: + logger.info("Creating new book") + success, new_book = BookRepository.create(book) + if not success: + logger.error("Book creation failed") + raise BookException(message="Failed to create book") + logger.info(f"Book created with ID: {new_book.id}") + return create_response_for_fast_api(data=BookSchema(**new_book.to_dict()), status_code_http=201) + -# Get book by ID @router.get("/{book_id}", response_model=EnvelopeResponse) -async def get_book(book_id: int) -> EnvelopeResponse: +async def get_book(book_id: UUID) -> EnvelopeResponse: logger.info(f"Retrieving book with ID: {book_id}") - for book in books_db: - if book.id == book_id: - return create_response_for_fast_api( - data=book, - status_code_http=201 - ) - logger.error(f"Book with ID {book_id} not found") - raise BookException( - message=f"Book with ID {book_id} not found", - data={ - "payload": { - "book_id": book_id - } - } - ) + success, book = BookRepository.get_by_id(book_id) + if not success: + logger.error(f"Book with ID {book_id} not found") + raise BookException( + message=f"Book with ID {book_id} not found", + data={"payload": {"book_id": str(book_id)}} + ) + return create_response_for_fast_api(data=BookSchema(**book.to_dict())) + -# Update a book @router.put("/{book_id}", response_model=EnvelopeResponse) -async def update_book(book_id: int, updated: BookCreate) -> EnvelopeResponse: - logger.info(f"Attempting to update book with ID: {book_id}") - for i, book in enumerate(books_db): - if book.id == book_id: - updated_book = Book(id=book_id, **updated.model_dump()) - books_db[i] = updated_book - logger.info(f"Successfully updated book with ID: {book_id}") - return create_response_for_fast_api( - data=updated_book, - status_code_http=200 - ) - logger.error(f"Book with ID {book_id} not found for update") - raise BookException( - message=f"Book with ID {book_id} not found for update", - data={ - "payload": { - "book_id": book_id - } - } - ) -# Delete a book +async def update_book(book_id: UUID, updated: BookCreateSchema) -> EnvelopeResponse: + logger.info(f"Updating book with ID: {book_id}") + success, updated_book = BookRepository.update(book_id, updated) + if not success: + logger.error(f"Book with ID {book_id} not found for update") + raise BookException( + message=f"Book with ID {book_id} not found for update", + data={"payload": {"book_id": str(book_id)}} + ) + logger.info(f"Successfully updated book with ID: {book_id}") + return create_response_for_fast_api(data=BookSchema(**updated_book.to_dict())) + + @router.delete("/{book_id}", status_code=204) -async def delete_book(book_id: int) -> None: +async def delete_book(book_id: UUID) -> None: logger.info(f"Attempting to delete book with ID: {book_id}") - for i, book in enumerate(books_db): - if book.id == book_id: - books_db.pop(i) - logger.info(f"Successfully deleted book with ID: {book_id}") - return Response(status_code=204) - logger.error(f"Book with ID {book_id} not found for deletion") - raise BookException( - message=f"Book with ID {book_id} not found for deletion", - data={ - "payload": { - "book_id": book_id - } - } - ) \ No newline at end of file + success, _ = BookRepository.delete(book_id) + if not success: + logger.error(f"Book with ID {book_id} not found for deletion") + raise BookException( + message=f"Book with ID {book_id} not found for deletion", + data={"payload": {"book_id": str(book_id)}} + ) + logger.info(f"Successfully deleted book with ID: {book_id}") + return Response(status_code=204) diff --git a/src/api/v1/repositories.py b/src/api/v1/repositories.py new file mode 100644 index 0000000..e59ac07 --- /dev/null +++ b/src/api/v1/repositories.py @@ -0,0 +1,49 @@ +from db.posgresql import get_db_context +from db.posgresql.models.public import Book +from api.v1.schema import BookCreateSchema +from sqlalchemy import select + +class BookRepository: + + @staticmethod + def get_all() -> tuple[bool, list[Book]]: + with get_db_context() as session: + books = session.scalars(select(Book)).all() + return True, books + + @staticmethod + def get_by_id(book_id: int) -> tuple[bool, Book | None]: + with get_db_context() as session: + book = session.get(Book, book_id) + return (True, book) if book else (False, None) + + @staticmethod + def create(book_create: BookCreateSchema) -> tuple[bool, Book]: + with get_db_context() as session: + new_book = Book(**book_create.model_dump()) + session.add(new_book) + session.commit() + session.refresh(new_book) + return True, new_book + + @staticmethod + def update(book_id: int, book_update: BookCreateSchema) -> tuple[bool, Book | None]: + with get_db_context() as session: + book = session.get(Book, book_id) + if not book: + return False, None + for key, value in book_update.model_dump().items(): + setattr(book, key, value) + session.commit() + session.refresh(book) + return True, book + + @staticmethod + def delete(book_id: int) -> tuple[bool, None]: + with get_db_context() as session: + book = session.get(Book, book_id) + if not book: + return False, None + session.delete(book) + session.commit() + return True, None diff --git a/src/api/v1/schema.py b/src/api/v1/schema.py index 7b986c3..c69def7 100644 --- a/src/api/v1/schema.py +++ b/src/api/v1/schema.py @@ -1,15 +1,24 @@ from pydantic import BaseModel +from db.posgresql.models.public import BookType +from uuid import UUID # Data model -class Book(BaseModel): - id: int +class BookSchema(BaseModel): + id: UUID title: str author: str year: int + type: BookType + class Config: + allow_population_by_field_name = False + json_encoders = { + UUID: lambda v: str(v), + } # Create a new book -class BookCreate(BaseModel): +class BookCreateSchema(BaseModel): title: str author: str - year: int \ No newline at end of file + year: int + type: BookType \ No newline at end of file diff --git a/src/db/posgresql/base.py b/src/db/posgresql/base.py index bf35622..1701886 100644 --- a/src/db/posgresql/base.py +++ b/src/db/posgresql/base.py @@ -24,4 +24,10 @@ def updated_at(cls): @declared_attr def deleted_at(cls): - return Column(DateTime, nullable=True) \ No newline at end of file + return Column(DateTime, nullable=True) + + def to_dict(self): + return { + column.name: getattr(self, column.name) + for column in self.__table__.columns + } \ No newline at end of file diff --git a/src/db/posgresql/models/public/books.py b/src/db/posgresql/models/public/books.py index 5302864..b58d8d3 100644 --- a/src/db/posgresql/models/public/books.py +++ b/src/db/posgresql/models/public/books.py @@ -10,7 +10,4 @@ class Book(Base, BaseModel): title: str = Column(String, nullable=False) author: str = Column(String, nullable=False) year: int = Column(Integer, nullable=False) - type: BookType = Column(Enum(BookType), nullable=False) - - - \ No newline at end of file + type: BookType = Column(Enum(BookType), nullable=False) \ No newline at end of file diff --git a/src/shared/base_responses.py b/src/shared/base_responses.py index c0cb17a..68eea48 100644 --- a/src/shared/base_responses.py +++ b/src/shared/base_responses.py @@ -44,11 +44,11 @@ def create_response_for_fast_api( else: first_element = data[0] if isinstance(first_element,BaseModel): - data = [element.model_dump() for element in data] + data = [element.model_dump(mode="json") for element in data] elif isinstance(data, BaseModel): - data = data.model_dump_json() + data = data.model_dump_json(mode="json") data = json.loads(data) if not success: From 848c69f4f875ee5bd17c354105e6c0389bdaccaf Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 15:06:50 -0600 Subject: [PATCH 34/36] refactor: add services.py --- src/api/v1/endpoints.py | 51 +++++++++++--------------------- src/api/v1/services.py | 57 ++++++++++++++++++++++++++++++++++++ src/shared/base_responses.py | 2 +- 3 files changed, 75 insertions(+), 35 deletions(-) create mode 100644 src/api/v1/services.py diff --git a/src/api/v1/endpoints.py b/src/api/v1/endpoints.py index 17e2bc9..e4f08f8 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/endpoints.py @@ -1,10 +1,15 @@ -from api.v1.schema import BookSchema, BookCreateSchema +from api.v1.schema import BookCreateSchema from loguru import logger from shared.base_responses import create_response_for_fast_api, EnvelopeResponse -from core.exceptions import BookException from fastapi import APIRouter, Response -from api.v1.repositories import BookRepository from uuid import UUID +from api.v1.services import ( + BooksListService, + BookCreateService, + BookRetrieveService, + BookUpdateService, + BookDeleteService, +) router = APIRouter(prefix="/books", tags=["Books"]) @@ -12,58 +17,36 @@ @router.get("", response_model=EnvelopeResponse) async def get_books() -> EnvelopeResponse: logger.info("Retrieving all books") - success, list_books = BookRepository.get_all() - list_books_schema = [BookSchema(**book.to_dict()) for book in list_books] - return create_response_for_fast_api(data=list_books_schema if success else None) + books = BooksListService.list() + return create_response_for_fast_api(data=books) @router.post("", response_model=EnvelopeResponse) async def create_book(book: BookCreateSchema) -> EnvelopeResponse: logger.info("Creating new book") - success, new_book = BookRepository.create(book) - if not success: - logger.error("Book creation failed") - raise BookException(message="Failed to create book") + new_book = BookCreateService.create(book) logger.info(f"Book created with ID: {new_book.id}") - return create_response_for_fast_api(data=BookSchema(**new_book.to_dict()), status_code_http=201) + return create_response_for_fast_api(data=new_book, status_code_http=201) @router.get("/{book_id}", response_model=EnvelopeResponse) async def get_book(book_id: UUID) -> EnvelopeResponse: logger.info(f"Retrieving book with ID: {book_id}") - success, book = BookRepository.get_by_id(book_id) - if not success: - logger.error(f"Book with ID {book_id} not found") - raise BookException( - message=f"Book with ID {book_id} not found", - data={"payload": {"book_id": str(book_id)}} - ) - return create_response_for_fast_api(data=BookSchema(**book.to_dict())) + book = BookRetrieveService.retrieve(book_id) + return create_response_for_fast_api(data=book) @router.put("/{book_id}", response_model=EnvelopeResponse) async def update_book(book_id: UUID, updated: BookCreateSchema) -> EnvelopeResponse: logger.info(f"Updating book with ID: {book_id}") - success, updated_book = BookRepository.update(book_id, updated) - if not success: - logger.error(f"Book with ID {book_id} not found for update") - raise BookException( - message=f"Book with ID {book_id} not found for update", - data={"payload": {"book_id": str(book_id)}} - ) + updated_book = BookUpdateService.update(book_id, updated) logger.info(f"Successfully updated book with ID: {book_id}") - return create_response_for_fast_api(data=BookSchema(**updated_book.to_dict())) + return create_response_for_fast_api(data=updated_book) @router.delete("/{book_id}", status_code=204) async def delete_book(book_id: UUID) -> None: logger.info(f"Attempting to delete book with ID: {book_id}") - success, _ = BookRepository.delete(book_id) - if not success: - logger.error(f"Book with ID {book_id} not found for deletion") - raise BookException( - message=f"Book with ID {book_id} not found for deletion", - data={"payload": {"book_id": str(book_id)}} - ) + BookDeleteService.delete(book_id) logger.info(f"Successfully deleted book with ID: {book_id}") return Response(status_code=204) diff --git a/src/api/v1/services.py b/src/api/v1/services.py new file mode 100644 index 0000000..bfece7f --- /dev/null +++ b/src/api/v1/services.py @@ -0,0 +1,57 @@ +from api.v1.repositories import BookRepository +from api.v1.schema import BookSchema, BookCreateSchema +from core.exceptions import BookException +from uuid import UUID + + +class BooksListService: + @staticmethod + def list() -> list[BookSchema]: + success, list_books = BookRepository.get_all() + if not success: + raise BookException(message="Failed to retrieve books") + return [BookSchema(**book.to_dict()) for book in list_books] + + +class BookCreateService: + @staticmethod + def create(book_data: BookCreateSchema) -> BookSchema: + success, new_book = BookRepository.create(book_data) + if not success: + raise BookException(message="Failed to create book") + return BookSchema(**new_book.to_dict()) + + +class BookRetrieveService: + @staticmethod + def retrieve(book_id: UUID) -> BookSchema: + success, book = BookRepository.get_by_id(book_id) + if not success: + raise BookException( + message=f"Book with ID {book_id} not found", + data={"payload": {"book_id": str(book_id)}} + ) + return BookSchema(**book.to_dict()) + + +class BookUpdateService: + @staticmethod + def update(book_id: UUID, book_data: BookCreateSchema) -> BookSchema: + success, updated_book = BookRepository.update(book_id, book_data) + if not success: + raise BookException( + message=f"Book with ID {book_id} not found for update", + data={"payload": {"book_id": str(book_id)}} + ) + return BookSchema(**updated_book.to_dict()) + + +class BookDeleteService: + @staticmethod + def delete(book_id: UUID) -> None: + success, _ = BookRepository.delete(book_id) + if not success: + raise BookException( + message=f"Book with ID {book_id} not found for deletion", + data={"payload": {"book_id": str(book_id)}} + ) diff --git a/src/shared/base_responses.py b/src/shared/base_responses.py index 68eea48..e0e17b5 100644 --- a/src/shared/base_responses.py +++ b/src/shared/base_responses.py @@ -48,7 +48,7 @@ def create_response_for_fast_api( elif isinstance(data, BaseModel): - data = data.model_dump_json(mode="json") + data = data.model_dump_json() data = json.loads(data) if not success: From fddce0330a9efbd1f8ffd6a9ffa197e149b5a356 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 15:20:22 -0600 Subject: [PATCH 35/36] feat: add tests --- src/api/v1/schema.py | 12 ++-- src/db/mongo/base.py | 13 ++-- src/tests/__init__.py | 3 +- src/tests/test_init.py | 108 --------------------------------- src/tests/test_repositories.py | 38 ++++++++++++ src/tests/test_responses.py | 17 ++++++ src/tests/test_services.py | 51 ++++++++++++++++ src/tests/tests_endpoints.py | 94 ++++++++++++++++++++++++++++ src/tests/utils.py | 50 +++++++++++++++ 9 files changed, 265 insertions(+), 121 deletions(-) delete mode 100644 src/tests/test_init.py create mode 100644 src/tests/test_repositories.py create mode 100644 src/tests/test_responses.py create mode 100644 src/tests/test_services.py create mode 100644 src/tests/tests_endpoints.py create mode 100644 src/tests/utils.py diff --git a/src/api/v1/schema.py b/src/api/v1/schema.py index c69def7..d020da4 100644 --- a/src/api/v1/schema.py +++ b/src/api/v1/schema.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, field_serializer from db.posgresql.models.public import BookType from uuid import UUID @@ -10,11 +10,11 @@ class BookSchema(BaseModel): year: int type: BookType - class Config: - allow_population_by_field_name = False - json_encoders = { - UUID: lambda v: str(v), - } + model_config = ConfigDict(validate_by_name=False) + + @field_serializer('id') + def serialize_id(self, v: UUID, _info): + return str(v) # Create a new book class BookCreateSchema(BaseModel): diff --git a/src/db/mongo/base.py b/src/db/mongo/base.py index 6975ffa..4ce31b2 100644 --- a/src/db/mongo/base.py +++ b/src/db/mongo/base.py @@ -3,7 +3,7 @@ from datetime import datetime from uuid import UUID -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, ConfigDict, field_serializer from shared.utils_dates import get_app_current_time @@ -24,11 +24,12 @@ class BaseMongoDocument(BaseModel): updated_at: datetime = Field(default_factory=default_mongodb_created_at) deleted_at: datetime | None = Field(default=None) - class Config: - allow_population_by_field_name = False - json_encoders = { - UUID: lambda v: str(v), - } + + model_config = ConfigDict(validate_by_name=False) + + @field_serializer('id') + def serialize_id(self, v: UUID, _info): + return str(v) class MongoAbstractRepository(ABC): diff --git a/src/tests/__init__.py b/src/tests/__init__.py index e22c3f8..bad3e76 100644 --- a/src/tests/__init__.py +++ b/src/tests/__init__.py @@ -7,7 +7,8 @@ from shared.environment import AppEnvironment from tests.create_databases import prepare_database -if settings.ENVIRONMENT in [AppEnvironment.TESTING, AppEnvironment.TESTING_DOCKER]: +logger.info(f"Check if ENVIRONMENT=testing: {settings.ENVIRONMENT} in {AppEnvironment.TESTING.value} or {AppEnvironment.TESTING_DOCKER.value}") +if settings.ENVIRONMENT in [AppEnvironment.TESTING.value, AppEnvironment.TESTING_DOCKER.value]: logger.info("Preparing database for tests") prepare_database( schemas_to_create=["public"], diff --git a/src/tests/test_init.py b/src/tests/test_init.py deleted file mode 100644 index a2befce..0000000 --- a/src/tests/test_init.py +++ /dev/null @@ -1,108 +0,0 @@ -import unittest -from fastapi.testclient import TestClient -from main import app -from api.v1.endpoints import books_db, counter_id -from unittest import TestCase - -class TestBooksAPI(TestCase): - def setUp(self) -> None: - # Reset global state before each test - global books_db, counter_id - books_db.clear() - counter_id = 0 - self.client = TestClient(app) - - def test_read_root(self) -> None: - response = self.client.get("/") - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), {"message": "Books API πŸ“š"}) - - def test_get_books_empty(self) -> None: - response = self.client.get("/v1/books") - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), []) - - def test_create_book(self) -> None: - payload = {"title": "Test Book", "author": "Author A", "year": 2020} - response = self.client.post("/v1/books", json=payload) - self.assertEqual(response.status_code, 201) - data = response.json() - self.assertEqual(data["id"], 1) - self.assertEqual(data["title"], payload["title"]) - self.assertEqual(data["author"], payload["author"]) - self.assertEqual(data["year"], payload["year"]) - - def test_get_book(self) -> None: - # First create a book - payload = {"title": "Test Book", "author": "Author A", "year": 2020} - create_response = self.client.post("/v1/books", json=payload) - book_id = create_response.json()["id"] - - # Retrieve the created book - response = self.client.get(f"/v1/books/{book_id}") - self.assertEqual(response.status_code, 200) - data = response.json() - self.assertEqual(data["id"], book_id) - self.assertEqual(data["title"], payload["title"]) - - def test_update_book(self) -> None: - # Create a book - payload = {"title": "Old Title", "author": "Author A", "year": 2020} - create_response = self.client.post("/v1/books", json=payload) - book_id = create_response.json()["id"] - - # Update the book - updated_payload = {"title": "New Title", "author": "Author B", "year": 2021} - response = self.client.put(f"/v1/books/{book_id}", json=updated_payload) - self.assertEqual(response.status_code, 200) - data = response.json() - self.assertEqual(data["id"], book_id) - self.assertEqual(data["title"], updated_payload["title"]) - self.assertEqual(data["author"], updated_payload["author"]) - self.assertEqual(data["year"], updated_payload["year"]) - - def test_delete_book(self) -> None: - # Create a book to delete later - payload = {"title": "Delete Me", "author": "Author A", "year": 2020} - create_response = self.client.post("/v1/books", json=payload) - book_id = create_response.json()["id"] - - # Delete the book - response = self.client.delete(f"/v1/books/{book_id}") - self.assertEqual(response.status_code, 204) - - # Verify the book no longer exists - get_response = self.client.get(f"/v1/books/{book_id}") - self.assertEqual(get_response.status_code, 404) - self.assertEqual(get_response.json()["detail"], "Book not found") - - def test_get_nonexistent_book(self) -> None: - response = self.client.get("/v1/books/999") - self.assertEqual(response.status_code, 404) - self.assertEqual(response.json()["detail"], "Book not found") - - def test_update_nonexistent_book(self) -> None: - updated_payload = {"title": "New Title", "author": "Author B", "year": 2021} - response = self.client.put("/v1/books/999", json=updated_payload) - self.assertEqual(response.status_code, 404) - self.assertEqual(response.json()["detail"], "Book not found") - - def test_delete_nonexistent_book(self) -> None: - response = self.client.delete("/v1/books/999") - self.assertEqual(response.status_code, 404) - self.assertEqual(response.json()["detail"], "Book not found") - - def test_openapi_schema(self) -> None: - response = self.client.get("/openapi.json") - self.assertEqual(response.status_code, 200) - data = response.json() - # Verify that the openapi version is the forced one - self.assertEqual(data["openapi"], "3.0.3") - self.assertEqual(data["info"]["title"], "Books API") - self.assertEqual(data["info"]["version"], "1.0.0") - - def tearDown(self) -> None: - # Reset global state after each test - global books_db, counter_id - books_db.clear() - counter_id = 0 diff --git a/src/tests/test_repositories.py b/src/tests/test_repositories.py new file mode 100644 index 0000000..4239e43 --- /dev/null +++ b/src/tests/test_repositories.py @@ -0,0 +1,38 @@ +import unittest +from api.v1.repositories import BookRepository +from .utils import DBMixin + +# ───────────────────────── TESTS REPOSITORIO ────────────────────────── # + +class TestBooksRepository(DBMixin, unittest.TestCase): + + def test_repository_crud(self): + # CREATE + ok, book = BookRepository.create(self.create_schema()) + self.assertTrue(ok) + self.assertIsNotNone(book.id) + + # GET ALL + ok, all_books = BookRepository.get_all() + self.assertTrue(ok) + self.assertEqual(len(all_books), 1) + + # GET BY ID + ok, fetched = BookRepository.get_by_id(book.id) + self.assertTrue(ok) + self.assertEqual(fetched.title, book.title) + + # UPDATE + new_schema = self.create_schema(title="Pragmatic Programmer", year=1999) + ok, updated = BookRepository.update(book.id, new_schema) + self.assertTrue(ok) + self.assertEqual(updated.title, "Pragmatic Programmer") + + # DELETE + ok, _ = BookRepository.delete(book.id) + self.assertTrue(ok) + + # Ya no existe + ok, none = BookRepository.get_by_id(book.id) + self.assertFalse(ok) + self.assertIsNone(none) diff --git a/src/tests/test_responses.py b/src/tests/test_responses.py new file mode 100644 index 0000000..20160d4 --- /dev/null +++ b/src/tests/test_responses.py @@ -0,0 +1,17 @@ +import unittest +from .utils import DBMixin + +class TestEnvelopeFormat(DBMixin, unittest.TestCase): + """Comprueba estructura genΓ©rica del sobre de respuesta.""" + + def test_envelope_keys(self): + res = self.client.get("/v1/books") + env = res.json() + self.assertSetEqual( + set(env.keys()), {"success", "message", "data", "trace_id"} + ) + self.assertIsInstance(env["success"], bool) + self.assertTrue(env["message"]) + # trace_id puede ser None o str + self.assertTrue(env["trace_id"] is None or isinstance(env["trace_id"], str)) + diff --git a/src/tests/test_services.py b/src/tests/test_services.py new file mode 100644 index 0000000..ddb049d --- /dev/null +++ b/src/tests/test_services.py @@ -0,0 +1,51 @@ +import json +import uuid +import unittest +from typing import Any + +from fastapi.testclient import TestClient +from sqlalchemy import text + +from main import app +from db.posgresql import get_db_context +from db.posgresql.models.public import BookType +from api.v1.schema import BookCreateSchema +from api.v1.repositories import BookRepository +from api.v1.services import ( + BooksListService, + BookCreateService, + BookRetrieveService, + BookUpdateService, + BookDeleteService, +) +from .utils import DBMixin + +# ───────────────────────── TESTS SERVICIOS ────────────────────────── # + +class TestBooksServices(DBMixin, unittest.TestCase): + + def test_service_create_retrieve_update_delete_flow(self): + # CREATE + schema = self.create_schema() + created = BookCreateService.create(schema) + self.assertEqual(created.title, schema.title) + + # LIST debe contener el nuevo + ids = [b.id for b in BooksListService.list()] + self.assertIn(created.id, ids) + + # RETRIEVE + fetched = BookRetrieveService.retrieve(created.id) + self.assertEqual(fetched.author, schema.author) + + # UPDATE + upd_schema = self.create_schema(title="DDD Updated", year=2004) + updated = BookUpdateService.update(created.id, upd_schema) + self.assertEqual(updated.title, "DDD Updated") + self.assertEqual(updated.year, 2004) + + # DELETE + BookDeleteService.delete(created.id) + with self.assertRaises(Exception): + BookRetrieveService.retrieve(created.id) + diff --git a/src/tests/tests_endpoints.py b/src/tests/tests_endpoints.py new file mode 100644 index 0000000..e33dafc --- /dev/null +++ b/src/tests/tests_endpoints.py @@ -0,0 +1,94 @@ +import uuid +import unittest +from .utils import DBMixin + + +# ───────────────────────── TESTS ENDPOINTS ────────────────────────── # + +class TestBooksEndpoints(DBMixin, unittest.TestCase): + + # ---------- GET /books vacΓ­o ---------- # + def test_list_books_empty(self): + res = self.client.get("/v1/books") + self.assertEqual(res.status_code, 200) + env = res.json() + self.assertTrue(env["success"]) + self.assertIsNone(env["data"]) + + # ---------- POST /books ---------- # + def test_create_book_success(self): + res = self.client.post("/v1/books", json=self.payload()) + self.assertEqual(res.status_code, 201) + env = res.json() + book = env["data"] + # id vΓ‘lido + uuid.UUID(book["id"]) + self.assertEqual(book["title"], "Clean Code") + + def test_create_book_missing_field(self): + bad = self.payload() + bad.pop("title") + res = self.client.post("/v1/books", json=bad) + self.assertEqual(res.status_code, 422) # validation error + + # ---------- GET /books/{id} ---------- # + def test_get_book_success(self): + book_id = self.client.post("/v1/books", json=self.payload()) \ + .json()["data"]["id"] + + res = self.client.get(f"/v1/books/{book_id}") + self.assertEqual(res.status_code, 200) + self.assertEqual(res.json()["data"]["id"], book_id) + + def test_get_book_not_found(self): + fake = "11111111-1111-1111-1111-111111111111" + res = self.client.get(f"/v1/books/{fake}") + self.assertEqual(res.status_code, 404) + env = res.json() + self.assertFalse(env["success"]) + self.assertEqual( + env["data"]["internal_error"]["code"], "BOOK_NOT_FOUND" + ) + + # ---------- PUT /books/{id} ---------- # + def test_update_book_success(self): + book_id = self.client.post("/v1/books", json=self.payload()) \ + .json()["data"]["id"] + + new_payload = self.payload(title="Clean Architecture", year=2017) + res = self.client.put(f"/v1/books/{book_id}", json=new_payload) + self.assertEqual(res.status_code, 200) + book = res.json()["data"] + self.assertEqual(book["title"], "Clean Architecture") + self.assertEqual(book["year"], 2017) + + def test_update_book_not_found(self): + fake = "22222222-2222-2222-2222-222222222222" + res = self.client.put(f"/v1/books/{fake}", json=self.payload()) + self.assertEqual(res.status_code, 404) + + # ---------- DELETE /books/{id} ---------- # + def test_delete_book_success(self): + book_id = self.client.post("/v1/books", json=self.payload()) \ + .json()["data"]["id"] + + res = self.client.delete(f"/v1/books/{book_id}") + self.assertEqual(res.status_code, 204) + + # ya no existe + res = self.client.get(f"/v1/books/{book_id}") + self.assertEqual(res.status_code, 404) + + def test_delete_book_not_found(self): + fake = "33333333-3333-3333-3333-333333333333" + res = self.client.delete(f"/v1/books/{fake}") + self.assertEqual(res.status_code, 404) + + # ---------- OPENAPI ---------- # + def test_openapi_schema_version(self): + res = self.client.get("/openapi.json") + self.assertEqual(res.status_code, 200) + data = res.json() + self.assertEqual(data["openapi"], "3.0.3") + self.assertEqual(data["info"]["title"], "Books API") + diff --git a/src/tests/utils.py b/src/tests/utils.py new file mode 100644 index 0000000..3e78219 --- /dev/null +++ b/src/tests/utils.py @@ -0,0 +1,50 @@ +from typing import Any + +from fastapi.testclient import TestClient +from sqlalchemy import text + +from main import app +from db.posgresql import get_db_context +from db.posgresql.models.public import BookType +from api.v1.schema import BookCreateSchema + +class DBMixin: + """MΓ©todos auxiliares para limpiar la tabla `book` entre tests.""" + + @staticmethod + def _truncate_books() -> None: + with get_db_context() as session: + session.execute( + text("TRUNCATE TABLE public.books RESTART IDENTITY CASCADE;") + ) + session.commit() + + def setUp(self) -> None: # se ejecuta antes de *cada* test + self._truncate_books() + self.client = TestClient(app) + + def tearDown(self) -> None: # limpieza final + self._truncate_books() + + # ---------- datos de apoyo ---------- # + @staticmethod + def payload(**overrides: Any) -> dict[str, Any]: + data = { + "title": "Clean Code", + "author": "Robert C. Martin", + "year": 2008, + "type": BookType.ONLINE.value, # Enum β†’ string para JSON + } + data.update(overrides) + return data + + @staticmethod + def create_schema(**overrides: Any) -> BookCreateSchema: + base = dict( + title="Domain-Driven Design", + author="Eric Evans", + year=2003, + type=BookType.ONLINE, + ) + base.update(overrides) + return BookCreateSchema(**base) From 0554bef947838a3a7370eeae3ed22feb16a26545 Mon Sep 17 00:00:00 2001 From: ronihdzz Date: Wed, 28 May 2025 19:19:25 -0600 Subject: [PATCH 36/36] refactor: reorganize code --- src/api/routers.py | 2 +- src/api/v1/books/__init__.py | 0 src/api/v1/{ => books}/endpoints.py | 4 ++-- src/api/v1/{ => books}/repositories.py | 2 +- src/api/v1/{ => books}/schema.py | 0 src/api/v1/{ => books}/services.py | 4 ++-- src/tests/__init__.py | 2 +- src/tests/common/__init__.py | 0 src/tests/{ => common}/test_database_connections.py | 0 src/tests/utils/__init__.py | 0 src/tests/{ => utils}/create_databases.py | 0 src/tests/v1/__init__.py | 0 src/tests/v1/test_books/__init__.py | 0 src/tests/{ => v1/test_books}/test_repositories.py | 2 +- src/tests/{ => v1/test_books}/test_responses.py | 0 src/tests/{ => v1/test_books}/test_services.py | 6 +++--- src/tests/{ => v1/test_books}/tests_endpoints.py | 0 src/tests/{ => v1/test_books}/utils.py | 2 +- 18 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 src/api/v1/books/__init__.py rename src/api/v1/{ => books}/endpoints.py (95%) rename src/api/v1/{ => books}/repositories.py (97%) rename src/api/v1/{ => books}/schema.py (100%) rename src/api/v1/{ => books}/services.py (94%) create mode 100644 src/tests/common/__init__.py rename src/tests/{ => common}/test_database_connections.py (100%) create mode 100644 src/tests/utils/__init__.py rename src/tests/{ => utils}/create_databases.py (100%) create mode 100644 src/tests/v1/__init__.py create mode 100644 src/tests/v1/test_books/__init__.py rename src/tests/{ => v1/test_books}/test_repositories.py (95%) rename src/tests/{ => v1/test_books}/test_responses.py (100%) rename src/tests/{ => v1/test_books}/test_services.py (91%) rename src/tests/{ => v1/test_books}/tests_endpoints.py (100%) rename src/tests/{ => v1/test_books}/utils.py (96%) diff --git a/src/api/routers.py b/src/api/routers.py index 26cb481..22670a4 100644 --- a/src/api/routers.py +++ b/src/api/routers.py @@ -1,5 +1,5 @@ from fastapi import APIRouter -from api.v1.endpoints import router as books_endpoints +from api.v1.books.endpoints import router as books_endpoints api_v1_router = APIRouter(prefix="/v1") diff --git a/src/api/v1/books/__init__.py b/src/api/v1/books/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/api/v1/endpoints.py b/src/api/v1/books/endpoints.py similarity index 95% rename from src/api/v1/endpoints.py rename to src/api/v1/books/endpoints.py index e4f08f8..6729a63 100644 --- a/src/api/v1/endpoints.py +++ b/src/api/v1/books/endpoints.py @@ -1,9 +1,9 @@ -from api.v1.schema import BookCreateSchema +from api.v1.books.schema import BookCreateSchema from loguru import logger from shared.base_responses import create_response_for_fast_api, EnvelopeResponse from fastapi import APIRouter, Response from uuid import UUID -from api.v1.services import ( +from api.v1.books.services import ( BooksListService, BookCreateService, BookRetrieveService, diff --git a/src/api/v1/repositories.py b/src/api/v1/books/repositories.py similarity index 97% rename from src/api/v1/repositories.py rename to src/api/v1/books/repositories.py index e59ac07..e9824cc 100644 --- a/src/api/v1/repositories.py +++ b/src/api/v1/books/repositories.py @@ -1,6 +1,6 @@ from db.posgresql import get_db_context from db.posgresql.models.public import Book -from api.v1.schema import BookCreateSchema +from api.v1.books.schema import BookCreateSchema from sqlalchemy import select class BookRepository: diff --git a/src/api/v1/schema.py b/src/api/v1/books/schema.py similarity index 100% rename from src/api/v1/schema.py rename to src/api/v1/books/schema.py diff --git a/src/api/v1/services.py b/src/api/v1/books/services.py similarity index 94% rename from src/api/v1/services.py rename to src/api/v1/books/services.py index bfece7f..6b67621 100644 --- a/src/api/v1/services.py +++ b/src/api/v1/books/services.py @@ -1,5 +1,5 @@ -from api.v1.repositories import BookRepository -from api.v1.schema import BookSchema, BookCreateSchema +from api.v1.books.repositories import BookRepository +from api.v1.books.schema import BookSchema, BookCreateSchema from core.exceptions import BookException from uuid import UUID diff --git a/src/tests/__init__.py b/src/tests/__init__.py index bad3e76..e72f245 100644 --- a/src/tests/__init__.py +++ b/src/tests/__init__.py @@ -5,7 +5,7 @@ Book ) from shared.environment import AppEnvironment -from tests.create_databases import prepare_database +from tests.utils.create_databases import prepare_database logger.info(f"Check if ENVIRONMENT=testing: {settings.ENVIRONMENT} in {AppEnvironment.TESTING.value} or {AppEnvironment.TESTING_DOCKER.value}") if settings.ENVIRONMENT in [AppEnvironment.TESTING.value, AppEnvironment.TESTING_DOCKER.value]: diff --git a/src/tests/common/__init__.py b/src/tests/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/tests/test_database_connections.py b/src/tests/common/test_database_connections.py similarity index 100% rename from src/tests/test_database_connections.py rename to src/tests/common/test_database_connections.py diff --git a/src/tests/utils/__init__.py b/src/tests/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/tests/create_databases.py b/src/tests/utils/create_databases.py similarity index 100% rename from src/tests/create_databases.py rename to src/tests/utils/create_databases.py diff --git a/src/tests/v1/__init__.py b/src/tests/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/tests/v1/test_books/__init__.py b/src/tests/v1/test_books/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/tests/test_repositories.py b/src/tests/v1/test_books/test_repositories.py similarity index 95% rename from src/tests/test_repositories.py rename to src/tests/v1/test_books/test_repositories.py index 4239e43..a87f50b 100644 --- a/src/tests/test_repositories.py +++ b/src/tests/v1/test_books/test_repositories.py @@ -1,5 +1,5 @@ import unittest -from api.v1.repositories import BookRepository +from api.v1.books.repositories import BookRepository from .utils import DBMixin # ───────────────────────── TESTS REPOSITORIO ────────────────────────── # diff --git a/src/tests/test_responses.py b/src/tests/v1/test_books/test_responses.py similarity index 100% rename from src/tests/test_responses.py rename to src/tests/v1/test_books/test_responses.py diff --git a/src/tests/test_services.py b/src/tests/v1/test_books/test_services.py similarity index 91% rename from src/tests/test_services.py rename to src/tests/v1/test_books/test_services.py index ddb049d..3db7cde 100644 --- a/src/tests/test_services.py +++ b/src/tests/v1/test_books/test_services.py @@ -9,9 +9,9 @@ from main import app from db.posgresql import get_db_context from db.posgresql.models.public import BookType -from api.v1.schema import BookCreateSchema -from api.v1.repositories import BookRepository -from api.v1.services import ( +from api.v1.books.schema import BookCreateSchema +from api.v1.books.repositories import BookRepository +from api.v1.books.services import ( BooksListService, BookCreateService, BookRetrieveService, diff --git a/src/tests/tests_endpoints.py b/src/tests/v1/test_books/tests_endpoints.py similarity index 100% rename from src/tests/tests_endpoints.py rename to src/tests/v1/test_books/tests_endpoints.py diff --git a/src/tests/utils.py b/src/tests/v1/test_books/utils.py similarity index 96% rename from src/tests/utils.py rename to src/tests/v1/test_books/utils.py index 3e78219..d730aab 100644 --- a/src/tests/utils.py +++ b/src/tests/v1/test_books/utils.py @@ -6,7 +6,7 @@ from main import app from db.posgresql import get_db_context from db.posgresql.models.public import BookType -from api.v1.schema import BookCreateSchema +from api.v1.books.schema import BookCreateSchema class DBMixin: """MΓ©todos auxiliares para limpiar la tabla `book` entre tests."""