From d63645fd2f7b3adcb3844bb6b31e1c1ba1cf2ba5 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Sat, 6 Sep 2025 11:46:28 +0300 Subject: [PATCH 01/13] feature/refactor (#8) --- .env.example | 22 +- .launch/api/Dockerfile | 2 +- README.rst | 248 +++++++++++++------ beautify.sh | 2 +- local_prepare.sh | 60 +++++ local_run.sh | 144 +++++++++++ src/app/application/services/auth_service.py | 36 +-- src/app/config/settings.py | 2 +- 8 files changed, 404 insertions(+), 112 deletions(-) create mode 100644 local_prepare.sh create mode 100644 local_run.sh diff --git a/.env.example b/.env.example index e2d5379..7a4058d 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,7 @@ # Base settings # ------------------------------------------------------------------------------ -PROJECT_NAME=Prototype +PROJECT_NAME=APP_DDD +PROJECT_NAME_SLUG=app_ddd TEAM_CONTACT_EMAIL=dream_team@gmail.com SECRET_KEY=OpypRaBHOiRSEQRp-wHEEEnq1euQQNqcmQBrD-Lb-G42lrXgREsnFQ3jKSugRKun @@ -20,17 +21,22 @@ API_LIMIT_ALLOWED_VALUES_LIST=[1,5,10,15,25,50] SHOW_API_DOCS=True CORS_ORIGIN_WHITELIST=["*"] +# GRPC settings +# ------------------------------------------------------------------------------ +GRPC_HOST=0.0.0.0 +GRPC_PORT=50051 + # Auth settings # ------------------------------------------------------------------------------ -ACCESS_TOKEN_EXPIRES_MINUTES=30 +ACCESS_TOKEN_EXPIRES_MINUTES=5 REFRESH_TOKEN_EXPIRES_DAYS=5 # DATABASE settings # ------------------------------------------------------------------------------ SHOW_SQL=False DB_HOST=127.0.0.1 -DB_PORT=5442 -DB_NAME=prototype +DB_PORT=5440 +DB_NAME=proto DB_USER=dev DB_PASSWORD=dev CONNECTIONS_POOL_MIN_SIZE=10 @@ -38,15 +44,15 @@ CONNECTIONS_POOL_MAX_OVERFLOW=25 # Redis # ------------------------------------------------------------------------------ -REDIS_URL=redis://127.0.0.1:6379/0 +REDIS_URL=redis://127.0.0.1:6380/0 # Celery # ------------------------------------------------------------------------------ -CELERY_BROKER_URL=redis://127.0.0.1/11 -CELERY_RESULT_BACKEND=redis://127.0.0.1/12 +CELERY_BROKER_URL=redis://127.0.0.1:6380/11 +CELERY_RESULT_BACKEND=redis://127.0.0.1:6380/12 # Mesage Broker # ------------------------------------------------------------------------------ -MESSAGE_BROKER_URL=amqp://dev:dev@127.0.0.1:5672 +MESSAGE_BROKER_URL=amqp://dev:dev@0.0.0.0:5672 DEFAULT_EXCHANGER=YOUR_DEFAULT_EXCHANGER DEFAULT_QUEUE=YOUR_DEFAULT_QUEUE diff --git a/.launch/api/Dockerfile b/.launch/api/Dockerfile index fffba8c..b91dee9 100644 --- a/.launch/api/Dockerfile +++ b/.launch/api/Dockerfile @@ -15,5 +15,5 @@ CMD alembic upgrade head ENV PYTHONPATH=/app -CMD alembic upgrade head && gunicorn --config src/app/gunicorn_config.py src.app.interfaces.cli.main:app +CMD alembic upgrade head && gunicorn --config src/app/interfaces/cli/gunicorn_config.py src.app.interfaces.cli.main:app EXPOSE 8081 diff --git a/README.rst b/README.rst index 127d1b9..8e1de7b 100644 --- a/README.rst +++ b/README.rst @@ -1,86 +1,178 @@ Readme -======= +====== -Real world project example based on FastAPI, Celery, Consumer[RabbitMQ, Kafka] +A Python project template built with **Domain-Driven Design (DDD)** and the **Onion Architecture**. -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/ambv/black - :alt: Black code style - - -To run app local use:: - - $ cd /{your_project_name} - $ poetry env use 3.12 - $ poetry install - $ poetry update - $ cp .env.example .env - - # to run app perform: - * install, run postgres, message broker, actualize .env - - # Run API - $ uvicorn src.app.main:app --reload --port 8081 - - # Run Celery - # celery -A src.app.extensions.celery_ext.celery_app worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h - - # Run consumer - # python -m src.app.consume - - -API docs here:: - - # http:///docs - -To make db schema:: - - # use commands +.. note:: + While not yet production-ready — with several components still incomplete — this template provides a solid foundation that can be extended into future production-grade projects. - $ alembic revision --autogenerate -m "some message" - $ alembic upgrade head - - -To check code quality[black, flake8, mypy]:: - - # use commands - $ bash beautify.sh - - -Docs commands:: - - # before: - # install sphinx - # mkdir docs - # cd docs - # pip sphinx-quickstart +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Black code style + +Tech Stack +========== + +- **API Layer** + - `FastAPI `_ + - gRPC +- **Background / Async Tasks** + - `Celery `_ (with Redis) +- **Messaging (Publish / Consume)** + - RabbitMQ + - Kafka +- **Database** + - `SQLAlchemy `_ + - Alembic + - PostgreSQL +- **Tests** + - `Pytest `_ +- **Code Quality** + - `Mypy `_ + - `Flake8 `_ + - `Black `_ +- **Documentation** + - `Sphinx `_ +- **Containerization** + - Docker + +Project Structure +================= + +.. code-block:: text + + fastapi_prototype/ + ├── .launch/ # Docker launch configurations + │ ├── api/ + │ ├── celery/ + │ ├── consume/ + │ └── tests/ + ├── docs/ # Documentation + │ └── source/ + ├── src/ # Main source code + │ └── app/ + │ ├── application/ # Application layer + │ │ ├── common/ + │ │ └── services/ + │ ├── config/ # Configuration + │ ├── domain/ # Domain layer + │ ├── infrastructure/ # Infrastructure layer + │ │ ├── common/ + │ │ ├── extensions/ + │ │ ├── messaging/ + │ │ ├── persistence/ + │ │ ├── repositories/ + │ │ ├── tasks/ + │ │ └── utils/ + │ └── interfaces/ # API interfaces + ├── static/ # Static files + ├── tests/ # Test files + │ ├── application/ + │ ├── domain/ + │ ├── fixtures/ + │ └── infrastructure/ + ├── .env.example # Environment variables example + ├── pyproject.toml # Python dependencies + ├── README.rst # Project documentation + ├── alembic.ini # Database migrations + └── docker-compose-tests.yml # Test environment + +Running the App via scripts, docker:: + + # launch required infrastructure containers + bash local_prepare.sh + # use --recreate if recreate, rebuild required + # example: bash local_prepare.sh --recreate + + # launch app + bash local_run.sh + # use flags: + # --recreate if recreate, rebuild required + # --run_api if run API container required + # example: bash local_run.sh --recreate --run_api + + + +Running the App locally:: + + cd /{your_project_name} + poetry env use 3.12 + poetry install + poetry update + bash local_prepare.sh # launch required containers + # !! make sure all required containers were launched before running the app. + + + # run API + uvicorn src.app.interfaces.cli.main:app --reload --port 8081 + + # run Celery + celery -A src.app.interfaces.cli.celery_app worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h + + # run Consumer + python -m src.app.consume + + +API Documentation:: + + http:///docs + +Database Migrations:: + + alembic revision --autogenerate -m "some message" + alembic upgrade head + +Code Quality Checks:: + + bash beautify.sh + + +Documentation Commands:: + + # Initial setup + pip install sphinx + mkdir docs + cd docs + sphinx-quickstart - $ cd /docs + # Build documentation + cd /docs # describe your docs in /docs/source/*.rst - $ make html + make html # open /docs/build/index.html -To run app using Docker:: - - $ cd /{your_project_name} - - # build, run celery - $ docker run -d --name my_local_redis -p 6379:6379 redis:latest - $ docker build -t celery_img --no-cache -f .launch/celery/Dockerfile . - $ docker run -d --name my_local_celery --shm-size="512m" --cpus=2 --env-file ./.env -e CELERY_ARGS="worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h" celery_img - $ docker run -d --name my_local_flower -e broker_url=redis://172.17.0.1:6379/11 -e CELERY_BROKER_URL=redis://172.17.0.1:6379/11 -e CELERY_BROKER_API=redis://172.17.0.1:6379/12 -p 5555:5555 mher/flower - - # build, run API - $ docker build -t api_img --no-cache -f .launch/api/Dockerfile . - $ docker run -d --env-file ./.env --name my_local_api --shm-size="1g" --cpus=1 -p 8081:8081 api_img - - # build, run Consumer - $ docker build -t consume_img --no-cache -f .launch/consume/Dockerfile . - $ docker run -d --env-file .env --name my_local_consume --shm-size="512m" --cpus=1 consume_img - -Run tests:: - - # run tests - $ docker-compose -f docker-compose-tests.yml up --abort-on-container-exit - # clean up - $ docker-compose -f docker-compose-tests.yml rm -fsv && docker rmi $(docker images '*x_test*' -a -q) && docker system prune +Docker Commands:: + + docker build -t api_img --no-cache -f .launch/api/Dockerfile . + docker build -t celery_img --no-cache -f .launch/celery/Dockerfile . + docker build -t consume_img --no-cache -f .launch/consume/Dockerfile . + + docker run -d --env-file --name my_local_api \ + --env-file ./.env \ + --shm-size="1g" \ + --cpus=1 -p 8081:8081 \ + api_img + + docker run -d --name my_local_celery \ + --shm-size="512m" \ + --cpus=2 \ + --env-file ./.env \ + -e CELERY_ARGS="worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h" \ + celery_img + + docker run -d --name my_local_consume \ + --env-file .env \ + --shm-size="512m" \ + --cpus=1 \ + consume_img + + docker run -d --name my_local_flower \ + -e broker_url=redis://172.17.0.1:6379/11 \ + -e CELERY_BROKER_URL=redis://172.17.0.1:6379/11 \ + -e CELERY_BROKER_API=redis://172.17.0.1:6379/12 \ + -p 5555:5555 mher/flower + +Running Tests:: + + docker-compose -f docker-compose-tests.yml up --abort-on-container-exit + docker-compose -f docker-compose-tests.yml rm -fsv && docker rmi $(docker images '*x_test*' -a -q) && docker system prune diff --git a/beautify.sh b/beautify.sh index 3066f44..f54610a 100644 --- a/beautify.sh +++ b/beautify.sh @@ -1,4 +1,4 @@ - +#!/bin/sh pip install --upgrade pip diff --git a/local_prepare.sh b/local_prepare.sh new file mode 100644 index 0000000..909b946 --- /dev/null +++ b/local_prepare.sh @@ -0,0 +1,60 @@ +#!/bin/sh + +# Load environment variables +. ./.env.example # or 'source .env' in bash + +# Config +REDIS_PORT=6380 +MESSAGE_BROKER_USER="dev" +MESSAGE_BROKER_PASSWORD="dev" +MESSAGE_BROKER_PORT=5672 + +RECREATE=false + +# Parse arguments +while [ "$#" -gt 0 ]; do + case $1 in + --recreate) RECREATE=true ;; + esac + shift +done + +echo "🥁 Infrastructure preparing..." + +# Remove old containers if requested +if [ "$RECREATE" = true ]; then + docker ps -a --filter "name=${PROJECT_NAME_SLUG}*" --format "{{.ID}}" | xargs -r docker rm -f + echo " 🗑️ removed old" +fi + +# Redis +if [ ! "$(docker ps -aq -f name=${PROJECT_NAME_SLUG}_redis)" ]; then + docker run -d --name "${PROJECT_NAME_SLUG}_redis" \ + -p $REDIS_PORT:6379 \ + redis:latest || true +fi +echo " ✅ ${PROJECT_NAME_SLUG}_redis UP" + +# RabbitMQ +if [ ! "$(docker ps -aq -f name=${PROJECT_NAME_SLUG}_rabbitmq)" ]; then + docker run -d --name "${PROJECT_NAME_SLUG}_rabbitmq" \ + -p 15672:15672 \ + -p $MESSAGE_BROKER_PORT:5672 \ + -e RABBITMQ_DEFAULT_USER=$MESSAGE_BROKER_USER \ + -e RABBITMQ_DEFAULT_PASS=$MESSAGE_BROKER_PASSWORD \ + rabbitmq:3.11.6-management || true +fi +echo " ✅ ${PROJECT_NAME_SLUG}_rabbitmq UP" + +# Postgres +if [ ! "$(docker ps -aq -f name=${PROJECT_NAME_SLUG}_postgres)" ]; then + docker run -d --name "${PROJECT_NAME_SLUG}_postgres" \ + -e POSTGRES_DB=$DB_NAME \ + -e POSTGRES_USER=$DB_USER \ + -e POSTGRES_PASSWORD=$DB_PASSWORD \ + -p $DB_PORT:5432 \ + postgres:latest || true +fi +echo " ✅ ${PROJECT_NAME_SLUG}_postgres UP" + +echo "✅ Infrastructure UP" diff --git a/local_run.sh b/local_run.sh new file mode 100644 index 0000000..bc8f040 --- /dev/null +++ b/local_run.sh @@ -0,0 +1,144 @@ +#!/bin/sh +set -euo pipefail + +# ---------------------------------------------------------------------- +# Load environment variables +# ---------------------------------------------------------------------- +. ./.env.example # or 'source .env' in bash +cp .env.example .env # create local .env file if it doesn't exist + +INPUT_ENV_FILE=".env" +OUTPUT_ENV_FILE=".env_docker" + +# ---------------------------------------------------------------------- +# Generate .env_docker for Docker containers +# ---------------------------------------------------------------------- +SEARCH="127.0.0.1" +REPLACE="172.17.0.1" + +# Create or overwrite output file +> "$OUTPUT_ENV_FILE" + +# Read .env line by line +while IFS= read -r line || [[ -n "$line" ]]; do + # Preserve empty lines and comments + if [[ -z "$line" || "$line" =~ ^# ]]; then + echo "$line" >> "$OUTPUT_ENV_FILE" + continue + fi + + # Split key and value + key="${line%%=*}" + value="${line#*=}" + + # Replace specified substring in the value + new_value="${value//$SEARCH/$REPLACE}" + + # Write updated line to output file + echo "$key=$new_value" >> "$OUTPUT_ENV_FILE" +done < "$INPUT_ENV_FILE" + + +. ./$OUTPUT_ENV_FILE # or 'source .env_docker' in bash + +# ---------------------------------------------------------------------- +# Docker configuration +# ---------------------------------------------------------------------- +DOCKER_PREFIX="local" +RECREATE=false +RUN_API=false + +# Parse script arguments +while [ "$#" -gt 0 ]; do + case $1 in + --recreate) RECREATE=true ;; + --run_api) RUN_API=true ;; + esac + shift +done + +# ---------------------------------------------------------------------- +# Recreate containers and images if requested +# ---------------------------------------------------------------------- +if [ "$RECREATE" = true ]; then + # Remove old containers + docker ps -a --filter "name=${DOCKER_PREFIX}*" --format "{{.ID}}" | xargs -r docker rm -f + + # Remove old images + docker rmi celery_img + docker rmi consume_img + docker rmi api_img + + echo " 🗑️ Removed old containers and images" + + # Build new images + docker build -t celery_img --no-cache -f .launch/celery/Dockerfile . + echo " 🏗️ Built celery_img image" + + docker build -t consume_img --no-cache -f .launch/consume/Dockerfile . + echo " 🏗️ Built consume_img image" + + if [ "$RUN_API" = true ]; then + docker build -t api_img --no-cache -f .launch/api/Dockerfile . + echo " 🏗️ Built api_img image" + fi +fi + +# ---------------------------------------------------------------------- +# Run Docker containers +# ---------------------------------------------------------------------- +# Celery container +if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_celery)" ]; then + docker run -d \ + --name "${DOCKER_PREFIX}_celery" \ + --env-file ./.env_docker \ + --shm-size="512m" \ + --cpus=2 \ + -e CELERY_ARGS="worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h" \ + celery_img || true + + docker run -d --name "${DOCKER_PREFIX}_flower" \ + -e broker_url=$CELERY_BROKER_URL \ + -e CELERY_BROKER_URL=$CELERY_BROKER_URL \ + -e CELERY_BROKER_API=$CELERY_RESULT_BACKEND \ + -p 5555:5555 mher/flower +fi +echo " ✅ ${DOCKER_PREFIX}_celery UP" +echo " ✅ ${DOCKER_PREFIX}_flower UP" + +# Consume container +if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_consume)" ]; then + docker run -d \ + --name "${DOCKER_PREFIX}_consume" \ + --env-file ./.env_docker \ + --shm-size="512m" \ + --cpus=1 \ + consume_img || true +fi +echo " ✅ ${DOCKER_PREFIX}_consume UP" + +# API container (optional) +if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_api)" ] && [ "$RUN_API" = true ]; then + docker run -d \ + --name "${DOCKER_PREFIX}_api" \ + --env-file ./.env_docker \ + --shm-size="1g" \ + --cpus=1 \ + -p 8081:8081 \ + api_img || true +fi +if [ "$RUN_API" = true ]; then +echo " ✅ ${DOCKER_PREFIX}_api UP" +fi + + +# ---------------------------------------------------------------------- +# Print results +# ---------------------------------------------------------------------- +echo "" +echo "-----------------------------------------------------------------" +if [ "$RUN_API" = true ]; then +echo " 💎 http://0.0.0.0:8081/docs - API" +fi +echo " ⚙️ http://0.0.0.0:5555 - Flower[celery monitoring]" +echo "----------------------------------------------------------------" diff --git a/src/app/application/services/auth_service.py b/src/app/application/services/auth_service.py index aa8ce46..27fd0e0 100644 --- a/src/app/application/services/auth_service.py +++ b/src/app/application/services/auth_service.py @@ -1,45 +1,33 @@ from copy import deepcopy from typing import Any -import bcrypt from fastapi import HTTPException, status -from passlib.context import CryptContext from pydantic import validate_email from src.app.application.common.services.base import AbstractBaseApplicationService -from src.app.application.container import container as services_container +from src.app.application.container import container as app_services_container, ApplicationServicesContainer +from src.app.domain.users.container import container as domain_services_container, DomainServicesContainer class AuthService(AbstractBaseApplicationService): - users_service = services_container.users_service + app_svc_container: ApplicationServicesContainer = app_services_container + dom_svc_container: DomainServicesContainer = domain_services_container - pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") auth_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) - @classmethod - def verify_password(cls, plain_password: str, hashed_password: str) -> bool: - password_byte_enc = plain_password.encode("utf-8") - hashed_password_enc = hashed_password.encode("utf-8") - return bcrypt.checkpw(password=password_byte_enc, hashed_password=hashed_password_enc) - - @classmethod - def get_password_hashed(cls, password: str) -> str: - pwd_bytes = password.encode("utf-8") - salt = bcrypt.gensalt() - hashed_password = bcrypt.hashpw(password=pwd_bytes, salt=salt) - return hashed_password.decode("utf-8") - @classmethod async def create_auth_user(cls, data: dict) -> Any: data_ = deepcopy(data) email = data_.get("email") or "" try: email_validated = validate_email(email)[1] - is_email_exists = await cls.users_service.is_exists(filter_data={"email": email_validated}) + is_email_exists = await cls.app_svc_container.users_service.is_exists( + filter_data={"email": email_validated} + ) if is_email_exists or not email: raise HTTPException(status_code=422, detail="User already exists with email") data["email"] = email_validated @@ -47,9 +35,9 @@ async def create_auth_user(cls, data: dict) -> Any: raise HTTPException(status_code=422, detail="Invalid value for email") password = data_.pop("password", None) or "" - password_hashed = cls.get_password_hashed(password) + password_hashed = cls.dom_svc_container.auth_service.get_password_hashed(password) data_["password_hashed"] = password_hashed - return await cls.users_service.create(data_, is_return_require=True) + return await cls.app_svc_container.users_service.create(data_, is_return_require=True) @classmethod async def get_auth_user(cls, email: str, password: str) -> Any: @@ -58,8 +46,10 @@ async def get_auth_user(cls, email: str, password: str) -> Any: except Exception: raise HTTPException(status_code=422, detail=f"Invalid value {email}") - user = await cls.users_service.get_first(filter_data={"email": email_validated}) - is_password_verified = cls.verify_password(password, getattr(user, "password_hashed")) + user = await cls.app_svc_container.users_service.get_first(filter_data={"email": email_validated}) + is_password_verified = cls.dom_svc_container.auth_service.verify_password( + password, getattr(user, "password_hashed") + ) if not user or not is_password_verified: raise HTTPException(status_code=422, detail="username or password is incorrect") return user diff --git a/src/app/config/settings.py b/src/app/config/settings.py index d79d4fd..b8872ef 100644 --- a/src/app/config/settings.py +++ b/src/app/config/settings.py @@ -23,7 +23,7 @@ class SettingsBase(PydanticSettings): # Base Settings # -------------------------------------------------------------------------- PROJECT_NAME: str = env.str("PROJECT_NAME", "Project") - PROJECT_NAME_SLUG: str = slugify(PROJECT_NAME) + PROJECT_NAME_SLUG: str = env.str("PROJECT_NAME_SLUG", "") or slugify(PROJECT_NAME) TEAM_CONTACT_EMAIL: str = env.str("TEAM_CONTACT_EMAIL", "yourteam@example.com") SECRET_KEY: str = env.str("SECRET_KEY", secrets.token_urlsafe(32)) From ad05d828b863c420b351a272c0072bb2fb1fd6cb Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Sun, 7 Sep 2025 12:29:26 +0300 Subject: [PATCH 02/13] feature/init gRPC (#10) --- .env.example | 3 +- .flake8 | 2 +- .launch/api/Dockerfile | 2 +- .launch/grpc/Dockerfile | 18 + README.rst | 25 ++ local_run.sh | 60 ++- poetry.lock | 396 +++++++++++++----- pyproject.toml | 5 + src/app/config/settings.py | 9 +- src/app/interfaces/cli/gunicorn_config.py | 6 +- src/app/interfaces/grpc/client.py | 32 ++ src/app/interfaces/grpc/pb/__init__.py | 0 src/app/interfaces/grpc/pb/debug/debug_pb2.py | 41 ++ .../grpc/pb/debug/debug_pb2_grpc.py | 100 +++++ .../interfaces/grpc/pb/example/__init__.py | 0 .../interfaces/grpc/pb/example/example_pb2.py | 34 ++ .../grpc/pb/example/example_pb2_grpc.py | 100 +++++ src/app/interfaces/grpc/protos/debug.proto | 27 ++ src/app/interfaces/grpc/protos/example.proto | 16 + src/app/interfaces/grpc/server.py | 31 ++ src/app/interfaces/grpc/services/__init__.py | 0 .../interfaces/grpc/services/debug_service.py | 25 ++ .../grpc/services/example_service.py | 7 + 23 files changed, 804 insertions(+), 135 deletions(-) create mode 100644 .launch/grpc/Dockerfile create mode 100644 src/app/interfaces/grpc/client.py create mode 100644 src/app/interfaces/grpc/pb/__init__.py create mode 100644 src/app/interfaces/grpc/pb/debug/debug_pb2.py create mode 100644 src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py create mode 100644 src/app/interfaces/grpc/pb/example/__init__.py create mode 100644 src/app/interfaces/grpc/pb/example/example_pb2.py create mode 100644 src/app/interfaces/grpc/pb/example/example_pb2_grpc.py create mode 100644 src/app/interfaces/grpc/protos/debug.proto create mode 100644 src/app/interfaces/grpc/protos/example.proto create mode 100644 src/app/interfaces/grpc/server.py create mode 100644 src/app/interfaces/grpc/services/__init__.py create mode 100644 src/app/interfaces/grpc/services/debug_service.py create mode 100644 src/app/interfaces/grpc/services/example_service.py diff --git a/.env.example b/.env.example index 7a4058d..8f5b77c 100644 --- a/.env.example +++ b/.env.example @@ -16,6 +16,7 @@ DEFAULT_BATCH_SIZE=500 # API settings # ------------------------------------------------------------------------------ +API_PORT=8082 API_DEFAULT_LIMIT=25 API_LIMIT_ALLOWED_VALUES_LIST=[1,5,10,15,25,50] SHOW_API_DOCS=True @@ -53,6 +54,6 @@ CELERY_RESULT_BACKEND=redis://127.0.0.1:6380/12 # Mesage Broker # ------------------------------------------------------------------------------ -MESSAGE_BROKER_URL=amqp://dev:dev@0.0.0.0:5672 +MESSAGE_BROKER_URL=amqp://dev:dev@127.0.0.1:5672 DEFAULT_EXCHANGER=YOUR_DEFAULT_EXCHANGER DEFAULT_QUEUE=YOUR_DEFAULT_QUEUE diff --git a/.flake8 b/.flake8 index 9aa7860..94d9bd1 100644 --- a/.flake8 +++ b/.flake8 @@ -1,4 +1,4 @@ [flake8] -exclude = .git,__pycache__, *env,.venv,*venv,migrations,logs +exclude = .git,__pycache__, *env,.venv,*venv,migrations,logs,src/app/interfaces/grpc max-line-length = 115 max-complexity = 8 diff --git a/.launch/api/Dockerfile b/.launch/api/Dockerfile index b91dee9..46289ce 100644 --- a/.launch/api/Dockerfile +++ b/.launch/api/Dockerfile @@ -16,4 +16,4 @@ CMD alembic upgrade head ENV PYTHONPATH=/app CMD alembic upgrade head && gunicorn --config src/app/interfaces/cli/gunicorn_config.py src.app.interfaces.cli.main:app -EXPOSE 8081 +EXPOSE $API_PORT diff --git a/.launch/grpc/Dockerfile b/.launch/grpc/Dockerfile new file mode 100644 index 0000000..92eb352 --- /dev/null +++ b/.launch/grpc/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.12.0-slim + +# env variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 + +RUN mkdir /app +WORKDIR /app +COPY . /app/ +RUN pip install --upgrade pip +RUN pip install poetry +RUN poetry config virtualenvs.create false +RUN poetry install --no-root +CMD alembic upgrade head + +ENV PYTHONPATH=/app + +CMD alembic upgrade head && python -m src.app.interfaces.grpc.server diff --git a/README.rst b/README.rst index 8e1de7b..f423d06 100644 --- a/README.rst +++ b/README.rst @@ -89,6 +89,7 @@ Running the App via scripts, docker:: # use flags: # --recreate if recreate, rebuild required # --run_api if run API container required + # --run_grpc if run gRPC container required # example: bash local_run.sh --recreate --run_api @@ -112,6 +113,9 @@ Running the App locally:: # run Consumer python -m src.app.consume + # run gRPC server + python -m src.app.interfaces.grpc.server + API Documentation:: @@ -122,6 +126,27 @@ Database Migrations:: alembic revision --autogenerate -m "some message" alembic upgrade head +gRPC:: + + # Generate services + + python -m grpc_tools.protoc \ + --proto_path ./src/app/interfaces/grpc/protos \ + --python_out=./src/app/interfaces/grpc/pb/debug \ + --grpc_python_out=./src/app/interfaces/grpc/pb/debug ./src/app/interfaces/grpc/protos/debug.proto + + python -m grpc_tools.protoc \ + --proto_path ./src/app/interfaces/grpc/protos \ + --python_out=./src/app/interfaces/grpc/pb/example \ + --grpc_python_out=./src/app/interfaces/grpc/pb/example ./src/app/interfaces/grpc/protos/example.proto + + # Run server + $ python -m src.app.interfaces.grpc.server + + # Run test client + $ python -m src.app.interfaces.grpc.client + + Code Quality Checks:: bash beautify.sh diff --git a/local_run.sh b/local_run.sh index bc8f040..7419fd0 100644 --- a/local_run.sh +++ b/local_run.sh @@ -47,41 +47,54 @@ done < "$INPUT_ENV_FILE" DOCKER_PREFIX="local" RECREATE=false RUN_API=false +RUN_GRPC=false # Parse script arguments while [ "$#" -gt 0 ]; do case $1 in --recreate) RECREATE=true ;; --run_api) RUN_API=true ;; + --run_grpc) RUN_GRPC=true ;; esac shift done + +IMAGE_CELERY="celery_img" +IMAGE_CONSUME="consume_img" +IMAGE_API="api_img" +IMAGE_GRPC="grpc_img" # ---------------------------------------------------------------------- -# Recreate containers and images if requested +# REBUILD images, remove old containers and images if requested # ---------------------------------------------------------------------- if [ "$RECREATE" = true ]; then # Remove old containers docker ps -a --filter "name=${DOCKER_PREFIX}*" --format "{{.ID}}" | xargs -r docker rm -f # Remove old images - docker rmi celery_img - docker rmi consume_img - docker rmi api_img + docker rmi $IMAGE_CELERY || true + docker rmi $IMAGE_CONSUME || true + docker rmi $IMAGE_API || true + docker rmi $IMAGE_GRPC || true echo " 🗑️ Removed old containers and images" # Build new images - docker build -t celery_img --no-cache -f .launch/celery/Dockerfile . + docker build -t $IMAGE_CELERY --no-cache -f .launch/celery/Dockerfile . echo " 🏗️ Built celery_img image" - docker build -t consume_img --no-cache -f .launch/consume/Dockerfile . + docker build -t $IMAGE_CONSUME --no-cache -f .launch/consume/Dockerfile . echo " 🏗️ Built consume_img image" if [ "$RUN_API" = true ]; then - docker build -t api_img --no-cache -f .launch/api/Dockerfile . + docker build -t $IMAGE_API --no-cache -f .launch/api/Dockerfile . echo " 🏗️ Built api_img image" fi + + if [ "$RUN_GRPC" = true ]; then + docker build -t $IMAGE_GRPC --no-cache -f .launch/grpc/Dockerfile . + echo " 🏗️ Built grpc_img image" + fi fi # ---------------------------------------------------------------------- @@ -95,7 +108,7 @@ if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_celery)" ]; then --shm-size="512m" \ --cpus=2 \ -e CELERY_ARGS="worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h" \ - celery_img || true + $IMAGE_CELERY || true docker run -d --name "${DOCKER_PREFIX}_flower" \ -e broker_url=$CELERY_BROKER_URL \ @@ -113,7 +126,7 @@ if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_consume)" ]; then --env-file ./.env_docker \ --shm-size="512m" \ --cpus=1 \ - consume_img || true + $IMAGE_CONSUME || true fi echo " ✅ ${DOCKER_PREFIX}_consume UP" @@ -124,21 +137,38 @@ if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_api)" ] && [ "$RUN_API" = true --env-file ./.env_docker \ --shm-size="1g" \ --cpus=1 \ - -p 8081:8081 \ - api_img || true + -p $API_PORT:$API_PORT \ + $IMAGE_API || true fi if [ "$RUN_API" = true ]; then echo " ✅ ${DOCKER_PREFIX}_api UP" fi +# gRpc container (optional) +if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_grpc)" ] && [ "$RUN_GRPC" = true ]; then + docker run -d \ + --name "${DOCKER_PREFIX}_grpc" \ + --env-file ./.env_docker \ + --shm-size="1g" \ + --cpus=1 \ + -p $GRPC_PORT:$GRPC_PORT \ + $IMAGE_GRPC || true +fi +if [ "$RUN_GRPC" = true ]; then +echo " ✅ ${DOCKER_PREFIX}_grpc UP" +fi + # ---------------------------------------------------------------------- # Print results # ---------------------------------------------------------------------- echo "" -echo "-----------------------------------------------------------------" +echo "-------------------------------------------------------------------------------" if [ "$RUN_API" = true ]; then -echo " 💎 http://0.0.0.0:8081/docs - API" +echo " 💎 http://0.0.0.0:${API_PORT}/docs **** API" +fi +if [ "$RUN_GRPC" = true ]; then +echo " 💎 http://0.0.0.0:${GRPC_PORT}/ ******* gRpc" fi -echo " ⚙️ http://0.0.0.0:5555 - Flower[celery monitoring]" -echo "----------------------------------------------------------------" +echo " ⚙️ http://0.0.0.0:5555 ********* Flower[celery monitoring]" +echo "-------------------------------------------------------------------------------" diff --git a/poetry.lock b/poetry.lock index ae4b91f..d0b2bd9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -95,13 +95,13 @@ files = [ [[package]] name = "alembic" -version = "1.16.4" +version = "1.16.5" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.9" files = [ - {file = "alembic-1.16.4-py3-none-any.whl", hash = "sha256:b05e51e8e82efc1abd14ba2af6392897e145930c3e0a2faf2b0da2f7f7fd660d"}, - {file = "alembic-1.16.4.tar.gz", hash = "sha256:efab6ada0dd0fae2c92060800e0bf5c1dc26af15a10e02fb4babff164b4725e2"}, + {file = "alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3"}, + {file = "alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e"}, ] [package.dependencies] @@ -605,99 +605,99 @@ files = [ [[package]] name = "coverage" -version = "7.10.4" +version = "7.10.6" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d92d6edb0ccafd20c6fbf9891ca720b39c2a6a4b4a6f9cf323ca2c986f33e475"}, - {file = "coverage-7.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7202da14dc0236884fcc45665ffb2d79d4991a53fbdf152ab22f69f70923cc22"}, - {file = "coverage-7.10.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ada418633ae24ec8d0fcad5efe6fc7aa3c62497c6ed86589e57844ad04365674"}, - {file = "coverage-7.10.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b828e33eca6c3322adda3b5884456f98c435182a44917ded05005adfa1415500"}, - {file = "coverage-7.10.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:802793ba397afcfdbe9f91f89d65ae88b958d95edc8caf948e1f47d8b6b2b606"}, - {file = "coverage-7.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d0b23512338c54101d3bf7a1ab107d9d75abda1d5f69bc0887fd079253e4c27e"}, - {file = "coverage-7.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f36b7dcf72d06a8c5e2dd3aca02be2b1b5db5f86404627dff834396efce958f2"}, - {file = "coverage-7.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fce316c367a1dc2c411821365592eeb335ff1781956d87a0410eae248188ba51"}, - {file = "coverage-7.10.4-cp310-cp310-win32.whl", hash = "sha256:8c5dab29fc8070b3766b5fc85f8d89b19634584429a2da6d42da5edfadaf32ae"}, - {file = "coverage-7.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:4b0d114616f0fccb529a1817457d5fb52a10e106f86c5fb3b0bd0d45d0d69b93"}, - {file = "coverage-7.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:05d5f98ec893d4a2abc8bc5f046f2f4367404e7e5d5d18b83de8fde1093ebc4f"}, - {file = "coverage-7.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9267efd28f8994b750d171e58e481e3bbd69e44baed540e4c789f8e368b24b88"}, - {file = "coverage-7.10.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4456a039fdc1a89ea60823d0330f1ac6f97b0dbe9e2b6fb4873e889584b085fb"}, - {file = "coverage-7.10.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c2bfbd2a9f7e68a21c5bd191be94bfdb2691ac40d325bac9ef3ae45ff5c753d9"}, - {file = "coverage-7.10.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ab7765f10ae1df7e7fe37de9e64b5a269b812ee22e2da3f84f97b1c7732a0d8"}, - {file = "coverage-7.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a09b13695166236e171ec1627ff8434b9a9bae47528d0ba9d944c912d33b3d2"}, - {file = "coverage-7.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5c9e75dfdc0167d5675e9804f04a56b2cf47fb83a524654297000b578b8adcb7"}, - {file = "coverage-7.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c751261bfe6481caba15ec005a194cb60aad06f29235a74c24f18546d8377df0"}, - {file = "coverage-7.10.4-cp311-cp311-win32.whl", hash = "sha256:051c7c9e765f003c2ff6e8c81ccea28a70fb5b0142671e4e3ede7cebd45c80af"}, - {file = "coverage-7.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:1a647b152f10be08fb771ae4a1421dbff66141e3d8ab27d543b5eb9ea5af8e52"}, - {file = "coverage-7.10.4-cp311-cp311-win_arm64.whl", hash = "sha256:b09b9e4e1de0d406ca9f19a371c2beefe3193b542f64a6dd40cfcf435b7d6aa0"}, - {file = "coverage-7.10.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a1f0264abcabd4853d4cb9b3d164adbf1565da7dab1da1669e93f3ea60162d79"}, - {file = "coverage-7.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:536cbe6b118a4df231b11af3e0f974a72a095182ff8ec5f4868c931e8043ef3e"}, - {file = "coverage-7.10.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9a4c0d84134797b7bf3f080599d0cd501471f6c98b715405166860d79cfaa97e"}, - {file = "coverage-7.10.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7c155fc0f9cee8c9803ea0ad153ab6a3b956baa5d4cd993405dc0b45b2a0b9e0"}, - {file = "coverage-7.10.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5f2ab6e451d4b07855d8bcf063adf11e199bff421a4ba57f5bb95b7444ca62"}, - {file = "coverage-7.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:685b67d99b945b0c221be0780c336b303a7753b3e0ec0d618c795aada25d5e7a"}, - {file = "coverage-7.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0c079027e50c2ae44da51c2e294596cbc9dbb58f7ca45b30651c7e411060fc23"}, - {file = "coverage-7.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3749aa72b93ce516f77cf5034d8e3c0dfd45c6e8a163a602ede2dc5f9a0bb927"}, - {file = "coverage-7.10.4-cp312-cp312-win32.whl", hash = "sha256:fecb97b3a52fa9bcd5a7375e72fae209088faf671d39fae67261f37772d5559a"}, - {file = "coverage-7.10.4-cp312-cp312-win_amd64.whl", hash = "sha256:26de58f355626628a21fe6a70e1e1fad95702dafebfb0685280962ae1449f17b"}, - {file = "coverage-7.10.4-cp312-cp312-win_arm64.whl", hash = "sha256:67e8885408f8325198862bc487038a4980c9277d753cb8812510927f2176437a"}, - {file = "coverage-7.10.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b8e1d2015d5dfdbf964ecef12944c0c8c55b885bb5c0467ae8ef55e0e151233"}, - {file = "coverage-7.10.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:25735c299439018d66eb2dccf54f625aceb78645687a05f9f848f6e6c751e169"}, - {file = "coverage-7.10.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:715c06cb5eceac4d9b7cdf783ce04aa495f6aff657543fea75c30215b28ddb74"}, - {file = "coverage-7.10.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e017ac69fac9aacd7df6dc464c05833e834dc5b00c914d7af9a5249fcccf07ef"}, - {file = "coverage-7.10.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bad180cc40b3fccb0f0e8c702d781492654ac2580d468e3ffc8065e38c6c2408"}, - {file = "coverage-7.10.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:becbdcd14f685fada010a5f792bf0895675ecf7481304fe159f0cd3f289550bd"}, - {file = "coverage-7.10.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0b485ca21e16a76f68060911f97ebbe3e0d891da1dbbce6af7ca1ab3f98b9097"}, - {file = "coverage-7.10.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6c1d098ccfe8e1e0a1ed9a0249138899948afd2978cbf48eb1cc3fcd38469690"}, - {file = "coverage-7.10.4-cp313-cp313-win32.whl", hash = "sha256:8630f8af2ca84b5c367c3df907b1706621abe06d6929f5045fd628968d421e6e"}, - {file = "coverage-7.10.4-cp313-cp313-win_amd64.whl", hash = "sha256:f68835d31c421736be367d32f179e14ca932978293fe1b4c7a6a49b555dff5b2"}, - {file = "coverage-7.10.4-cp313-cp313-win_arm64.whl", hash = "sha256:6eaa61ff6724ca7ebc5326d1fae062d85e19b38dd922d50903702e6078370ae7"}, - {file = "coverage-7.10.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:702978108876bfb3d997604930b05fe769462cc3000150b0e607b7b444f2fd84"}, - {file = "coverage-7.10.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e8f978e8c5521d9c8f2086ac60d931d583fab0a16f382f6eb89453fe998e2484"}, - {file = "coverage-7.10.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:df0ac2ccfd19351411c45e43ab60932b74472e4648b0a9edf6a3b58846e246a9"}, - {file = "coverage-7.10.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73a0d1aaaa3796179f336448e1576a3de6fc95ff4f07c2d7251d4caf5d18cf8d"}, - {file = "coverage-7.10.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:873da6d0ed6b3ffc0bc01f2c7e3ad7e2023751c0d8d86c26fe7322c314b031dc"}, - {file = "coverage-7.10.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c6446c75b0e7dda5daa876a1c87b480b2b52affb972fedd6c22edf1aaf2e00ec"}, - {file = "coverage-7.10.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6e73933e296634e520390c44758d553d3b573b321608118363e52113790633b9"}, - {file = "coverage-7.10.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52073d4b08d2cb571234c8a71eb32af3c6923149cf644a51d5957ac128cf6aa4"}, - {file = "coverage-7.10.4-cp313-cp313t-win32.whl", hash = "sha256:e24afb178f21f9ceb1aefbc73eb524769aa9b504a42b26857243f881af56880c"}, - {file = "coverage-7.10.4-cp313-cp313t-win_amd64.whl", hash = "sha256:be04507ff1ad206f4be3d156a674e3fb84bbb751ea1b23b142979ac9eebaa15f"}, - {file = "coverage-7.10.4-cp313-cp313t-win_arm64.whl", hash = "sha256:f3e3ff3f69d02b5dad67a6eac68cc9c71ae343b6328aae96e914f9f2f23a22e2"}, - {file = "coverage-7.10.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a59fe0af7dd7211ba595cf7e2867458381f7e5d7b4cffe46274e0b2f5b9f4eb4"}, - {file = "coverage-7.10.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3a6c35c5b70f569ee38dc3350cd14fdd0347a8b389a18bb37538cc43e6f730e6"}, - {file = "coverage-7.10.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:acb7baf49f513554c4af6ef8e2bd6e8ac74e6ea0c7386df8b3eb586d82ccccc4"}, - {file = "coverage-7.10.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a89afecec1ed12ac13ed203238b560cbfad3522bae37d91c102e690b8b1dc46c"}, - {file = "coverage-7.10.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:480442727f464407d8ade6e677b7f21f3b96a9838ab541b9a28ce9e44123c14e"}, - {file = "coverage-7.10.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a89bf193707f4a17f1ed461504031074d87f035153239f16ce86dfb8f8c7ac76"}, - {file = "coverage-7.10.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:3ddd912c2fc440f0fb3229e764feec85669d5d80a988ff1b336a27d73f63c818"}, - {file = "coverage-7.10.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a538944ee3a42265e61c7298aeba9ea43f31c01271cf028f437a7b4075592cf"}, - {file = "coverage-7.10.4-cp314-cp314-win32.whl", hash = "sha256:fd2e6002be1c62476eb862b8514b1ba7e7684c50165f2a8d389e77da6c9a2ebd"}, - {file = "coverage-7.10.4-cp314-cp314-win_amd64.whl", hash = "sha256:ec113277f2b5cf188d95fb66a65c7431f2b9192ee7e6ec9b72b30bbfb53c244a"}, - {file = "coverage-7.10.4-cp314-cp314-win_arm64.whl", hash = "sha256:9744954bfd387796c6a091b50d55ca7cac3d08767795b5eec69ad0f7dbf12d38"}, - {file = "coverage-7.10.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5af4829904dda6aabb54a23879f0f4412094ba9ef153aaa464e3c1b1c9bc98e6"}, - {file = "coverage-7.10.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7bba5ed85e034831fac761ae506c0644d24fd5594727e174b5a73aff343a7508"}, - {file = "coverage-7.10.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d57d555b0719834b55ad35045de6cc80fc2b28e05adb6b03c98479f9553b387f"}, - {file = "coverage-7.10.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ba62c51a72048bb1ea72db265e6bd8beaabf9809cd2125bbb5306c6ce105f214"}, - {file = "coverage-7.10.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0acf0c62a6095f07e9db4ec365cc58c0ef5babb757e54745a1aa2ea2a2564af1"}, - {file = "coverage-7.10.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1033bf0f763f5cf49ffe6594314b11027dcc1073ac590b415ea93463466deec"}, - {file = "coverage-7.10.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:92c29eff894832b6a40da1789b1f252305af921750b03ee4535919db9179453d"}, - {file = "coverage-7.10.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:822c4c830989c2093527e92acd97be4638a44eb042b1bdc0e7a278d84a070bd3"}, - {file = "coverage-7.10.4-cp314-cp314t-win32.whl", hash = "sha256:e694d855dac2e7cf194ba33653e4ba7aad7267a802a7b3fc4347d0517d5d65cd"}, - {file = "coverage-7.10.4-cp314-cp314t-win_amd64.whl", hash = "sha256:efcc54b38ef7d5bfa98050f220b415bc5bb3d432bd6350a861cf6da0ede2cdcd"}, - {file = "coverage-7.10.4-cp314-cp314t-win_arm64.whl", hash = "sha256:6f3a3496c0fa26bfac4ebc458747b778cff201c8ae94fa05e1391bab0dbc473c"}, - {file = "coverage-7.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:48fd4d52600c2a9d5622e52dfae674a7845c5e1dceaf68b88c99feb511fbcfd6"}, - {file = "coverage-7.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:56217b470d09d69e6b7dcae38200f95e389a77db801cb129101697a4553b18b6"}, - {file = "coverage-7.10.4-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:44ac3f21a6e28c5ff7f7a47bca5f87885f6a1e623e637899125ba47acd87334d"}, - {file = "coverage-7.10.4-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3387739d72c84d17b4d2f7348749cac2e6700e7152026912b60998ee9a40066b"}, - {file = "coverage-7.10.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f111ff20d9a6348e0125be892608e33408dd268f73b020940dfa8511ad05503"}, - {file = "coverage-7.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:01a852f0a9859734b018a3f483cc962d0b381d48d350b1a0c47d618c73a0c398"}, - {file = "coverage-7.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:225111dd06759ba4e37cee4c0b4f3df2b15c879e9e3c37bf986389300b9917c3"}, - {file = "coverage-7.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2178d4183bd1ba608f0bb12e71e55838ba1b7dbb730264f8b08de9f8ef0c27d0"}, - {file = "coverage-7.10.4-cp39-cp39-win32.whl", hash = "sha256:93d175fe81913aee7a6ea430abbdf2a79f1d9fd451610e12e334e4fe3264f563"}, - {file = "coverage-7.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:2221a823404bb941c7721cf0ef55ac6ee5c25d905beb60c0bba5e5e85415d353"}, - {file = "coverage-7.10.4-py3-none-any.whl", hash = "sha256:065d75447228d05121e5c938ca8f0e91eed60a1eb2d1258d42d5084fecfc3302"}, - {file = "coverage-7.10.4.tar.gz", hash = "sha256:25f5130af6c8e7297fd14634955ba9e1697f47143f289e2a23284177c0061d27"}, + {file = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}, + {file = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}, + {file = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}, + {file = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}, + {file = "coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}, + {file = "coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}, + {file = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}, + {file = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}, + {file = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}, + {file = "coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}, + {file = "coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}, + {file = "coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}, + {file = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}, + {file = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}, + {file = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}, + {file = "coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}, + {file = "coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}, + {file = "coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}, + {file = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}, + {file = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}, + {file = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}, + {file = "coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}, + {file = "coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}, + {file = "coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}, + {file = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}, + {file = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}, + {file = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}, + {file = "coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}, + {file = "coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}, + {file = "coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}, + {file = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}, + {file = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}, + {file = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}, + {file = "coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}, + {file = "coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}, + {file = "coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}, + {file = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}, + {file = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}, + {file = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}, + {file = "coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}, + {file = "coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}, + {file = "coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352"}, + {file = "coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144"}, + {file = "coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2"}, + {file = "coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78"}, + {file = "coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c"}, + {file = "coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf"}, + {file = "coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}, + {file = "coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}, ] [package.extras] @@ -754,13 +754,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "email-validator" -version = "2.2.0" +version = "2.3.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, + {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, + {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, ] [package.dependencies] @@ -906,6 +906,134 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil", "setuptools"] +[[package]] +name = "grpcio" +version = "1.74.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +files = [ + {file = "grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907"}, + {file = "grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb"}, + {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486"}, + {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11"}, + {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9"}, + {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc"}, + {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e"}, + {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82"}, + {file = "grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7"}, + {file = "grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5"}, + {file = "grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31"}, + {file = "grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4"}, + {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce"}, + {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3"}, + {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182"}, + {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d"}, + {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f"}, + {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4"}, + {file = "grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b"}, + {file = "grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11"}, + {file = "grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8"}, + {file = "grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6"}, + {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5"}, + {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49"}, + {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7"}, + {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3"}, + {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707"}, + {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b"}, + {file = "grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c"}, + {file = "grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc"}, + {file = "grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89"}, + {file = "grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01"}, + {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e"}, + {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91"}, + {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249"}, + {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362"}, + {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f"}, + {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20"}, + {file = "grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa"}, + {file = "grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24"}, + {file = "grpcio-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae"}, + {file = "grpcio-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b"}, + {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a"}, + {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a"}, + {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9"}, + {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7"}, + {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176"}, + {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac"}, + {file = "grpcio-1.74.0-cp39-cp39-win32.whl", hash = "sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854"}, + {file = "grpcio-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa"}, + {file = "grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.74.0)"] + +[[package]] +name = "grpcio-tools" +version = "1.74.0" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.9" +files = [ + {file = "grpcio_tools-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:796796b4d7e83a9cdd03bb95c6774fca060fd209d83fb9af5f043e9c6f06a1fa"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d576b7786207359b63c2c2e3c387639b4177cf53b1e43d020b005deead32049e"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d73686934bfdd868be0dbfbfcba2a5f50a8b0b71362e86a133e8efcbdc5cad5d"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:187f99fd22de6e63fbf4f30b2e054a2e3c4fb80beec73b1f4716ea86192050f5"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bef8a16c34e68aaa2d246cd358629f8103730cb96cfc521f720378995f218282"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e41084adbae7176097aa9d08a13d98c189895ec8c967f5461975750d3537625a"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b61337b47d981b4d270e3caa83607a900169617478c034e6f6baf16ab22d333"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7e920982b4eaab253affbd45ec6d5ec12d895f5c143374ef4c3eadef49162373"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-win32.whl", hash = "sha256:b966f3b93f9d24151591d096ecf9c3fdb419a50d486761f7d28a9a69b028b627"}, + {file = "grpcio_tools-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:03787990b56f5c3b3f72c722a7e74fbc5a3b769bbc31ad426e2c6f6a28a9d7c8"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:9d9e28fbbab9b9e923c3d286949e8ff81ebbb402458698f0a2b1183b539779db"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:41040eb1b5d1e582687f6f19cf2efc4c191b6eab56b16f6fba50ac085c5ca4dd"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1fdc013118e4e9054b6e1a64d16a0d4a17a4071042e674ada8673406ddb26e59"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f037414c527a2c4a3af15451d9e58d7856d0a62b3f6dd3f5b969ecba82f5e843"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536f53a6a8d1ba1c469d085066cfa0dd3bb51f07013b71857bc3ad1eabe3ab49"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1e23ff54dea7f6e9543dcebd2c0f4b7c9af39812966c05e1c5289477cb2bf2f7"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76072dee9fa99b33eb0c334a16e70d694df762df705c7a2481f702af33d81a28"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bdf91eb722f2990085b1342c277e212ec392e37bd493a2a21d9eb9238f28c3e"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-win32.whl", hash = "sha256:a036cd2a4223901e7a9f6a9b394326a9352a4ad70bdd3f1d893f1b231fcfdf7e"}, + {file = "grpcio_tools-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1fdf245178158a92a2dc78e3545b6d13b6c917d9b80931fc85cfb3e9534a07d"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61d84f6050d7170712600f7ee1dac8849f5dc0bfe0044dd71132ee1e7aa2b373"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f0129a62711dbc1f1efd51d069d2ce0631d69e033bf3a046606c623acf935e08"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:5ec661f3bb41f0d2a30125ea382f4d5c874bf4f26d4d8e3839bb7e3b3c037b3e"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7970a9cf3002bec2eff5a449ac7398b77e5d171cbb534c47258c72409d0aea74"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f56d67b04790f84e216353341c6b298f1aeb591e1797fe955f606516c640936"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3d0c33cc984d21525f190cb1af479f8da46370df5f2ced1a4e50769ababd0c0"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:88e535c1cf349e57e371529ea9918f811c5eff88161f322bbc06d6222bad6d50"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3cf9401ce72bc49582c2d80e0a2ee0e573e1c3c998c8bc5f739db8845e8e148"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-win32.whl", hash = "sha256:b63e250da44b15c67b9a34c5c30c81059bde528fc8af092d7f43194469f7c719"}, + {file = "grpcio_tools-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:519d7cae085ae6695a8031bb990bf7766a922332b0a531e51342abc5431b78b5"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d"}, + {file = "grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:39045d07f2582b35685858e1616761b7ad45085e446941c8f9f7c6da523f83c3"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:406ec87e2fd4cb6a40229fbecebcd11973afd4747484bfd5c2bc2ebe81545b7a"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:70725de8cf724c54040502f199ea28df0e8bc480175eacbed8c999c9ad4c0ffe"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333003e6a9dc304da9e6b086294a8d25212c542284e60699a72b456c515f114c"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5274a4f227e4bd244e3890a9238bda47b169765421ea87f157e4955ea39b4326"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f476f1ec637888a49402a1acff52bb641ec01a8672f60b57c5ee0a1d0e0763d2"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0e8c22e390800175417ec646fac99acaadcbd2f5cdb1a27694995ca86d3bbfd3"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77b400d3c87b1f85be505366e299e00214e2266f604ab58616fc77d016336a24"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-win32.whl", hash = "sha256:fc572f8af2d8f13db4b0091dcf518d6ca5c82ea6f59e8716683bd8aeb729b203"}, + {file = "grpcio_tools-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:700d8933684f66dd8edc0324590fa61930bed8f9fb66322a48f5c7ba08386810"}, + {file = "grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95"}, +] + +[package.dependencies] +grpcio = ">=1.74.0" +protobuf = ">=6.31.1,<7.0.0" +setuptools = "*" + [[package]] name = "gunicorn" version = "23.0.0" @@ -1191,18 +1319,18 @@ files = [ [[package]] name = "marshmallow" -version = "4.0.0" +version = "4.0.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" files = [ - {file = "marshmallow-4.0.0-py3-none-any.whl", hash = "sha256:e7b0528337e9990fd64950f8a6b3a1baabed09ad17a0dfb844d701151f92d203"}, - {file = "marshmallow-4.0.0.tar.gz", hash = "sha256:3b6e80aac299a7935cfb97ed01d1854fb90b5079430969af92118ea1b12a8d55"}, + {file = "marshmallow-4.0.1-py3-none-any.whl", hash = "sha256:72f14ef346f81269dbddee891bac547dda1501e9e08b6a809756ea3dbb7936a1"}, + {file = "marshmallow-4.0.1.tar.gz", hash = "sha256:e1d860bd262737cb2d34e1541b84cb52c32c72c9474e3fe6f30f137ef8b0d97f"}, ] [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.2.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.1)", "sphinxext-opengraph (==0.10.0)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2025.7.19)", "sphinx (==8.2.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.1)", "sphinxext-opengraph (==0.12.0)"] tests = ["pytest", "simplejson"] [[package]] @@ -1478,13 +1606,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.4.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" files = [ - {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, - {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, + {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, + {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, ] [package.extras] @@ -1509,13 +1637,13 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "prompt-toolkit" -version = "3.0.51" +version = "3.0.52" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8" files = [ - {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, - {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, ] [package.dependencies] @@ -1628,6 +1756,24 @@ files = [ {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}, ] +[[package]] +name = "protobuf" +version = "6.32.0" +description = "" +optional = false +python-versions = ">=3.9" +files = [ + {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"}, + {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"}, + {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"}, + {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"}, + {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"}, + {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"}, + {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, + {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.10" @@ -1886,13 +2032,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.4.1" +version = "8.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.9" files = [ - {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, - {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, ] [package.dependencies] @@ -2148,6 +2294,26 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] + [[package]] name = "six" version = "1.17.0" @@ -2467,13 +2633,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.47.2" +version = "0.47.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" files = [ - {file = "starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b"}, - {file = "starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8"}, + {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, + {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, ] [package.dependencies] @@ -2496,13 +2662,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] @@ -2732,4 +2898,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.0" python-versions = "3.12.0" -content-hash = "94fb6318c0fcbef4293adb15036cfefec3ae1cfccd6d0e042cb5dd1ff8fab863" +content-hash = "f9823e90e9e466fb0763a2100ce5f14969d57d84fcbe203e80eddae88f46d19d" diff --git a/pyproject.toml b/pyproject.toml index e039711..79e13e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,8 @@ celery = "^5.5.3" redis = "^6.4.0" aiokafka = "^0.12.0" pytz = "^2025.2" +grpcio = "^1.69.0" +grpcio-tools = "^1.69.0" [tool.poetry.group.dev.dependencies] pytest = "^8.3.4" @@ -61,3 +63,6 @@ target-version = ['py311'] python_version=3.12 disallow_untyped_defs=1 ignore_missing_imports=1 +exclude = [ + "src/app/interfaces/grpc/" +] diff --git a/src/app/config/settings.py b/src/app/config/settings.py index b8872ef..237402f 100644 --- a/src/app/config/settings.py +++ b/src/app/config/settings.py @@ -2,7 +2,7 @@ import pathlib import secrets from enum import Enum -from typing import List, Union +from typing import List, Optional, Union from environs import Env from pydantic.v1 import BaseSettings as PydanticSettings @@ -35,11 +35,18 @@ class SettingsBase(PydanticSettings): # API Settings # -------------------------------------------------------------------------- API: str = "/api" + API_PORT: int = env.int("API_PORT", 8000) CORS_ORIGIN_WHITELIST: List[str] = env.list("CORS_ORIGIN_WHITELIST", ["*"]) API_DEFAULT_LIMIT: int = env.int("API_DEFAULT_LIMIT", 25) API_LIMIT_ALLOWED_VALUES_LIST: List[int] = env.list("API_LIMIT_ALLOWED_VALUES_LIST", [1, 5, 10, 15, 25]) SHOW_API_DOCS: bool = env.bool("SHOW_API_DOCS", False) + # GRPC Settings + # -------------------------------------------------------------------------- + GRPC_HOST: Optional[str] = env.str("GRPC_HOST", "") + GRPC_PORT: Optional[int] = env.int("GRPC_PORT", None) + GRPC_URL: str = f"{GRPC_HOST}:{int(GRPC_PORT)}" # type: ignore + # Auth settings # -------------------------------------------------------------------------- ALGORITHM = "HS256" diff --git a/src/app/interfaces/cli/gunicorn_config.py b/src/app/interfaces/cli/gunicorn_config.py index 11adfef..076dad3 100644 --- a/src/app/interfaces/cli/gunicorn_config.py +++ b/src/app/interfaces/cli/gunicorn_config.py @@ -1,6 +1,10 @@ import multiprocessing +from src.app.config.settings import LaunchMode, settings + name = "Gunicorn Config" -bind = "0.0.0.0:8081" +bind = f"0.0.0.0:{settings.API_PORT}" worker_class = "uvicorn.workers.UvicornWorker" workers = multiprocessing.cpu_count() * 2 + 1 +if settings.LAUNCH_MODE != LaunchMode.PRODUCTION.value: + workers = 2 diff --git a/src/app/interfaces/grpc/client.py b/src/app/interfaces/grpc/client.py new file mode 100644 index 0000000..b3344d2 --- /dev/null +++ b/src/app/interfaces/grpc/client.py @@ -0,0 +1,32 @@ +import grpc + +from src.app.config.settings import settings +from src.app.interfaces.grpc.pb.debug import debug_pb2_grpc +from src.app.interfaces.grpc.pb.debug import debug_pb2 as pb2 +from google.protobuf.any_pb2 import Any + + +def run() -> None: + with grpc.insecure_channel(settings.GRPC_URL) as channel: + stub = debug_pb2_grpc.DebugServiceStub(channel) + data = pb2.SayMeqDataReq() # type: ignore + + # Pack the Struct into the Any field + any_data = Any() + any_data.Pack(data) + + # Send the request + stub.SendMessage(pb2.MessageReq(event="say_meow", data=any_data)) # type: ignore + + data_2 = pb2.TestDataReq(year="2025", month="05") # type: ignore + + # Pack the Struct into the Any field + any_data_2 = Any() + any_data_2.Pack(data_2) + + # Send the request + stub.SendMessage(pb2.MessageReq(event="test_event", data=any_data_2)) # type: ignore + + +if __name__ == "__main__": + run() diff --git a/src/app/interfaces/grpc/pb/__init__.py b/src/app/interfaces/grpc/pb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/interfaces/grpc/pb/debug/debug_pb2.py b/src/app/interfaces/grpc/pb/debug/debug_pb2.py new file mode 100644 index 0000000..0b3b748 --- /dev/null +++ b/src/app/interfaces/grpc/pb/debug/debug_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: debug.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 5, 29, 0, "", "debug.proto") +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0b\x64\x65\x62ug.proto\x12\rgrpc.pb.debug\x1a\x19google/protobuf/any.proto"\x0f\n\rSayMeqDataReq"*\n\x0bTestDataReq\x12\x0c\n\x04year\x18\x01 \x01(\t\x12\r\n\x05month\x18\x02 \x01(\t"?\n\nMessageReq\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12"\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any".\n\x0bMessageResp\x12\x0e\n\x06status\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t2V\n\x0c\x44\x65\x62ugService\x12\x46\n\x0bSendMessage\x12\x19.grpc.pb.debug.MessageReq\x1a\x1a.grpc.pb.debug.MessageResp"\x00\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "debug_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_SAYMEQDATAREQ"]._serialized_start = 57 + _globals["_SAYMEQDATAREQ"]._serialized_end = 72 + _globals["_TESTDATAREQ"]._serialized_start = 74 + _globals["_TESTDATAREQ"]._serialized_end = 116 + _globals["_MESSAGEREQ"]._serialized_start = 118 + _globals["_MESSAGEREQ"]._serialized_end = 181 + _globals["_MESSAGERESP"]._serialized_start = 183 + _globals["_MESSAGERESP"]._serialized_end = 229 + _globals["_DEBUGSERVICE"]._serialized_start = 231 + _globals["_DEBUGSERVICE"]._serialized_end = 317 +# @@protoc_insertion_point(module_scope) diff --git a/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py b/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py new file mode 100644 index 0000000..647e9b4 --- /dev/null +++ b/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py @@ -0,0 +1,100 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import src.app.interfaces.grpc.pb.debug.debug_pb2 as debug__pb2 + +GRPC_GENERATED_VERSION = "1.70.0" +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f"The grpc package installed is at version {GRPC_VERSION}," + + " but the generated code in debug_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + ) + + +class DebugServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SendMessage = channel.unary_unary( + "/grpc.pb.debug.DebugService/SendMessage", + request_serializer=debug__pb2.MessageReq.SerializeToString, + response_deserializer=debug__pb2.MessageResp.FromString, + _registered_method=True, + ) + + +class DebugServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def SendMessage(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_DebugServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "SendMessage": grpc.unary_unary_rpc_method_handler( + servicer.SendMessage, + request_deserializer=debug__pb2.MessageReq.FromString, + response_serializer=debug__pb2.MessageResp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler("grpc.pb.debug.DebugService", rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers("grpc.pb.debug.DebugService", rpc_method_handlers) + + +# This class is part of an EXPERIMENTAL API. +class DebugService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def SendMessage( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/grpc.pb.debug.DebugService/SendMessage", + debug__pb2.MessageReq.SerializeToString, + debug__pb2.MessageResp.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/src/app/interfaces/grpc/pb/example/__init__.py b/src/app/interfaces/grpc/pb/example/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/interfaces/grpc/pb/example/example_pb2.py b/src/app/interfaces/grpc/pb/example/example_pb2.py new file mode 100644 index 0000000..9aada88 --- /dev/null +++ b/src/app/interfaces/grpc/pb/example/example_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: example.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 5, 29, 0, "", "example.proto") +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\rexample.proto\x12\rgrpc.messages" \n\nExampleReq\x12\x12\n\nexample_id\x18\x01 \x01(\x05"&\n\x0b\x45xampleResp\x12\x17\n\x0f\x65xample_message\x18\x01 \x01(\t2W\n\x0e\x45xampleService\x12\x45\n\nGetExample\x12\x19.grpc.messages.ExampleReq\x1a\x1a.grpc.messages.ExampleResp"\x00\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "example_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals["_EXAMPLEREQ"]._serialized_start = 32 + _globals["_EXAMPLEREQ"]._serialized_end = 64 + _globals["_EXAMPLERESP"]._serialized_start = 66 + _globals["_EXAMPLERESP"]._serialized_end = 104 + _globals["_EXAMPLESERVICE"]._serialized_start = 106 + _globals["_EXAMPLESERVICE"]._serialized_end = 193 +# @@protoc_insertion_point(module_scope) diff --git a/src/app/interfaces/grpc/pb/example/example_pb2_grpc.py b/src/app/interfaces/grpc/pb/example/example_pb2_grpc.py new file mode 100644 index 0000000..29cfe90 --- /dev/null +++ b/src/app/interfaces/grpc/pb/example/example_pb2_grpc.py @@ -0,0 +1,100 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import src.app.interfaces.grpc.pb.example.example_pb2 as example__pb2 + +GRPC_GENERATED_VERSION = "1.70.0" +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f"The grpc package installed is at version {GRPC_VERSION}," + + " but the generated code in example_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + ) + + +class ExampleServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetExample = channel.unary_unary( + "/grpc.messages.ExampleService/GetExample", + request_serializer=example__pb2.ExampleReq.SerializeToString, + response_deserializer=example__pb2.ExampleResp.FromString, + _registered_method=True, + ) + + +class ExampleServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetExample(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_ExampleServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "GetExample": grpc.unary_unary_rpc_method_handler( + servicer.GetExample, + request_deserializer=example__pb2.ExampleReq.FromString, + response_serializer=example__pb2.ExampleResp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler("grpc.messages.ExampleService", rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers("grpc.messages.ExampleService", rpc_method_handlers) + + +# This class is part of an EXPERIMENTAL API. +class ExampleService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetExample( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/grpc.messages.ExampleService/GetExample", + example__pb2.ExampleReq.SerializeToString, + example__pb2.ExampleResp.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/src/app/interfaces/grpc/protos/debug.proto b/src/app/interfaces/grpc/protos/debug.proto new file mode 100644 index 0000000..6e76996 --- /dev/null +++ b/src/app/interfaces/grpc/protos/debug.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +import "google/protobuf/any.proto"; + +package grpc.pb.debug; + +message SayMeqDataReq { + +} +message TestDataReq { + string year = 1; + string month = 2; +} + +message MessageReq{ + string event = 1; + google.protobuf.Any data = 2; +} + +message MessageResp { + bool status = 1; + string message = 2; +} + +service DebugService { + rpc SendMessage(MessageReq) returns (MessageResp) {} +} diff --git a/src/app/interfaces/grpc/protos/example.proto b/src/app/interfaces/grpc/protos/example.proto new file mode 100644 index 0000000..1f66a52 --- /dev/null +++ b/src/app/interfaces/grpc/protos/example.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package grpc.pb.example; + + +message ExampleReq{ + int32 example_id = 1; +} + +message ExampleResp { + string example_message = 1; +} + +service ExampleService { + rpc GetExample(ExampleReq) returns (ExampleResp) {} +} diff --git a/src/app/interfaces/grpc/server.py b/src/app/interfaces/grpc/server.py new file mode 100644 index 0000000..b5ef3b2 --- /dev/null +++ b/src/app/interfaces/grpc/server.py @@ -0,0 +1,31 @@ +import os + +import grpc +import asyncio +from concurrent import futures +from loguru import logger +from src.app.config.settings import settings, LaunchMode +from src.app.interfaces.grpc.pb.debug import debug_pb2_grpc +from src.app.interfaces.grpc.pb.example import example_pb2_grpc +from src.app.interfaces.grpc.services.debug_service import DebugService +from src.app.interfaces.grpc.services.example_service import ExampleService + + +async def serve() -> None: + interceptors: list = [] + max_workers = int(os.cpu_count() * 1.5) # type: ignore + if settings.LAUNCH_MODE != LaunchMode.PRODUCTION.value: + max_workers = 2 + server = grpc.aio.server( + migration_thread_pool=futures.ThreadPoolExecutor(max_workers=max_workers), interceptors=interceptors + ) + debug_pb2_grpc.add_DebugServiceServicer_to_server(DebugService(), server) + example_pb2_grpc.add_ExampleServiceServicer_to_server(ExampleService(), server) + server.add_insecure_port(settings.GRPC_URL) + await server.start() + logger.info(f"GRPC server started {settings.GRPC_URL} on {max_workers} workers") + await server.wait_for_termination() + + +if __name__ == "__main__": + asyncio.run(serve()) diff --git a/src/app/interfaces/grpc/services/__init__.py b/src/app/interfaces/grpc/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/interfaces/grpc/services/debug_service.py b/src/app/interfaces/grpc/services/debug_service.py new file mode 100644 index 0000000..faa0305 --- /dev/null +++ b/src/app/interfaces/grpc/services/debug_service.py @@ -0,0 +1,25 @@ +import json + +from loguru import logger + +from src.app.config.settings import settings +from src.app.infrastructure.messaging.mq_client import mq_client +from src.app.interfaces.grpc.pb.debug import debug_pb2 as pb2 +from google.protobuf.json_format import MessageToJson + +from src.app.interfaces.grpc.pb.debug.debug_pb2_grpc import DebugServiceServicer + + +class DebugService(DebugServiceServicer): + async def SendMessage(self, request, context) -> pb2.MessageResp: # type: ignore + data_raw = MessageToJson(request.data) or "{}" + data = json.loads(data_raw) + _, data_type_ = data.pop("@type", "").split("/") + event = request.event + await mq_client.produce_messages( + messages=[{"event": event, "data": data}], + queue_name=settings.DEFAULT_QUEUE, + exchanger_name=settings.DEFAULT_EXCHANGER, + ) + logger.debug(f"Sent message `{event}` with data {str(data)}") + return pb2.MessageResp(status=True, message="OK") # type: ignore diff --git a/src/app/interfaces/grpc/services/example_service.py b/src/app/interfaces/grpc/services/example_service.py new file mode 100644 index 0000000..3086aee --- /dev/null +++ b/src/app/interfaces/grpc/services/example_service.py @@ -0,0 +1,7 @@ +from src.app.interfaces.grpc.pb.example import example_pb2 as pb2 +from src.app.interfaces.grpc.pb.example.example_pb2_grpc import ExampleServiceServicer + + +class ExampleService(ExampleServiceServicer): + async def GetExample(self, request, context) -> pb2.ExampleResp: + return pb2.ExampleResp(example_message="OK") From 7721181c17f45372e517bd01a9ecc028fdc45d5f Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Sun, 7 Sep 2025 14:12:15 +0300 Subject: [PATCH 03/13] feature/tests_refactor (#12) --- .env.example | 2 +- .../users/services/test_users_service.py | 68 +++++++------- tests/domain/users/aggregates/common.py | 8 +- tests/fixtures/constants.py | 12 +-- .../repositories/test_users_repository.py | 94 ++++++++++++------- 5 files changed, 105 insertions(+), 79 deletions(-) diff --git a/.env.example b/.env.example index a2ca1f1..2c8d9a8 100644 --- a/.env.example +++ b/.env.example @@ -16,7 +16,7 @@ DEFAULT_BATCH_SIZE=500 # API settings # ------------------------------------------------------------------------------ -API_PORT=8082 +API_PORT=8081 API_DEFAULT_LIMIT=25 API_LIMIT_ALLOWED_VALUES_LIST=[1,5,10,15,25,50] SHOW_API_DOCS=True diff --git a/tests/application/users/services/test_users_service.py b/tests/application/users/services/test_users_service.py index 5718869..18daa13 100644 --- a/tests/application/users/services/test_users_service.py +++ b/tests/application/users/services/test_users_service.py @@ -1,7 +1,7 @@ import uuid from asyncio import AbstractEventLoop from copy import deepcopy -from datetime import datetime +import datetime as dt from typing import Any, List, Type import pytest @@ -56,13 +56,13 @@ def test_users_repository_create(e_loop: AbstractEventLoop, users: Any) -> None: user_data_raw = { "id": 999, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -88,7 +88,7 @@ def test_users_repository_create_without_id_and_uuid(e_loop: AbstractEventLoop, "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -117,7 +117,7 @@ def test_users_repository_create_is_not_return(e_loop: AbstractEventLoop, users: "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -140,13 +140,13 @@ def test_users_repository_create_bulk(e_loop: AbstractEventLoop, users: Any) -> user_data_raw_1 = { "id": 998, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", @@ -155,13 +155,13 @@ def test_users_repository_create_bulk(e_loop: AbstractEventLoop, users: Any) -> user_data_raw_2 = { "id": 999, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -189,26 +189,26 @@ def test_users_repository_create_bulk_without_id_uuid(e_loop: AbstractEventLoop, users_service = service_container.users_service user_data_raw_1 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", } user_data_raw_2 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -238,26 +238,26 @@ def test_users_repository_create_bulk_is_not_return(e_loop: AbstractEventLoop, u users_service = service_container.users_service user_data_raw_1 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", } user_data_raw_2 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -299,7 +299,7 @@ def test_users_repository_update_partial(e_loop: AbstractEventLoop, users: Any) user_data = deepcopy(USERS[0]) user_data_raw = { - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_first_name_1", "last_name": "updated_last_name_1", } @@ -324,7 +324,7 @@ def test_users_repository_update_without_return(e_loop: AbstractEventLoop, users user_data = deepcopy(USERS[0]) user_data_raw = { - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_first_name_1", "last_name": "updated_last_name_1", } @@ -376,13 +376,13 @@ def test_users_repository_bulk_update_partial(e_loop: AbstractEventLoop, users: item_1_to_update = { "id": USERS[0]["id"], "uuid": str(USERS[0]["uuid"]), - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_1_first_name", } item_2_to_update = { "id": USERS[1]["id"], "uuid": str(USERS[1]["uuid"]), - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_2_first_name", } @@ -444,26 +444,26 @@ def test_users_repository_bulk_without_return(e_loop: AbstractEventLoop, users: USERS[0], USERS[1], { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", }, { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", diff --git a/tests/domain/users/aggregates/common.py b/tests/domain/users/aggregates/common.py index 69fb621..64c6dd5 100644 --- a/tests/domain/users/aggregates/common.py +++ b/tests/domain/users/aggregates/common.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from datetime import datetime +import datetime as dt from typing import Any, Dict @@ -8,9 +8,9 @@ class UserTestAggregate: id: int uuid: str meta: Dict[str, Any] | Any - created_at: datetime - updated_at: datetime - birthday: datetime | None + created_at: dt.datetime + updated_at: dt.datetime + birthday: dt.datetime | None first_name: str | None last_name: str | None email: str diff --git a/tests/fixtures/constants.py b/tests/fixtures/constants.py index e95e185..d57756d 100644 --- a/tests/fixtures/constants.py +++ b/tests/fixtures/constants.py @@ -1,13 +1,13 @@ import random import uuid -from datetime import datetime +import datetime as dt from typing import List from dateutil.relativedelta import relativedelta from src.app.infrastructure.utils.common import generate_str -USER_CREATED_AT = datetime.utcnow() - relativedelta(months=6) +USER_CREATED_AT = dt.datetime.now(dt.UTC).replace(tzinfo=None) - relativedelta(months=6) USER_UPDATED_AT = USER_CREATED_AT USERS: List = [ { @@ -19,7 +19,7 @@ "first_name": "first_name_1", "last_name": "last_name_1", }, - "birthday": datetime.utcnow() - relativedelta(months=random.randint(2, 12)), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None) - relativedelta(months=random.randint(2, 12)), "is_active": True, "first_name": "first_name_1", "last_name": "last_name_1", @@ -34,7 +34,7 @@ "first_name": "first_name_2", "last_name": "last_name_2", }, - "birthday": datetime.utcnow() - relativedelta(months=random.randint(2, 12)), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None) - relativedelta(months=random.randint(2, 12)), "is_active": True, "first_name": "first_name_2", "last_name": "last_name_2", @@ -49,7 +49,7 @@ "first_name": "first_name_3", "last_name": "last_name_3", }, - "birthday": datetime.utcnow() - relativedelta(months=random.randint(2, 12)), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None) - relativedelta(months=random.randint(2, 12)), "is_active": True, "first_name": "first_name_3", "last_name": "last_name_3", @@ -64,7 +64,7 @@ "first_name": "first_name_4", "last_name": "last_name_4", }, - "birthday": datetime.utcnow() - relativedelta(months=random.randint(2, 12)), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None) - relativedelta(months=random.randint(2, 12)), "is_active": True, "first_name": "first_name_4", "last_name": "last_name_4", diff --git a/tests/infrastructure/repositories/test_users_repository.py b/tests/infrastructure/repositories/test_users_repository.py index 54bf780..0c8fec2 100644 --- a/tests/infrastructure/repositories/test_users_repository.py +++ b/tests/infrastructure/repositories/test_users_repository.py @@ -1,7 +1,7 @@ import uuid from asyncio import AbstractEventLoop from copy import deepcopy -from datetime import datetime +import datetime as dt from typing import Any, List, Type import pytest @@ -42,6 +42,21 @@ def test_users_repository_get_first_by_attr_name(e_loop: AbstractEventLoop, user assert getattr(user, key) == value +def test_users_repository_get_first_by_attr_name_without_out_dataclass( + e_loop: AbstractEventLoop, users: Any +) -> None: + users_repository = repo_container.users_repository + attr_names = ["id", "uuid", "email"] + for user_raw in USERS: + for attr_name in attr_names: + user: Type[UserTestAggregate] | None = e_loop.run_until_complete( + users_repository.get_first(filter_data={attr_name: user_raw.get(attr_name)}) + ) + assert isinstance(user, UserTestAggregate) is False + for key, value in user_raw.items(): + assert getattr(user, key) == value + + def test_users_get_list_basic(e_loop: AbstractEventLoop, users: Any) -> None: users_repository = repo_container.users_repository items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( @@ -55,19 +70,30 @@ def test_users_get_list_basic(e_loop: AbstractEventLoop, users: Any) -> None: assert user.id in raw_ids +def test_users_get_list_without_out_dataclass(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + items: List[Type[UserTestAggregate]] = e_loop.run_until_complete(users_repository.get_list()) + assert isinstance(items, list) is True + assert len(items) == len(USERS) + raw_ids = [i["id"] for i in USERS] + for user in items: + assert isinstance(user, UserTestAggregate) is False + assert user.id in raw_ids + + def test_users_repository_create(e_loop: AbstractEventLoop, users: Any) -> None: users_repository = repo_container.users_repository user_data_raw = { "id": 999, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -93,7 +119,7 @@ def test_users_repository_create_without_id_and_uuid(e_loop: AbstractEventLoop, "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -122,7 +148,7 @@ def test_users_repository_create_is_not_return(e_loop: AbstractEventLoop, users: "first_name": "first_name_n", "last_name": "last_name_n", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n", "last_name": "last_name_n", @@ -145,13 +171,13 @@ def test_users_repository_create_bulk(e_loop: AbstractEventLoop, users: Any) -> user_data_raw_1 = { "id": 998, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", @@ -160,13 +186,13 @@ def test_users_repository_create_bulk(e_loop: AbstractEventLoop, users: Any) -> user_data_raw_2 = { "id": 999, "uuid": uuid.uuid4().__str__(), - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -194,26 +220,26 @@ def test_users_repository_create_bulk_without_id_uuid(e_loop: AbstractEventLoop, users_repository = repo_container.users_repository user_data_raw_1 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", } user_data_raw_2 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -243,26 +269,26 @@ def test_users_repository_create_bulk_is_not_return(e_loop: AbstractEventLoop, u users_repository = repo_container.users_repository user_data_raw_1 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", } user_data_raw_2 = { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", @@ -309,7 +335,7 @@ def test_users_repository_update_partial(e_loop: AbstractEventLoop, users: Any) user_data = deepcopy(USERS[0]) user_data_raw = { - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_first_name_1", "last_name": "updated_last_name_1", } @@ -339,7 +365,7 @@ def test_users_repository_update_without_return(e_loop: AbstractEventLoop, users user_data = deepcopy(USERS[0]) user_data_raw = { - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_first_name_1", "last_name": "updated_last_name_1", } @@ -393,13 +419,13 @@ def test_users_repository_bulk_update_partial(e_loop: AbstractEventLoop, users: item_1_to_update = { "id": USERS[0]["id"], "uuid": str(USERS[0]["uuid"]), - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_1_first_name", } item_2_to_update = { "id": USERS[1]["id"], "uuid": str(USERS[1]["uuid"]), - "updated_at": datetime.utcnow(), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "first_name": "updated_2_first_name", } @@ -465,26 +491,26 @@ def test_users_repository_bulk_without_return(e_loop: AbstractEventLoop, users: USERS[0], USERS[1], { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n1", "last_name": "last_name_n1", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n1", "last_name": "last_name_n1", "email": "n1" + generate_str(5) + "@gmail.com", }, { - "created_at": datetime.utcnow(), - "updated_at": datetime.utcnow(), + "created_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), "meta": { "first_name": "first_name_n2", "last_name": "last_name_n2", }, - "birthday": datetime.utcnow(), + "birthday": dt.datetime.now(dt.UTC).replace(tzinfo=None), "is_active": True, "first_name": "first_name_n2", "last_name": "last_name_n2", From 2b857a0cbf9f7161ca70cbc585d91f0ad6fd1973 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Thu, 11 Sep 2025 13:10:17 +0300 Subject: [PATCH 04/13] launch app via base docker image (#14) Co-authored-by: Medniy2000 <{ID}+{username}@users.noreply.github.com> --- .launch/Dockerfile_base | 14 ++++++++++++++ .launch/api/Dockerfile | 14 ++------------ .launch/celery/Dockerfile | 14 ++------------ .launch/consume/Dockerfile | 14 ++------------ .launch/grpc/Dockerfile | 14 ++------------ README.rst | 7 ++++--- local_run.sh | 30 ++++++++++++++++++------------ 7 files changed, 44 insertions(+), 63 deletions(-) create mode 100644 .launch/Dockerfile_base diff --git a/.launch/Dockerfile_base b/.launch/Dockerfile_base new file mode 100644 index 0000000..b797013 --- /dev/null +++ b/.launch/Dockerfile_base @@ -0,0 +1,14 @@ +FROM python:3.12.0-slim + +# env variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 + +RUN mkdir /app +WORKDIR /app +COPY . /app/ +RUN pip install --upgrade pip +RUN pip install poetry +RUN poetry config virtualenvs.create false +RUN poetry install --no-root + diff --git a/.launch/api/Dockerfile b/.launch/api/Dockerfile index 46289ce..d054224 100644 --- a/.launch/api/Dockerfile +++ b/.launch/api/Dockerfile @@ -1,16 +1,6 @@ -FROM python:3.12.0-slim +ARG BASE_IMAGE +FROM $BASE_IMAGE -# env variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 - -RUN mkdir /app -WORKDIR /app -COPY . /app/ -RUN pip install --upgrade pip -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-root CMD alembic upgrade head ENV PYTHONPATH=/app diff --git a/.launch/celery/Dockerfile b/.launch/celery/Dockerfile index da01c08..f185e67 100644 --- a/.launch/celery/Dockerfile +++ b/.launch/celery/Dockerfile @@ -1,16 +1,6 @@ -FROM python:3.12.0-slim +ARG BASE_IMAGE +FROM $BASE_IMAGE -# env variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 - -RUN mkdir /app -WORKDIR /app -COPY . /app/ -RUN pip install --upgrade pip -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-root CMD alembic upgrade head diff --git a/.launch/consume/Dockerfile b/.launch/consume/Dockerfile index 266b891..84e99be 100644 --- a/.launch/consume/Dockerfile +++ b/.launch/consume/Dockerfile @@ -1,16 +1,6 @@ -FROM python:3.12.0-slim +ARG BASE_IMAGE +FROM $BASE_IMAGE -# env variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 - -RUN mkdir /app -WORKDIR /app -COPY . /app/ -RUN pip install --upgrade pip -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-root CMD alembic upgrade head ENV PYTHONPATH=/app diff --git a/.launch/grpc/Dockerfile b/.launch/grpc/Dockerfile index 92eb352..454fbc8 100644 --- a/.launch/grpc/Dockerfile +++ b/.launch/grpc/Dockerfile @@ -1,16 +1,6 @@ -FROM python:3.12.0-slim +ARG BASE_IMAGE +FROM $BASE_IMAGE -# env variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 - -RUN mkdir /app -WORKDIR /app -COPY . /app/ -RUN pip install --upgrade pip -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-root CMD alembic upgrade head ENV PYTHONPATH=/app diff --git a/README.rst b/README.rst index f423d06..0c7bd94 100644 --- a/README.rst +++ b/README.rst @@ -168,9 +168,10 @@ Documentation Commands:: Docker Commands:: - docker build -t api_img --no-cache -f .launch/api/Dockerfile . - docker build -t celery_img --no-cache -f .launch/celery/Dockerfile . - docker build -t consume_img --no-cache -f .launch/consume/Dockerfile . + docker build -t base_img --no-cache -f .launch/Dockerfile_base + docker build --build-arg BASE_IMAGE=base_img -t api_img --no-cache -f .launch/api/Dockerfile . + docker build --build-arg BASE_IMAGE=base_img -t celery_img --no-cache -f .launch/celery/Dockerfile . + docker build --build-arg BASE_IMAGE=base_img -t consume_img --no-cache -f .launch/consume/Dockerfile . docker run -d --env-file --name my_local_api \ --env-file ./.env \ diff --git a/local_run.sh b/local_run.sh index 7419fd0..b8548bc 100644 --- a/local_run.sh +++ b/local_run.sh @@ -60,6 +60,7 @@ while [ "$#" -gt 0 ]; do done +BASE_IMAGE="base_img" IMAGE_CELERY="celery_img" IMAGE_CONSUME="consume_img" IMAGE_API="api_img" @@ -72,6 +73,7 @@ if [ "$RECREATE" = true ]; then docker ps -a --filter "name=${DOCKER_PREFIX}*" --format "{{.ID}}" | xargs -r docker rm -f # Remove old images + docker rmi $BASE_IMAGE || true docker rmi $IMAGE_CELERY || true docker rmi $IMAGE_CONSUME || true docker rmi $IMAGE_API || true @@ -80,20 +82,24 @@ if [ "$RECREATE" = true ]; then echo " 🗑️ Removed old containers and images" # Build new images - docker build -t $IMAGE_CELERY --no-cache -f .launch/celery/Dockerfile . + docker build -t $BASE_IMAGE --no-cache -f .launch/Dockerfile_base . + echo " 🏗️ Built ${BASE_IMAGE} image" + + + docker build --build-arg BASE_IMAGE=$BASE_IMAGE -t $IMAGE_CELERY --no-cache -f .launch/celery/Dockerfile . echo " 🏗️ Built celery_img image" - docker build -t $IMAGE_CONSUME --no-cache -f .launch/consume/Dockerfile . - echo " 🏗️ Built consume_img image" + docker build --build-arg BASE_IMAGE=$BASE_IMAGE -t $IMAGE_CONSUME --no-cache -f .launch/consume/Dockerfile . + echo " 🏗️ Built ${IMAGE_CONSUME} image" if [ "$RUN_API" = true ]; then - docker build -t $IMAGE_API --no-cache -f .launch/api/Dockerfile . - echo " 🏗️ Built api_img image" + docker build --build-arg BASE_IMAGE=$BASE_IMAGE -t $IMAGE_API --no-cache -f .launch/api/Dockerfile . + echo " 🏗️ Built ${IMAGE_API} image" fi if [ "$RUN_GRPC" = true ]; then - docker build -t $IMAGE_GRPC --no-cache -f .launch/grpc/Dockerfile . - echo " 🏗️ Built grpc_img image" + docker build --build-arg BASE_IMAGE=$BASE_IMAGE -t $IMAGE_GRPC --no-cache -f .launch/grpc/Dockerfile . + echo " 🏗️ Built ${IMAGE_GRPC} image" fi fi @@ -116,8 +122,8 @@ if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_celery)" ]; then -e CELERY_BROKER_API=$CELERY_RESULT_BACKEND \ -p 5555:5555 mher/flower fi -echo " ✅ ${DOCKER_PREFIX}_celery UP" -echo " ✅ ${DOCKER_PREFIX}_flower UP" +echo " ✅ ${DOCKER_PREFIX}_celery UP" +echo " ✅ ${DOCKER_PREFIX}_flower UP" # Consume container if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_consume)" ]; then @@ -128,7 +134,7 @@ if [ ! "$(docker ps -aq -f name=${DOCKER_PREFIX}_consume)" ]; then --cpus=1 \ $IMAGE_CONSUME || true fi -echo " ✅ ${DOCKER_PREFIX}_consume UP" +echo " ✅ ${DOCKER_PREFIX}_consume UP" # API container (optional) if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_api)" ] && [ "$RUN_API" = true ]; then @@ -141,7 +147,7 @@ if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_api)" ] && [ "$RUN_API" = true $IMAGE_API || true fi if [ "$RUN_API" = true ]; then -echo " ✅ ${DOCKER_PREFIX}_api UP" +echo " ✅ ${DOCKER_PREFIX}_api UP" fi # gRpc container (optional) @@ -155,7 +161,7 @@ if [ -z "$(docker ps -aq -f name=${DOCKER_PREFIX}_grpc)" ] && [ "$RUN_GRPC" = tr $IMAGE_GRPC || true fi if [ "$RUN_GRPC" = true ]; then -echo " ✅ ${DOCKER_PREFIX}_grpc UP" +echo " ✅ ${DOCKER_PREFIX}_grpc UP" fi From d71be4cec4cb36000d676f8ebdd409da5f0021ab Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Tue, 16 Sep 2025 11:14:47 +0300 Subject: [PATCH 05/13] feature/init github actions (#16) --- .github/workflows/tests.yml | 24 ++++++++++++++++++++++++ .launch/tests/.test_env | 12 ++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..688bc3f --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,24 @@ +name: Run Tests + +on: + push: + branches: [ main, stage, dev ] + pull_request: + branches: [ main, stage, dev ] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Build and run tests + run: | + docker compose -f docker-compose-tests.yml up --build --abort-on-container-exit --exit-code-from x_test_service + + - name: Clean up + if: always() + run: | + docker compose -f docker-compose-tests.yml down -v + docker system prune -f diff --git a/.launch/tests/.test_env b/.launch/tests/.test_env index 0784657..e2de1cd 100644 --- a/.launch/tests/.test_env +++ b/.launch/tests/.test_env @@ -21,6 +21,11 @@ DB_USER=dev DB_PASSWORD=dev DB_NAME=test_db +# GRPC settings +# ------------------------------------------------------------------------------ +GRPC_HOST=0.0.0.0 +GRPC_PORT=50051 + # Redis # ------------------------------------------------------------------------------ @@ -30,3 +35,10 @@ REDIS_URL=redis://172.17.0.1:6389/0 # ------------------------------------------------------------------------------ CELERY_BROKER_URL=redis://172.17.0.1:6389/11 CELERY_RESULT_BACKEND=redis://172.17.0.1:6389/12 + + +# Mesage Broker +# ------------------------------------------------------------------------------ +MESSAGE_BROKER_URL=amqp://dev:dev@0.0.0.0:5672 +DEFAULT_EXCHANGER=YOUR_DEFAULT_EXCHANGER +DEFAULT_QUEUE=YOUR_DEFAULT_QUEUE From 5c66c1c0d56b5efa62de24d8a022df3bcdd271f2 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Fri, 19 Sep 2025 10:00:52 +0300 Subject: [PATCH 06/13] feature/imorove dockerfiles (#18) --- .dockerignore | 71 +++++ .launch/Dockerfile_base | 58 +++- .launch/api/Dockerfile | 4 - .launch/celery/Dockerfile | 7 +- .launch/tests/api/Dockerfile | 2 +- README.rst | 2 +- docker-compose-tests.yml | 2 - poetry.lock | 537 +++++++++++++++++++++-------------- pyproject.toml | 3 +- 9 files changed, 451 insertions(+), 235 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..0be3b56 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,71 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual environments +venv/ +env/ +ENV/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Git +.git/ +.gitignore + +# Documentation +docs/ +*.md +README* + +# Tests +tests/ +.pytest_cache/ +.coverage +htmlcov/ +.tox/ + +# OS +.DS_Store +Thumbs.db + +# Docker +Dockerfile* +docker-compose* +.dockerignore + +# CI/CD +.github/ +.gitlab-ci.yml + +# Logs +*.log +logs/ + +# Temporary files +tmp/ +temp/ +.tmp/ \ No newline at end of file diff --git a/.launch/Dockerfile_base b/.launch/Dockerfile_base index b797013..7d13e54 100644 --- a/.launch/Dockerfile_base +++ b/.launch/Dockerfile_base @@ -1,14 +1,52 @@ -FROM python:3.12.0-slim +# Build stage +ARG PYTHON_VERSION=3.12.11 +FROM python:${PYTHON_VERSION}-slim AS builder -# env variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 -RUN mkdir /app WORKDIR /app -COPY . /app/ -RUN pip install --upgrade pip -RUN pip install poetry -RUN poetry config virtualenvs.create false -RUN poetry install --no-root + +# Install build dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Install poetry +RUN pip install --no-cache-dir poetry + +# Copy dependency files +COPY pyproject.toml poetry.lock ./ + +# Configure poetry and install dependencies +RUN poetry config virtualenvs.create false \ + && poetry install --no-root --only=main + +# Production stage +ARG PYTHON_VERSION=3.12 +FROM python:${PYTHON_VERSION}-slim + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV PYTHONPATH=/app + +# Install runtime dependencies only +RUN apt-get update && apt-get install -y \ + libpq5 \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +WORKDIR /app + +# Get Python version for dynamic path +ARG PYTHON_VERSION=3.12 +ENV PYTHON_SITE_PACKAGES=/usr/local/lib/python${PYTHON_VERSION}/site-packages + +# Copy installed packages from builder stage +COPY --from=builder ${PYTHON_SITE_PACKAGES} ${PYTHON_SITE_PACKAGES} +COPY --from=builder /usr/local/bin /usr/local/bin + +# Copy application code +COPY src/ ./src/ +COPY alembic.ini ./ diff --git a/.launch/api/Dockerfile b/.launch/api/Dockerfile index d054224..834f4a4 100644 --- a/.launch/api/Dockerfile +++ b/.launch/api/Dockerfile @@ -1,9 +1,5 @@ ARG BASE_IMAGE FROM $BASE_IMAGE -CMD alembic upgrade head - -ENV PYTHONPATH=/app - CMD alembic upgrade head && gunicorn --config src/app/interfaces/cli/gunicorn_config.py src.app.interfaces.cli.main:app EXPOSE $API_PORT diff --git a/.launch/celery/Dockerfile b/.launch/celery/Dockerfile index f185e67..cb4cf1b 100644 --- a/.launch/celery/Dockerfile +++ b/.launch/celery/Dockerfile @@ -1,9 +1,4 @@ ARG BASE_IMAGE FROM $BASE_IMAGE -CMD alembic upgrade head - - -ENV PYTHONPATH=/app - -CMD alembic upgrade head && poetry run celery -A src.app.interfaces.cli.celery_app $CELERY_ARGS +CMD alembic upgrade head && celery -A src.app.interfaces.cli.celery_app $CELERY_ARGS diff --git a/.launch/tests/api/Dockerfile b/.launch/tests/api/Dockerfile index cfeaf72..bf665a5 100644 --- a/.launch/tests/api/Dockerfile +++ b/.launch/tests/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12.0-slim +FROM python:3.12.11-slim # env variables ENV PYTHONDONTWRITEBYTECODE 1 diff --git a/README.rst b/README.rst index 0c7bd94..438c94f 100644 --- a/README.rst +++ b/README.rst @@ -201,4 +201,4 @@ Docker Commands:: Running Tests:: docker-compose -f docker-compose-tests.yml up --abort-on-container-exit - docker-compose -f docker-compose-tests.yml rm -fsv && docker rmi $(docker images '*x_test*' -a -q) && docker system prune + docker-compose -f docker-compose-tests.yml rm -fsv && docker images '*x_test*' -a -q | xargs -r docker rmi && docker system prune diff --git a/docker-compose-tests.yml b/docker-compose-tests.yml index f706318..2583767 100644 --- a/docker-compose-tests.yml +++ b/docker-compose-tests.yml @@ -1,5 +1,3 @@ -version: '3' - services: x_test_service: container_name: service_test_container diff --git a/poetry.lock b/poetry.lock index d0b2bd9..9064d73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand. [[package]] name = "aio-pika" @@ -6,6 +6,7 @@ version = "9.5.7" description = "Wrapper around the aiormq for asyncio and humans" optional = false python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ {file = "aio_pika-9.5.7-py3-none-any.whl", hash = "sha256:684316a0e92157754bb2d6927c5568fd997518b123add342e97405aa9066772b"}, {file = "aio_pika-9.5.7.tar.gz", hash = "sha256:0569b59d3c7b36ca76abcb213cdc3677e2a4710a3c371dd27359039f9724f4ee"}, @@ -21,6 +22,7 @@ version = "0.12.0" description = "Kafka integration with asyncio" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "aiokafka-0.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da8938eac2153ca767ac0144283b3df7e74bb4c0abc0c9a722f3ae63cfbf3a42"}, {file = "aiokafka-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5c827c8883cfe64bc49100de82862225714e1853432df69aba99f135969bb1b"}, @@ -73,6 +75,7 @@ version = "6.9.0" description = "Pure python AMQP asynchronous client library" optional = false python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ {file = "aiormq-6.9.0-py3-none-any.whl", hash = "sha256:e1d88db819d197646cabaea6d6b53497a5ba358a5b6ae8f45f61dcb446821fa6"}, {file = "aiormq-6.9.0.tar.gz", hash = "sha256:1c31f2098ad2beee6e95d0ad969c836876c1e3113e8c67142eb58565fedcab4c"}, @@ -88,6 +91,7 @@ version = "1.0.0" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, @@ -99,6 +103,7 @@ version = "1.16.5" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3"}, {file = "alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e"}, @@ -118,6 +123,7 @@ version = "5.3.1" description = "Low-level AMQP client for Python (fork of amqplib)." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, @@ -132,6 +138,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -143,6 +150,7 @@ version = "4.10.0" description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"}, {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"}, @@ -162,6 +170,7 @@ version = "3.3.11" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "astroid-3.3.11-py3-none-any.whl", hash = "sha256:54c760ae8322ece1abd213057c4b5bba7c49818853fc901ef09719a60dbf9dec"}, {file = "astroid-3.3.11.tar.gz", hash = "sha256:1e5a5011af2920c7c67a53f65d536d65bfa7116feeaf2354d8b94f29573bb0ce"}, @@ -173,6 +182,7 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -184,6 +194,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -238,8 +249,8 @@ files = [ [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] [[package]] name = "babel" @@ -247,13 +258,14 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "bcrypt" @@ -261,6 +273,7 @@ version = "4.3.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, @@ -325,6 +338,7 @@ version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, @@ -336,6 +350,7 @@ version = "24.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -380,6 +395,7 @@ version = "5.5.3" description = "Distributed Task Queue." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525"}, {file = "celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5"}, @@ -399,33 +415,33 @@ vine = ">=5.1.0,<6.0" arangodb = ["pyArango (>=2.0.2)"] auth = ["cryptography (==44.0.2)"] azureblockblob = ["azure-identity (>=1.19.0)", "azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +brotli = ["brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlipy (>=0.7.0) ; platform_python_implementation == \"PyPy\""] cassandra = ["cassandra-driver (>=3.25.0,<4)"] consul = ["python-consul2 (==0.1.5)"] cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] +couchbase = ["couchbase (>=3.0.0) ; platform_python_implementation != \"PyPy\" and (platform_system != \"Windows\" or python_version < \"3.10\")"] couchdb = ["pycouchdb (==1.16.0)"] django = ["Django (>=2.2.28)"] dynamodb = ["boto3 (>=1.26.143)"] elasticsearch = ["elastic-transport (<=8.17.1)", "elasticsearch (<=8.17.2)"] -eventlet = ["eventlet (>=0.32.0)"] +eventlet = ["eventlet (>=0.32.0) ; python_version < \"3.10\""] gcs = ["google-cloud-firestore (==2.20.1)", "google-cloud-storage (>=2.10.0)", "grpcio (==1.67.0)"] gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] +memcache = ["pylibmc (==1.6.3) ; platform_system != \"Windows\""] mongodb = ["kombu[mongodb]"] msgpack = ["kombu[msgpack]"] pydantic = ["pydantic (>=2.4)"] pymemcache = ["python-memcached (>=1.61)"] -pyro = ["pyro4 (==4.82)"] +pyro = ["pyro4 (==4.82) ; python_version < \"3.11\""] pytest = ["pytest-celery[all] (>=1.2.0,<1.3.0)"] redis = ["kombu[redis]"] s3 = ["boto3 (>=1.26.143)"] slmq = ["softlayer_messaging (>=1.0.3)"] -solar = ["ephem (==4.2)"] +solar = ["ephem (==4.2) ; platform_python_implementation != \"PyPy\""] sqlalchemy = ["kombu[sqlalchemy]"] sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.5.0)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +tblib = ["tblib (>=1.3.0) ; python_version < \"3.8.0\"", "tblib (>=1.5.0) ; python_version >= \"3.8.0\""] yaml = ["kombu[yaml]"] zookeeper = ["kazoo (>=1.3.1)"] zstd = ["zstandard (==0.23.0)"] @@ -436,6 +452,7 @@ version = "2025.8.3" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, @@ -447,6 +464,7 @@ version = "3.4.3" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}, {file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}, @@ -535,6 +553,7 @@ version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" +groups = ["main", "dev"] files = [ {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, @@ -549,6 +568,7 @@ version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" optional = false python-versions = ">=3.6.2" +groups = ["main"] files = [ {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, @@ -563,6 +583,7 @@ version = "1.1.1.2" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6"}, {file = "click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261"}, @@ -580,6 +601,7 @@ version = "0.3.0" description = "REPL plugin for Click" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, @@ -598,6 +620,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -609,6 +633,7 @@ version = "7.10.6" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}, {file = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}, @@ -701,27 +726,28 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "dnspython" -version = "2.7.0" +version = "2.8.0" description = "DNS toolkit" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, - {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=43)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=1.0.0)"] -idna = ["idna (>=3.7)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] [[package]] name = "docutils" @@ -729,6 +755,7 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -740,6 +767,7 @@ version = "0.19.1" description = "ECDSA cryptographic signature library (pure python)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6" +groups = ["main"] files = [ {file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"}, {file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"}, @@ -758,6 +786,7 @@ version = "2.3.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, @@ -773,6 +802,7 @@ version = "14.3.0" description = "simplified environment variable parsing" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "environs-14.3.0-py3-none-any.whl", hash = "sha256:91e4c4ea964be277855cdd83a588f6375f10fad9fa452660ecb9f503c230f26a"}, {file = "environs-14.3.0.tar.gz", hash = "sha256:20672d92db325ce8114872b1989104eb84f083486325b5a44bcddff56472a384"}, @@ -785,22 +815,23 @@ python-dotenv = "*" [package.extras] dev = ["environs[tests]", "pre-commit (>=4.0,<5.0)", "tox"] django = ["dj-database-url", "dj-email-url", "django-cache-url"] -tests = ["backports.strenum", "environs[django]", "packaging", "pytest"] +tests = ["backports.strenum ; python_version < \"3.11\"", "environs[django]", "packaging", "pytest"] [[package]] name = "fastapi" -version = "0.116.1" +version = "0.116.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565"}, - {file = "fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143"}, + {file = "fastapi-0.116.2-py3-none-any.whl", hash = "sha256:c3a7a8fb830b05f7e087d920e0d786ca1fc9892eb4e9a84b227be4c1bc7569db"}, + {file = "fastapi-0.116.2.tar.gz", hash = "sha256:231a6af2fe21cfa2c32730170ad8514985fc250bec16c9b242d3b94c835ef529"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.40.0,<0.48.0" +starlette = ">=0.40.0,<0.49.0" typing-extensions = ">=4.8.0" [package.extras] @@ -814,6 +845,7 @@ version = "7.3.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, @@ -830,6 +862,7 @@ version = "2.1.4" description = "URL manipulation made simple." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "furl-2.1.4-py2.py3-none-any.whl", hash = "sha256:da34d0b34e53ffe2d2e6851a7085a05d96922b5b578620a37377ff1dbeeb11c8"}, {file = "furl-2.1.4.tar.gz", hash = "sha256:877657501266c929269739fb5f5980534a41abd6bbabcb367c136d1d3b2a6015"}, @@ -845,6 +878,8 @@ version = "3.2.4" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, @@ -908,129 +943,134 @@ test = ["objgraph", "psutil", "setuptools"] [[package]] name = "grpcio" -version = "1.74.0" +version = "1.75.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" -files = [ - {file = "grpcio-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907"}, - {file = "grpcio-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11"}, - {file = "grpcio-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e"}, - {file = "grpcio-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82"}, - {file = "grpcio-1.74.0-cp310-cp310-win32.whl", hash = "sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7"}, - {file = "grpcio-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5"}, - {file = "grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31"}, - {file = "grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3"}, - {file = "grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f"}, - {file = "grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4"}, - {file = "grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b"}, - {file = "grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11"}, - {file = "grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8"}, - {file = "grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49"}, - {file = "grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707"}, - {file = "grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b"}, - {file = "grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c"}, - {file = "grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc"}, - {file = "grpcio-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89"}, - {file = "grpcio-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91"}, - {file = "grpcio-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f"}, - {file = "grpcio-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20"}, - {file = "grpcio-1.74.0-cp313-cp313-win32.whl", hash = "sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa"}, - {file = "grpcio-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24"}, - {file = "grpcio-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae"}, - {file = "grpcio-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a"}, - {file = "grpcio-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176"}, - {file = "grpcio-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac"}, - {file = "grpcio-1.74.0-cp39-cp39-win32.whl", hash = "sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854"}, - {file = "grpcio-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa"}, - {file = "grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1"}, +groups = ["main"] +files = [ + {file = "grpcio-1.75.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:1ec9cbaec18d9597c718b1ed452e61748ac0b36ba350d558f9ded1a94cc15ec7"}, + {file = "grpcio-1.75.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7ee5ee42bfae8238b66a275f9ebcf6f295724375f2fa6f3b52188008b6380faf"}, + {file = "grpcio-1.75.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9146e40378f551eed66c887332afc807fcce593c43c698e21266a4227d4e20d2"}, + {file = "grpcio-1.75.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0c40f368541945bb664857ecd7400acb901053a1abbcf9f7896361b2cfa66798"}, + {file = "grpcio-1.75.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:50a6e43a9adc6938e2a16c9d9f8a2da9dd557ddd9284b73b07bd03d0e098d1e9"}, + {file = "grpcio-1.75.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dce15597ca11913b78e1203c042d5723e3ea7f59e7095a1abd0621be0e05b895"}, + {file = "grpcio-1.75.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:851194eec47755101962da423f575ea223c9dd7f487828fe5693920e8745227e"}, + {file = "grpcio-1.75.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ca123db0813eef80625a4242a0c37563cb30a3edddebe5ee65373854cf187215"}, + {file = "grpcio-1.75.0-cp310-cp310-win32.whl", hash = "sha256:222b0851e20c04900c63f60153503e918b08a5a0fad8198401c0b1be13c6815b"}, + {file = "grpcio-1.75.0-cp310-cp310-win_amd64.whl", hash = "sha256:bb58e38a50baed9b21492c4b3f3263462e4e37270b7ea152fc10124b4bd1c318"}, + {file = "grpcio-1.75.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:7f89d6d0cd43170a80ebb4605cad54c7d462d21dc054f47688912e8bf08164af"}, + {file = "grpcio-1.75.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cb6c5b075c2d092f81138646a755f0dad94e4622300ebef089f94e6308155d82"}, + {file = "grpcio-1.75.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:494dcbade5606128cb9f530ce00331a90ecf5e7c5b243d373aebdb18e503c346"}, + {file = "grpcio-1.75.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:050760fd29c8508844a720f06c5827bb00de8f5e02f58587eb21a4444ad706e5"}, + {file = "grpcio-1.75.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:266fa6209b68a537b2728bb2552f970e7e78c77fe43c6e9cbbe1f476e9e5c35f"}, + {file = "grpcio-1.75.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d22e1d8645e37bc110f4c589cb22c283fd3de76523065f821d6e81de33f5d4"}, + {file = "grpcio-1.75.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9880c323595d851292785966cadb6c708100b34b163cab114e3933f5773cba2d"}, + {file = "grpcio-1.75.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:55a2d5ae79cd0f68783fb6ec95509be23746e3c239290b2ee69c69a38daa961a"}, + {file = "grpcio-1.75.0-cp311-cp311-win32.whl", hash = "sha256:352dbdf25495eef584c8de809db280582093bc3961d95a9d78f0dfb7274023a2"}, + {file = "grpcio-1.75.0-cp311-cp311-win_amd64.whl", hash = "sha256:678b649171f229fb16bda1a2473e820330aa3002500c4f9fd3a74b786578e90f"}, + {file = "grpcio-1.75.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:fa35ccd9501ffdd82b861809cbfc4b5b13f4b4c5dc3434d2d9170b9ed38a9054"}, + {file = "grpcio-1.75.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0fcb77f2d718c1e58cc04ef6d3b51e0fa3b26cf926446e86c7eba105727b6cd4"}, + {file = "grpcio-1.75.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:36764a4ad9dc1eb891042fab51e8cdf7cc014ad82cee807c10796fb708455041"}, + {file = "grpcio-1.75.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:725e67c010f63ef17fc052b261004942763c0b18dcd84841e6578ddacf1f9d10"}, + {file = "grpcio-1.75.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91fbfc43f605c5ee015c9056d580a70dd35df78a7bad97e05426795ceacdb59f"}, + {file = "grpcio-1.75.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a9337ac4ce61c388e02019d27fa837496c4b7837cbbcec71b05934337e51531"}, + {file = "grpcio-1.75.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ee16e232e3d0974750ab5f4da0ab92b59d6473872690b5e40dcec9a22927f22e"}, + {file = "grpcio-1.75.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55dfb9122973cc69520b23d39867726722cafb32e541435707dc10249a1bdbc6"}, + {file = "grpcio-1.75.0-cp312-cp312-win32.whl", hash = "sha256:fb64dd62face3d687a7b56cd881e2ea39417af80f75e8b36f0f81dfd93071651"}, + {file = "grpcio-1.75.0-cp312-cp312-win_amd64.whl", hash = "sha256:6b365f37a9c9543a9e91c6b4103d68d38d5bcb9965b11d5092b3c157bd6a5ee7"}, + {file = "grpcio-1.75.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:1bb78d052948d8272c820bb928753f16a614bb2c42fbf56ad56636991b427518"}, + {file = "grpcio-1.75.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:9dc4a02796394dd04de0b9673cb79a78901b90bb16bf99ed8cb528c61ed9372e"}, + {file = "grpcio-1.75.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:437eeb16091d31498585d73b133b825dc80a8db43311e332c08facf820d36894"}, + {file = "grpcio-1.75.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c2c39984e846bd5da45c5f7bcea8fafbe47c98e1ff2b6f40e57921b0c23a52d0"}, + {file = "grpcio-1.75.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38d665f44b980acdbb2f0e1abf67605ba1899f4d2443908df9ec8a6f26d2ed88"}, + {file = "grpcio-1.75.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e8e752ab5cc0a9c5b949808c000ca7586223be4f877b729f034b912364c3964"}, + {file = "grpcio-1.75.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3a6788b30aa8e6f207c417874effe3f79c2aa154e91e78e477c4825e8b431ce0"}, + {file = "grpcio-1.75.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc33e67cab6141c54e75d85acd5dec616c5095a957ff997b4330a6395aa9b51"}, + {file = "grpcio-1.75.0-cp313-cp313-win32.whl", hash = "sha256:c8cfc780b7a15e06253aae5f228e1e84c0d3c4daa90faf5bc26b751174da4bf9"}, + {file = "grpcio-1.75.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c91d5b16eff3cbbe76b7a1eaaf3d91e7a954501e9d4f915554f87c470475c3d"}, + {file = "grpcio-1.75.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0b85f4ebe6b56d2a512201bb0e5f192c273850d349b0a74ac889ab5d38959d16"}, + {file = "grpcio-1.75.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:68c95b1c1e3bf96ceadf98226e9dfe2bc92155ce352fa0ee32a1603040e61856"}, + {file = "grpcio-1.75.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:153c5a7655022c3626ad70be3d4c2974cb0967f3670ee49ece8b45b7a139665f"}, + {file = "grpcio-1.75.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:53067c590ac3638ad0c04272f2a5e7e32a99fec8824c31b73bc3ef93160511fa"}, + {file = "grpcio-1.75.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:78dcc025a144319b66df6d088bd0eda69e1719eb6ac6127884a36188f336df19"}, + {file = "grpcio-1.75.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ec2937fd92b5b4598cbe65f7e57d66039f82b9e2b7f7a5f9149374057dde77d"}, + {file = "grpcio-1.75.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:597340a41ad4b619aaa5c9b94f7e6ba4067885386342ab0af039eda945c255cd"}, + {file = "grpcio-1.75.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0aa795198b28807d28570c0a5f07bb04d5facca7d3f27affa6ae247bbd7f312a"}, + {file = "grpcio-1.75.0-cp39-cp39-win32.whl", hash = "sha256:585147859ff4603798e92605db28f4a97c821c69908e7754c44771c27b239bbd"}, + {file = "grpcio-1.75.0-cp39-cp39-win_amd64.whl", hash = "sha256:eafbe3563f9cb378370a3fa87ef4870539cf158124721f3abee9f11cd8162460"}, + {file = "grpcio-1.75.0.tar.gz", hash = "sha256:b989e8b09489478c2d19fecc744a298930f40d8b27c3638afbfe84d22f36ce4e"}, ] +[package.dependencies] +typing-extensions = ">=4.12,<5.0" + [package.extras] -protobuf = ["grpcio-tools (>=1.74.0)"] +protobuf = ["grpcio-tools (>=1.75.0)"] [[package]] name = "grpcio-tools" -version = "1.74.0" +version = "1.75.0" description = "Protobuf code generator for gRPC" optional = false python-versions = ">=3.9" -files = [ - {file = "grpcio_tools-1.74.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:796796b4d7e83a9cdd03bb95c6774fca060fd209d83fb9af5f043e9c6f06a1fa"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d576b7786207359b63c2c2e3c387639b4177cf53b1e43d020b005deead32049e"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d73686934bfdd868be0dbfbfcba2a5f50a8b0b71362e86a133e8efcbdc5cad5d"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:187f99fd22de6e63fbf4f30b2e054a2e3c4fb80beec73b1f4716ea86192050f5"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bef8a16c34e68aaa2d246cd358629f8103730cb96cfc521f720378995f218282"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e41084adbae7176097aa9d08a13d98c189895ec8c967f5461975750d3537625a"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6b61337b47d981b4d270e3caa83607a900169617478c034e6f6baf16ab22d333"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7e920982b4eaab253affbd45ec6d5ec12d895f5c143374ef4c3eadef49162373"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-win32.whl", hash = "sha256:b966f3b93f9d24151591d096ecf9c3fdb419a50d486761f7d28a9a69b028b627"}, - {file = "grpcio_tools-1.74.0-cp310-cp310-win_amd64.whl", hash = "sha256:03787990b56f5c3b3f72c722a7e74fbc5a3b769bbc31ad426e2c6f6a28a9d7c8"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:9d9e28fbbab9b9e923c3d286949e8ff81ebbb402458698f0a2b1183b539779db"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:41040eb1b5d1e582687f6f19cf2efc4c191b6eab56b16f6fba50ac085c5ca4dd"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:1fdc013118e4e9054b6e1a64d16a0d4a17a4071042e674ada8673406ddb26e59"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f037414c527a2c4a3af15451d9e58d7856d0a62b3f6dd3f5b969ecba82f5e843"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536f53a6a8d1ba1c469d085066cfa0dd3bb51f07013b71857bc3ad1eabe3ab49"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1e23ff54dea7f6e9543dcebd2c0f4b7c9af39812966c05e1c5289477cb2bf2f7"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76072dee9fa99b33eb0c334a16e70d694df762df705c7a2481f702af33d81a28"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bdf91eb722f2990085b1342c277e212ec392e37bd493a2a21d9eb9238f28c3e"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-win32.whl", hash = "sha256:a036cd2a4223901e7a9f6a9b394326a9352a4ad70bdd3f1d893f1b231fcfdf7e"}, - {file = "grpcio_tools-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1fdf245178158a92a2dc78e3545b6d13b6c917d9b80931fc85cfb3e9534a07d"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61d84f6050d7170712600f7ee1dac8849f5dc0bfe0044dd71132ee1e7aa2b373"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f0129a62711dbc1f1efd51d069d2ce0631d69e033bf3a046606c623acf935e08"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:5ec661f3bb41f0d2a30125ea382f4d5c874bf4f26d4d8e3839bb7e3b3c037b3e"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7970a9cf3002bec2eff5a449ac7398b77e5d171cbb534c47258c72409d0aea74"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f56d67b04790f84e216353341c6b298f1aeb591e1797fe955f606516c640936"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3d0c33cc984d21525f190cb1af479f8da46370df5f2ced1a4e50769ababd0c0"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:88e535c1cf349e57e371529ea9918f811c5eff88161f322bbc06d6222bad6d50"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3cf9401ce72bc49582c2d80e0a2ee0e573e1c3c998c8bc5f739db8845e8e148"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-win32.whl", hash = "sha256:b63e250da44b15c67b9a34c5c30c81059bde528fc8af092d7f43194469f7c719"}, - {file = "grpcio_tools-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:519d7cae085ae6695a8031bb990bf7766a922332b0a531e51342abc5431b78b5"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e2e22460355adbd0f25fdd7ed8b9ae53afb3875b9d5f34cdf1cf12559418245e"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0cab5a2c6ae75b555fee8a1a9a9b575205171e1de392fe2d4139a29e67d8f5bb"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:9b18afca48b55832402a716ea4634ef2b68927a8a17ddf4038f51812299255c9"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85f442a9e89e276bf89a0c9c76ea71647a927d967759333c1fa40300c27f7bd"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051ce925b0b99ae2daf61b3cba19962b8655cc2a72758ce4081b89272206f5a3"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:98c7b8eb0de6984cd7fa7335ce3383b3bb9a1559edc238c811df88008d5d3593"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f8f7d17b7573b9a2a6b4183fa4a56a2ab17370c8d0541e1424cf0c9c6f863434"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:db08b91ea0cd66dc4b1b929100e7aa84c9c10c51573c8282ec1ba05b41f887ef"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-win32.whl", hash = "sha256:4b6c5efb331ae9e5f614437f4a5938459a8a5a1ab3dfe133d2bbdeaba39b894d"}, - {file = "grpcio_tools-1.74.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8324cd67f61f7900d227b36913ee5f0302ba3ba8777c8bc705afa8174098d28"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:39045d07f2582b35685858e1616761b7ad45085e446941c8f9f7c6da523f83c3"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:406ec87e2fd4cb6a40229fbecebcd11973afd4747484bfd5c2bc2ebe81545b7a"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:70725de8cf724c54040502f199ea28df0e8bc480175eacbed8c999c9ad4c0ffe"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333003e6a9dc304da9e6b086294a8d25212c542284e60699a72b456c515f114c"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5274a4f227e4bd244e3890a9238bda47b169765421ea87f157e4955ea39b4326"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f476f1ec637888a49402a1acff52bb641ec01a8672f60b57c5ee0a1d0e0763d2"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0e8c22e390800175417ec646fac99acaadcbd2f5cdb1a27694995ca86d3bbfd3"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77b400d3c87b1f85be505366e299e00214e2266f604ab58616fc77d016336a24"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-win32.whl", hash = "sha256:fc572f8af2d8f13db4b0091dcf518d6ca5c82ea6f59e8716683bd8aeb729b203"}, - {file = "grpcio_tools-1.74.0-cp39-cp39-win_amd64.whl", hash = "sha256:700d8933684f66dd8edc0324590fa61930bed8f9fb66322a48f5c7ba08386810"}, - {file = "grpcio_tools-1.74.0.tar.gz", hash = "sha256:88ab9eb18b6ac1b4872add6b394073bd8d44eee7c32e4dc60a022e25ffaffb95"}, +groups = ["main"] +files = [ + {file = "grpcio_tools-1.75.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:5ca29b0ae735044c6a48072cf7bf53e34ce9ab03eec66acaf2173071d4f66d8a"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d1a224887f70981683dfcaacc253c08f3680b919c0b2353fbb57f89b27e1c9b9"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c49649d2b46a5a09419631adec105b05bcb016e5727c8f1b08ac8e16d9b0e3e0"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c944610bc009185f3da399030a2a8a9d550ae3246f93ad20ff63593fa883ddfb"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:186c11fe9c8ef90b0862013b61876693644c952fda8fffef6ab0de0a83f90479"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:688668666265a8f3e5eb86f73694e8adac2d2cc5f40c90249ce80bf6c6cec9ea"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9083fe53cbe17b972d9ede47b1e6c82ec532a91770d41c790c4f9b39291041c3"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3072b10f4ad82739650aa9d667b536de8d4973083236215b7bf2389ba75bb507"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-win32.whl", hash = "sha256:c42fc86ab55018ba5afe2aa95d6d34e2e763da06eff23c08bed487a556341071"}, + {file = "grpcio_tools-1.75.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e0c8d5d4bdce7f32e2fef3e2304cdca1fbb16a6469c7d3bce38884ee4c449d1"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:6c3b8dbe8b2ad7df4ba661b5ee29ae8fe79d2715aade519847deaef26f5c1a06"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cdbccc5a4809ef9414b7c434dd1aabc94b66a01c01c13ecc1edba9f8f4277b44"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16a9597d1bd4143a71bfae341a32952a64c094a63d3d0bdd24b21fdc8b843846"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:899c46520446ad1935f5899729746b390e13085e9757d043401298b18fa37d99"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53c116d0d5df70845330eefb98ef4242ff09be264a22bc5e18f171a3047c9e66"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:495ce168f996d4c42328e17b788d51d808fc585a80612fe70943c00ac16d0fca"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26f1f3cedebe465f97b5aad312fb775a4bd53a0e88d08c4000e588c195519eca"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:82692be482cdcf7ac9b79563dbea99333835aaa3f5e7f0641689766b64b91543"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-win32.whl", hash = "sha256:fd038847974aeb883ee0f3b5b535d85618ad32789c15c9bf24af6c12a44f67f1"}, + {file = "grpcio_tools-1.75.0-cp311-cp311-win_amd64.whl", hash = "sha256:5c5465cd7b83c34f3c987a235fe3b04012411502d4bc66de5a34b238617ded4c"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:6ded12c79fb56ceae0ce60e653453159bfc2ccb044922b7e7d721de6c8e04506"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ebdac7cc820459874f3b19eddddae19c0c7e7cdf228aee8e7567cec1fddb2ae3"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:509ec0ce7c4269c2bea6015efcdcde00a5d55d97c88ad17587b4247cdc3d2fe8"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:a68a8dcbcbd1df33e7c08c2ceeb69ed8fd53e235784ac680dfe3fc1e89aac2ac"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ac8a663e955bf3188f76d93d7fdc656f346ff54ea7e512eb034374c6fd61b50"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c30cb36ae1a4ed5fb1960f4bc0000548fecb9ff21a51d78a1f54e3424f971c0"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:35d4368794506db2b0acde60e7e2bae21255cc0d05db9ffc078510ab6a84ff4f"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edefbb90bb7ddc4eadac3463d5f7084e1d43b1d713254f668dd55c25db5b5ef2"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-win32.whl", hash = "sha256:c2bad23bd0d43acd9d7032b6ffb04f5eb176d853cd32967eb2c4a39044c81cfe"}, + {file = "grpcio_tools-1.75.0-cp312-cp312-win_amd64.whl", hash = "sha256:0f4f31035a5178acd924a052b8954d5ac71319092b57e3711438ca6518b71017"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:69742254df93323275b7ee5ac017e3b9fdba8ecc6dca00bd6b2cd1c70c80a9c2"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a07aa71ad96103b18bb84dc069dd139897356116d2aaa68d3df84d4d59701ae8"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dcfb12654fb1d6ce84f4a55d3dfbc267a04d53dc9b52ee0974b2110d02f68dac"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:990d183fee5a2ef9d4f3a220b6506f5da740271da175efcb7e4e34ebc3191a12"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39c6ff052960a3301cd920549384a2ad7cb3165c778feed601cae2a2131b63f8"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:60bd449814fe3cebeda11c0cda3a3adffd81941559aa254e6d153751baa0cffc"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:91e430e9368afc38e94645f744840ab06995cfb7312233623c5d7370f8c0dd7c"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3351acef4b8897e99bdceae5cfcc300e1e5c1d88c0fc2ffc2b5ca1bd5ce4ced8"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-win32.whl", hash = "sha256:1241f8c65f2429f00d9e15e819aca2138c5aa571f0ac644ab658a0281dc177d9"}, + {file = "grpcio_tools-1.75.0-cp313-cp313-win_amd64.whl", hash = "sha256:193ce6aef33417849289cbb518402fe60c00d0fa66d68ea9a30c98cb8818280c"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:b9f64ab078f1e8ea09ceb72c3f7a55b9cbec515fd20e804aea78491adf785503"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7154a35243a49704782b39e8780d9a0adb393a9cedba2ab65c352e94ff42fe8c"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9a620de24caa85b102d2416c3f679260d1d4103edcc2806d7dda43aad1913e01"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08cc1b8a1364a5b8f975e6a7273684d13630caab76c209a201464ad05f826eb9"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a0c899175dd23e96f61b3ab8153642e0ae0182b9c9a582cd0cc4702a056d845"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8707b63acb1e08c4031e959936af45487bc185a3fa1ae37fdac465e8ab311774"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4d28cb03efb871a0ce13dc0fe1416c237ed6d70c42f19a64cef24aba88dd7c5f"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:93b297f77a3f9fe99ea30597e98fd62d3d40bc2520f3e6c6c12b202710a2581d"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-win32.whl", hash = "sha256:05087b1879b3f32a2182f1365e34233236c22e1a1e8cc448b5d29ea58d661846"}, + {file = "grpcio_tools-1.75.0-cp39-cp39-win_amd64.whl", hash = "sha256:aaec9c9b1cb0ff3823961e74b6cf0a1e6b0e7a82fa2fb0b2bc7b312978bd34f7"}, + {file = "grpcio_tools-1.75.0.tar.gz", hash = "sha256:eb5e4025034d92da3c81fd5e3468c33d5ae7571b07a72c385b5ec1746658573f"}, ] [package.dependencies] -grpcio = ">=1.74.0" +grpcio = ">=1.75.0" protobuf = ">=6.31.1,<7.0.0" setuptools = "*" @@ -1040,6 +1080,7 @@ version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, @@ -1061,6 +1102,7 @@ version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, @@ -1072,6 +1114,7 @@ version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, @@ -1093,6 +1136,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -1105,7 +1149,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1117,6 +1161,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1131,6 +1176,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -1142,6 +1188,7 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -1153,6 +1200,7 @@ version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -1164,6 +1212,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -1181,6 +1230,7 @@ version = "5.5.4" description = "Messaging library for Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8"}, {file = "kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363"}, @@ -1198,7 +1248,7 @@ azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6. confluentkafka = ["confluent-kafka (>=2.2.0)"] consul = ["python-consul2 (==0.1.5)"] gcpubsub = ["google-cloud-monitoring (>=2.16.0)", "google-cloud-pubsub (>=2.18.4)", "grpcio (==1.67.0)", "protobuf (==4.25.5)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] +librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] mongodb = ["pymongo (==4.10.1)"] msgpack = ["msgpack (==1.1.0)"] pyro = ["pyro4 (==4.82)"] @@ -1216,6 +1266,7 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = "<4.0,>=3.5" +groups = ["main"] files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -1226,7 +1277,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] [[package]] name = "mako" @@ -1234,6 +1285,7 @@ version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, @@ -1253,6 +1305,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1323,6 +1376,7 @@ version = "4.0.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "marshmallow-4.0.1-py3-none-any.whl", hash = "sha256:72f14ef346f81269dbddee891bac547dda1501e9e08b6a809756ea3dbb7936a1"}, {file = "marshmallow-4.0.1.tar.gz", hash = "sha256:e1d860bd262737cb2d34e1541b84cb52c32c72c9474e3fe6f30f137ef8b0d97f"}, @@ -1339,6 +1393,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1350,6 +1405,7 @@ version = "6.6.4" description = "multidict implementation" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f"}, {file = "multidict-6.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb"}, @@ -1465,49 +1521,50 @@ files = [ [[package]] name = "mypy" -version = "1.17.1" +version = "1.18.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" -files = [ - {file = "mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972"}, - {file = "mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7"}, - {file = "mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df"}, - {file = "mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390"}, - {file = "mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94"}, - {file = "mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b"}, - {file = "mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58"}, - {file = "mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5"}, - {file = "mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd"}, - {file = "mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b"}, - {file = "mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5"}, - {file = "mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b"}, - {file = "mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb"}, - {file = "mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403"}, - {file = "mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056"}, - {file = "mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341"}, - {file = "mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb"}, - {file = "mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19"}, - {file = "mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7"}, - {file = "mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81"}, - {file = "mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6"}, - {file = "mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849"}, - {file = "mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14"}, - {file = "mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a"}, - {file = "mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733"}, - {file = "mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd"}, - {file = "mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0"}, - {file = "mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a"}, - {file = "mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91"}, - {file = "mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed"}, - {file = "mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9"}, - {file = "mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99"}, - {file = "mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8"}, - {file = "mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8"}, - {file = "mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259"}, - {file = "mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d"}, - {file = "mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9"}, - {file = "mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01"}, +groups = ["dev"] +files = [ + {file = "mypy-1.18.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2761b6ae22a2b7d8e8607fb9b81ae90bc2e95ec033fd18fa35e807af6c657763"}, + {file = "mypy-1.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b10e3ea7f2eec23b4929a3fabf84505da21034a4f4b9613cda81217e92b74f3"}, + {file = "mypy-1.18.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:261fbfced030228bc0f724d5d92f9ae69f46373bdfd0e04a533852677a11dbea"}, + {file = "mypy-1.18.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4dc6b34a1c6875e6286e27d836a35c0d04e8316beac4482d42cfea7ed2527df8"}, + {file = "mypy-1.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1cabb353194d2942522546501c0ff75c4043bf3b63069cb43274491b44b773c9"}, + {file = "mypy-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:738b171690c8e47c93569635ee8ec633d2cdb06062f510b853b5f233020569a9"}, + {file = "mypy-1.18.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c903857b3e28fc5489e54042684a9509039ea0aedb2a619469438b544ae1961"}, + {file = "mypy-1.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a0c8392c19934c2b6c65566d3a6abdc6b51d5da7f5d04e43f0eb627d6eeee65"}, + {file = "mypy-1.18.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f85eb7efa2ec73ef63fc23b8af89c2fe5bf2a4ad985ed2d3ff28c1bb3c317c92"}, + {file = "mypy-1.18.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:82ace21edf7ba8af31c3308a61dc72df30500f4dbb26f99ac36b4b80809d7e94"}, + {file = "mypy-1.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a2dfd53dfe632f1ef5d161150a4b1f2d0786746ae02950eb3ac108964ee2975a"}, + {file = "mypy-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:320f0ad4205eefcb0e1a72428dde0ad10be73da9f92e793c36228e8ebf7298c0"}, + {file = "mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9"}, + {file = "mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e"}, + {file = "mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2"}, + {file = "mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d"}, + {file = "mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5"}, + {file = "mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf"}, + {file = "mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f"}, + {file = "mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce"}, + {file = "mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e"}, + {file = "mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71"}, + {file = "mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746"}, + {file = "mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d"}, + {file = "mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61"}, + {file = "mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5"}, + {file = "mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8"}, + {file = "mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d"}, + {file = "mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d"}, + {file = "mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce"}, + {file = "mypy-1.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e37763af63a8018308859bc83d9063c501a5820ec5bd4a19f0a2ac0d1c25c061"}, + {file = "mypy-1.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:51531b6e94f34b8bd8b01dee52bbcee80daeac45e69ec5c36e25bce51cbc46e6"}, + {file = "mypy-1.18.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbfdea20e90e9c5476cea80cfd264d8e197c6ef2c58483931db2eefb2f7adc14"}, + {file = "mypy-1.18.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99f272c9b59f5826fffa439575716276d19cbf9654abc84a2ba2d77090a0ba14"}, + {file = "mypy-1.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8c05a7f8c00300a52f3a4fcc95a185e99bf944d7e851ff141bae8dcf6dcfeac4"}, + {file = "mypy-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:2fbcecbe5cf213ba294aa8c0b8c104400bf7bb64db82fb34fe32a205da4b3531"}, + {file = "mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e"}, + {file = "mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9"}, ] [package.dependencies] @@ -1528,6 +1585,7 @@ version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -1539,6 +1597,7 @@ version = "1.0.1" description = "Ordered Multivalue Dictionary" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "orderedmultidict-1.0.1-py2.py3-none-any.whl", hash = "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"}, {file = "orderedmultidict-1.0.1.tar.gz", hash = "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad"}, @@ -1553,6 +1612,7 @@ version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -1564,6 +1624,7 @@ version = "3.3.0" description = "RabbitMQ Focused AMQP low-level library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0"}, {file = "pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b"}, @@ -1579,6 +1640,7 @@ version = "1.7.4" description = "comprehensive password hashing framework supporting over 30 schemes" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, @@ -1599,6 +1661,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1610,6 +1673,7 @@ version = "4.4.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, @@ -1626,6 +1690,7 @@ version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -1641,6 +1706,7 @@ version = "3.0.52" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, @@ -1655,6 +1721,7 @@ version = "0.3.2" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, @@ -1758,20 +1825,21 @@ files = [ [[package]] name = "protobuf" -version = "6.32.0" +version = "6.32.1" description = "" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741"}, - {file = "protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e"}, - {file = "protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0"}, - {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1"}, - {file = "protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c"}, - {file = "protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb"}, - {file = "protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3"}, - {file = "protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783"}, - {file = "protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2"}, + {file = "protobuf-6.32.1-cp310-abi3-win32.whl", hash = "sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085"}, + {file = "protobuf-6.32.1-cp310-abi3-win_amd64.whl", hash = "sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1"}, + {file = "protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281"}, + {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4"}, + {file = "protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710"}, + {file = "protobuf-6.32.1-cp39-cp39-win32.whl", hash = "sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1"}, + {file = "protobuf-6.32.1-cp39-cp39-win_amd64.whl", hash = "sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122"}, + {file = "protobuf-6.32.1-py3-none-any.whl", hash = "sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346"}, + {file = "protobuf-6.32.1.tar.gz", hash = "sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d"}, ] [[package]] @@ -1780,6 +1848,7 @@ version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, @@ -1857,6 +1926,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -1868,6 +1938,7 @@ version = "2.14.0" description = "Python style guide checker" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, @@ -1875,13 +1946,14 @@ files = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"}, + {file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"}, ] [package.dependencies] @@ -1892,7 +1964,7 @@ typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -1900,6 +1972,7 @@ version = "2.33.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, @@ -2011,6 +2084,7 @@ version = "3.4.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, @@ -2022,6 +2096,7 @@ version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -2036,6 +2111,7 @@ version = "8.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, @@ -2057,6 +2133,7 @@ version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, @@ -2071,13 +2148,14 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "6.2.1" +version = "6.3.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5"}, - {file = "pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2"}, + {file = "pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749"}, + {file = "pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2"}, ] [package.dependencies] @@ -2094,6 +2172,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2108,6 +2187,7 @@ version = "1.1.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"}, {file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"}, @@ -2122,6 +2202,7 @@ version = "3.5.0" description = "JOSE implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"}, {file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"}, @@ -2144,6 +2225,7 @@ version = "8.0.4" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, @@ -2161,6 +2243,7 @@ version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, @@ -2172,6 +2255,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2234,6 +2318,7 @@ version = "6.4.0" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f"}, {file = "redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010"}, @@ -2250,6 +2335,7 @@ version = "2.32.5" description = "Python HTTP for Humans." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, @@ -2271,6 +2357,7 @@ version = "3.1.0" description = "Manipulate well-formed Roman numerals" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, @@ -2286,6 +2373,7 @@ version = "4.9.1" description = "Pure-Python RSA implementation" optional = false python-versions = "<4,>=3.6" +groups = ["main"] files = [ {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, @@ -2300,19 +2388,20 @@ version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -2320,6 +2409,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2331,6 +2421,7 @@ version = "0.0.1" description = "A generic slugifier." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "slugify-0.0.1.tar.gz", hash = "sha256:c5703cc11c1a6947536f3ce8bb306766b8bb5a84a53717f5a703ce0f18235e4c"}, ] @@ -2341,6 +2432,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2352,6 +2444,7 @@ version = "3.0.1" description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +groups = ["dev"] files = [ {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, @@ -2363,6 +2456,7 @@ version = "8.2.3" description = "Python documentation generator" optional = false python-versions = ">=3.11" +groups = ["dev"] files = [ {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, @@ -2398,6 +2492,7 @@ version = "3.6.0" description = "Sphinx API documentation generator" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinx_autoapi-3.6.0-py3-none-any.whl", hash = "sha256:f3b66714493cab140b0e896d33ce7137654a16ac1edb6563edcbd47bf975f711"}, {file = "sphinx_autoapi-3.6.0.tar.gz", hash = "sha256:c685f274e41d0842ae7e199460c322c4bd7fec816ccc2da8d806094b4f64af06"}, @@ -2415,6 +2510,7 @@ version = "3.0.2" description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, @@ -2434,6 +2530,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -2450,6 +2547,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -2466,6 +2564,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -2482,6 +2581,7 @@ version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" +groups = ["dev"] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -2496,6 +2596,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -2510,6 +2611,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -2526,6 +2628,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -2542,6 +2645,7 @@ version = "2.0.43" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-2.0.43-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21ba7a08a4253c5825d1db389d4299f64a100ef9800e4624c8bf70d8f136e6ed"}, {file = "SQLAlchemy-2.0.43-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11b9503fa6f8721bef9b8567730f664c5a5153d25e247aadc69247c4bc605227"}, @@ -2633,13 +2737,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.47.3" +version = "0.48.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51"}, - {file = "starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9"}, + {file = "starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659"}, + {file = "starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46"}, ] [package.dependencies] @@ -2655,6 +2760,7 @@ version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, @@ -2666,6 +2772,7 @@ version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, @@ -2677,6 +2784,7 @@ version = "0.4.1" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, @@ -2691,6 +2799,7 @@ version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, @@ -2702,13 +2811,14 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2719,6 +2829,7 @@ version = "1.30" description = "UUID object and generation functions (Python 2.3 or higher)" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "uuid-1.30.tar.gz", hash = "sha256:1f87cc004ac5120466f36c5beae48b4c48cc411968eed0eaecd3da82aa96193f"}, ] @@ -2729,6 +2840,7 @@ version = "0.35.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a"}, {file = "uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01"}, @@ -2739,7 +2851,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "vine" @@ -2747,6 +2859,7 @@ version = "5.1.0" description = "Python promises." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, @@ -2758,6 +2871,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -2769,13 +2883,15 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, ] [package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [[package]] name = "yarl" @@ -2783,6 +2899,7 @@ version = "1.20.1" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, @@ -2896,6 +3013,6 @@ multidict = ">=4.0" propcache = ">=0.2.1" [metadata] -lock-version = "2.0" -python-versions = "3.12.0" -content-hash = "f9823e90e9e466fb0763a2100ce5f14969d57d84fcbe203e80eddae88f46d19d" +lock-version = "2.1" +python-versions = "3.12.11" +content-hash = "553fb8d81f67ea7c51950d31416b6355494cf7c79584deb13e013d41463c28da" diff --git a/pyproject.toml b/pyproject.toml index 79e13e8..15957bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "" authors = ["Your Team "] [tool.poetry.dependencies] -python = "3.12.0" +python = "3.12.11" fastapi = "^0.116.1" uvicorn = "0.35.0" gunicorn = "^23.0.0" @@ -32,6 +32,7 @@ pytz = "^2025.2" grpcio = "^1.69.0" grpcio-tools = "^1.69.0" + [tool.poetry.group.dev.dependencies] pytest = "^8.3.4" pytest-asyncio = "^0.25.0" From f704cfc665d677c8867c0c98e4669e838e90ed48 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Tue, 23 Sep 2025 22:17:54 +0300 Subject: [PATCH 07/13] feature/health check (#20) * fix readme issue * init health check api endpoint * implemented is_healthy for messaging * test mq_client is_healthy for rabbit_mq and kafka * refactor repositories * improve repository * extend users repository tests --- .launch/tests/.test_env | 8 +- README.rst | 2 +- docker-compose-tests.yml | 35 +- src/app/application/common/services/base.py | 2 +- src/app/config/settings.py | 2 +- .../messaging/clients/kafka_client.py | 16 +- .../messaging/clients/rabbitmq_client.py | 9 + src/app/infrastructure/messaging/mq_client.py | 6 + .../repositories/base/__init__.py | 0 .../repositories/base/abstract.py | 87 ++++ .../{base.py => base/base_psql_repository.py} | 144 ++----- .../infrastructure/repositories/container.py | 6 +- .../repositories/users_repository.py | 4 +- .../api/v1/endpoints/debug/resources.py | 9 +- tests/infrastructure/messaging/__init__.py | 0 .../messaging/test_kafka_client.py | 28 ++ .../messaging/test_rabbit_mq_client.py | 29 ++ .../repositories/test_repository_general.py | 109 +++-- .../repositories/test_users_repository.py | 401 ++++++++++++++++++ 19 files changed, 743 insertions(+), 154 deletions(-) create mode 100644 src/app/infrastructure/repositories/base/__init__.py create mode 100644 src/app/infrastructure/repositories/base/abstract.py rename src/app/infrastructure/repositories/{base.py => base/base_psql_repository.py} (85%) create mode 100644 tests/infrastructure/messaging/__init__.py create mode 100644 tests/infrastructure/messaging/test_kafka_client.py create mode 100644 tests/infrastructure/messaging/test_rabbit_mq_client.py diff --git a/.launch/tests/.test_env b/.launch/tests/.test_env index e2de1cd..090d435 100644 --- a/.launch/tests/.test_env +++ b/.launch/tests/.test_env @@ -29,16 +29,16 @@ GRPC_PORT=50051 # Redis # ------------------------------------------------------------------------------ -REDIS_URL=redis://172.17.0.1:6389/0 +REDIS_URL=redis://172.17.0.1:6382/0 # Celery # ------------------------------------------------------------------------------ -CELERY_BROKER_URL=redis://172.17.0.1:6389/11 -CELERY_RESULT_BACKEND=redis://172.17.0.1:6389/12 +CELERY_BROKER_URL=redis://172.17.0.1:6382/11 +CELERY_RESULT_BACKEND=redis://172.17.0.1:6382/12 # Mesage Broker # ------------------------------------------------------------------------------ -MESSAGE_BROKER_URL=amqp://dev:dev@0.0.0.0:5672 +MESSAGE_BROKER_URL=amqp://dev:dev@172.17.0.1:5682 DEFAULT_EXCHANGER=YOUR_DEFAULT_EXCHANGER DEFAULT_QUEUE=YOUR_DEFAULT_QUEUE diff --git a/README.rst b/README.rst index 438c94f..8910c7c 100644 --- a/README.rst +++ b/README.rst @@ -111,7 +111,7 @@ Running the App locally:: celery -A src.app.interfaces.cli.celery_app worker -l INFO -E -B -Q default_queue --concurrency=2 -n default@%h # run Consumer - python -m src.app.consume + python -m src.app.interfaces.cli.consume # run gRPC server python -m src.app.interfaces.grpc.server diff --git a/docker-compose-tests.yml b/docker-compose-tests.yml index 2583767..b62685a 100644 --- a/docker-compose-tests.yml +++ b/docker-compose-tests.yml @@ -14,6 +14,7 @@ services: - x_test_redis_service - x_test_rabbit_mq_service - x_test_psql_db_service + - x_test_kafka_service x_test_psql_db_service: image: postgres:latest @@ -31,6 +32,11 @@ services: env_file: - ./.launch/tests/.test_env + x_test_redis_service: + image: redis:latest + ports: + - "6382:6379" + x_test_rabbit_mq_service: image: rabbitmq:3.11.6-management ports: @@ -39,7 +45,30 @@ services: RABBITMQ_DEFAULT_USER: dev RABBITMQ_DEFAULT_PASS: dev - x_test_redis_service: - image: redis:latest + x_test_kafka_service: + image: confluentinc/cp-kafka:7.4.0 ports: - - "6382:6379" + - "9094:9094" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: x_test_zookeeper_service:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://x_test_kafka_service:29092,PLAINTEXT_HOST://localhost:9094 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 + KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + depends_on: + - x_test_zookeeper_service + healthcheck: + test: ["CMD", "kafka-topics", "--bootstrap-server", "localhost:9094", "--list"] + interval: 5s + timeout: 10s + retries: 5 + + x_test_zookeeper_service: + image: confluentinc/cp-zookeeper:latest + ports: + - "2183:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 diff --git a/src/app/application/common/services/base.py b/src/app/application/common/services/base.py index ebadd08..20e0466 100644 --- a/src/app/application/common/services/base.py +++ b/src/app/application/common/services/base.py @@ -1,7 +1,7 @@ from abc import ABC from typing import Any, Dict, Generic, List, Optional, Tuple, Type -from src.app.infrastructure.repositories.base import AbstractBaseRepository, OuterGenericType +from src.app.infrastructure.repositories.base.abstract import AbstractBaseRepository, OuterGenericType class AbstractBaseApplicationService(ABC): diff --git a/src/app/config/settings.py b/src/app/config/settings.py index 237402f..2168d60 100644 --- a/src/app/config/settings.py +++ b/src/app/config/settings.py @@ -74,7 +74,7 @@ class SettingsBase(PydanticSettings): # Message Broker Settings # -------------------------------------------------------------------------- - MESSAGE_BROKER_URL: str | None = env.str("MESSAGE_BROKER_URL", None) + MESSAGE_BROKER_URL: str = env.str("MESSAGE_BROKER_URL") or "" DEFAULT_EXCHANGER: str = env.str("DEFAULT_EXCHANGER", "default_exchanger") DEFAULT_QUEUE: str = env.str("DEFAULT_QUEUE", "default_queue") diff --git a/src/app/infrastructure/messaging/clients/kafka_client.py b/src/app/infrastructure/messaging/clients/kafka_client.py index 4db3890..44949f3 100644 --- a/src/app/infrastructure/messaging/clients/kafka_client.py +++ b/src/app/infrastructure/messaging/clients/kafka_client.py @@ -2,7 +2,7 @@ import datetime as dt from typing import Callable, List from loguru import logger -from aiokafka import AIOKafkaConsumer, AIOKafkaProducer, ConsumerRecord, TopicPartition +from aiokafka import AIOKafkaConsumer, AIOKafkaProducer, ConsumerRecord, TopicPartition, AIOKafkaClient class KafkaClient: @@ -14,6 +14,20 @@ class KafkaClient: def __init__(self, message_broker_url: str) -> None: self.message_broker_url = message_broker_url + async def is_healthy(self) -> bool: + client = AIOKafkaClient(bootstrap_servers=self.message_broker_url) + try: + await client.bootstrap() + metadata = await client.fetch_all_metadata() + # Check if brokers is a method or property + brokers = metadata.brokers() if callable(metadata.brokers) else metadata.brokers + return len(brokers) > 0 + except Exception as ex: + logger.error(f"{ex}") + return False + finally: + await client.close() + async def produce_messages(self, topic: str, partition: int, messages: List[dict], **kwargs: dict) -> None: producer = AIOKafkaProducer( bootstrap_servers=self.message_broker_url, diff --git a/src/app/infrastructure/messaging/clients/rabbitmq_client.py b/src/app/infrastructure/messaging/clients/rabbitmq_client.py index 3c383a2..0cf7127 100644 --- a/src/app/infrastructure/messaging/clients/rabbitmq_client.py +++ b/src/app/infrastructure/messaging/clients/rabbitmq_client.py @@ -27,6 +27,15 @@ def __init__(self, message_broker_url: str) -> None: self.__connection_pool: Pool = Pool(self.__get_connection, max_size=self.__connections_pool_max_size) self.__channel_pool: Pool = Pool(self.__get_channel, max_size=self.__channel_pool_max_size) + async def is_healthy(self) -> bool: + try: + connection_ = await aio_pika.connect_robust(self.message_broker_url) + await connection_.close() + return True + except Exception as ex: + logger.error(f"{ex}") + return False + async def __get_connection(self) -> AbstractRobustConnection: while True: try: diff --git a/src/app/infrastructure/messaging/mq_client.py b/src/app/infrastructure/messaging/mq_client.py index c94de07..49385cf 100644 --- a/src/app/infrastructure/messaging/mq_client.py +++ b/src/app/infrastructure/messaging/mq_client.py @@ -7,6 +7,9 @@ class MessageBrokerProtocol(Protocol): + + async def is_healthy(self) -> bool: ... + async def produce_messages(self, **kwargs: Any) -> None: ... async def consume(self, **kwargs: Any) -> None: ... @@ -31,6 +34,9 @@ def __init__(self, message_broker_type: str, message_broker_url: str) -> None: self._client = client_class(message_broker_url) self.message_broker_type = message_broker_type + async def is_healthy(self) -> bool: + return await self._client.is_healthy() + async def produce_messages( self, exchanger_name: str, # Exchange name, Topic diff --git a/src/app/infrastructure/repositories/base/__init__.py b/src/app/infrastructure/repositories/base/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/infrastructure/repositories/base/abstract.py b/src/app/infrastructure/repositories/base/abstract.py new file mode 100644 index 0000000..c0e0345 --- /dev/null +++ b/src/app/infrastructure/repositories/base/abstract.py @@ -0,0 +1,87 @@ +from abc import ABC +from dataclasses import dataclass +from typing import Any, Dict, Generic, List, Optional, Tuple, Type, TypeVar + +from src.app.infrastructure.extensions.psql_ext.psql_ext import Base + + +class AbstractRepository(ABC): + pass + + +@dataclass +class BaseOutputEntity(ABC): + pass + + +BaseModel = TypeVar("BaseModel", bound=Base) +OuterGenericType = TypeVar("OuterGenericType", bound=BaseOutputEntity) + + +class AbstractBaseRepository(AbstractRepository, Generic[OuterGenericType]): + MODEL: Optional[Type[Base]] = None + + @classmethod + async def count(cls, filter_data: dict) -> int: + raise NotImplementedError + + @classmethod + async def is_exists(cls, filter_data: dict) -> bool: + raise NotImplementedError + + @classmethod + async def get_first( + cls, filter_data: dict, out_dataclass: Optional[OuterGenericType] = None + ) -> OuterGenericType | None: + raise NotImplementedError + + @classmethod + async def get_list( + cls, filter_data: dict, order_data: Tuple[str] = ("id",), out_dataclass: Optional[OuterGenericType] = None + ) -> List[OuterGenericType]: + raise NotImplementedError + + @classmethod + async def create( + cls, data: dict, is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None + ) -> OuterGenericType | None: + raise NotImplementedError + + @classmethod + async def create_bulk( + cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None + ) -> List[OuterGenericType] | None: + raise NotImplementedError + + @classmethod + async def update( + cls, + filter_data: dict, + data: Dict[str, Any], + is_return_require: bool = False, + out_dataclass: Optional[OuterGenericType] = None, + ) -> OuterGenericType | None: + raise NotImplementedError + + @classmethod + async def update_bulk( + cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None + ) -> List[OuterGenericType] | None: + raise NotImplementedError + + @classmethod + async def update_or_create( + cls, + filter_data: dict, + data: Dict[str, Any], + is_return_require: bool = False, + out_dataclass: Optional[OuterGenericType] = None, + ) -> OuterGenericType | None: + raise NotImplementedError + + @classmethod + async def remove( + cls, + filter_data: dict, + ) -> None: + raise NotImplementedError diff --git a/src/app/infrastructure/repositories/base.py b/src/app/infrastructure/repositories/base/base_psql_repository.py similarity index 85% rename from src/app/infrastructure/repositories/base.py rename to src/app/infrastructure/repositories/base/base_psql_repository.py index 8f8b66b..c61e5db 100644 --- a/src/app/infrastructure/repositories/base.py +++ b/src/app/infrastructure/repositories/base/base_psql_repository.py @@ -1,98 +1,16 @@ -from abc import ABC -from copy import deepcopy import datetime as dt -from dataclasses import dataclass, fields, make_dataclass -from typing import Any, Callable, Dict, Generic, List, Optional, Tuple, Type, TypeVar +from copy import deepcopy +from dataclasses import fields, make_dataclass +from typing import Any, Callable, Dict, Generic, List, Optional, Tuple, Type from sqlalchemy import delete, exists, func, insert, inspect, select, Select, String, text, update -from src.app.infrastructure.utils.common import generate_str from src.app.infrastructure.extensions.psql_ext.psql_ext import Base, get_session +from src.app.infrastructure.repositories.base.abstract import AbstractBaseRepository, OuterGenericType, BaseModel +from src.app.infrastructure.utils.common import generate_str -class AbstractRepository(ABC): - pass - - -@dataclass -class BaseOutputEntity(ABC): - pass - - -BaseModel = TypeVar("BaseModel", bound=Base) -OuterGenericType = TypeVar("OuterGenericType", bound=BaseOutputEntity) - - -class AbstractBaseRepository(AbstractRepository, Generic[OuterGenericType]): - MODEL: Optional[Type[Base]] = None - - @classmethod - async def count(cls, filter_data: dict) -> int: - raise NotImplementedError - - @classmethod - async def is_exists(cls, filter_data: dict) -> bool: - raise NotImplementedError - - @classmethod - async def get_first( - cls, filter_data: dict, out_dataclass: Optional[OuterGenericType] = None - ) -> OuterGenericType | None: - raise NotImplementedError - - @classmethod - async def get_list( - cls, filter_data: dict, order_data: Tuple[str] = ("id",), out_dataclass: Optional[OuterGenericType] = None - ) -> List[OuterGenericType]: - raise NotImplementedError - - @classmethod - async def create( - cls, data: dict, is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None - ) -> OuterGenericType | None: - raise NotImplementedError - - @classmethod - async def create_bulk( - cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None - ) -> List[OuterGenericType] | None: - raise NotImplementedError - - @classmethod - async def update( - cls, - filter_data: dict, - data: Dict[str, Any], - is_return_require: bool = False, - out_dataclass: Optional[OuterGenericType] = None, - ) -> OuterGenericType | None: - raise NotImplementedError - - @classmethod - async def update_bulk( - cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None - ) -> List[OuterGenericType] | None: - raise NotImplementedError - - @classmethod - async def update_or_create( - cls, - filter_data: dict, - data: Dict[str, Any], - is_return_require: bool = False, - out_dataclass: Optional[OuterGenericType] = None, - ) -> OuterGenericType | None: - raise NotImplementedError - - @classmethod - async def remove( - cls, - filter_data: dict, - ) -> None: - raise NotImplementedError - - -class BaseSQLAsyncDrivenBaseRepository(AbstractBaseRepository[OuterGenericType], Generic[OuterGenericType]): +class BasePSQLRepository(AbstractBaseRepository[OuterGenericType], Generic[OuterGenericType]): MODEL: Optional[Type[Base]] = None __ATR_SEPARATOR: str = "__" @@ -106,13 +24,9 @@ class BaseSQLAsyncDrivenBaseRepository(AbstractBaseRepository[OuterGenericType], "in": lambda stmt, key1, _, v: stmt.where(key1.in_(v)), # does not work with None "not_in": lambda stmt, key1, _, v: stmt.where(key1.not_in(v)), # does not work with None "like": lambda stmt, key1, _, v: stmt.filter(key1.cast(String).like(f"%{str(v)}%")), - "not_like_all": lambda stmt, key1, _, v: BaseSQLAsyncDrivenBaseRepository.__not_like_all(stmt, key1, v), - "jsonb_like": lambda stmt, key1, key_2, v: BaseSQLAsyncDrivenBaseRepository.__jsonb_like( - stmt, key1, key_2, v - ), - "jsonb_not_like": lambda stmt, key1, key_2, v: BaseSQLAsyncDrivenBaseRepository.__jsonb_not_like( - stmt, key1, key_2, v - ), + "not_like_all": lambda stmt, key1, _, v: BasePSQLRepository.__not_like_all(stmt, key1, v), + "jsonb_like": lambda stmt, key1, key_2, v: BasePSQLRepository.__jsonb_like(stmt, key1, key_2, v), + "jsonb_not_like": lambda stmt, key1, key_2, v: BasePSQLRepository.__jsonb_not_like(stmt, key1, key_2, v), } @staticmethod @@ -372,6 +286,9 @@ async def create_bulk( # Add timestamps to all items cls._set_timestamps_on_create(items=items_copy) + # Normalize data to handle mixed completeness + cls._normalize_bulk_data(items=items_copy) + async with get_session(expire_on_commit=True) as session: model_class = cls.model() # type: ignore model_table = model_class.__table__ # type: ignore @@ -487,6 +404,34 @@ def _set_timestamps_on_create(cls, items: List[dict]) -> None: if hasattr(cls.model(), "created_at") and "created_at" not in item: item["created_at"] = dt_ + @classmethod + def _normalize_bulk_data(cls, items: List[dict]) -> None: + """Normalize bulk data to handle mixed field completeness""" + if not items: + return + + # Get all unique keys from all items + all_keys: set[str] = set() + for item in items: + all_keys.update(item.keys()) + + # Get model column defaults and nullable info + model_class = cls.model() # type: ignore + model_table = model_class.__table__ # type: ignore + + # For each item, ensure it has all fields with appropriate defaults + for item in items: + for key in all_keys: + if key not in item: + # Check if column exists in model + if hasattr(model_class, key): + column = getattr(model_table.c, key, None) + if column is not None: + # Only add explicit None if column is nullable and has no default + if column.nullable and column.default is None and column.server_default is None: + item[key] = None + # Don't add anything for columns with defaults - let database handle it + @classmethod async def _bulk_update_with_returning( cls, session: Any, items: List[dict], out_dataclass: Optional[OuterGenericType] = None @@ -522,10 +467,15 @@ async def update_or_create( data_tmp = deepcopy(data) data_tmp.pop("id", None) data_tmp.pop("uuid", None) - item = await cls.update(filter_data=filter_data, data=data_tmp, is_return_require=is_return_require) + item = await cls.update( + filter_data=filter_data, + data=data_tmp, + is_return_require=is_return_require, + out_dataclass=out_dataclass, + ) return item else: - item = await cls.create(data=data, is_return_require=is_return_require) + item = await cls.create(data=data, is_return_require=is_return_require, out_dataclass=out_dataclass) return item @classmethod diff --git a/src/app/infrastructure/repositories/container.py b/src/app/infrastructure/repositories/container.py index 0f52e10..ce765e9 100644 --- a/src/app/infrastructure/repositories/container.py +++ b/src/app/infrastructure/repositories/container.py @@ -1,10 +1,10 @@ from typing import NamedTuple, Type -from src.app.infrastructure.repositories.users_repository import UsersRepository +from src.app.infrastructure.repositories.users_repository import UsersPSQLRepository class RepositoriesContainer(NamedTuple): - users_repository: Type[UsersRepository] + users_repository: Type[UsersPSQLRepository] -container = RepositoriesContainer(users_repository=UsersRepository) +container = RepositoriesContainer(users_repository=UsersPSQLRepository) diff --git a/src/app/infrastructure/repositories/users_repository.py b/src/app/infrastructure/repositories/users_repository.py index ab213b0..5e55071 100644 --- a/src/app/infrastructure/repositories/users_repository.py +++ b/src/app/infrastructure/repositories/users_repository.py @@ -1,6 +1,6 @@ from src.app.infrastructure.persistence.models.container import container as models_container -from src.app.infrastructure.repositories.base import BaseSQLAsyncDrivenBaseRepository +from src.app.infrastructure.repositories.base.base_psql_repository import BasePSQLRepository -class UsersRepository(BaseSQLAsyncDrivenBaseRepository): +class UsersPSQLRepository(BasePSQLRepository): MODEL = models_container.user diff --git a/src/app/interfaces/api/v1/endpoints/debug/resources.py b/src/app/interfaces/api/v1/endpoints/debug/resources.py index d241d88..b3d7be2 100644 --- a/src/app/interfaces/api/v1/endpoints/debug/resources.py +++ b/src/app/interfaces/api/v1/endpoints/debug/resources.py @@ -1,4 +1,4 @@ -from typing import Annotated +from typing import Annotated, Dict from fastapi import APIRouter, Body, Request @@ -36,3 +36,10 @@ async def send_message( await mq_client.produce_messages( messages=[request_body], queue_name=settings.DEFAULT_QUEUE, exchanger_name=settings.DEFAULT_EXCHANGER or "" ) + + +@router.get("/health-check/", status_code=200) +async def health_check( + request: Request, +) -> Dict[str, str]: + return {"status": "ok"} diff --git a/tests/infrastructure/messaging/__init__.py b/tests/infrastructure/messaging/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/infrastructure/messaging/test_kafka_client.py b/tests/infrastructure/messaging/test_kafka_client.py new file mode 100644 index 0000000..565bedb --- /dev/null +++ b/tests/infrastructure/messaging/test_kafka_client.py @@ -0,0 +1,28 @@ +from asyncio import AbstractEventLoop + +import pytest + +from src.app.infrastructure.messaging.mq_client import MQClientProxy + +MESSAGE_BROKER_URLS = ["x_test_kafka_service:29092"] + + +@pytest.mark.parametrize("broker_url", MESSAGE_BROKER_URLS, scope="function") +def test_mq_is_healthy_kafka(e_loop: AbstractEventLoop, broker_url: str) -> None: + mq_client = MQClientProxy(message_broker_type="kafka", message_broker_url=broker_url) + + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + + assert is_healthy is True + + +MESSAGE_BROKER_URLS_FAKE = ["x_test_kafka_service:29092".replace("92", "99")] + + +@pytest.mark.parametrize("broker_url", MESSAGE_BROKER_URLS_FAKE, scope="function") +def test_mq_is_not_healthy_kafka(e_loop: AbstractEventLoop, broker_url: str) -> None: + mq_client = MQClientProxy(message_broker_type="kafka", message_broker_url=broker_url) + + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + + assert is_healthy is not True diff --git a/tests/infrastructure/messaging/test_rabbit_mq_client.py b/tests/infrastructure/messaging/test_rabbit_mq_client.py new file mode 100644 index 0000000..6e2878e --- /dev/null +++ b/tests/infrastructure/messaging/test_rabbit_mq_client.py @@ -0,0 +1,29 @@ +from asyncio import AbstractEventLoop + +import pytest + +from src.app.config.settings import settings +from src.app.infrastructure.messaging.mq_client import MQClientProxy + +MESSAGE_BROKER_URLS = [settings.MESSAGE_BROKER_URL] + + +@pytest.mark.parametrize("broker_url", MESSAGE_BROKER_URLS, scope="function") +def test_mq_is_healthy_rabbit_mq(e_loop: AbstractEventLoop, broker_url: str) -> None: + mq_client = MQClientProxy(message_broker_type="rabbitmq", message_broker_url=broker_url) + + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + + assert is_healthy is True + + +MESSAGE_BROKER_URLS_FAKE = [settings.MESSAGE_BROKER_URL.replace("dev", "non_dev")] + + +@pytest.mark.parametrize("broker_url", MESSAGE_BROKER_URLS_FAKE, scope="function") +def test_mq_is_not_healthy_rabbit_mq(e_loop: AbstractEventLoop, broker_url: str) -> None: + mq_client = MQClientProxy(message_broker_type="rabbitmq", message_broker_url=broker_url) + + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + + assert is_healthy is not True diff --git a/tests/infrastructure/repositories/test_repository_general.py b/tests/infrastructure/repositories/test_repository_general.py index cb48ac6..472ef4e 100644 --- a/tests/infrastructure/repositories/test_repository_general.py +++ b/tests/infrastructure/repositories/test_repository_general.py @@ -10,110 +10,139 @@ def test_users_get_list_limit_offset_case_1(e_loop: AbstractEventLoop, users: Any) -> None: + """Test pagination with limit larger than remaining items""" users_repository = repo_container.users_repository - count = len(USERS) - offset = count - 1 + total_users = len(USERS) + offset = total_users - 1 limit = 10 - expected_count = 1 # noqa + expected_count = 1 items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate), ) - assert isinstance(items, list) is True + + assert isinstance(items, list) assert len(items) == expected_count - raw_ids = [i["id"] for i in USERS] + # Verify returned item is valid + expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) is True - assert user.id in raw_ids + assert isinstance(user, UserTestAggregate) + assert user.id in expected_ids def test_users_get_list_limit_offset_case_2(e_loop: AbstractEventLoop, users: Any) -> None: + """Test pagination with offset near end of dataset""" users_repository = repo_container.users_repository - count = len(USERS) - offset = count - 2 + total_users = len(USERS) + offset = total_users - 2 limit = 10 expected_count = 2 - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( # noqa + items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate) ) - assert isinstance(items, list) is True + + assert isinstance(items, list) assert len(items) == expected_count - raw_ids = [i["id"] for i in USERS] + expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) is True - assert user.id in raw_ids + assert isinstance(user, UserTestAggregate) + assert user.id in expected_ids def test_users_get_list_limit_offset_case_3(e_loop: AbstractEventLoop, users: Any) -> None: + """Test pagination with small limit and large offset""" users_repository = repo_container.users_repository - count = len(USERS) - offset = count - 2 + + total_users = len(USERS) + offset = total_users - 2 limit = 1 - expected_count = 1 # noqa + expected_count = 1 + items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate) ) - assert isinstance(items, list) is True + + assert isinstance(items, list) assert len(items) == expected_count - raw_ids = [i["id"] for i in USERS] + expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) is True - assert user.id in raw_ids + assert isinstance(user, UserTestAggregate) + assert user.id in expected_ids -def test_users_get_list_limit_offset_case_4(e_loop: AbstractEventLoop, users: Any) -> None: +def test_users_get_list_with_offset_only(e_loop: AbstractEventLoop, users: Any) -> None: + """Test pagination with offset but no limit""" users_repository = repo_container.users_repository - count = len(USERS) + + total_users = len(USERS) offset = 1 - expected_count = count - offset + expected_count = total_users - offset + items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( users_repository.get_list(filter_data={"offset": offset}, out_dataclass=UserTestAggregate) ) - assert isinstance(items, list) is True + + assert isinstance(items, list) assert len(items) == expected_count - raw_ids = [i["id"] for i in USERS] + expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) is True - assert user.id in raw_ids + assert isinstance(user, UserTestAggregate) + assert user.id in expected_ids def test_users_get_list_order_by_id_asc(e_loop: AbstractEventLoop, users: Any) -> None: + """Test ordering users by ID in ascending order""" users_repository = repo_container.users_repository - USERS_RAW = sorted(USERS, key=lambda i: i["id"]) - count = len(USERS_RAW) + + users_sorted = sorted(USERS, key=lambda i: i["id"]) + expected_count = len(users_sorted) items: List[UserTestAggregate] = e_loop.run_until_complete( users_repository.get_list(order_data=("id",), out_dataclass=UserTestAggregate) ) - assert isinstance(items, list) is True - assert len(items) == count + assert isinstance(items, list) + assert len(items) == expected_count + + # Verify ordering is correct for index, user in enumerate(items): - assert isinstance(user, UserTestAggregate) is True - assert user.id == USERS_RAW[index]["id"] + assert isinstance(user, UserTestAggregate) + assert user.id == users_sorted[index]["id"] + + # Verify items are in ascending order + user_ids = [user.id for user in items] + assert user_ids == sorted(user_ids) def test_users_get_list_order_by_id_desc(e_loop: AbstractEventLoop, users: Any) -> None: + """Test ordering users by ID in descending order""" users_repository = repo_container.users_repository - USERS_RAW = sorted(USERS, key=lambda i: i["id"], reverse=True) - count = len(USERS_RAW) + + users_sorted = sorted(USERS, key=lambda i: i["id"], reverse=True) + expected_count = len(users_sorted) items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( users_repository.get_list(order_data=("-id",), out_dataclass=UserTestAggregate) ) - assert isinstance(items, list) is True - assert len(items) == count + assert isinstance(items, list) + assert len(items) == expected_count + + # Verify ordering is correct for index, user in enumerate(items): - assert isinstance(user, UserTestAggregate) is True - assert user.id == USERS_RAW[index]["id"] + assert isinstance(user, UserTestAggregate) + assert user.id == users_sorted[index]["id"] + + # Verify items are in descending order + user_ids = [user.id for user in items] + assert user_ids == sorted(user_ids, reverse=True) USERS_IN_LOOKUP = [ diff --git a/tests/infrastructure/repositories/test_users_repository.py b/tests/infrastructure/repositories/test_users_repository.py index 0c8fec2..91e55bd 100644 --- a/tests/infrastructure/repositories/test_users_repository.py +++ b/tests/infrastructure/repositories/test_users_repository.py @@ -574,3 +574,404 @@ def test_remove_all(e_loop: AbstractEventLoop, users: Any) -> None: assert count_before == len(USERS) assert count_after == 0 + + +# Validation and error handling test cases +def test_allow_create_user_with_duplicate_email(e_loop: AbstractEventLoop, users: Any) -> None: + """Test documents that the system currently allows duplicate emails""" + users_repository = repo_container.users_repository + + user_data_raw = { + "first_name": "duplicate_test", + "last_name": "duplicate_test", + "email": USERS[0]["email"], # Using existing email + "is_active": True, + } + + count_before = e_loop.run_until_complete(users_repository.count()) + + # The system currently allows duplicate emails - this documents the actual behavior + created_user = e_loop.run_until_complete( + users_repository.create(data=user_data_raw, is_return_require=True, out_dataclass=UserTestAggregate) + ) + + count_after = e_loop.run_until_complete(users_repository.count()) + + # Verify the user was created successfully with duplicate email + assert created_user is not None + assert created_user.email == USERS[0]["email"] + assert created_user.first_name == "duplicate_test" + assert count_after == count_before + 1 + + +@pytest.mark.parametrize( + "invalid_email", + [ + "invalid", + "invalid@", + "@invalid.com", + "invalid@.com", + "", + "spaces in email@test.com", + "missing-domain@", + "@missing-local.com", + ], +) +def test_allow_create_user_with_invalid_email_format( + e_loop: AbstractEventLoop, users: Any, invalid_email: str +) -> None: + users_repository = repo_container.users_repository + + user_data_raw = { + "first_name": "test_user", + "last_name": "test_user", + "email": invalid_email, + "is_active": True, + } + + # Test either validates and fails, or creates successfully (depends on implementation) + # This test documents the actual behavior rather than assuming validation + result = e_loop.run_until_complete( + users_repository.create(data=user_data_raw, is_return_require=True, out_dataclass=UserTestAggregate) + ) + + # If creation succeeds, verify the email was stored as-is + if result is not None: + assert result.email == invalid_email + + +def test_update_user_with_invalid_id(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + non_existent_id = 99999 + user_data_raw = { + "first_name": "updated_name", + "last_name": "updated_last_name", + } + + updated_user = e_loop.run_until_complete( + users_repository.update( + filter_data={"id": non_existent_id}, + data=user_data_raw, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + + assert updated_user is None + + +def test_get_first_with_non_existent_filter(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + user = e_loop.run_until_complete( + users_repository.get_first( + filter_data={"email": "nonexistent@example.com"}, out_dataclass=UserTestAggregate + ) + ) + + assert user is None + + +def test_is_exists_with_non_existent_data(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + exists = e_loop.run_until_complete( + users_repository.is_exists(filter_data={"email": "nonexistent@example.com"}) + ) + + assert exists is False + + +def test_remove_non_existent_user(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + count_before = e_loop.run_until_complete(users_repository.count()) + e_loop.run_until_complete(users_repository.remove(filter_data={"id": 99999})) + count_after = e_loop.run_until_complete(users_repository.count()) + + assert count_before == count_after + + +# Data integrity test cases +def test_create_user_with_minimal_required_fields(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + minimal_user_data = { + "email": "minimal" + generate_str(5) + "@example.com", + "is_active": True, + } + + count_before = e_loop.run_until_complete(users_repository.count()) + created_user = e_loop.run_until_complete( + users_repository.create(data=minimal_user_data, is_return_require=True, out_dataclass=UserTestAggregate) + ) + count_after = e_loop.run_until_complete(users_repository.count()) + + assert isinstance(created_user, UserTestAggregate) + assert created_user.email == minimal_user_data["email"] + assert created_user.is_active == minimal_user_data["is_active"] + assert created_user.id is not None + assert created_user.uuid is not None + assert count_after == count_before + 1 + + +def test_update_user_timestamps_are_handled(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + original_user_data = deepcopy(USERS[0]) + update_data = { + "first_name": "updated_timestamp_test", + "updated_at": dt.datetime.now(dt.UTC).replace(tzinfo=None), + } + + updated_user = e_loop.run_until_complete( + users_repository.update( + filter_data={"id": original_user_data["id"]}, + data=update_data, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + assert isinstance(updated_user, UserTestAggregate) + + assert updated_user.first_name == update_data["first_name"] + assert updated_user.updated_at == update_data["updated_at"] + assert updated_user.created_at == original_user_data["created_at"] + + +def test_create_user_with_complex_meta_data(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + complex_meta = { + "first_name": "complex_first", + "last_name": "complex_last", + "preferences": {"theme": "dark", "language": "en", "notifications": True}, + "profile": {"bio": "Test user bio", "social_links": ["twitter.com/test", "linkedin.com/test"]}, + } + + user_data_raw = { + "email": "complex" + generate_str(5) + "@example.com", + "is_active": True, + "meta": complex_meta, + "first_name": "complex_first", + "last_name": "complex_last", + } + + created_user = e_loop.run_until_complete( + users_repository.create(data=user_data_raw, is_return_require=True, out_dataclass=UserTestAggregate) + ) + + assert isinstance(created_user, UserTestAggregate) + assert created_user.meta == complex_meta + assert created_user.first_name == user_data_raw["first_name"] + assert created_user.last_name == user_data_raw["last_name"] + + +# Bulk operations edge cases +def test_create_bulk_with_empty_list(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + count_before = e_loop.run_until_complete(users_repository.count()) + created_items = e_loop.run_until_complete( + users_repository.create_bulk(items=[], is_return_require=True, out_dataclass=UserTestAggregate) + ) + count_after = e_loop.run_until_complete(users_repository.count()) + + assert created_items == [] + assert count_before == count_after + + +def test_update_bulk_with_mixed_valid_invalid_ids(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + items_to_update = [ + { + "id": USERS[0]["id"], # Valid ID + "uuid": str(USERS[0]["uuid"]), + "first_name": "updated_valid", + }, + { + "id": 99999, # Invalid ID + "uuid": str(uuid.uuid4()), + "first_name": "updated_invalid", + }, + ] + + updated_items = e_loop.run_until_complete( + users_repository.update_bulk( + items=items_to_update, is_return_require=True, out_dataclass=UserTestAggregate + ) + ) + + # Check behavior with mixed valid/invalid IDs + if updated_items: + assert len(updated_items) == 1 # Should have at most one valid update + assert updated_items[0].id == items_to_update[0]["id"] + assert updated_items[0].first_name == items_to_update[0]["first_name"] + + +# Transaction and consistency tests +def test_update_or_create_creates_new_user(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + new_email = "create_new" + generate_str(5) + "@example.com" + user_data = { + "email": new_email, + "first_name": "new_user", + "last_name": "new_user", + "is_active": True, + } + + count_before = e_loop.run_until_complete(users_repository.count()) + result_user = e_loop.run_until_complete( + users_repository.update_or_create( + filter_data={"email": new_email}, + data=user_data, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + count_after = e_loop.run_until_complete(users_repository.count()) + + assert count_after == count_before + 1 + assert isinstance(result_user, UserTestAggregate) is True + assert result_user is not None + assert result_user.email == new_email + assert result_user.first_name == "new_user" + + +def test_update_or_create_updates_existing_user(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + existing_email = USERS[0]["email"] + update_data = { + "email": existing_email, + "first_name": "updated_existing", + "last_name": "updated_existing", + "is_active": False, + } + + count_before = e_loop.run_until_complete(users_repository.count()) + result_user = e_loop.run_until_complete( + users_repository.update_or_create( + filter_data={"email": existing_email}, + data=update_data, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + count_after = e_loop.run_until_complete(users_repository.count()) + + assert count_after == count_before # No new user created + assert isinstance(result_user, UserTestAggregate) is True + assert result_user is not None + assert result_user.email == existing_email + assert result_user.first_name == "updated_existing" + assert result_user.is_active is False + + +# Performance and boundary tests +def test_get_list_with_large_limit(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + large_limit = 1000 + items = e_loop.run_until_complete( + users_repository.get_list(filter_data={"limit": large_limit}, out_dataclass=UserTestAggregate) + ) + + assert len(items) == len(USERS) # Should return all available users, not more + + +def test_get_list_with_zero_limit(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + items = e_loop.run_until_complete( + users_repository.get_list(filter_data={"limit": 0}, out_dataclass=UserTestAggregate) + ) + + assert len(items) == 0 + + +def test_get_list_with_large_offset(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + large_offset = 1000 + items = e_loop.run_until_complete( + users_repository.get_list(filter_data={"offset": large_offset}, out_dataclass=UserTestAggregate) + ) + + assert len(items) == 0 # Should return empty list + + +# Data type and format tests +def test_create_user_with_special_characters_in_name(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + special_chars_data = { + "email": "special" + generate_str(5) + "@example.com", + "first_name": "José María", + "last_name": "O'Connor-Smith", + "is_active": True, + } + + created_user = e_loop.run_until_complete( + users_repository.create(data=special_chars_data, is_return_require=True, out_dataclass=UserTestAggregate) + ) + assert isinstance(created_user, UserTestAggregate) + + assert created_user.first_name == "José María" + assert created_user.last_name == "O'Connor-Smith" + + +def test_create_user_with_long_strings(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + long_string = "a" * 100 # 100 character string + long_data = { + "email": "long" + generate_str(5) + "@example.com", + "first_name": long_string, + "last_name": long_string, + "is_active": True, + } + + # Test documents actual behavior - either succeeds or fails with constraints + created_user = e_loop.run_until_complete( + users_repository.create(data=long_data, is_return_require=True, out_dataclass=UserTestAggregate) + ) + assert isinstance(created_user, UserTestAggregate) + + # If creation succeeds, verify the data was stored correctly + assert created_user.first_name == long_string + assert created_user.last_name == long_string + + +# State management tests +def test_user_activation_deactivation_cycle(e_loop: AbstractEventLoop, users: Any) -> None: + users_repository = repo_container.users_repository + + user_id = USERS[0]["id"] + + # Deactivate user + deactivated_user = e_loop.run_until_complete( + users_repository.update( + filter_data={"id": user_id}, + data={"is_active": False}, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + assert isinstance(deactivated_user, UserTestAggregate) + assert deactivated_user.is_active is False + + # Reactivate user + reactivated_user = e_loop.run_until_complete( + users_repository.update( + filter_data={"id": user_id}, + data={"is_active": True}, + is_return_require=True, + out_dataclass=UserTestAggregate, + ) + ) + assert isinstance(reactivated_user, UserTestAggregate) + assert reactivated_user.is_active is True From fd100628817dcfb5bcc59046217c9efab33d95e8 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Wed, 24 Sep 2025 14:59:43 +0300 Subject: [PATCH 08/13] feature/health check part2 (#22) * init health check grpc * init CommonApplicationService with is_healthy * after beautify * test common_service is_healthy --- poetry.lock | 3 +- pyproject.toml | 1 + src/app/application/container.py | 6 + .../application/services/common_service.py | 23 ++++ .../base/base_redis_repository.py | 46 +++++++ .../repositories/common_psql_repository.py | 16 +++ .../repositories/common_redis_repository.py | 12 ++ .../infrastructure/repositories/container.py | 11 +- .../api/v1/endpoints/debug/resources.py | 6 +- src/app/interfaces/grpc/pb/debug/debug_pb2.py | 14 +- .../grpc/pb/debug/debug_pb2_grpc.py | 54 +++++++- src/app/interfaces/grpc/protos/debug.proto | 11 ++ .../interfaces/grpc/services/debug_service.py | 6 + .../test_common_service_is_healthy.py | 128 ++++++++++++++++++ 14 files changed, 324 insertions(+), 13 deletions(-) create mode 100644 src/app/application/services/common_service.py create mode 100644 src/app/infrastructure/repositories/base/base_redis_repository.py create mode 100644 src/app/infrastructure/repositories/common_psql_repository.py create mode 100644 src/app/infrastructure/repositories/common_redis_repository.py create mode 100644 tests/application/users/services/test_common_service_is_healthy.py diff --git a/poetry.lock b/poetry.lock index 9064d73..a36de1e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -879,7 +879,6 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, @@ -3015,4 +3014,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "3.12.11" -content-hash = "553fb8d81f67ea7c51950d31416b6355494cf7c79584deb13e013d41463c28da" +content-hash = "dfea5f63160f7a2d1f8a211084d156e8c02252db4a3e678e1b5389e363ddd1a9" diff --git a/pyproject.toml b/pyproject.toml index 15957bd..03f9095 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ aiokafka = "^0.12.0" pytz = "^2025.2" grpcio = "^1.69.0" grpcio-tools = "^1.69.0" +greenlet = "^3.2.4" [tool.poetry.group.dev.dependencies] diff --git a/src/app/application/container.py b/src/app/application/container.py index a0f9dab..563fbff 100644 --- a/src/app/application/container.py +++ b/src/app/application/container.py @@ -17,5 +17,11 @@ def auth_service(self) -> Type["src.app.application.services.auth_service.AuthSe return AuthService + @property + def common_service(self) -> Type["src.app.application.services.common_service.CommonApplicationService"]: + from src.app.application.services.common_service import CommonApplicationService + + return CommonApplicationService + container = ApplicationServicesContainer() diff --git a/src/app/application/services/common_service.py b/src/app/application/services/common_service.py new file mode 100644 index 0000000..4a76a16 --- /dev/null +++ b/src/app/application/services/common_service.py @@ -0,0 +1,23 @@ +from src.app.infrastructure.messaging.mq_client import mq_client +from src.app.infrastructure.repositories.container import container as repo_container +from src.app.application.common.services.base import AbstractBaseApplicationService +from loguru import logger + + +class CommonApplicationService(AbstractBaseApplicationService): + + @classmethod + async def is_healthy(cls) -> bool: + """Checks if app infrastructure is up and healthy.""" + try: + is_psql_healthy = await repo_container.common_psql_repository.is_healthy() + + is_redis_healthy = await repo_container.common_redis_repository.is_healthy() + + is_message_broker_healthy = await mq_client.is_healthy() + + except Exception as ex: + logger.error(f"Application is not healthy. Reason: {ex}") + return False + + return all([is_psql_healthy, is_redis_healthy, is_message_broker_healthy]) diff --git a/src/app/infrastructure/repositories/base/base_redis_repository.py b/src/app/infrastructure/repositories/base/base_redis_repository.py new file mode 100644 index 0000000..3ac08e5 --- /dev/null +++ b/src/app/infrastructure/repositories/base/base_redis_repository.py @@ -0,0 +1,46 @@ +import json +from typing import Any + +import redis.asyncio as redis + +from src.app.infrastructure.extensions.redis_ext.redis_ext import redis_client +from src.app.infrastructure.repositories.base.abstract import AbstractRepository + + +class BaseRedisRepository(AbstractRepository): + client: redis.Redis = redis_client + + @classmethod + def get_client(cls) -> redis.Redis: + return cls.client + + @classmethod + async def set(cls, key: str, value: dict, expire_in_seconds: int) -> None: + client = cls.get_client() + value_ = json.dumps(value, default=str) + await client.setex(name=key, value=value_, time=expire_in_seconds) + + @classmethod + async def get(cls, key: str) -> Any: + client = cls.get_client() + value_ = await client.get(name=key) + if value_: + return json.loads(value_) + return None + + @classmethod + async def delete(cls, keys: list) -> Any: + client = cls.get_client() + for key in keys: + await client.delete(key) + return None + + @classmethod + async def exists(cls, key: str) -> bool: + client = cls.get_client() + return await client.exists(key) + + @classmethod + async def flush_db(cls) -> None: + client = cls.get_client() + await client.flushdb(asynchronous=True) diff --git a/src/app/infrastructure/repositories/common_psql_repository.py b/src/app/infrastructure/repositories/common_psql_repository.py new file mode 100644 index 0000000..27da23b --- /dev/null +++ b/src/app/infrastructure/repositories/common_psql_repository.py @@ -0,0 +1,16 @@ +from sqlalchemy import text + +from src.app.infrastructure.extensions.psql_ext.psql_ext import get_session +from src.app.infrastructure.repositories.base.abstract import AbstractRepository + + +class CommonPSQLRepository(AbstractRepository): + + @classmethod + async def is_healthy(cls) -> bool: + stmt = """SELECT 1;""" + + async with get_session() as session: + result = await session.execute(statement=text(stmt), params={}) + result = result.scalars().first() + return result == 1 diff --git a/src/app/infrastructure/repositories/common_redis_repository.py b/src/app/infrastructure/repositories/common_redis_repository.py new file mode 100644 index 0000000..512e5bb --- /dev/null +++ b/src/app/infrastructure/repositories/common_redis_repository.py @@ -0,0 +1,12 @@ +from src.app.infrastructure.extensions.redis_ext.redis_ext import redis_client +from src.app.infrastructure.repositories.base.base_redis_repository import BaseRedisRepository + + +class CommonRedisRepository(BaseRedisRepository): + client = redis_client + + @classmethod + async def is_healthy(cls) -> bool: + client = cls.get_client() + result = await client.ping() + return result diff --git a/src/app/infrastructure/repositories/container.py b/src/app/infrastructure/repositories/container.py index ce765e9..fb94d0d 100644 --- a/src/app/infrastructure/repositories/container.py +++ b/src/app/infrastructure/repositories/container.py @@ -1,10 +1,19 @@ from typing import NamedTuple, Type +from src.app.infrastructure.repositories.common_psql_repository import CommonPSQLRepository +from src.app.infrastructure.repositories.common_redis_repository import CommonRedisRepository from src.app.infrastructure.repositories.users_repository import UsersPSQLRepository class RepositoriesContainer(NamedTuple): + + common_psql_repository: Type[CommonPSQLRepository] + common_redis_repository: Type[CommonRedisRepository] users_repository: Type[UsersPSQLRepository] -container = RepositoriesContainer(users_repository=UsersPSQLRepository) +container = RepositoriesContainer( + common_psql_repository=CommonPSQLRepository, + common_redis_repository=CommonRedisRepository, + users_repository=UsersPSQLRepository, +) diff --git a/src/app/interfaces/api/v1/endpoints/debug/resources.py b/src/app/interfaces/api/v1/endpoints/debug/resources.py index b3d7be2..b981c05 100644 --- a/src/app/interfaces/api/v1/endpoints/debug/resources.py +++ b/src/app/interfaces/api/v1/endpoints/debug/resources.py @@ -1,7 +1,7 @@ from typing import Annotated, Dict from fastapi import APIRouter, Body, Request - +from src.app.application.container import container as services_container from src.app.interfaces.api.v1.endpoints.debug.schemas.req_schemas import MessageReq from src.app.config.settings import settings from src.app.infrastructure.messaging.mq_client import mq_client @@ -42,4 +42,6 @@ async def send_message( async def health_check( request: Request, ) -> Dict[str, str]: - return {"status": "ok"} + is_healthy = await services_container.common_service.is_healthy() + status = "OK" if is_healthy else "NOT OK" + return {"status": status} diff --git a/src/app/interfaces/grpc/pb/debug/debug_pb2.py b/src/app/interfaces/grpc/pb/debug/debug_pb2.py index 0b3b748..2c1c084 100644 --- a/src/app/interfaces/grpc/pb/debug/debug_pb2.py +++ b/src/app/interfaces/grpc/pb/debug/debug_pb2.py @@ -2,7 +2,7 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # NO CHECKED-IN PROTOBUF GENCODE # source: debug.proto -# Protobuf Python Version: 5.29.0 +# Protobuf Python Version: 6.31.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -10,7 +10,7 @@ from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 5, 29, 0, "", "debug.proto") +_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 6, 31, 1, "", "debug.proto") # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x0b\x64\x65\x62ug.proto\x12\rgrpc.pb.debug\x1a\x19google/protobuf/any.proto"\x0f\n\rSayMeqDataReq"*\n\x0bTestDataReq\x12\x0c\n\x04year\x18\x01 \x01(\t\x12\r\n\x05month\x18\x02 \x01(\t"?\n\nMessageReq\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12"\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any".\n\x0bMessageResp\x12\x0e\n\x06status\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t2V\n\x0c\x44\x65\x62ugService\x12\x46\n\x0bSendMessage\x12\x19.grpc.pb.debug.MessageReq\x1a\x1a.grpc.pb.debug.MessageResp"\x00\x62\x06proto3' + b'\n\x0b\x64\x65\x62ug.proto\x12\rgrpc.pb.debug\x1a\x19google/protobuf/any.proto"\x0f\n\rSayMeqDataReq"*\n\x0bTestDataReq\x12\x0c\n\x04year\x18\x01 \x01(\t\x12\r\n\x05month\x18\x02 \x01(\t"?\n\nMessageReq\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12"\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any".\n\x0bMessageResp\x12\x0e\n\x06status\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t"\x10\n\x0eHealthCheckReq"!\n\x0fHealthCheckResp\x12\x0e\n\x06status\x18\x01 \x01(\t2\xa6\x01\n\x0c\x44\x65\x62ugService\x12\x46\n\x0bSendMessage\x12\x19.grpc.pb.debug.MessageReq\x1a\x1a.grpc.pb.debug.MessageResp"\x00\x12N\n\x0bHealthCheck\x12\x1d.grpc.pb.debug.HealthCheckReq\x1a\x1e.grpc.pb.debug.HealthCheckResp"\x00\x62\x06proto3' ) _globals = globals() @@ -36,6 +36,10 @@ _globals["_MESSAGEREQ"]._serialized_end = 181 _globals["_MESSAGERESP"]._serialized_start = 183 _globals["_MESSAGERESP"]._serialized_end = 229 - _globals["_DEBUGSERVICE"]._serialized_start = 231 - _globals["_DEBUGSERVICE"]._serialized_end = 317 + _globals["_HEALTHCHECKREQ"]._serialized_start = 231 + _globals["_HEALTHCHECKREQ"]._serialized_end = 247 + _globals["_HEALTHCHECKRESP"]._serialized_start = 249 + _globals["_HEALTHCHECKRESP"]._serialized_end = 282 + _globals["_DEBUGSERVICE"]._serialized_start = 285 + _globals["_DEBUGSERVICE"]._serialized_end = 451 # @@protoc_insertion_point(module_scope) diff --git a/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py b/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py index 647e9b4..dee7faf 100644 --- a/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py +++ b/src/app/interfaces/grpc/pb/debug/debug_pb2_grpc.py @@ -1,10 +1,11 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc +import warnings -import src.app.interfaces.grpc.pb.debug.debug_pb2 as debug__pb2 +from src.app.interfaces.grpc.pb.debug import debug_pb2 as debug__pb2 -GRPC_GENERATED_VERSION = "1.70.0" +GRPC_GENERATED_VERSION = "1.75.0" GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -18,7 +19,7 @@ if _version_not_supported: raise RuntimeError( f"The grpc package installed is at version {GRPC_VERSION}," - + " but the generated code in debug_pb2_grpc.py depends on" + + f" but the generated code in debug_pb2_grpc.py depends on" + f" grpcio>={GRPC_GENERATED_VERSION}." + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." @@ -40,6 +41,12 @@ def __init__(self, channel): response_deserializer=debug__pb2.MessageResp.FromString, _registered_method=True, ) + self.HealthCheck = channel.unary_unary( + "/grpc.pb.debug.DebugService/HealthCheck", + request_serializer=debug__pb2.HealthCheckReq.SerializeToString, + response_deserializer=debug__pb2.HealthCheckResp.FromString, + _registered_method=True, + ) class DebugServiceServicer(object): @@ -51,6 +58,12 @@ def SendMessage(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def HealthCheck(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def add_DebugServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -59,6 +72,11 @@ def add_DebugServiceServicer_to_server(servicer, server): request_deserializer=debug__pb2.MessageReq.FromString, response_serializer=debug__pb2.MessageResp.SerializeToString, ), + "HealthCheck": grpc.unary_unary_rpc_method_handler( + servicer.HealthCheck, + request_deserializer=debug__pb2.HealthCheckReq.FromString, + response_serializer=debug__pb2.HealthCheckResp.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler("grpc.pb.debug.DebugService", rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) @@ -98,3 +116,33 @@ def SendMessage( metadata, _registered_method=True, ) + + @staticmethod + def HealthCheck( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/grpc.pb.debug.DebugService/HealthCheck", + debug__pb2.HealthCheckReq.SerializeToString, + debug__pb2.HealthCheckResp.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/src/app/interfaces/grpc/protos/debug.proto b/src/app/interfaces/grpc/protos/debug.proto index 6e76996..1bcd131 100644 --- a/src/app/interfaces/grpc/protos/debug.proto +++ b/src/app/interfaces/grpc/protos/debug.proto @@ -22,6 +22,17 @@ message MessageResp { string message = 2; } + +message HealthCheckReq { +} + +message HealthCheckResp { + string status = 1; +} + + service DebugService { rpc SendMessage(MessageReq) returns (MessageResp) {} + + rpc HealthCheck(HealthCheckReq) returns (HealthCheckResp) {} } diff --git a/src/app/interfaces/grpc/services/debug_service.py b/src/app/interfaces/grpc/services/debug_service.py index faa0305..fee8e1f 100644 --- a/src/app/interfaces/grpc/services/debug_service.py +++ b/src/app/interfaces/grpc/services/debug_service.py @@ -8,6 +8,7 @@ from google.protobuf.json_format import MessageToJson from src.app.interfaces.grpc.pb.debug.debug_pb2_grpc import DebugServiceServicer +from src.app.application.container import container as services_container class DebugService(DebugServiceServicer): @@ -23,3 +24,8 @@ async def SendMessage(self, request, context) -> pb2.MessageResp: # type: ignor ) logger.debug(f"Sent message `{event}` with data {str(data)}") return pb2.MessageResp(status=True, message="OK") # type: ignore + + async def HealthCheck(self, request, context) -> pb2.HealthCheckResp: # type: ignore + is_healthy = await services_container.common_service.is_healthy() + status = "OK" if is_healthy else "NOT OK" + return pb2.HealthCheckResp(status=status) # type: ignore diff --git a/tests/application/users/services/test_common_service_is_healthy.py b/tests/application/users/services/test_common_service_is_healthy.py new file mode 100644 index 0000000..de6415b --- /dev/null +++ b/tests/application/users/services/test_common_service_is_healthy.py @@ -0,0 +1,128 @@ +from asyncio import AbstractEventLoop +from typing import Generator, Tuple, Any +from unittest.mock import patch + +import pytest + +from src.app.application.container import container as service_container + + +@pytest.fixture +def mock_health_services() -> Generator[Tuple[Any, Any, Any], None, None]: + with ( + patch( + "src.app.infrastructure.repositories.common_psql_repository.CommonPSQLRepository.is_healthy" + ) as mock_psql, + patch( + "src.app.infrastructure.repositories.common_redis_repository.CommonRedisRepository.is_healthy" + ) as mock_redis, + patch("src.app.infrastructure.messaging.mq_client.mq_client.is_healthy") as mock_mq, + ): + yield mock_psql, mock_redis, mock_mq + + +IS_HEALTHY_CASES = [ + {"psql_val": True, "redis_val": True, "mq_val": True, "expected": True}, + {"psql_val": False, "redis_val": True, "mq_val": True, "expected": False}, + {"psql_val": True, "redis_val": False, "mq_val": True, "expected": False}, + {"psql_val": True, "redis_val": True, "mq_val": False, "expected": False}, + {"psql_val": True, "redis_val": False, "mq_val": False, "expected": False}, + {"psql_val": False, "redis_val": True, "mq_val": False, "expected": False}, +] + + +@pytest.mark.parametrize("data", IS_HEALTHY_CASES, scope="function") +def test_common_service_is_healthy( + e_loop: AbstractEventLoop, mock_health_services: Tuple[Any, Any, Any], data: dict +) -> None: + mock_psql, mock_redis, mock_mq = mock_health_services + expected_val, psql_val, redis_val, mq_val = ( + data["expected"], + data["psql_val"], + data["redis_val"], + data["mq_val"], + ) + mock_psql.return_value = psql_val + mock_redis.return_value = redis_val + mock_mq.return_value = mq_val + + result = e_loop.run_until_complete(service_container.common_service.is_healthy()) + + assert result is expected_val + + +IS_HEALTHY_CASES_FAILED = [ + {"psql_val": True, "redis_val": True, "mq_val": False, "expected": False}, + {"psql_val": True, "redis_val": False, "mq_val": False, "expected": False}, + {"psql_val": False, "redis_val": True, "mq_val": False, "expected": False}, +] + + +@pytest.mark.parametrize("data", IS_HEALTHY_CASES_FAILED, scope="function") +def test_common_service_is_healthy_with_psq_exception( + e_loop: AbstractEventLoop, mock_health_services: Tuple[Any, Any, Any], data: dict +) -> None: + mock_psql, mock_redis, mock_mq = mock_health_services + expected_val, psql_val, redis_val, mq_val = ( + data["expected"], + data["psql_val"], + data["redis_val"], + data["mq_val"], + ) + mock_psql.return_value = psql_val + mock_redis.return_value = redis_val + mock_mq.return_value = mq_val + mock_psql.side_effect = Exception("Connection failed") + + result = e_loop.run_until_complete(service_container.common_service.is_healthy()) + + assert result is expected_val + + +@pytest.mark.parametrize("data", IS_HEALTHY_CASES_FAILED, scope="function") +def test_common_service_is_healthy_with_mq_exception( + e_loop: AbstractEventLoop, mock_health_services: Tuple[Any, Any, Any], data: dict +) -> None: + mock_psql, mock_redis, mock_mq = mock_health_services + expected_val, psql_val, redis_val, mq_val = ( + data["expected"], + data["psql_val"], + data["redis_val"], + data["mq_val"], + ) + mock_psql.return_value = psql_val + mock_redis.return_value = redis_val + mock_mq.return_value = mq_val + mock_mq.side_effect = Exception("Connection failed") + + result = e_loop.run_until_complete(service_container.common_service.is_healthy()) + + assert result is expected_val + + +@pytest.mark.parametrize("data", IS_HEALTHY_CASES_FAILED, scope="function") +def test_common_service_is_healthy_with_redis_exception( + e_loop: AbstractEventLoop, mock_health_services: Tuple[Any, Any, Any], data: dict +) -> None: + mock_psql, mock_redis, mock_mq = mock_health_services + expected_val, psql_val, redis_val, mq_val = ( + data["expected"], + data["psql_val"], + data["redis_val"], + data["mq_val"], + ) + mock_psql.return_value = psql_val + mock_redis.return_value = redis_val + mock_mq.return_value = mq_val + mock_redis.side_effect = Exception("Connection failed") + + result = e_loop.run_until_complete(service_container.common_service.is_healthy()) + + assert result is expected_val + + +def test_common_service_is_healthy_real_infrastructure(e_loop: AbstractEventLoop) -> None: + result = e_loop.run_until_complete(service_container.common_service.is_healthy()) + + assert isinstance(result, bool) + assert result is True From e06bfe58abafb143e9e2f5e9b733809ebbd8aed7 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Fri, 26 Sep 2025 12:22:52 +0300 Subject: [PATCH 09/13] feature/improve repository (#24) --- .../repositories/base/abstract.py | 4 + .../repositories/base/base_psql_repository.py | 993 +++++++++++++----- .../repositories/test_repository_general.py | 260 +++-- .../repositories/test_users_repository.py | 11 +- 4 files changed, 930 insertions(+), 338 deletions(-) diff --git a/src/app/infrastructure/repositories/base/abstract.py b/src/app/infrastructure/repositories/base/abstract.py index c0e0345..ee3e04e 100644 --- a/src/app/infrastructure/repositories/base/abstract.py +++ b/src/app/infrastructure/repositories/base/abstract.py @@ -5,6 +5,10 @@ from src.app.infrastructure.extensions.psql_ext.psql_ext import Base +class RepositoryError(Exception): + pass + + class AbstractRepository(ABC): pass diff --git a/src/app/infrastructure/repositories/base/base_psql_repository.py b/src/app/infrastructure/repositories/base/base_psql_repository.py index c61e5db..0c9f88e 100644 --- a/src/app/infrastructure/repositories/base/base_psql_repository.py +++ b/src/app/infrastructure/repositories/base/base_psql_repository.py @@ -1,133 +1,642 @@ import datetime as dt +import re from copy import deepcopy +from datetime import datetime from dataclasses import fields, make_dataclass from typing import Any, Callable, Dict, Generic, List, Optional, Tuple, Type -from sqlalchemy import delete, exists, func, insert, inspect, select, Select, String, text, update +from sqlalchemy import ( + delete, + exists, + func, + insert, + inspect, + select, + Select, + String, + text, + update, + Column, + JSON, + DateTime, + Boolean, + Float, + Integer, +) from src.app.infrastructure.extensions.psql_ext.psql_ext import Base, get_session -from src.app.infrastructure.repositories.base.abstract import AbstractBaseRepository, OuterGenericType, BaseModel +from src.app.infrastructure.repositories.base.abstract import ( + AbstractBaseRepository, + OuterGenericType, + BaseModel, + RepositoryError, +) from src.app.infrastructure.utils.common import generate_str -class BasePSQLRepository(AbstractBaseRepository[OuterGenericType], Generic[OuterGenericType]): - MODEL: Optional[Type[Base]] = None - - __ATR_SEPARATOR: str = "__" +class PSQLLookupRegistry: LOOKUP_MAP = { - "gt": lambda stmt, key1, _, v: stmt.where(key1 > v), - "gte": lambda stmt, key1, _, v: stmt.where(key1 >= v), - "lt": lambda stmt, key1, _, v: stmt.where(key1 < v), - "lte": lambda stmt, key1, _, v: stmt.where(key1 <= v), - "e": lambda stmt, key1, _, v: stmt.where(key1 == v), - "ne": lambda stmt, key1, _, v: stmt.where(key1 != v), - "in": lambda stmt, key1, _, v: stmt.where(key1.in_(v)), # does not work with None - "not_in": lambda stmt, key1, _, v: stmt.where(key1.not_in(v)), # does not work with None - "like": lambda stmt, key1, _, v: stmt.filter(key1.cast(String).like(f"%{str(v)}%")), - "not_like_all": lambda stmt, key1, _, v: BasePSQLRepository.__not_like_all(stmt, key1, v), - "jsonb_like": lambda stmt, key1, key_2, v: BasePSQLRepository.__jsonb_like(stmt, key1, key_2, v), - "jsonb_not_like": lambda stmt, key1, key_2, v: BasePSQLRepository.__jsonb_not_like(stmt, key1, key_2, v), + "gt": lambda stmt, key1, _, v: PSQLLookupRegistry._greater_than(stmt, key1, v), + "gte": lambda stmt, key1, _, v: PSQLLookupRegistry._greater_than_equal(stmt, key1, v), + "lt": lambda stmt, key1, _, v: PSQLLookupRegistry._less_than(stmt, key1, v), + "lte": lambda stmt, key1, _, v: PSQLLookupRegistry._less_than_equal(stmt, key1, v), + "e": lambda stmt, key1, _, v: PSQLLookupRegistry._equal(stmt, key1, v), + "ne": lambda stmt, key1, _, v: PSQLLookupRegistry._not_equal(stmt, key1, v), + "in": lambda stmt, key1, _, v: PSQLLookupRegistry._in(stmt, key1, v), # does not work with None + "not_in": lambda stmt, key1, _, v: PSQLLookupRegistry._not_in(stmt, key1, v), # does not work with None + "ilike": lambda stmt, key1, _, v: PSQLLookupRegistry._ilike(stmt, key1, v), + "like": lambda stmt, key1, _, v: PSQLLookupRegistry._like(stmt, key1, v), + "not_like_all": lambda stmt, key1, _, v: PSQLLookupRegistry._not_like_all(stmt, key1, v), + "jsonb_like": lambda stmt, key1, key_2, v: PSQLLookupRegistry._jsonb_like(stmt, key1, key_2, v), + "jsonb_not_like": lambda stmt, key1, key_2, v: PSQLLookupRegistry._jsonb_not_like(stmt, key1, key_2, v), } + _JSONB_LOOKUPS = ( + "jsonb_like", + "jsonb_not_like", + ) + + @classmethod + def get_operation(cls, name: str) -> Callable: + """Get lookup operation by name""" + operation = cls.LOOKUP_MAP.get(name, None) + if not operation: + raise RepositoryError(f"Unknown lookup operation: '{name}'. Available: {list(cls.LOOKUP_MAP.keys())}") + return operation + + @classmethod + def apply_lookup( + cls, stmt: Any, column: Any, lookup: str, value: Any, jsonb_field: Optional[str] = None + ) -> Any: + """Apply lookup operation to statement""" + operation = cls.get_operation(lookup) + + if lookup in cls._JSONB_LOOKUPS: + return operation(stmt, column, jsonb_field, value) + else: + return operation(stmt, column, jsonb_field, value) + + # Core lookup operations + @staticmethod + def _equal(stmt: Any, column: Any, value: Any) -> Any: + """Equal comparison: column = value""" + return stmt.where(column == value) + + @staticmethod + def _not_equal(stmt: Any, column: Any, value: Any) -> Any: + """Not equal comparison: column != value""" + return stmt.where(column != value) + + @staticmethod + def _greater_than(stmt: Any, column: Any, value: Any) -> Any: + """Greater than comparison: column > value""" + return stmt.where(column > value) + + @staticmethod + def _greater_than_equal(stmt: Any, column: Any, value: Any) -> Any: + """Greater than or equal comparison: column >= value""" + return stmt.where(column >= value) + + @staticmethod + def _less_than(stmt: Any, column: Any, value: Any) -> Any: + """Less than comparison: column < value""" + return stmt.where(column < value) + + @staticmethod + def _less_than_equal(stmt: Any, column: Any, value: Any) -> Any: + """Less than or equal comparison: column <= value""" + return stmt.where(column <= value) + + @staticmethod + def _in(stmt: Any, column: Any, value: List[Any]) -> Any: + """IN comparison: column IN (values)""" + if not isinstance(value, (list, tuple)): + raise RepositoryError("IN lookup requires list or tuple value") + return stmt.where(column.in_(value)) + + @staticmethod + def _not_in(stmt: Any, column: Any, value: List[Any]) -> Any: + """NOT IN comparison: column NOT IN (values)""" + if not isinstance(value, (list, tuple)): + raise RepositoryError("NOT_IN lookup requires list or tuple value") + return stmt.where(column.not_in(value)) + + @staticmethod + def _like(stmt: Any, column: Any, value: Any) -> Any: + """LIKE comparison: column LIKE %value%""" + return stmt.filter(column.cast(String).like(f"%{str(value)}%")) @staticmethod - def __not_like_all(stmt: Any, k: Any, v: Any) -> Select: - for item in v: - stmt = stmt.filter(k.cast(String).like(f"%{str(item)}%")) + def _ilike(stmt: Any, column: Any, value: Any) -> Any: + """LIKE comparison: column LIKE %value%""" + return stmt.filter(column.cast(String).ilike(f"%{str(value)}%")) + + @staticmethod + def _not_like_all(stmt: Any, column: Any, value: List[str]) -> Select: + """NOT LIKE ALL: column NOT LIKE ALL values (all values must not match)""" + if not isinstance(value, (list, tuple)): + raise RepositoryError("NOT_LIKE_ALL lookup requires list or tuple value") + + for item in value: + stmt = stmt.filter(~column.cast(String).like(f"%{str(item)}%")) return stmt @staticmethod - def __jsonb_like(stmt: Any, key_1: Any, key_2: Any, v: Any) -> Select: + def _jsonb_like(stmt: Any, key_1: Any, key_2: Any, v: Any) -> Select: if not key_2: return stmt.where(key_1.cast(String).like(f"%{v}%")) else: - key_ = "jsonb_like" + generate_str(size=4) + value_param = "jsonb_like_val_" + generate_str(size=8) return stmt.where( - text(f"{key_1}->>'{key_2}' LIKE CONCAT('%', CAST(:{key_} AS TEXT), '%')").params(**{key_: str(v)}) + text(f"{key_1.name}->>:jsonb_key LIKE CONCAT('%', CAST(:{value_param} AS TEXT), '%')").params( + jsonb_key=str(key_2), **{value_param: str(v)} + ) ) @staticmethod - def __jsonb_not_like(stmt: Any, key_1: Any, key_2: Any, v: Any) -> Select: + def _jsonb_not_like(stmt: Any, key_1: Any, key_2: Any, v: Any) -> Select: if not key_2: return stmt.where(~key_1.cast(String).like(f"%{v}%")) else: - key_ = "jsonb_n_like" + generate_str(size=4) + value_param = "jsonb_not_like_val_" + generate_str(size=8) return stmt.where( - text(f"{key_1}->>'{key_2}' NOT LIKE CONCAT('%', CAST(:{key_} AS TEXT), '%')").params( - **{key_: str(v)} + text(f"{key_1.name}->>:jsonb_key NOT LIKE CONCAT('%', CAST(:{value_param} AS TEXT), '%')").params( + jsonb_key=str(key_2), **{value_param: str(v)} ) ) + +# ========================================== +# SECURITY AND VALIDATION CLASSES +# ========================================== + + +class SecurityConfig: + """Security configuration constants and patterns""" + + MAX_FILTER_COMPLEXITY = 50 + MAX_STRING_LENGTH = 5000 + MAX_LIST_LENGTH = 500 + KEY_MAX_LENGTH = 50 + DANGEROUS_STRINGS = [";", "--", "/*", "*/", "xp_", "sp_"] + ALLOWED_ORDER_PATTERN = re.compile(r"^-?[a-zA-Z_][a-zA-Z0-9_]*$") + ALLOWED_KEY_PATTERN = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") + + +class FilterKeyParser: + """Handles parsing of filter keys with lookup operations""" + + ATTRIBUTE_SEPARATOR = "__" + @classmethod - def _parse_filter_key(cls, key: str) -> Tuple[str, str, str]: # type: ignore - splitted: list = key.split(cls.__ATR_SEPARATOR) + def parse(cls, key: str) -> Tuple[str, str, str]: + """ + Parse filter key into components. + + Returns: + Tuple[column_name, jsonb_field, lookup_operation] + + Examples: + "name" -> ("name", "", "e") + "name__ilike" -> ("name", "", "ilike") + "meta__preferences__jsonb_like" -> ("meta", "preferences", "jsonb_like") + """ + parts = key.split(cls.ATTRIBUTE_SEPARATOR) + + if len(parts) == 1: + return parts[0], "", "e" + elif len(parts) == 2: + return parts[0], "", parts[1] + elif len(parts) == 3: + return parts[0], parts[1], parts[2] + else: + raise RepositoryError(f"Invalid filter key format: '{key}'. Too many separators.") + - if len(splitted) == 1: - key_1 = splitted[0] - key_2 = "" - return key_1, key_2, "e" - elif len(splitted) == 2: - key_1 = splitted[0] - key_2 = "" - lookup = splitted[1] - return key_1, key_2, lookup - elif len(splitted) == 3: - key_1 = splitted[0] - key_2 = splitted[1] - lookup = splitted[2] - return key_1, key_2, lookup +class SecurityValidator: + """Handles all security validation for query building""" @classmethod - def _parse_order_data(cls, order_data: Optional[Tuple[str]] = None) -> tuple: - if not order_data: - order_data = () # type: ignore - parsed_order_data = [] - - for order_item in order_data: # type: ignore - order_item_tmp = order_item - if order_item_tmp.startswith("-"): - order_item_tmp = order_item[1:] - parsed_order_data.append(getattr(cls.model(), order_item_tmp).desc()) - else: - parsed_order_data.append(getattr(cls.model(), order_item_tmp).asc()) + def validate_filter_complexity(cls, filter_data: Dict[str, Any]) -> None: + """Validate filter complexity to prevent DoS attacks""" + if len(filter_data) > SecurityConfig.MAX_FILTER_COMPLEXITY: + raise RepositoryError( + f"Filter complexity exceeds maximum allowed ({SecurityConfig.MAX_FILTER_COMPLEXITY})" + ) + + @classmethod + def validate_key_security(cls, key: str) -> None: + """Validate filter key for security""" + if len(key) > SecurityConfig.KEY_MAX_LENGTH: + raise RepositoryError(f"Key too long. Maximum {SecurityConfig.KEY_MAX_LENGTH} characters allowed.") + + if not SecurityConfig.ALLOWED_KEY_PATTERN.match(str(key)): + raise RepositoryError( + f"Invalid key format: '{key}'. Only alphanumeric characters and underscores allowed." + ) - return tuple(parsed_order_data) + # Check for potentially dangerous characters + if any(char in key for char in SecurityConfig.DANGEROUS_STRINGS): + raise RepositoryError(f"Key contains dangerous characters: '{key}'") @classmethod - def _parse_order_data_for_target(cls, target: Base, order_data: Optional[Tuple[str]] = None) -> tuple: - if not order_data: - order_data = () # type: ignore - parsed_order_data = [] - - for order_item in order_data: # type: ignore - order_item_tmp = order_item - if order_item_tmp.startswith("-"): - order_item_tmp = order_item[1:] - parsed_order_data.append(getattr(target, order_item_tmp).desc()) - else: - parsed_order_data.append(getattr(target, order_item_tmp).asc()) + def validate_value_security(cls, value: Any) -> None: + """Validate filter value for security""" + if isinstance(value, str): + if len(value) > SecurityConfig.MAX_STRING_LENGTH: + raise RepositoryError( + f"String value too long. Maximum {SecurityConfig.MAX_STRING_LENGTH} characters allowed." + ) + elif isinstance(value, (list, tuple)): + if len(value) > SecurityConfig.MAX_LIST_LENGTH: + raise RepositoryError( + f"List value too long. Maximum {SecurityConfig.MAX_LIST_LENGTH} items allowed." + ) + for item in value: + cls.validate_value_security(item) + + @classmethod + def validate_order_field(cls, order_field: str) -> None: + """Validate order field for security""" + if not SecurityConfig.ALLOWED_ORDER_PATTERN.match(order_field): + raise RepositoryError( + f"Invalid order field format: '{order_field}'. " + f"Only alphanumeric characters, underscores, and optional leading dash allowed." + ) + + if len(order_field) > SecurityConfig.KEY_MAX_LENGTH: + raise RepositoryError( + f"Order field too long. Maximum {SecurityConfig.KEY_MAX_LENGTH} characters allowed." + ) + + +class TypeValidator: + """Handles type validation for different column types""" + + TYPE_VALIDATORS = { + String: "_validate_string_type", + Integer: "_validate_integer_type", + Float: "_validate_float_type", + Boolean: "_validate_boolean_type", + DateTime: "_validate_datetime_type", + JSON: "_validate_json_type", + } + + @classmethod + def validate_value_type(cls, key: str, value: Any, column_type: Any) -> None: + """Validate a single value against column type""" + for type_class, validator_method in cls.TYPE_VALIDATORS.items(): + if isinstance(column_type, type_class): + getattr(cls, validator_method)(key, value) + return + + @classmethod + def _validate_string_type(cls, key: str, value: Any) -> None: + """Validate string type value""" + if not isinstance(value, str): + raise RepositoryError(f"Column '{key}' expects string value, got {type(value).__name__}") + + @classmethod + def _validate_integer_type(cls, key: str, value: Any) -> None: + """Validate integer type value""" + if not isinstance(value, int): + raise RepositoryError(f"Column '{key}' expects integer value, got {type(value).__name__}") + + @classmethod + def _validate_float_type(cls, key: str, value: Any) -> None: + """Validate float type value""" + if not isinstance(value, (int, float)): + raise RepositoryError(f"Column '{key}' expects numeric value, got {type(value).__name__}") + + @classmethod + def _validate_boolean_type(cls, key: str, value: Any) -> None: + """Validate boolean type value""" + if not isinstance(value, bool): + raise RepositoryError(f"Column '{key}' expects boolean value, got {type(value).__name__}") + + @classmethod + def _validate_datetime_type(cls, key: str, value: Any) -> None: + """Validate datetime type value""" + + if not isinstance(value, datetime): + raise RepositoryError(f"Column '{key}' expects datetime value, got {type(value).__name__}") + + @classmethod + def _validate_json_type(cls, key: str, value: Any) -> None: + """Validate JSON type value""" + if not isinstance(value, (dict, list, str, int, float, bool)): + raise RepositoryError(f"Column '{key}' expects JSON-compatible value, got {type(value).__name__}") + + +# ========================================== +# MAIN QUERY BUILDER CLASS +# ========================================== + + +class QueryBuilder: + """ + Main query builder class responsible for constructing SQL queries with security validations. + + This class is organized into logical sections: + - Configuration and Setup + - Column Management + - Filter Processing + - Ordering and Pagination + - Validation Orchestration + """ + + # ========================================== + # CONFIGURATION + # ========================================== + + LOOKUP_REGISTRY_CLASS = PSQLLookupRegistry + PAGINATION_KEYS = ["limit", "offset"] + _MODEL_COLUMNS_CACHE: Dict[str, Dict[str, Column]] = {} + + # ========================================== + # CORE QUERY BUILDING METHODS + # ========================================== + + @classmethod + def lookup_registry(cls) -> Type[PSQLLookupRegistry]: + """Get the lookup registry for SQL operations""" + return cls.LOOKUP_REGISTRY_CLASS + + @classmethod + def _get_model_columns(cls, model_class: Type[Base]) -> Dict[str, Column]: + """Get all columns from the model with caching""" + model_name = model_class.__name__ + + if model_name not in cls._MODEL_COLUMNS_CACHE: + inspector = inspect(model_class) + cls._MODEL_COLUMNS_CACHE[model_name] = {col.name: col for col in inspector.columns} + + return cls._MODEL_COLUMNS_CACHE[model_name] + + @classmethod + def validate_model_key(cls, key: str, model_class: Type[Base]) -> Column: + """Validate that a key exists in the model and return the column""" + column_ = cls._get_model_columns(model_class).get(key, None) + if column_ is None: + raise RepositoryError(f"Column '{key}' does not exist in model {model_class.__name__}") + return column_ + + # ========================================== + # FILTER VALIDATION METHODS + # ========================================== + + @classmethod + def validate_filter_value(cls, column: Column, key: str, value: Any, lookup: str) -> None: + """ + Comprehensive validation of filter values. + + Validates: + - None values against nullable columns + - Security constraints + - Type compatibility + - Lookup-specific requirements + """ + # Check nullable constraints + if cls._is_none_value_invalid(column, value): + raise RepositoryError(f"Column '{key}' cannot be None (not nullable)") + + if value is None: + return # None is valid for nullable columns + + # Security validation + SecurityValidator.validate_value_security(value) + + # Lookup-specific validation + if cls._is_list_based_lookup(lookup): + cls._validate_list_lookup_values(key, value, column.type) + elif cls._is_string_convertible_lookup(lookup): + cls._validate_string_convertible_lookup(key, value, lookup) + else: + # Type validation for single values + TypeValidator.validate_value_type(key, value, column.type) + + @classmethod + def _is_none_value_invalid(cls, column: Column, value: Any) -> bool: + """Check if None value is invalid for the column""" + return value is None and not column.nullable + + @classmethod + def _is_list_based_lookup(cls, lookup: str) -> bool: + """Check if lookup requires list/tuple values""" + return lookup in ("in", "not_in") - return tuple(parsed_order_data) + @classmethod + def _is_string_convertible_lookup(cls, lookup: str) -> bool: + """Check if lookup converts values to strings""" + return lookup in ("not_like_all", "like", "jsonb_like", "jsonb_not_like", "ilike") + + @classmethod + def _validate_list_lookup_values(cls, key: str, value: Any, column_type: Any) -> None: + """Validate values for list-based lookups (IN, NOT IN)""" + if not isinstance(value, (list, tuple)): + raise RepositoryError(f"List-based lookup for column '{key}' requires list/tuple value") + + # Validate each item in the list + for item in value: + if item is not None: + TypeValidator.validate_value_type(key, item, column_type) @classmethod - def _apply_where(cls, stmt: Any, filter_data: dict) -> Any: + def _validate_string_convertible_lookup(cls, key: str, value: Any, lookup: str) -> None: + """Validate values for string-convertible lookups (LIKE, ILIKE, etc.)""" + if lookup == "not_like_all" and not isinstance(value, (list, tuple)): + raise RepositoryError(f"Lookup 'not_like_all' for column '{key}' requires list/tuple value") + + # ========================================== + # MAIN QUERY PROCESSING METHODS + # ========================================== + + @classmethod + def apply_where(cls, stmt: Any, filter_data: Optional[Dict[str, Any]], model_class: Type[Base]) -> Any: + """ + Apply WHERE clauses to a SQL statement based on filter data. + + Args: + stmt: SQLAlchemy statement to modify + filter_data: Dictionary of filter conditions + model_class: SQLAlchemy model class for validation + + Returns: + Modified SQLAlchemy statement with WHERE clauses applied + """ + if not filter_data: + return stmt + + # Security validation + SecurityValidator.validate_filter_complexity(filter_data) + + # Process each filter for key, value in filter_data.items(): - key_1, key_2, lookup = cls._parse_filter_key(key) - key_1_ = getattr(cls.model(), key_1, None) - key_2_ = key_2 - if "jsonb" in lookup and key_2: - key_1_ = key_1 - key_2_ = key_2 - stmt = cls.LOOKUP_MAP[lookup](stmt, key_1_, key_2_, value) + if key in cls.PAGINATION_KEYS: + continue + + # Parse and validate the filter key + column_name, jsonb_field, lookup = FilterKeyParser.parse(key) + + # Security validation + SecurityValidator.validate_key_security(column_name) + if jsonb_field: + SecurityValidator.validate_key_security(jsonb_field) + + # Validate column exists and get column object + column = cls.validate_model_key(column_name, model_class) + + # Comprehensive value validation + cls.validate_filter_value(column, key, value, lookup) + + # Apply the lookup operation + stmt = cls.lookup_registry().apply_lookup( + stmt=stmt, column=column, lookup=lookup, value=value, jsonb_field=jsonb_field + ) + return stmt + @classmethod + def apply_ordering(cls, stmt: Any, order_data: Optional[Tuple[str, ...]], model_class: Type[Base]) -> Any: + """ + Apply ORDER BY clause to statement. + + Args: + stmt: SQLAlchemy statement to modify + order_data: Tuple of field names for ordering (prefix with "-" for DESC) + model_class: SQLAlchemy model class for validation + + Returns: + Modified SQLAlchemy statement with ORDER BY clause applied + """ + if not order_data: + return stmt + + try: + parsed_order = cls._parse_order_data(order_data, model_class) + return stmt.order_by(*parsed_order) + except Exception as e: + raise RepositoryError(f"Failed to apply ordering: {str(e)}") + + @classmethod + def apply_pagination(cls, stmt: Any, filter_data: Optional[Dict[str, Any]] = None) -> Any: + """ + Apply LIMIT and OFFSET to statement for pagination. + + Args: + stmt: SQLAlchemy statement to modify + filter_data: Dictionary containing "limit" and "offset" keys + + Returns: + Modified SQLAlchemy statement with pagination applied + """ + if not filter_data: + return stmt + + # Extract and validate pagination parameters + limit = filter_data.get("limit") + offset = filter_data.get("offset", 0) + + # Apply offset if provided + if offset: + if not isinstance(offset, int) or offset < 0: + raise RepositoryError(f"Offset must be non-negative integer, got: {offset}") + stmt = stmt.offset(offset) + + # Apply limit if provided + if limit is not None: + if not isinstance(limit, int) or limit <= 0: + raise RepositoryError(f"Limit must be positive integer, got: {limit}") + stmt = stmt.limit(limit) + + return stmt + + # ========================================== + # HELPER METHODS + # ========================================== + + @classmethod + def _parse_order_data(cls, order_data: Tuple[str, ...], model_class: Type[Base]) -> List[Any]: + """ + Parse order data into SQLAlchemy order clauses. + + Args: + order_data: Tuple of field names, optionally prefixed with "-" for descending order + model_class: SQLAlchemy model class for validation + + Returns: + List of SQLAlchemy order clauses + + Example: + ("name", "-created_at") -> [Column.asc(), Column.desc()] + """ + parsed_order = [] + + for order_item in order_data: + if not isinstance(order_item, str): + raise RepositoryError(f"Order field must be string, got: {type(order_item).__name__}") + + # Security validation + SecurityValidator.validate_order_field(order_item) + + # Parse direction and field name + if order_item.startswith("-"): + field_name = order_item[1:] + direction = "desc" + else: + field_name = order_item + direction = "asc" + + # Validate field exists in model and create order clause + try: + column = cls.validate_model_key(field_name, model_class) + parsed_order.append(getattr(column, direction)()) + except Exception as e: + raise RepositoryError(f"Invalid order field '{field_name}': {str(e)}") + + return parsed_order + + +class BasePSQLRepository(AbstractBaseRepository[OuterGenericType], Generic[OuterGenericType]): + """ + Base PostgreSQL repository with CRUD operations and bulk operations support. + + Organized into logical sections: + - Configuration and Setup + - Dataclass Helpers + - Read Operations + - Write Operations + - Bulk Operations + - Utility Methods + """ + + MODEL: Optional[Type[Base]] = None + _QUERY_BUILDER_CLASS: Type[QueryBuilder] = QueryBuilder + + # ========================================== + # CONFIGURATION AND SETUP + # ========================================== + + @classmethod + def query_builder(cls) -> Type[QueryBuilder]: + """Get the query builder class for this repository""" + if not cls._QUERY_BUILDER_CLASS: + raise AttributeError("Query builder class not configured") + return cls._QUERY_BUILDER_CLASS + @classmethod def model(cls) -> Type[BaseModel]: + """Get the SQLAlchemy model class for this repository""" if not cls.MODEL: - raise AttributeError + raise AttributeError("Model class not configured") return cls.MODEL + # ========================================== + # DATACLASS HELPERS + # ========================================== + @classmethod - def __make_out_dataclass(cls) -> Tuple[Callable, List[str]]: + def _create_dynamic_dataclass(cls) -> Tuple[Callable, List[str]]: + """Create a dynamic dataclass from the model structure""" model = cls.model() # type: ignore columns = inspect(model).c field_names = [column.name for column in columns] @@ -146,25 +655,29 @@ def __make_out_dataclass(cls) -> Tuple[Callable, List[str]]: def out_dataclass_with_columns( cls, out_dataclass: Optional[OuterGenericType] = None ) -> Tuple[Callable, List[str]]: + """Get output dataclass and column names for result conversion""" if not out_dataclass: - out_dataclass_, columns = cls.__make_out_dataclass() + out_dataclass_, columns = cls._create_dynamic_dataclass() else: out_dataclass_ = out_dataclass # type: ignore columns = [f.name for f in fields(out_dataclass_)] # type: ignore return out_dataclass_, columns + # ========================================== + # CRUD OPERATIONS + # ========================================== + @classmethod async def count(cls, filter_data: Optional[dict] = None) -> int: + """Count records matching the filter criteria""" if not filter_data: filter_data = {} - filter_data_ = deepcopy(filter_data) - filter_data_.pop("limit", "") - filter_data_.pop("offset", "") + filter_data_ = filter_data.copy() if filter_data else {} stmt: Select = select(func.count(cls.model().id)) # type: ignore - stmt = cls._apply_where(stmt, filter_data=filter_data_) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) async with get_session(expire_on_commit=True) as session: result = await session.execute(stmt) @@ -172,13 +685,11 @@ async def count(cls, filter_data: Optional[dict] = None) -> int: @classmethod async def is_exists(cls, filter_data: dict) -> bool: - - filter_data_ = deepcopy(filter_data) - filter_data_.pop("limit", "") - filter_data_.pop("offset", "") + """Check if any records exist matching the filter criteria""" + filter_data_ = filter_data.copy() stmt = select(exists(cls.model())) - stmt = cls._apply_where(stmt, filter_data=filter_data_) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) async with get_session() as session: result = await session.execute(stmt) @@ -189,12 +700,11 @@ async def is_exists(cls, filter_data: dict) -> bool: async def get_first( cls, filter_data: dict, out_dataclass: Optional[OuterGenericType] = None ) -> OuterGenericType | None: - filter_data_ = deepcopy(filter_data) - filter_data_.pop("limit", "") - filter_data_.pop("offset", "") + """Get the first record matching the filter criteria""" + filter_data_ = filter_data.copy() stmt: Select = select(cls.model()) - stmt = cls._apply_where(stmt, filter_data=filter_data_) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) async with get_session(expire_on_commit=True) as session: result = await session.execute(stmt) @@ -213,17 +723,15 @@ async def get_list( order_data: Optional[Tuple[str]] = ("id",), out_dataclass: Optional[OuterGenericType] = None, ) -> List[OuterGenericType]: + """Get a list of records matching the filter criteria with pagination and ordering""" if not filter_data: filter_data = {} - limit = filter_data.pop("limit", None) - offset = filter_data.pop("offset", 0) + filter_data_ = filter_data.copy() stmt: Select = select(cls.model()) - stmt = cls._apply_where(stmt, filter_data=filter_data) - stmt = stmt.order_by(*cls._parse_order_data(order_data)) - stmt = stmt.offset(offset) - if limit is not None: - stmt = stmt.limit(limit) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) + stmt = cls.query_builder().apply_ordering(stmt, order_data=order_data, model_class=cls.model()) + stmt = cls.query_builder().apply_pagination(stmt, filter_data=filter_data_) async with get_session(expire_on_commit=True) as session: result = await session.execute(stmt) @@ -241,6 +749,7 @@ async def get_list( async def create( cls, data: dict, is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None ) -> OuterGenericType | None: + """Create a single record""" data_copy = data.copy() # Handle explicit ID if provided, otherwise let database auto-increment @@ -274,10 +783,78 @@ async def create( return None + @classmethod + async def update( + cls, + filter_data: dict, + data: Dict[str, Any], + is_return_require: bool = False, + out_dataclass: Optional[OuterGenericType] = None, + ) -> OuterGenericType | None: + """Update records matching the filter criteria""" + data_copy = data.copy() + + stmt = update(cls.model()) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data, model_class=cls.model()) + + cls._set_timestamps_on_update(items=[data_copy]) + + stmt = stmt.values(**data_copy) + stmt.execution_options(synchronize_session="fetch") + + async with get_session(expire_on_commit=True) as session: + await session.execute(stmt) + await session.commit() + + if is_return_require: + return await cls.get_first(filter_data=filter_data, out_dataclass=out_dataclass) + return None + + @classmethod + async def update_or_create( + cls, + filter_data: dict, + data: Dict[str, Any], + is_return_require: bool = False, + out_dataclass: Optional[OuterGenericType] = None, + ) -> OuterGenericType | None: + """Update existing record or create new one if not found""" + is_exists = await cls.is_exists(filter_data=filter_data) + if is_exists: + data_tmp = deepcopy(data) + data_tmp.pop("id", None) + data_tmp.pop("uuid", None) + item = await cls.update( + filter_data=filter_data, + data=data_tmp, + is_return_require=is_return_require, + out_dataclass=out_dataclass, + ) + return item + else: + item = await cls.create(data=data, is_return_require=is_return_require, out_dataclass=out_dataclass) + return item + + @classmethod + async def remove( + cls, + filter_data: Dict[str, Any], + ) -> None: + """Delete records matching the filter criteria""" + if not filter_data: + filter_data = {} + stmt = delete(cls.model()) + stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data, model_class=cls.model()) + + async with get_session() as session: + await session.execute(stmt) + await session.commit() + @classmethod async def create_bulk( cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None ) -> List[OuterGenericType] | None: + """Create multiple records in a single operation""" if not items: return [] @@ -286,9 +863,6 @@ async def create_bulk( # Add timestamps to all items cls._set_timestamps_on_create(items=items_copy) - # Normalize data to handle mixed completeness - cls._normalize_bulk_data(items=items_copy) - async with get_session(expire_on_commit=True) as session: model_class = cls.model() # type: ignore model_table = model_class.__table__ # type: ignore @@ -314,36 +888,85 @@ async def create_bulk( return None @classmethod - async def update( - cls, - filter_data: dict, - data: Dict[str, Any], - is_return_require: bool = False, - out_dataclass: Optional[OuterGenericType] = None, - ) -> OuterGenericType | None: - data_copy = deepcopy(data) + async def update_bulk( + cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None + ) -> List[OuterGenericType] | None: + """Update multiple records in optimized bulk operation - stmt = update(cls.model()) - stmt = cls._apply_where(stmt, filter_data=filter_data) + Note: Currently uses 2 queries for returning case: + - Option 1: Keep current ORM approach (cleaner, 2 queries for returning) + - Option 2: Go back to raw SQL (1 query, but more complex) + - Option 3: Hybrid approach - use ORM for non-returning, raw SQL for returning + """ + if not items: + return None - cls._set_timestamps_on_update(items=[data_copy]) + items_copy = deepcopy(items) - stmt = stmt.values(**data_copy) - stmt.execution_options(synchronize_session="fetch") + cls._set_timestamps_on_update(items=items_copy) async with get_session(expire_on_commit=True) as session: - await session.execute(stmt) - await session.commit() + if is_return_require: + return await cls._bulk_update_with_returning(session, items_copy, out_dataclass) + else: + await cls._bulk_update_without_returning(session, items_copy) + return None - if is_return_require: - return await cls.get_first(filter_data=filter_data, out_dataclass=out_dataclass) - return None + # ========================================== + # BULK OPERATION HELPERS + # ========================================== + + @classmethod + async def _bulk_update_with_returning( + cls, session: Any, items: List[dict], out_dataclass: Optional[OuterGenericType] = None + ) -> List[OuterGenericType]: + """Perform bulk update with RETURNING for result collection using ORM""" + if not items: + return [] + + model_class = cls.model() # type: ignore + + # Use SQLAlchemy's bulk_update_mappings with synchronize_session=False for performance + await session.execute(update(model_class), items, execution_options={"synchronize_session": False}) + await session.commit() + + # Get updated items by their IDs + updated_ids = [item["id"] for item in items if "id" in item] + if not updated_ids: + return [] + + # Query the updated records + stmt = select(model_class).where(model_class.id.in_(updated_ids)) + result = await session.execute(stmt) + updated_records = result.scalars().all() + + # Convert to output dataclass + out_entity_, _ = cls.out_dataclass_with_columns(out_dataclass=out_dataclass) + updated_items = [] + + for record in updated_records: + entity_data = {c.key: getattr(record, c.key) for c in inspect(record).mapper.column_attrs} + updated_items.append(out_entity_(**entity_data)) + + return updated_items + + @classmethod + async def _bulk_update_without_returning(cls, session: Any, items: List[dict]) -> None: + """Perform bulk update without RETURNING using SQLAlchemy's bulk operations""" + if not items: + return + + model_class = cls.model() # type: ignore + + # Use SQLAlchemy's built-in bulk update method + await session.execute(update(model_class), items, execution_options={"synchronize_session": False}) + await session.commit() @classmethod async def _update_single_with_returning( cls, session: Any, item_data: dict, out_entity_: Callable ) -> OuterGenericType | None: - """Update a single item and return the updated entity""" + """Update a single item and return the updated entity (legacy method)""" if "id" not in item_data: return None @@ -366,36 +989,13 @@ async def _update_single_with_returning( return out_entity_(**entity_data) return None - @classmethod - async def update_bulk( - cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None - ) -> List[OuterGenericType] | None: - if not items: - return None - - items_copy = deepcopy(items) - - cls._set_timestamps_on_update(items=items_copy) - - async with get_session(expire_on_commit=True) as session: - if is_return_require: - return await cls._bulk_update_with_returning(session, items_copy, out_dataclass) - else: - await cls._bulk_update_without_returning(session, items_copy) - return None - - @classmethod - def _set_timestamps_on_update(cls, items: List[dict]) -> None: - """Set updated_at on update""" - if hasattr(cls.model(), "updated_at"): - dt_ = dt.datetime.now(dt.UTC).replace(tzinfo=None) - for item in items: - if "updated_at" not in item: - item["updated_at"] = dt_ + # ========================================== + # UTILITY METHODS + # ========================================== @classmethod def _set_timestamps_on_create(cls, items: List[dict]) -> None: - """Set created_at, updated_at on create""" + """Set created_at, updated_at timestamps on create operations""" if hasattr(cls.model(), "updated_at") or hasattr(cls.model(), "created_at"): dt_ = dt.datetime.now(dt.UTC).replace(tzinfo=None) for item in items: @@ -405,89 +1005,10 @@ def _set_timestamps_on_create(cls, items: List[dict]) -> None: item["created_at"] = dt_ @classmethod - def _normalize_bulk_data(cls, items: List[dict]) -> None: - """Normalize bulk data to handle mixed field completeness""" - if not items: - return - - # Get all unique keys from all items - all_keys: set[str] = set() - for item in items: - all_keys.update(item.keys()) - - # Get model column defaults and nullable info - model_class = cls.model() # type: ignore - model_table = model_class.__table__ # type: ignore - - # For each item, ensure it has all fields with appropriate defaults - for item in items: - for key in all_keys: - if key not in item: - # Check if column exists in model - if hasattr(model_class, key): - column = getattr(model_table.c, key, None) - if column is not None: - # Only add explicit None if column is nullable and has no default - if column.nullable and column.default is None and column.server_default is None: - item[key] = None - # Don't add anything for columns with defaults - let database handle it - - @classmethod - async def _bulk_update_with_returning( - cls, session: Any, items: List[dict], out_dataclass: Optional[OuterGenericType] = None - ) -> List[OuterGenericType]: - """Perform bulk update with RETURNING for result collection""" - updated_items = [] - out_entity_, _ = cls.out_dataclass_with_columns(out_dataclass=out_dataclass) - - for item_data in items: - updated_item = await cls._update_single_with_returning(session, item_data, out_entity_) - if updated_item: - updated_items.append(updated_item) - - await session.commit() - return updated_items - - @classmethod - async def _bulk_update_without_returning(cls, session: Any, items: List[dict]) -> None: - """Perform bulk update without RETURNING for better performance""" - await session.execute(update(cls.model()), items) - await session.commit() - - @classmethod - async def update_or_create( - cls, - filter_data: dict, - data: Dict[str, Any], - is_return_require: bool = False, - out_dataclass: Optional[OuterGenericType] = None, - ) -> OuterGenericType | None: - is_exists = await cls.is_exists(filter_data=filter_data) - if is_exists: - data_tmp = deepcopy(data) - data_tmp.pop("id", None) - data_tmp.pop("uuid", None) - item = await cls.update( - filter_data=filter_data, - data=data_tmp, - is_return_require=is_return_require, - out_dataclass=out_dataclass, - ) - return item - else: - item = await cls.create(data=data, is_return_require=is_return_require, out_dataclass=out_dataclass) - return item - - @classmethod - async def remove( - cls, - filter_data: Dict[str, Any], - ) -> None: - if not filter_data: - filter_data = {} - stmt = delete(cls.model()) - stmt = cls._apply_where(stmt, filter_data=filter_data) - - async with get_session() as session: - await session.execute(stmt) - await session.commit() + def _set_timestamps_on_update(cls, items: List[dict]) -> None: + """Set updated_at timestamp on update operations""" + if hasattr(cls.model(), "updated_at"): + dt_ = dt.datetime.now(dt.UTC).replace(tzinfo=None) + for item in items: + if "updated_at" not in item: + item["updated_at"] = dt_ diff --git a/tests/infrastructure/repositories/test_repository_general.py b/tests/infrastructure/repositories/test_repository_general.py index 472ef4e..86f78c6 100644 --- a/tests/infrastructure/repositories/test_repository_general.py +++ b/tests/infrastructure/repositories/test_repository_general.py @@ -9,17 +9,20 @@ from tests.fixtures.constants import USERS -def test_users_get_list_limit_offset_case_1(e_loop: AbstractEventLoop, users: Any) -> None: +repository = repo_container.users_repository +out_dataclass = UserTestAggregate + + +def test_get_list_limit_offset_case_1(e_loop: AbstractEventLoop, users: Any) -> None: """Test pagination with limit larger than remaining items""" - users_repository = repo_container.users_repository total_users = len(USERS) offset = total_users - 1 limit = 10 expected_count = 1 - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate), + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=out_dataclass), ) assert isinstance(items, list) @@ -28,21 +31,20 @@ def test_users_get_list_limit_offset_case_1(e_loop: AbstractEventLoop, users: An # Verify returned item is valid expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id in expected_ids -def test_users_get_list_limit_offset_case_2(e_loop: AbstractEventLoop, users: Any) -> None: +def test_get_list_limit_offset_case_2(e_loop: AbstractEventLoop, users: Any) -> None: """Test pagination with offset near end of dataset""" - users_repository = repo_container.users_repository total_users = len(USERS) offset = total_users - 2 limit = 10 expected_count = 2 - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=out_dataclass) ) assert isinstance(items, list) @@ -50,21 +52,20 @@ def test_users_get_list_limit_offset_case_2(e_loop: AbstractEventLoop, users: An expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id in expected_ids -def test_users_get_list_limit_offset_case_3(e_loop: AbstractEventLoop, users: Any) -> None: +def test_get_list_limit_offset_case_3(e_loop: AbstractEventLoop, users: Any) -> None: """Test pagination with small limit and large offset""" - users_repository = repo_container.users_repository total_users = len(USERS) offset = total_users - 2 limit = 1 expected_count = 1 - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={"limit": limit, "offset": offset}, out_dataclass=out_dataclass) ) assert isinstance(items, list) @@ -72,20 +73,19 @@ def test_users_get_list_limit_offset_case_3(e_loop: AbstractEventLoop, users: An expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id in expected_ids -def test_users_get_list_with_offset_only(e_loop: AbstractEventLoop, users: Any) -> None: +def test_get_list_with_offset_only(e_loop: AbstractEventLoop, users: Any) -> None: """Test pagination with offset but no limit""" - users_repository = repo_container.users_repository total_users = len(USERS) offset = 1 expected_count = total_users - offset - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={"offset": offset}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={"offset": offset}, out_dataclass=out_dataclass) ) assert isinstance(items, list) @@ -93,19 +93,18 @@ def test_users_get_list_with_offset_only(e_loop: AbstractEventLoop, users: Any) expected_ids = {user["id"] for user in USERS} for user in items: - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id in expected_ids -def test_users_get_list_order_by_id_asc(e_loop: AbstractEventLoop, users: Any) -> None: +def test_get_list_order_by_id_asc(e_loop: AbstractEventLoop, users: Any) -> None: """Test ordering users by ID in ascending order""" - users_repository = repo_container.users_repository users_sorted = sorted(USERS, key=lambda i: i["id"]) expected_count = len(users_sorted) - items: List[UserTestAggregate] = e_loop.run_until_complete( - users_repository.get_list(order_data=("id",), out_dataclass=UserTestAggregate) + items: List[out_dataclass] = e_loop.run_until_complete( + repository.get_list(order_data=("id",), out_dataclass=out_dataclass) ) assert isinstance(items, list) @@ -113,7 +112,7 @@ def test_users_get_list_order_by_id_asc(e_loop: AbstractEventLoop, users: Any) - # Verify ordering is correct for index, user in enumerate(items): - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id == users_sorted[index]["id"] # Verify items are in ascending order @@ -121,15 +120,14 @@ def test_users_get_list_order_by_id_asc(e_loop: AbstractEventLoop, users: Any) - assert user_ids == sorted(user_ids) -def test_users_get_list_order_by_id_desc(e_loop: AbstractEventLoop, users: Any) -> None: +def test_get_list_order_by_id_desc(e_loop: AbstractEventLoop, users: Any) -> None: """Test ordering users by ID in descending order""" - users_repository = repo_container.users_repository users_sorted = sorted(USERS, key=lambda i: i["id"], reverse=True) expected_count = len(users_sorted) - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(order_data=("-id",), out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(order_data=("-id",), out_dataclass=out_dataclass) ) assert isinstance(items, list) @@ -137,7 +135,7 @@ def test_users_get_list_order_by_id_desc(e_loop: AbstractEventLoop, users: Any) # Verify ordering is correct for index, user in enumerate(items): - assert isinstance(user, UserTestAggregate) + assert isinstance(user, out_dataclass) assert user.id == users_sorted[index]["id"] # Verify items are in descending order @@ -153,14 +151,14 @@ def test_users_get_list_order_by_id_desc(e_loop: AbstractEventLoop, users: Any) @pytest.mark.parametrize("data", USERS_IN_LOOKUP, scope="function") -def test_users_get_list_in_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_in_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: + users_repository = repository field = data["key"] lookup = f"{field}__in" expected_values = data["value"] - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -178,13 +176,11 @@ def test_users_get_list_in_lookup(e_loop: AbstractEventLoop, users: Any, data: d @pytest.mark.parametrize("data", USERS_GT_LOOKUP, scope="function") -def test_users_get_list_gt_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository - +def test_get_list_gt_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__gt" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -202,13 +198,12 @@ def test_users_get_list_gt_lookup(e_loop: AbstractEventLoop, users: Any, data: d @pytest.mark.parametrize("data", USERS_GTE_LOOKUP, scope="function") -def test_users_get_list_gte_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_gte_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] - lookup = f"{field}__gt" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + lookup = f"{field}__gte" + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -226,13 +221,12 @@ def test_users_get_list_gte_lookup(e_loop: AbstractEventLoop, users: Any, data: @pytest.mark.parametrize("data", USERS_LT_LOOKUP, scope="function") -def test_users_get_list_lt_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_lt_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__lt" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -250,13 +244,11 @@ def test_users_get_list_lt_lookup(e_loop: AbstractEventLoop, users: Any, data: d @pytest.mark.parametrize("data", USERS_LTE_LOOKUP, scope="function") -def test_users_get_list_lte_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository - +def test_get_list_lte_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__lte" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -275,13 +267,12 @@ def test_users_get_list_lte_lookup(e_loop: AbstractEventLoop, users: Any, data: @pytest.mark.parametrize("data", USERS_E_LOOKUP, scope="function") -def test_users_get_list_e_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_e_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__e" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -292,13 +283,12 @@ def test_users_get_list_e_lookup_case_1(e_loop: AbstractEventLoop, users: Any, d @pytest.mark.parametrize("data", USERS_E_LOOKUP, scope="function") -def test_users_get_list_e_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_e_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = field - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -317,13 +307,12 @@ def test_users_get_list_e_lookup_case_2(e_loop: AbstractEventLoop, users: Any, d @pytest.mark.parametrize("data", USERS_NE_LOOKUP, scope="function") -def test_users_get_list_ne_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_ne_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__ne" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -342,13 +331,12 @@ def test_users_get_list_ne_lookup(e_loop: AbstractEventLoop, users: Any, data: d @pytest.mark.parametrize("data", USERS_NOT_IN_LOOKUP, scope="function") -def test_users_get_list_not_in_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_not_in_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__not_in" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -370,24 +358,35 @@ def test_users_get_list_not_in_lookup(e_loop: AbstractEventLoop, users: Any, dat "value": "gmail", }, {"key": "birthday", "value": USERS[3]["birthday"]}, + {"key": "email", "value": "%gmail%"}, + {"key": "first_name", "value": "first%"}, + {"key": "last_name", "value": "%name%"}, ] @pytest.mark.parametrize("data", USERS_LIKE_LOOKUP, scope="function") -def test_users_get_list_like_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_like_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__like" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True for index, user in enumerate(items): - value = getattr(user, field) - assert str(data["value"]) in str(value) + value = str(getattr(user, field)) + pattern = str(data["value"]) + if "%" in pattern: + if pattern.startswith("%") and pattern.endswith("%"): + assert pattern[1:-1] in value + elif pattern.startswith("%"): + assert value.endswith(pattern[1:]) + elif pattern.endswith("%"): + assert value.startswith(pattern[:-1]) + else: + assert pattern in value USERS_NOT_LIKE_ALL_LOOKUP = [ @@ -401,13 +400,12 @@ def test_users_get_list_like_lookup(e_loop: AbstractEventLoop, users: Any, data: @pytest.mark.parametrize("data", USERS_NOT_LIKE_ALL_LOOKUP, scope="function") -def test_users_get_list_not_like_all_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_not_like_all_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__not_like_all" - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -426,16 +424,15 @@ def test_users_get_list_not_like_all_lookup(e_loop: AbstractEventLoop, users: An @pytest.mark.parametrize("data", USERS_JSONB_LIKE_LOOKUP, scope="function") -def test_users_get_list_jsonb_like_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_jsonb_like_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__jsonb_like" for value in data["value"]: - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list( + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list( filter_data={lookup: value}, - out_dataclass=UserTestAggregate, + out_dataclass=out_dataclass, ) ) @@ -448,14 +445,13 @@ def test_users_get_list_jsonb_like_lookup_case_1(e_loop: AbstractEventLoop, user @pytest.mark.parametrize("data", USERS_JSONB_LIKE_LOOKUP, scope="function") -def test_users_get_list_jsonb_like_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_jsonb_like_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"meta__{field}__jsonb_like" for value in data["value"]: - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: value}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: value}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -475,14 +471,13 @@ def test_users_get_list_jsonb_like_lookup_case_2(e_loop: AbstractEventLoop, user @pytest.mark.parametrize("data", USERS_JSONB_NOT_LIKE_LOOKUP, scope="function") -def test_users_get_list_jsonb_not_like_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_jsonb_not_like_lookup_case_1(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"{field}__jsonb_not_like" for value in data["value"]: - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: value}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: value}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -495,14 +490,13 @@ def test_users_get_list_jsonb_not_like_lookup_case_1(e_loop: AbstractEventLoop, @pytest.mark.parametrize("data", USERS_JSONB_NOT_LIKE_LOOKUP, scope="function") -def test_users_get_list_jsonb_not_like_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: - users_repository = repo_container.users_repository +def test_get_list_jsonb_not_like_lookup_case_2(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: field = data["key"] lookup = f"meta__{field}__jsonb_not_like" for value in data["value"]: - items: List[Type[UserTestAggregate]] = e_loop.run_until_complete( - users_repository.get_list(filter_data={lookup: value}, out_dataclass=UserTestAggregate) + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: value}, out_dataclass=out_dataclass) ) assert isinstance(items, list) is True @@ -511,3 +505,77 @@ def test_users_get_list_jsonb_not_like_lookup_case_2(e_loop: AbstractEventLoop, data = getattr(user, "meta", {}) data_value = data.get(field) assert str(value) not in str(data_value) + + +USERS_ILIKE_LOOKUP = [ + {"key": "first_name", "value": "FIRST_NAME_1"}, + {"key": "first_name", "value": "first_name_2"}, + {"key": "email", "value": "GMAIL"}, + {"key": "email", "value": "1"}, + {"key": "last_name", "value": "LAST_NAME_3"}, + {"key": "email", "value": "%gmail%"}, + {"key": "first_name", "value": "first%"}, + {"key": "last_name", "value": "%name%"}, + {"key": "email", "value": "%.com"}, +] + + +@pytest.mark.parametrize("data", USERS_ILIKE_LOOKUP, scope="function") +def test_get_list_ilike_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: + field = data["key"] + lookup = f"{field}__ilike" + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) + ) + + assert isinstance(items, list) is True + + for index, user in enumerate(items): + value = str(getattr(user, field)).lower() + pattern = str(data["value"]).lower() + if "%" in pattern: + if pattern.startswith("%") and pattern.endswith("%"): + assert pattern[1:-1] in value + elif pattern.startswith("%"): + assert value.endswith(pattern[1:]) + elif pattern.endswith("%"): + assert value.startswith(pattern[:-1]) + else: + assert pattern in value + + +USERS_EMPTY_FIELDS_LOOKUP = [ + {"key": "first_name", "value": ""}, + {"key": "email", "value": ""}, + {"key": "last_name", "value": ""}, +] + + +@pytest.mark.parametrize("data", USERS_EMPTY_FIELDS_LOOKUP, scope="function") +def test_get_list_empty_string_lookup(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: + field = data["key"] + lookup = f"{field}__e" + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data={lookup: data["value"]}, out_dataclass=out_dataclass) + ) + + assert isinstance(items, list) is True + assert len(items) == 0 + + +USERS_COMBINED_FILTERS_LOOKUP = [ + {"filters": {"first_name__like": "first_name", "id__gt": 1}, "expected_min_count": 1}, + {"filters": {"email__ilike": "GMAIL", "id__lte": 3}, "expected_min_count": 1}, + {"filters": {"first_name__e": USERS[0]["first_name"], "id__in": [1, 2, 3]}, "expected_min_count": 1}, +] + + +@pytest.mark.parametrize("data", USERS_COMBINED_FILTERS_LOOKUP, scope="function") +def test_get_list_combined_filters(e_loop: AbstractEventLoop, users: Any, data: dict) -> None: + + items: List[Type[out_dataclass]] = e_loop.run_until_complete( + repository.get_list(filter_data=data["filters"], out_dataclass=out_dataclass) + ) + + assert isinstance(items, list) is True + assert len(items) >= data["expected_min_count"] diff --git a/tests/infrastructure/repositories/test_users_repository.py b/tests/infrastructure/repositories/test_users_repository.py index 91e55bd..fe29453 100644 --- a/tests/infrastructure/repositories/test_users_repository.py +++ b/tests/infrastructure/repositories/test_users_repository.py @@ -6,7 +6,7 @@ import pytest - +from src.app.infrastructure.repositories.base.abstract import RepositoryError from src.app.infrastructure.utils.common import generate_str from src.app.infrastructure.repositories.container import container as repo_container from tests.domain.users.aggregates.common import UserTestAggregate @@ -886,11 +886,10 @@ def test_get_list_with_large_limit(e_loop: AbstractEventLoop, users: Any) -> Non def test_get_list_with_zero_limit(e_loop: AbstractEventLoop, users: Any) -> None: users_repository = repo_container.users_repository - items = e_loop.run_until_complete( - users_repository.get_list(filter_data={"limit": 0}, out_dataclass=UserTestAggregate) - ) - - assert len(items) == 0 + with pytest.raises(RepositoryError): + e_loop.run_until_complete( + users_repository.get_list(filter_data={"limit": 0}, out_dataclass=UserTestAggregate) + ) def test_get_list_with_large_offset(e_loop: AbstractEventLoop, users: Any) -> None: From 4d1f7bbdf3a852c6f19fb0a9c234b2842ada5139 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Fri, 26 Sep 2025 17:36:06 +0300 Subject: [PATCH 10/13] small fixes (#26) --- .dockerignore | 3 ++- local_prepare.sh | 2 +- .../messaging/clients/kafka_client.py | 2 +- .../messaging/clients/rabbitmq_client.py | 2 +- src/app/interfaces/cli/consume.py | 22 +++++++++++++++++++ 5 files changed, 27 insertions(+), 4 deletions(-) diff --git a/.dockerignore b/.dockerignore index 0be3b56..f4932db 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,7 @@ # Python -__pycache__/ +**/__pycache__/ *.py[cod] +*.pyo *$py.class *.so .Python diff --git a/local_prepare.sh b/local_prepare.sh index 909b946..aacc190 100644 --- a/local_prepare.sh +++ b/local_prepare.sh @@ -42,7 +42,7 @@ if [ ! "$(docker ps -aq -f name=${PROJECT_NAME_SLUG}_rabbitmq)" ]; then -p $MESSAGE_BROKER_PORT:5672 \ -e RABBITMQ_DEFAULT_USER=$MESSAGE_BROKER_USER \ -e RABBITMQ_DEFAULT_PASS=$MESSAGE_BROKER_PASSWORD \ - rabbitmq:3.11.6-management || true + rabbitmq:4.1.4-management-alpine || true fi echo " ✅ ${PROJECT_NAME_SLUG}_rabbitmq UP" diff --git a/src/app/infrastructure/messaging/clients/kafka_client.py b/src/app/infrastructure/messaging/clients/kafka_client.py index 44949f3..4a543d1 100644 --- a/src/app/infrastructure/messaging/clients/kafka_client.py +++ b/src/app/infrastructure/messaging/clients/kafka_client.py @@ -23,7 +23,7 @@ async def is_healthy(self) -> bool: brokers = metadata.brokers() if callable(metadata.brokers) else metadata.brokers return len(brokers) > 0 except Exception as ex: - logger.error(f"{ex}") + logger.warning(f"{ex}") return False finally: await client.close() diff --git a/src/app/infrastructure/messaging/clients/rabbitmq_client.py b/src/app/infrastructure/messaging/clients/rabbitmq_client.py index 0cf7127..6192e12 100644 --- a/src/app/infrastructure/messaging/clients/rabbitmq_client.py +++ b/src/app/infrastructure/messaging/clients/rabbitmq_client.py @@ -33,7 +33,7 @@ async def is_healthy(self) -> bool: await connection_.close() return True except Exception as ex: - logger.error(f"{ex}") + logger.warning(f"{ex}") return False async def __get_connection(self) -> AbstractRobustConnection: diff --git a/src/app/interfaces/cli/consume.py b/src/app/interfaces/cli/consume.py index 5e2f9a2..fa5f96f 100644 --- a/src/app/interfaces/cli/consume.py +++ b/src/app/interfaces/cli/consume.py @@ -45,6 +45,28 @@ async def queue_processing_aggregator(data: dict, handlers_by_event: Dict[str, D asyncio.set_event_loop(e_loop) try: + + # ================================================= + # WAIT FOR READINESS + # ================================================= + sleep_before = 120 + slept = 10 + logger.info("Waiting for readiness ..") + e_loop.run_until_complete(asyncio.sleep(slept)) + + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + while slept < sleep_before and not is_healthy: + logger.info(f"Waiting for readiness {slept}/{sleep_before} sec..") + sleep_ = 15 + e_loop.run_until_complete(asyncio.sleep(sleep_)) + slept += sleep_ + is_healthy = e_loop.run_until_complete(mq_client.is_healthy()) + logger.info("READY.." if is_healthy else "NOT READY!") + + # ================================================= + # RUN CONSUMER + # ================================================= + handlers_by_event_ = HANDLERS_MAP aggregator_ = queue_processing_aggregator e_loop.run_until_complete( From e65cf794b18b9964b2cfe0bca8b0ede726dd23c8 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Fri, 26 Sep 2025 20:03:29 +0300 Subject: [PATCH 11/13] beautify Dockerfile_base (#27) --- .launch/Dockerfile_base | 42 +++++++++++++++-------------------------- 1 file changed, 15 insertions(+), 27 deletions(-) diff --git a/.launch/Dockerfile_base b/.launch/Dockerfile_base index 7d13e54..fd967e6 100644 --- a/.launch/Dockerfile_base +++ b/.launch/Dockerfile_base @@ -1,52 +1,40 @@ # Build stage -ARG PYTHON_VERSION=3.12.11 -FROM python:${PYTHON_VERSION}-slim AS builder +FROM python:3.12-slim AS builder ENV PYTHONDONTWRITEBYTECODE=1 -ENV PYTHONUNBUFFERED=1 - WORKDIR /app -# Install build dependencies -RUN apt-get update && apt-get install -y \ - build-essential \ - && rm -rf /var/lib/apt/lists/* - -# Install poetry -RUN pip install --no-cache-dir poetry +# Install build tools and poetry +RUN apt-get update && apt-get install -y build-essential \ + && rm -rf /var/lib/apt/lists/* \ + && pip install --no-cache-dir poetry -# Copy dependency files +# Install dependencies COPY pyproject.toml poetry.lock ./ - -# Configure poetry and install dependencies RUN poetry config virtualenvs.create false \ && poetry install --no-root --only=main # Production stage -ARG PYTHON_VERSION=3.12 -FROM python:${PYTHON_VERSION}-slim +FROM python:3.12-slim ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 ENV PYTHONPATH=/app -# Install runtime dependencies only -RUN apt-get update && apt-get install -y \ - libpq5 \ - && rm -rf /var/lib/apt/lists/* \ - && apt-get clean +# Install runtime dependencies +RUN apt-get update && apt-get install -y libpq5 \ + && rm -rf /var/lib/apt/lists/* WORKDIR /app -# Get Python version for dynamic path -ARG PYTHON_VERSION=3.12 -ENV PYTHON_SITE_PACKAGES=/usr/local/lib/python${PYTHON_VERSION}/site-packages - -# Copy installed packages from builder stage -COPY --from=builder ${PYTHON_SITE_PACKAGES} ${PYTHON_SITE_PACKAGES} +# Copy Python packages and binaries from builder +COPY --from=builder /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages COPY --from=builder /usr/local/bin /usr/local/bin # Copy application code COPY src/ ./src/ COPY alembic.ini ./ +# Clean any cache files (safety backup) +RUN find ./src -name "*.pyc" -delete 2>/dev/null || true + From c748baa01f47bb3cd8c517e4d736c85510b6a0cc Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Tue, 30 Sep 2025 13:04:18 +0300 Subject: [PATCH 12/13] feature/health check improve (#29) --- .../interfaces/api/v1/endpoints/debug/resources.py | 14 ++++++++++---- src/app/interfaces/grpc/services/debug_service.py | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/app/interfaces/api/v1/endpoints/debug/resources.py b/src/app/interfaces/api/v1/endpoints/debug/resources.py index b981c05..ded5bde 100644 --- a/src/app/interfaces/api/v1/endpoints/debug/resources.py +++ b/src/app/interfaces/api/v1/endpoints/debug/resources.py @@ -1,10 +1,13 @@ -from typing import Annotated, Dict +from typing import Annotated from fastapi import APIRouter, Body, Request +from fastapi.responses import JSONResponse +from starlette.status import HTTP_200_OK, HTTP_400_BAD_REQUEST + from src.app.application.container import container as services_container -from src.app.interfaces.api.v1.endpoints.debug.schemas.req_schemas import MessageReq from src.app.config.settings import settings from src.app.infrastructure.messaging.mq_client import mq_client +from src.app.interfaces.api.v1.endpoints.debug.schemas.req_schemas import MessageReq router = APIRouter(prefix="/debug") @@ -41,7 +44,10 @@ async def send_message( @router.get("/health-check/", status_code=200) async def health_check( request: Request, -) -> Dict[str, str]: +) -> JSONResponse: is_healthy = await services_container.common_service.is_healthy() status = "OK" if is_healthy else "NOT OK" - return {"status": status} + status_code = HTTP_200_OK if is_healthy else HTTP_400_BAD_REQUEST + resp = JSONResponse(content={"status": status}, status_code=status_code) + + return resp diff --git a/src/app/interfaces/grpc/services/debug_service.py b/src/app/interfaces/grpc/services/debug_service.py index fee8e1f..0330b87 100644 --- a/src/app/interfaces/grpc/services/debug_service.py +++ b/src/app/interfaces/grpc/services/debug_service.py @@ -27,5 +27,5 @@ async def SendMessage(self, request, context) -> pb2.MessageResp: # type: ignor async def HealthCheck(self, request, context) -> pb2.HealthCheckResp: # type: ignore is_healthy = await services_container.common_service.is_healthy() - status = "OK" if is_healthy else "NOT OK" + status = "SERVING" if is_healthy else "NOT_SERVING" return pb2.HealthCheckResp(status=status) # type: ignore From bee975dd18c6b1c77a0ae8573118b19e6e7d97d6 Mon Sep 17 00:00:00 2001 From: medniy <20140819+Medniy2000@users.noreply.github.com> Date: Thu, 9 Oct 2025 14:09:52 +0300 Subject: [PATCH 13/13] feature/imrove db session (#30) * improve psql ext * after beautify * repository refactor * update .env.example --------- Co-authored-by: medniy2000 --- .env.example | 4 +- src/app/config/settings.py | 4 +- .../extensions/psql_ext/psql_ext.py | 55 ++++++++++++---- .../repositories/base/base_psql_repository.py | 62 ++++++------------- 4 files changed, 69 insertions(+), 56 deletions(-) diff --git a/.env.example b/.env.example index 2c8d9a8..5b5e87c 100644 --- a/.env.example +++ b/.env.example @@ -41,7 +41,9 @@ DB_NAME=proto DB_USER=dev DB_PASSWORD=dev CONNECTIONS_POOL_MIN_SIZE=10 -CONNECTIONS_POOL_MAX_OVERFLOW=25 +CONNECTIONS_POOL_MAX_OVERFLOW=30 +CONNECTIONS_POOL_RECYCLE=3600 +CONNECTIONS_POOL_TIMEOUT: 30 # Redis # ------------------------------------------------------------------------------ diff --git a/src/app/config/settings.py b/src/app/config/settings.py index 2168d60..44e64cf 100644 --- a/src/app/config/settings.py +++ b/src/app/config/settings.py @@ -66,7 +66,9 @@ class SettingsBase(PydanticSettings): DB_URL_SYNC: str = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" CONNECTIONS_POOL_MIN_SIZE: int = env.int("CONNECTIONS_POOL_MIN_SIZE", 5) - CONNECTIONS_POOL_MAX_OVERFLOW: int = env.int("CONNECTIONS_POOL_MAX_OVERFLOW", 25) + CONNECTIONS_POOL_MAX_OVERFLOW: int = env.int("CONNECTIONS_POOL_MAX_OVERFLOW", 35) + CONNECTIONS_POOL_RECYCLE: int = env.int("CONNECTIONS_POOL_RECYCLE", 3600) # 1 hour in seconds + CONNECTIONS_POOL_TIMEOUT: int = env.int("CONNECTIONS_POOL_TIMEOUT", 30) # seconds # Redis Settings # -------------------------------------------------------------------------- diff --git a/src/app/infrastructure/extensions/psql_ext/psql_ext.py b/src/app/infrastructure/extensions/psql_ext/psql_ext.py index 3c2550a..cbc83e7 100644 --- a/src/app/infrastructure/extensions/psql_ext/psql_ext.py +++ b/src/app/infrastructure/extensions/psql_ext/psql_ext.py @@ -1,38 +1,71 @@ from contextlib import asynccontextmanager from typing import AsyncGenerator -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy import create_engine -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker +from sqlalchemy import create_engine, text +from sqlalchemy.orm import DeclarativeBase, sessionmaker from src.app.config.settings import settings -Base = declarative_base() + +class Base(DeclarativeBase): + pass + + +CONNECTIONS_POOL_USE_LIFO: bool = True # LIFO for better connection reuse +DB_JIT_DISABLED: bool = True # Disable JIT +DB_ISOLATION_LEVEL: str = "READ COMMITTED" # Init connection for own database ... default_engine = create_async_engine( settings.DB_URL, pool_size=settings.CONNECTIONS_POOL_MIN_SIZE, max_overflow=settings.CONNECTIONS_POOL_MAX_OVERFLOW, - pool_recycle=60 * 60 * 3, # recycle after 3 hours + pool_recycle=settings.CONNECTIONS_POOL_RECYCLE, + pool_timeout=settings.CONNECTIONS_POOL_TIMEOUT, + pool_use_lifo=CONNECTIONS_POOL_USE_LIFO, pool_pre_ping=True, future=True, echo_pool=True, echo=settings.SHOW_SQL, - connect_args={"server_settings": {"jit": "off"}}, + isolation_level=DB_ISOLATION_LEVEL, + connect_args={"server_settings": {"jit": "off" if DB_JIT_DISABLED else "on"}}, ) -default_session = sessionmaker( - default_engine, # type: ignore - class_=AsyncSession, # type: ignore +default_session = async_sessionmaker( + default_engine, + class_=AsyncSession, + expire_on_commit=True, ) +# Allowed isolation levels for validation +ALLOWED_ISOLATION_LEVELS = { + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "SERIALIZABLE", +} + + @asynccontextmanager -async def get_session(expire_on_commit: bool = False) -> AsyncGenerator: +async def get_session( + expire_on_commit: bool = False, + isolation_level: str | None = None, +) -> AsyncGenerator: + + # Validate isolation level to prevent SQL injection + if isolation_level and isolation_level not in ALLOWED_ISOLATION_LEVELS: + raise ValueError( + f"Invalid isolation level: '{isolation_level}'. " + f"Allowed values: {', '.join(sorted(ALLOWED_ISOLATION_LEVELS))}" + ) + try: async with default_session(expire_on_commit=expire_on_commit) as session: + if isolation_level: + # Safe to use string formatting after validation + await session.execute(text(f"SET TRANSACTION ISOLATION LEVEL {isolation_level}")) yield session except Exception as e: await session.rollback() diff --git a/src/app/infrastructure/repositories/base/base_psql_repository.py b/src/app/infrastructure/repositories/base/base_psql_repository.py index 0c9f88e..b4e0842 100644 --- a/src/app/infrastructure/repositories/base/base_psql_repository.py +++ b/src/app/infrastructure/repositories/base/base_psql_repository.py @@ -28,7 +28,6 @@ from src.app.infrastructure.repositories.base.abstract import ( AbstractBaseRepository, OuterGenericType, - BaseModel, RepositoryError, ) from src.app.infrastructure.utils.common import generate_str @@ -624,7 +623,7 @@ def query_builder(cls) -> Type[QueryBuilder]: return cls._QUERY_BUILDER_CLASS @classmethod - def model(cls) -> Type[BaseModel]: + def model(cls) -> Type[Base]: """Get the SQLAlchemy model class for this repository""" if not cls.MODEL: raise AttributeError("Model class not configured") @@ -679,7 +678,7 @@ async def count(cls, filter_data: Optional[dict] = None) -> int: stmt: Select = select(func.count(cls.model().id)) # type: ignore stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) - async with get_session(expire_on_commit=True) as session: + async with get_session(expire_on_commit=False) as session: result = await session.execute(stmt) return result.scalars().first() @@ -706,7 +705,7 @@ async def get_first( stmt: Select = select(cls.model()) stmt = cls.query_builder().apply_where(stmt, filter_data=filter_data_, model_class=cls.model()) - async with get_session(expire_on_commit=True) as session: + async with get_session(expire_on_commit=False) as session: result = await session.execute(stmt) raw = result.scalars().first() @@ -733,7 +732,7 @@ async def get_list( stmt = cls.query_builder().apply_ordering(stmt, order_data=order_data, model_class=cls.model()) stmt = cls.query_builder().apply_pagination(stmt, filter_data=filter_data_) - async with get_session(expire_on_commit=True) as session: + async with get_session(expire_on_commit=False) as session: result = await session.execute(stmt) raw_items = result.scalars().all() @@ -757,7 +756,7 @@ async def create( cls._set_timestamps_on_create(items=[data_copy]) - async with get_session(expire_on_commit=True) as session: + async with get_session(expire_on_commit=False) as session: if is_return_require: # Use RETURNING to get specific columns instead of the whole model model_class = cls.model() # type: ignore @@ -802,7 +801,7 @@ async def update( stmt = stmt.values(**data_copy) stmt.execution_options(synchronize_session="fetch") - async with get_session(expire_on_commit=True) as session: + async with get_session(expire_on_commit=False) as session: await session.execute(stmt) await session.commit() @@ -863,15 +862,17 @@ async def create_bulk( # Add timestamps to all items cls._set_timestamps_on_create(items=items_copy) - async with get_session(expire_on_commit=True) as session: + # No need to keep objects attached, we use RETURNING clause + async with get_session(expire_on_commit=False) as session: model_class = cls.model() # type: ignore model_table = model_class.__table__ # type: ignore if is_return_require: - # Use RETURNING to get created records efficiently + # Use RETURNING to get created records efficiently in single query stmt = insert(model_class).values(items_copy).returning(*model_table.columns.values()) result = await session.execute(stmt) await session.commit() + # Process results immediately after commit, before session closes raw_items = result.fetchall() out_entity_, _ = cls.out_dataclass_with_columns(out_dataclass=out_dataclass) created_items = [] @@ -891,12 +892,13 @@ async def create_bulk( async def update_bulk( cls, items: List[dict], is_return_require: bool = False, out_dataclass: Optional[OuterGenericType] = None ) -> List[OuterGenericType] | None: - """Update multiple records in optimized bulk operation + """ + Update multiple records in optimized bulk operation. - Note: Currently uses 2 queries for returning case: - - Option 1: Keep current ORM approach (cleaner, 2 queries for returning) - - Option 2: Go back to raw SQL (1 query, but more complex) - - Option 3: Hybrid approach - use ORM for non-returning, raw SQL for returning + Performance notes: + - Uses expire_on_commit=False to avoid unnecessary object expiration + - When is_return_require=True: 2 queries (bulk update + select) + - When is_return_require=False: 1 query (bulk update only) """ if not items: return None @@ -905,7 +907,8 @@ async def update_bulk( cls._set_timestamps_on_update(items=items_copy) - async with get_session(expire_on_commit=True) as session: + # expire_on_commit=False for better performance, no ORM objects to track + async with get_session(expire_on_commit=False) as session: if is_return_require: return await cls._bulk_update_with_returning(session, items_copy, out_dataclass) else: @@ -936,7 +939,7 @@ async def _bulk_update_with_returning( return [] # Query the updated records - stmt = select(model_class).where(model_class.id.in_(updated_ids)) + stmt = select(model_class).where(model_class.id.in_(updated_ids)) # type: ignore[attr-defined] result = await session.execute(stmt) updated_records = result.scalars().all() @@ -962,33 +965,6 @@ async def _bulk_update_without_returning(cls, session: Any, items: List[dict]) - await session.execute(update(model_class), items, execution_options={"synchronize_session": False}) await session.commit() - @classmethod - async def _update_single_with_returning( - cls, session: Any, item_data: dict, out_entity_: Callable - ) -> OuterGenericType | None: - """Update a single item and return the updated entity (legacy method)""" - if "id" not in item_data: - return None - - model_class = cls.model() # type: ignore - model_table = model_class.__table__ # type: ignore - - item_id = item_data.pop("id") - stmt = ( - update(model_class) - .where(model_class.id == item_id) # type: ignore - .values(**item_data) - .returning(*model_table.columns.values()) - ) - result = await session.execute(stmt) - raw = result.fetchone() - if raw: - # Convert Row to dict using column names - column_names = [col.name for col in model_table.columns.values()] - entity_data = dict(zip(column_names, raw)) - return out_entity_(**entity_data) - return None - # ========================================== # UTILITY METHODS # ==========================================