From b4d5d35cc67ce7ab512f47a9b94eb88c404edc2d Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Tue, 3 Jun 2025 17:30:24 +0300 Subject: [PATCH 01/25] refactoring: ruff checks --- .github/workflows/checks.yml | 4 +- .kerberos/config_server.py | 14 +- Makefile | 5 +- app/alembic/env.py | 6 +- .../275222846605_initial_ldap_schema.py | 8 +- app/alembic/versions/fafc3d0b11ec_.py | 10 +- app/api/__init__.py | 14 +- app/ldap_protocol/asn1parser.py | 6 +- app/ldap_protocol/dependency.py | 2 +- app/ldap_protocol/dns.py | 10 +- app/ldap_protocol/kerberos/__init__.py | 12 +- app/ldap_protocol/kerberos/base.py | 2 +- app/ldap_protocol/kerberos/utils.py | 4 +- app/ldap_protocol/ldap_requests/__init__.py | 2 +- app/ldap_protocol/ldap_requests/base.py | 9 +- .../ldap_requests/bind_methods/__init__.py | 10 +- .../ldap_requests/bind_methods/sasl_gssapi.py | 2 +- app/ldap_protocol/ldap_requests/modify.py | 4 +- app/ldap_protocol/ldap_requests/search.py | 10 +- app/ldap_protocol/ldap_responses.py | 8 +- app/ldap_protocol/policies/access_policy.py | 4 +- app/ldap_protocol/policies/password_policy.py | 6 +- app/ldap_protocol/server.py | 2 +- app/ldap_protocol/session_storage.py | 8 +- app/ldap_protocol/utils/pagination.py | 8 +- .../utils/raw_definition_parser.py | 4 +- app/models.py | 16 +- app/schedule.py | 6 +- pyproject.toml | 24 ++- tests/test_api/test_auth/test_sessions.py | 22 +-- .../test_object_class_router.py | 4 +- .../test_main/test_router/test_add.py | 2 +- tests/test_ldap/test_util/test_add.py | 40 ++--- tests/test_ldap/test_util/test_delete.py | 26 ++- tests/test_ldap/test_util/test_modify.py | 156 ++++++++---------- 35 files changed, 223 insertions(+), 247 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 01b92ef8b..e49355fd8 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -26,7 +26,7 @@ jobs: - name: Run linters env: NEW_TAG: linter - run: docker run $NEW_TAG ruff check --output-format=github . + run: docker run $NEW_TAG ruff check --output-format=github . --preview ruff_format: runs-on: ubuntu-latest @@ -86,4 +86,4 @@ jobs: - name: Run tests env: TAG: tests - run: docker compose -f docker-compose.remote.test.yml up --no-log-prefix --attach md-test --exit-code-from md-test \ No newline at end of file + run: docker compose -f docker-compose.remote.test.yml up --no-log-prefix --attach md-test --exit-code-from md-test diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index 28ab8c66d..ca6a5158e 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -120,7 +120,11 @@ async def change_password(self, name: str, new_password: str) -> None: """ @abstractmethod - async def create_or_update_princ_pw(self, name: str, new_password) -> None: + async def create_or_update_princ_pw( + self, + name: str, + new_password: str, + ) -> None: """Create new principal or update password. :param str name: principal @@ -262,7 +266,11 @@ async def change_password(self, name: str, new_password: str) -> None: new_password, ) - async def create_or_update_princ_pw(self, name: str, new_password) -> None: + async def create_or_update_princ_pw( + self, + name: str, + new_password: str, + ) -> None: """Create new principal or update password. :param str name: principal @@ -343,7 +351,7 @@ async def try_set_kadmin(app: FastAPI) -> None: logging.info("Successfully connected to kadmin local") return - loop.create_task(try_set_kadmin(app)) + await loop.create_task(try_set_kadmin(app)) yield if kadmind := getattr(app.state, "kadmind", None): await kadmind.disconnect() diff --git a/Makefile b/Makefile index 2fd10c0ed..a986f4d8e 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,9 @@ help: ## show help message before_pr: ruff format . - ruff check . --fix --unsafe-fixes - mypy app + ruff check . --preview --fix --unsafe-fixes + ruff format . + mypy . build: ## build app and manually generate self-signed cert make down diff --git a/app/alembic/env.py b/app/alembic/env.py index 667f64d3e..c6ca0a787 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -4,7 +4,7 @@ from logging.config import fileConfig from alembic import context -from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.ext.asyncio import AsyncConnection, create_async_engine from config import Settings from models import Base @@ -21,7 +21,7 @@ target_metadata = Base.metadata -def do_run_migrations(connection): +def do_run_migrations(connection: AsyncConnection): """Run sync migrations.""" context.configure( connection=connection, @@ -34,7 +34,7 @@ def do_run_migrations(connection): context.run_migrations() -async def run_async_migrations(settings): +async def run_async_migrations(settings: Settings): """Run async migrations.""" engine = create_async_engine(str(settings.POSTGRES_URI)) diff --git a/app/alembic/versions/275222846605_initial_ldap_schema.py b/app/alembic/versions/275222846605_initial_ldap_schema.py index 47fb2fc95..3a913fcf8 100644 --- a/app/alembic/versions/275222846605_initial_ldap_schema.py +++ b/app/alembic/versions/275222846605_initial_ldap_schema.py @@ -11,7 +11,7 @@ import sqlalchemy as sa from alembic import op from ldap3.protocol.schemas.ad2012R2 import ad_2012_r2_schema -from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncSession from sqlalchemy.orm import Session from ldap_protocol.ldap_schema.attribute_type_crud import ( @@ -179,7 +179,7 @@ def upgrade() -> None: session.commit() # NOTE: Load objectClasses into the database - async def _create_object_classes(connection): + async def _create_object_classes(connection: AsyncConnection): session = AsyncSession(bind=connection) await session.begin() @@ -237,7 +237,7 @@ async def _create_object_classes(connection): op.run_async(_create_object_classes) - async def _create_attribute_types(connection): + async def _create_attribute_types(connection: AsyncConnection): session = AsyncSession(bind=connection) await session.begin() @@ -259,7 +259,7 @@ async def _create_attribute_types(connection): op.run_async(_create_attribute_types) - async def _modify_object_classes(connection): + async def _modify_object_classes(connection: AsyncConnection): session = AsyncSession(bind=connection) await session.begin() diff --git a/app/alembic/versions/fafc3d0b11ec_.py b/app/alembic/versions/fafc3d0b11ec_.py index b795d0001..226edf4d0 100644 --- a/app/alembic/versions/fafc3d0b11ec_.py +++ b/app/alembic/versions/fafc3d0b11ec_.py @@ -9,7 +9,7 @@ from alembic import op from sqlalchemy import delete, exists, select from sqlalchemy.exc import DBAPIError, IntegrityError -from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncSession from ldap_protocol.policies.access_policy import create_access_policy from ldap_protocol.utils.queries import ( @@ -29,7 +29,9 @@ def upgrade() -> None: """Upgrade.""" - async def _create_readonly_grp_and_plcy(connection) -> None: + async def _create_readonly_grp_and_plcy( + connection: AsyncConnection, + ) -> None: session = AsyncSession(bind=connection) await session.begin() base_dn_list = await get_base_directories(session) @@ -81,7 +83,9 @@ async def _create_readonly_grp_and_plcy(connection) -> None: def downgrade() -> None: """Downgrade.""" - async def _delete_readonly_grp_and_plcy(connection) -> None: + async def _delete_readonly_grp_and_plcy( + connection: AsyncConnection, + ) -> None: session = AsyncSession(bind=connection) await session.begin() base_dn_list = await get_base_directories(session) diff --git a/app/api/__init__.py b/app/api/__init__.py index c33f0e25c..b364cf44b 100644 --- a/app/api/__init__.py +++ b/app/api/__init__.py @@ -17,17 +17,15 @@ from .shadow.router import shadow_router __all__ = [ - "auth_router", - "session_router", - "entry_router", - "network_router", - "mfa_router", - "pwd_router", "access_policy_router", - "ldap_schema_router", + "auth_router", "dns_router", - "krb5_router", "entry_router", + "krb5_router", + "ldap_schema_router", + "mfa_router", "network_router", + "pwd_router", + "session_router", "shadow_router", ] diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index b1a40bd7e..06ce77c5d 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -7,7 +7,7 @@ from contextlib import suppress from dataclasses import dataclass from enum import IntEnum -from typing import Annotated, Generic, TypeVar +from typing import Annotated, TypeVar from asn1 import Classes, Decoder, Encoder, Numbers, Tag, Types from pydantic import AfterValidator @@ -65,7 +65,7 @@ class SubstringTag(IntEnum): @dataclass -class ASN1Row(Generic[T]): +class ASN1Row[T: "ASN1Row | list[ASN1Row] | str | bytes | int | float"]: """Row with metadata.""" class_id: int @@ -218,7 +218,7 @@ def serialize(self, obj: "ASN1Row | T | None" = None) -> str: # noqa: C901 elif isinstance(obj, str): return obj - elif isinstance(obj, int) or isinstance(obj, float): + elif isinstance(obj, int | float): return str(obj) else: diff --git a/app/ldap_protocol/dependency.py b/app/ldap_protocol/dependency.py index 8ec0e8448..8f903bcd2 100644 --- a/app/ldap_protocol/dependency.py +++ b/app/ldap_protocol/dependency.py @@ -12,7 +12,7 @@ T = TypeVar("T", bound=Callable) -async def resolve_deps(func: T, container: AsyncContainer) -> T: +async def resolve_deps[T: Callable](func: T, container: AsyncContainer) -> T: """Provide async dependencies. :param T func: Awaitable diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index f71c05b3c..6b92e44e1 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -9,8 +9,8 @@ from abc import ABC, abstractmethod from collections import defaultdict from dataclasses import dataclass -from enum import Enum, StrEnum -from typing import Any, Awaitable, Callable +from enum import StrEnum +from typing import Awaitable, Callable from dns.asyncquery import inbound_xfr as make_inbound_xfr, tcp as asynctcp from dns.asyncresolver import Resolver as AsyncResolver @@ -53,7 +53,7 @@ def wrapper(func: Callable) -> Callable: bus_type = " stub " if is_stub else " " @functools.wraps(func) - async def wrapped(*args: str, **kwargs: str) -> Any: + async def wrapped(*args: str, **kwargs: str) -> object: logger = log.opt(depth=1) logger.info(f"Calling{bus_type}'{name}'") @@ -77,7 +77,7 @@ class DNSConnectionError(ConnectionError): """API Error.""" -class DNSRecordType(str, Enum): +class DNSRecordType(StrEnum): """DNS record types.""" a = "A" @@ -416,7 +416,7 @@ async def get_dns_manager_settings( CatalogueSetting.name == DNS_MANAGER_IP_ADDRESS_NAME, CatalogueSetting.name == DNS_MANAGER_TSIG_KEY_NAME, ) - ), # fmt: skip + ), ): settings_dict[setting.name] = setting.value diff --git a/app/ldap_protocol/kerberos/__init__.py b/app/ldap_protocol/kerberos/__init__.py index c685d6c8c..d20ac4faa 100644 --- a/app/ldap_protocol/kerberos/__init__.py +++ b/app/ldap_protocol/kerberos/__init__.py @@ -23,13 +23,13 @@ async def get_kerberos_class(session: AsyncSession) -> type[AbstractKadmin]: __all__ = [ - "get_kerberos_class", - "KerberosMDAPIClient", - "StubKadminMDADPIClient", + "KERBEROS_STATE_NAME", "AbstractKadmin", - "KerberosState", "KRBAPIError", - "unlock_principal", - "KERBEROS_STATE_NAME", + "KerberosMDAPIClient", + "KerberosState", + "StubKadminMDADPIClient", + "get_kerberos_class", "set_state", + "unlock_principal", ] diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index c9c2e5e5c..baeaece4f 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -168,7 +168,7 @@ async def add_principal( self, name: str, password: str | None, - timeout: int | float = 1, + timeout: float = 1, ) -> None: ... @abstractmethod diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index f08f4bb02..588e85a0c 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -1,7 +1,7 @@ """Utils for kadmin.""" from functools import wraps -from typing import Any, Callable +from typing import Callable import httpx from sqlalchemy import delete, select, update @@ -24,7 +24,7 @@ def wrapper(func: Callable) -> Callable: bus_type = " stub " if is_stub else " " @wraps(func) - async def wrapped(*args: str, **kwargs: str) -> Any: + async def wrapped(*args: str, **kwargs: str) -> object: logger = log.opt(depth=1) try: principal = args[1] diff --git a/app/ldap_protocol/ldap_requests/__init__.py b/app/ldap_protocol/ldap_requests/__init__.py index 90ff4cdd8..246e3baf0 100644 --- a/app/ldap_protocol/ldap_requests/__init__.py +++ b/app/ldap_protocol/ldap_requests/__init__.py @@ -32,4 +32,4 @@ } -__all__ = ["protocol_id_map", "BaseRequest"] +__all__ = ["BaseRequest", "protocol_id_map"] diff --git a/app/ldap_protocol/ldap_requests/base.py b/app/ldap_protocol/ldap_requests/base.py index f48f37534..462e2960f 100644 --- a/app/ldap_protocol/ldap_requests/base.py +++ b/app/ldap_protocol/ldap_requests/base.py @@ -27,8 +27,8 @@ colorize=False, ) -type handler = Callable[..., AsyncGenerator[BaseResponse, None]] -type serializer = Callable[..., "BaseRequest"] +type Handler = Callable[..., AsyncGenerator[BaseResponse, None]] +type Serializer = Callable[..., BaseRequest] if TYPE_CHECKING: @@ -40,6 +40,7 @@ async def _handle_api( self, container: AsyncContainer, ) -> list[BaseResponse] | BaseResponse: ... + else: class _APIProtocol: ... @@ -48,8 +49,8 @@ class _APIProtocol: ... class BaseRequest(ABC, _APIProtocol, BaseModel): """Base request builder.""" - handle: ClassVar[handler] - from_data: ClassVar[serializer] + handle: ClassVar[Handler] + from_data: ClassVar[Serializer] @property @abstractmethod diff --git a/app/ldap_protocol/ldap_requests/bind_methods/__init__.py b/app/ldap_protocol/ldap_requests/bind_methods/__init__.py index 88eb06bbc..a2683cbad 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/__init__.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/__init__.py @@ -25,15 +25,15 @@ } __all__ = [ - "get_bad_response", - "sasl_mechanism_map", + "GSSAPISL", "AbstractLDAPAuth", + "GSSAPIAuthStatus", + "LDAPBindErrors", "SASLMethod", "SaslAuthentication", "SaslGSSAPIAuthentication", "SaslPLAINAuthentication", "SimpleAuthentication", - "GSSAPIAuthStatus", - "GSSAPISL", - "LDAPBindErrors", + "get_bad_response", + "sasl_mechanism_map", ] diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index 05ba8aaf8..40c18a530 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -61,7 +61,7 @@ class SaslGSSAPIAuthentication(SaslAuthentication): 2. Intermediate Requests: - The client sends kerberos AP-REQ token - The server processes token - - Сontinues until context is established + - Continues until context is established - The client sends empty token 3. Final Handshake: diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index 2626946ba..bbfcfb4bf 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -4,7 +4,7 @@ License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE """ -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from enum import IntEnum from typing import AsyncGenerator, ClassVar @@ -140,7 +140,7 @@ async def _update_password_expiration( if policy.maximum_password_age_days == 0: return - now = datetime.now(timezone.utc) + now = datetime.now(UTC) now += timedelta(days=policy.maximum_password_age_days) change.modification.vals[0] = now.strftime("%Y%m%d%H%M%SZ") diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index d722348c7..d2f070d48 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -98,7 +98,7 @@ class Config: ignored_types = (cached_property,) @field_serializer("filter") - def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: + def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # noqa: ANN401 """Serialize filter field.""" return val.to_ldap_filter() if isinstance(val, ASN1Row) else None @@ -411,7 +411,7 @@ async def paginate_query( end = start + self.size_limit query = query.offset(start).limit(end) - return query, int(ceil(count / float(self.size_limit))), count + return query, ceil(count / float(self.size_limit)), count async def tree_view( # noqa: C901 self, @@ -460,10 +460,8 @@ async def tree_view( # noqa: C901 attrs["authTimestamp"].append(directory.user.last_logon) if ( - self.member_of - and "group" in obj_classes - or "user" in obj_classes - ): + self.member_of and "group" in obj_classes + ) or "user" in obj_classes: for group in directory.groups: attrs["memberOf"].append(group.directory.path_dn) diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index e8e8f3642..5f1302c5e 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -38,9 +38,7 @@ class Config: populate_by_name = True arbitrary_types_allowed = True - json_encoders = { - bytes: lambda value: value.hex(), - } + json_encoders: ClassVar[dict] = {bytes: lambda value: value.hex()} class BaseEncoder(BaseModel): @@ -116,9 +114,7 @@ class Config: """Allow class to use property.""" arbitrary_types_allowed = True - json_encoders = { - bytes: lambda value: value.hex(), - } + json_encoders: ClassVar[dict] = {bytes: lambda value: value.hex()} class SearchResultEntry(BaseResponse): diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index fde5769eb..7a1f8ec89 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -21,7 +21,7 @@ from models import AccessPolicy, Directory, Group T = TypeVar("T", bound=Select) -__all__ = ["get_policies", "create_access_policy", "mutate_ap"] +__all__ = ["create_access_policy", "get_policies", "mutate_ap"] async def get_policies(session: AsyncSession) -> list[AccessPolicy]: @@ -75,7 +75,7 @@ async def create_access_policy( await session.flush() -def mutate_ap( +def mutate_ap[T: Select]( query: T, user: UserSchema, action: Literal["add", "read", "modify", "del"] = "read", diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 25c789697..82f6e5386 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -242,9 +242,9 @@ async def validate_password_with_policy( errors.append("password minimum length violation") regex = ( - re.search("[A-ZА-Я]", password) is not None, - re.search("[a-zа-я]", password) is not None, - re.search("[0-9]", password) is not None, + re.search(r"[A-ZА-Я]", password) is not None, # noqa: RUF001 + re.search(r"[a-zа-я]", password) is not None, # noqa: RUF001 + re.search(r"[0-9]", password) is not None, password.lower() not in _COMMON_PASSWORDS, ) diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 44f8ebad2..48afb9dc1 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -184,7 +184,7 @@ async def recieve( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, - return_addr: Literal[True, False] | bool = False, + return_addr: bool = False, ) -> tuple[IPv4Address | IPv6Address, bytes] | bytes: """Read N packets by 1kB. diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 1183892c1..227be6d29 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -7,7 +7,7 @@ import json from abc import ABC, abstractmethod from collections import defaultdict -from datetime import datetime, timezone +from datetime import UTC, datetime from secrets import token_hex from typing import Iterable, Literal, Self @@ -203,7 +203,7 @@ def _generate_session_data( signature = self._sign(session_id, settings) data = {"id": uid, "sign": signature} | extra_data - data["issued"] = datetime.now(timezone.utc).isoformat() + data["issued"] = datetime.now(UTC).isoformat() return session_id, signature, data @abstractmethod @@ -693,7 +693,7 @@ async def create_ldap_session( :param str key: session key :param dict data: any data """ - data["issued"] = datetime.now(timezone.utc).isoformat() + data["issued"] = datetime.now(UTC).isoformat() ldap_sessions_key = self._get_user_session_key(uid, "ldap") ip_sessions_key = None @@ -723,7 +723,7 @@ async def check_rekey(self, session_id: str, rekey_interval: int) -> bool: data = await self.get(session_id) issued = datetime.fromisoformat(data.get("issued")) # type: ignore - return (datetime.now(timezone.utc) - issued).seconds > rekey_interval + return (datetime.now(UTC) - issued).seconds > rekey_interval async def _rekey_session(self, session_id: str, settings: Settings) -> str: """Rekey session. diff --git a/app/ldap_protocol/utils/pagination.py b/app/ldap_protocol/utils/pagination.py index ea96ad8fa..0402b6a3f 100644 --- a/app/ldap_protocol/utils/pagination.py +++ b/app/ldap_protocol/utils/pagination.py @@ -8,7 +8,7 @@ from abc import abstractmethod from dataclasses import dataclass from math import ceil -from typing import Generic, Sequence, TypeVar +from typing import Sequence, TypeVar from pydantic import BaseModel, Field from sqlalchemy import func, select @@ -48,7 +48,7 @@ class PaginationMetadata: total_pages: int | None = None -class BasePaginationSchema(BaseModel, Generic[P]): +class BasePaginationSchema[P: BaseModel](BaseModel): """Paginator Schema.""" metadata: PaginationMetadata @@ -60,7 +60,7 @@ class Config: arbitrary_types_allowed = True -class BaseSchemaModel(BaseModel, Generic[S]): +class BaseSchemaModel[S: Base](BaseModel): """Model for Schema. Schema is used for serialization and deserialization. @@ -73,7 +73,7 @@ def from_db(cls, sqla_instance: S) -> "BaseSchemaModel[S]": @dataclass -class PaginationResult(Generic[S]): +class PaginationResult[S: Base]: """Paginator. Paginator contains metadata about pagination and chunk of items. diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index 75954dd9e..465f4a25e 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -25,12 +25,12 @@ def _list_to_string(data: list[str]) -> str | None: @staticmethod def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: tmp = AttributeTypeInfo.from_definition(definitions=[raw_definition]) - return list(tmp.values())[0] + return next(iter(tmp.values())) @staticmethod def get_object_class_info(raw_definition: str) -> ObjectClassInfo: tmp = ObjectClassInfo.from_definition(definitions=[raw_definition]) - return list(tmp.values())[0] + return next(iter(tmp.values())) @staticmethod async def _get_attribute_types_by_names( diff --git a/app/models.py b/app/models.py index da23e5dc6..468227219 100644 --- a/app/models.py +++ b/app/models.py @@ -9,7 +9,7 @@ import enum import uuid from collections import defaultdict -from datetime import datetime, timezone +from datetime import UTC, datetime from ipaddress import IPv4Address, IPv4Network from typing import Annotated, ClassVar, Literal @@ -279,13 +279,13 @@ def attributes_dict(self) -> defaultdict[str, list[str]]: ), ) - search_fields = { + search_fields: ClassVar[dict[str, str]] = { "name": "name", "objectguid": "objectGUID", "objectsid": "objectSid", } - ro_fields = { + ro_fields: ClassVar[set[str]] = { "uid", "whenCreated", "lastLogon", @@ -327,7 +327,7 @@ def create_path( ) -> None: """Create path from a new directory.""" pre_path: list[str] = parent.path if parent else [] - self.path = pre_path + [self.get_dn(dn)] + self.path = [*pre_path, self.get_dn(dn)] self.depth = len(self.path) self.rdname = dn @@ -393,7 +393,7 @@ class User(Base): DateTime(timezone=True), ) - search_fields = { + search_fields: ClassVar[dict[str, str]] = { "mail": "mail", "samaccountname": "sAMAccountName", "userprincipalname": "userPrincipalName", @@ -402,7 +402,7 @@ class User(Base): "accountexpires": "accountExpires", } - fields = { + fields: ClassVar[dict[str, str]] = { "loginshell": "loginShell", "uidnumber": "uidNumber", "homedirectory": "homeDirectory", @@ -443,8 +443,8 @@ def is_expired(self) -> bool: if self.account_exp is None: return False - now = datetime.now(tz=timezone.utc) - user_account_exp = self.account_exp.astimezone(timezone.utc) + now = datetime.now(tz=UTC) + user_account_exp = self.account_exp.astimezone(UTC) return now > user_account_exp diff --git a/app/schedule.py b/app/schedule.py index 48a3f9ac9..cd013ecad 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -15,9 +15,9 @@ from ioc import MainProvider from ldap_protocol.dependency import resolve_deps -type task_type = Callable[..., Coroutine] +type TaskType = Callable[..., Coroutine] -_TASKS: set[tuple[task_type, float]] = { +_TASKS: set[tuple[TaskType, float]] = { (disable_accounts, 600.0), (principal_block_sync, 60.0), (check_ldap_principal, -1.0), @@ -26,7 +26,7 @@ async def _schedule( - task: task_type, + task: TaskType, wait: float, container: AsyncContainer, ) -> None: diff --git a/pyproject.toml b/pyproject.toml index facc3873c..a98522526 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,6 +104,8 @@ line-ending = "lf" skip-magic-trailing-comma = false # default: false [tool.ruff.lint] +extend-select = [] + select = [ "F", # Pyflakes. Must have "E", # pycodestyle (Error), check tool.ruff.lint.pycodestyle. Must have @@ -121,7 +123,7 @@ select = [ "COM", # flake8-commas # "CPY", # flake8-copyright TODO uncomment, ruff fix and fix error "PIE", # flake8-pie - # "PYI", # flake8-pyi TODO uncomment, ruff fix and fix error + "PYI", # flake8-pyi "PT", # flake8-pytest "Q", # flake8-quotes # "RET", # flake8-return TODO uncomment, ruff fix and fix error @@ -134,25 +136,19 @@ select = [ "ERA", # eradicate # "PGH", # pygrep-hooks TODO does we need it? uncomment, ruff fix and fix error # "PL", # Pylint TODO uncomment, ruff fix and fix error - # "DOC", # pydoclint TODO uncomment, ruff fix and fix error - # "RUF", # Ruff-specific rules TODO uncomment, ruff fix and fix error - "RUF100", # Ruff100-specific rule TODO delete that and uncomment "RUF"-rule in line up. + "DOC", # pydoclint TODO uncomment, ruff fix and fix error + "RUF", # Ruff-specific rules + "FAST", # FastAPI checks ] # Gradually remove all values marked 'TODO' and fix errors. ignore = [ - "D102", # TODO delete that and fix all errors - "D104", # TODO delete that and fix all errors "D203", # this is necessary. Conflict with `D211` "D213", # this is necessary. Conflict with `D212` "D301", # this is necessary. - "UP017", # TODO delete that and fix all errors - "UP034", # TODO delete that and fix all errors "UP035", # this is necessary. We allowed deprecated import - "ANN001", # TODO delete that and fix all errors "ANN002", # this is necessary. "ANN003", # this is necessary. - "ANN401", # TODO delete that and fix all errors "ASYNC109", "ASYNC230", "S311", # this is necessary. @@ -161,17 +157,19 @@ ignore = [ "TC001", # this is necessary. "TC002", # this is necessary. "TC003", # this is necessary. - "SIM101", # analogue simplify-boolean-expressions IF100 "B905", # this is necessary. get-attr-with-constant + "RUF029", ] -extend-select = [] - fixable = ["ALL"] unfixable = [ "T20", # dont auto delete print/pprint lines in code ] +[tool.ruff.lint.pydocstyle] +convention = "google" +ignore-var-parameters = true + [tool.ruff.lint.flake8-unused-arguments] ignore-variadic-names = true diff --git a/tests/test_api/test_auth/test_sessions.py b/tests/test_api/test_auth/test_sessions.py index c47f5d822..711e130ab 100644 --- a/tests/test_api/test_auth/test_sessions.py +++ b/tests/test_api/test_auth/test_sessions.py @@ -40,7 +40,7 @@ async def test_session_creation( assert sessions - key = list(sessions.keys())[0] + key = next(iter(sessions.keys())) assert sessions[key]["id"] == user.id assert sessions[key]["issued"] @@ -69,13 +69,13 @@ async def test_session_rekey( ) sessions = await storage.get_user_sessions(user.id) - old_key = list(sessions.keys())[0] + old_key = next(iter(sessions.keys())) old_session = sessions[old_key] await storage.rekey_session(old_key, settings) sessions = await storage.get_user_sessions(user.id) - new_key = list(sessions.keys())[0] + new_key = next(iter(sessions.keys())) new_session = sessions[new_key] assert len(sessions) == 1 @@ -117,7 +117,7 @@ async def test_session_creation_ldap_bind_unbind( assert sessions - key = list(sessions.keys())[0] + key = next(iter(sessions.keys())) assert sessions[key]["id"] == user.id assert sessions[key]["issued"] @@ -208,7 +208,7 @@ async def test_session_api_delete_detail( response = await http_client.get(f"sessions/{creds.un}") assert response.status_code == 200 - session_id = list(response.json().keys())[0] + session_id = next(iter(response.json().keys())) assert len(await storage.get_user_sessions(user.id)) == 1 @@ -339,36 +339,36 @@ async def test_get_sessions_by_protocol( all_sessions = await storage.get_user_sessions(uid) assert len(all_sessions) == 2 - key = list(all_sessions.keys())[0] + key = next(iter(all_sessions.keys())) assert all_sessions[key]["id"] == user.id http_sessions = await storage.get_user_sessions(uid, "http") assert len(http_sessions) == 1 - key = list(http_sessions.keys())[0] + key = next(iter(http_sessions.keys())) assert http_sessions[key]["id"] == user.id assert http_sessions[key]["ip"] == http_ip ldap_sessions = await storage.get_user_sessions(uid, "ldap") assert len(ldap_sessions) == 1 - key = list(ldap_sessions.keys())[0] + key = next(iter(ldap_sessions.keys())) assert ldap_sessions[key]["id"] == user.id assert ldap_sessions[key]["ip"] == ldap_ip ip_all_sessions = await storage.get_ip_sessions(http_ip) assert len(ip_all_sessions) == 1 - key = list(ip_all_sessions.keys())[0] + key = next(iter(ip_all_sessions.keys())) assert ip_all_sessions[key]["id"] == user.id assert ip_all_sessions[key]["ip"] == http_ip ip_http_sessions = await storage.get_ip_sessions(http_ip, "http") assert len(ip_http_sessions) == 1 - key = list(ip_http_sessions.keys())[0] + key = next(iter(ip_http_sessions.keys())) assert ip_http_sessions[key]["id"] == user.id assert ip_http_sessions[key]["ip"] == http_ip ip_ldap_sessions = await storage.get_ip_sessions(ldap_ip, "ldap") assert len(ip_ldap_sessions) == 1 - key = list(ip_ldap_sessions.keys())[0] + key = next(iter(ip_ldap_sessions.keys())) assert ip_ldap_sessions[key]["id"] == user.id assert ip_ldap_sessions[key]["ip"] == ldap_ip diff --git a/tests/test_api/test_ldap_schema/test_object_class_router.py b/tests/test_api/test_ldap_schema/test_object_class_router.py index 41c3c14e0..537d26e0e 100644 --- a/tests/test_api/test_ldap_schema/test_object_class_router.py +++ b/tests/test_api/test_ldap_schema/test_object_class_router.py @@ -95,10 +95,10 @@ async def test_modify_one_object_class( object_class = response.json() assert set(object_class.get("attribute_type_names_must")) == set( new_statement.get("attribute_type_names_must") - ) # type: ignore + ) assert set(object_class.get("attribute_type_names_may")) == set( new_statement.get("attribute_type_names_may") - ) # type: ignore + ) @pytest.mark.parametrize( diff --git a/tests/test_api/test_main/test_router/test_add.py b/tests/test_api/test_main/test_router/test_add.py index d011587d9..fb0c36f93 100644 --- a/tests/test_api/test_main/test_router/test_add.py +++ b/tests/test_api/test_main/test_router/test_add.py @@ -245,7 +245,7 @@ async def test_api_correct_add_double_member_of( assert data.get("resultCode") == LDAPCodes.SUCCESS assert data["search_result"][0]["object_name"] == user - created_groups = groups + ["cn=domain users,cn=groups,dc=md,dc=test"] + created_groups = [*groups, "cn=domain users,cn=groups,dc=md,dc=test"] for attr in data["search_result"][0]["partial_attributes"]: if attr["type"] == "memberOf": diff --git a/tests/test_ldap/test_util/test_add.py b/tests/test_ldap/test_util/test_add.py index fe42a153b..2977df513 100644 --- a/tests/test_ldap/test_util/test_add.py +++ b/tests/test_ldap/test_util/test_add.py @@ -36,14 +36,12 @@ async def test_ldap_root_add( search_path = get_search_path(dn) with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "name: test\n" - "cn: test\n" - "objectClass: organization\n" - "objectClass: top\n" - "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" - ) + f"dn: {dn}\n" + "name: test\n" + "cn: test\n" + "objectClass: organization\n" + "objectClass: top\n" + "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -164,14 +162,12 @@ async def test_ldap_user_add_group_with_group( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {child_group_dn}\n" - "name: twisted\n" - "cn: twisted\n" - "objectClass: group\n" - "objectClass: top\n" - f"memberOf: {group_dn}\n" - ) + f"dn: {child_group_dn}\n" + "name: twisted\n" + "cn: twisted\n" + "objectClass: group\n" + "objectClass: top\n" + f"memberOf: {group_dn}\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -245,13 +241,11 @@ async def test_ldap_add_access_control( async def try_add() -> int: with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "name: test\n" - "cn: test\n" - "objectClass: organization\n" - "objectClass: top\n" - ) + f"dn: {dn}\n" + "name: test\n" + "cn: test\n" + "objectClass: organization\n" + "objectClass: top\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( diff --git a/tests/test_ldap/test_util/test_delete.py b/tests/test_ldap/test_util/test_delete.py index 187dd183c..3bccc3d1a 100644 --- a/tests/test_ldap/test_util/test_delete.py +++ b/tests/test_ldap/test_util/test_delete.py @@ -30,14 +30,12 @@ async def test_ldap_delete( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "name: test\n" - "cn: test\n" - "objectClass: organization\n" - "objectClass: top\n" - "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" - ) + f"dn: {dn}\n" + "name: test\n" + "cn: test\n" + "objectClass: organization\n" + "objectClass: top\n" + "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -94,13 +92,11 @@ async def test_ldap_delete_w_access_control( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "name: test\n" - "cn: test\n" - "objectClass: organization\n" - "objectClass: top\n" - ) + f"dn: {dn}\n" + "name: test\n" + "cn: test\n" + "objectClass: organization\n" + "objectClass: top\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( # Add as Admin diff --git a/tests/test_ldap/test_util/test_modify.py b/tests/test_ldap/test_util/test_modify.py index 66cb1a342..7e5e720d9 100644 --- a/tests/test_ldap/test_util/test_modify.py +++ b/tests/test_ldap/test_util/test_modify.py @@ -53,24 +53,22 @@ async def test_ldap_base_modify( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "replace: mail\n" - "mail: modme@student.of.life.edu\n" - "-\n" - "add: title\n" - "title: Grand Poobah\n" - "title: Grand Poobah1\n" - "title: Grand Poobah2\n" - "title: Grand Poobah3\n" - "-\n" - "add: jpegPhoto\n" - "jpegPhoto: modme.jpeg\n" - "-\n" - "delete: posixEmail\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "replace: mail\n" + "mail: modme@student.of.life.edu\n" + "-\n" + "add: title\n" + "title: Grand Poobah\n" + "title: Grand Poobah1\n" + "title: Grand Poobah2\n" + "title: Grand Poobah3\n" + "-\n" + "add: jpegPhoto\n" + "jpegPhoto: modme.jpeg\n" + "-\n" + "delete: posixEmail\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -139,7 +137,7 @@ async def test_ldap_membersip_user_delete( assert directory.groups with tempfile.NamedTemporaryFile("w") as file: - file.write((f"dn: {dn}\nchangetype: modify\ndelete: memberOf\n-\n")) + file.write(f"dn: {dn}\nchangetype: modify\ndelete: memberOf\n-\n") file.seek(0) proc = await asyncio.create_subprocess_exec( "ldapmodify", @@ -190,13 +188,11 @@ async def test_ldap_membersip_user_add( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "add: memberOf\n" - "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "add: memberOf\n" + "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -247,14 +243,12 @@ async def test_ldap_membersip_user_replace( # add new group with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {new_group_dn}" - "name: twisted\n" - "cn: twisted\n" - "objectClass: group\n" - "objectClass: top\n" - "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" - ) + f"dn: {new_group_dn}" + "name: twisted\n" + "cn: twisted\n" + "objectClass: group\n" + "objectClass: top\n" + "memberOf: cn=domain admins,cn=groups,dc=md,dc=test\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -279,13 +273,11 @@ async def test_ldap_membersip_user_replace( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "replace: memberOf\n" - "memberOf: cn=twisted,cn=groups,dc=md,dc=test\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "replace: memberOf\n" + "memberOf: cn=twisted,cn=groups,dc=md,dc=test\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -341,13 +333,11 @@ async def test_ldap_membersip_grp_replace( # add new group with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - "dn: cn=twisted1,cn=groups,dc=md,dc=test\n" - "name: twisted\n" - "cn: twisted\n" - "objectClass: group\n" - "objectClass: top\n" - ) + "dn: cn=twisted1,cn=groups,dc=md,dc=test\n" + "name: twisted\n" + "cn: twisted\n" + "objectClass: group\n" + "objectClass: top\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -372,13 +362,11 @@ async def test_ldap_membersip_grp_replace( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "replace: memberOf\n" - "memberOf: cn=twisted1,cn=groups,dc=md,dc=test\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "replace: memberOf\n" + "memberOf: cn=twisted1,cn=groups,dc=md,dc=test\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -418,13 +406,11 @@ async def test_ldap_modify_dn( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modrdn\n" - "newrdn: cn=user2\n" - "deleteoldrdn: 1\n" - "newsuperior: ou=users,dc=md,dc=test\n" - ) + f"dn: {dn}\n" + "changetype: modrdn\n" + "newrdn: cn=user2\n" + "deleteoldrdn: 1\n" + "newsuperior: ou=users,dc=md,dc=test\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -465,13 +451,11 @@ async def test_ldap_modify_password_change( with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "replace: userPassword\n" - f"userPassword: {new_password}\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "replace: userPassword\n" + f"userPassword: {new_password}\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( @@ -535,24 +519,22 @@ async def test_ldap_modify_with_ap( async def try_modify() -> int: with tempfile.NamedTemporaryFile("w") as file: file.write( - ( - f"dn: {dn}\n" - "changetype: modify\n" - "replace: mail\n" - "mail: modme@student.of.life.edu\n" - "-\n" - "add: title\n" - "title: Grand Poobah\n" - "title: Grand Poobah1\n" - "title: Grand Poobah2\n" - "title: Grand Poobah3\n" - "-\n" - "add: jpegPhoto\n" - "jpegPhoto: modme.jpeg\n" - "-\n" - "delete: posixEmail\n" - "-\n" - ) + f"dn: {dn}\n" + "changetype: modify\n" + "replace: mail\n" + "mail: modme@student.of.life.edu\n" + "-\n" + "add: title\n" + "title: Grand Poobah\n" + "title: Grand Poobah1\n" + "title: Grand Poobah2\n" + "title: Grand Poobah3\n" + "-\n" + "add: jpegPhoto\n" + "jpegPhoto: modme.jpeg\n" + "-\n" + "delete: posixEmail\n" + "-\n" ) file.seek(0) proc = await asyncio.create_subprocess_exec( From feb5d980bda6cd44255ef6e39f221018d394b2a5 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Tue, 3 Jun 2025 17:30:41 +0300 Subject: [PATCH 02/25] refactor: convert to google task_508 --- .kerberos/config_server.py | 4 +- app/api/auth/oauth2.py | 28 +- app/api/auth/router.py | 50 ++-- app/api/auth/router_mfa.py | 64 +++-- app/api/auth/utils.py | 23 +- app/api/ldap_schema/attribute_type_router.py | 72 +++-- app/api/ldap_schema/object_class_router.py | 70 +++-- app/api/main/ap_router.py | 6 +- app/api/main/krb5_router.py | 32 ++- app/api/network/router.py | 84 ++++-- app/api/network/schema.py | 7 +- app/api/network/utils.py | 7 +- app/api/shadow/router.py | 16 +- app/config.py | 3 +- app/extra/scripts/check_ldap_principal.py | 7 +- .../scripts/principal_block_user_sync.py | 9 +- app/extra/scripts/uac_sync.py | 3 +- app/ioc.py | 47 ++-- app/ldap_protocol/dependency.py | 9 +- app/ldap_protocol/kerberos/__init__.py | 7 +- app/ldap_protocol/kerberos/base.py | 15 +- app/ldap_protocol/kerberos/client.py | 21 +- app/ldap_protocol/kerberos/utils.py | 12 +- app/ldap_protocol/ldap_requests/add.py | 7 +- app/ldap_protocol/ldap_requests/base.py | 9 +- app/ldap_protocol/ldap_requests/bind.py | 11 +- .../ldap_requests/bind_methods/base.py | 13 +- .../ldap_requests/bind_methods/sasl_gssapi.py | 65 +++-- .../ldap_requests/bind_methods/sasl_plain.py | 7 +- .../ldap_requests/bind_methods/simple.py | 7 +- app/ldap_protocol/ldap_requests/extended.py | 7 +- app/ldap_protocol/ldap_requests/search.py | 26 +- .../ldap_schema/attribute_type_crud.py | 67 +++-- .../ldap_schema/object_class_crud.py | 67 +++-- app/ldap_protocol/messages.py | 13 +- app/ldap_protocol/multifactor.py | 62 +++-- app/ldap_protocol/policies/access_policy.py | 21 +- app/ldap_protocol/policies/network_policy.py | 38 ++- app/ldap_protocol/policies/password_policy.py | 76 ++++-- app/ldap_protocol/server.py | 56 ++-- app/ldap_protocol/session_storage.py | 245 ++++++++++++------ app/ldap_protocol/user_account_control.py | 24 +- app/ldap_protocol/utils/cte.py | 9 +- app/ldap_protocol/utils/helpers.py | 40 ++- app/ldap_protocol/utils/queries.py | 74 ++++-- app/multidirectory.py | 9 +- app/schedule.py | 7 +- app/security.py | 16 +- tests/conftest.py | 14 +- tests/test_api/test_auth/test_router.py | 9 +- tests/test_api/test_main/test_kadmin.py | 50 ++-- 51 files changed, 1091 insertions(+), 554 deletions(-) diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index ca6a5158e..6c66fb493 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -108,7 +108,7 @@ async def get_princ(self, name: str) -> Principal | None: """Get principal. :param str name: principal - :return kadmin.Principal: Principal + :return: kadmin.Principal: Principal """ @abstractmethod @@ -248,7 +248,7 @@ async def get_princ(self, name: str) -> Principal: """Get principal. :param str name: principal - :return kadmin.Principal: Principal + :return: kadmin.Principal: Principal """ principal = await self._get_raw_principal(name) return Principal.model_validate(principal, from_attributes=True) diff --git a/app/api/auth/oauth2.py b/app/api/auth/oauth2.py index 4f2e9671a..c34345263 100644 --- a/app/api/auth/oauth2.py +++ b/app/api/auth/oauth2.py @@ -38,10 +38,13 @@ async def authenticate_user( ) -> User | None: """Get user and verify password. - :param AsyncSession session: sa session - :param str username: any str - :param str password: any str - :return User | None: User model (pydantic) + Args: + session (AsyncSession): sa session + username (str): any str + password (str): any str + + Returns: + User | None: User model (pydantic) """ user = await get_user(session, username) @@ -68,14 +71,17 @@ async def get_current_user( request's cookies, verifies the session, and returns the user schema. Makes a rekey of the session if necessary. - :param FromDishka[Settings] settings: settings - :param FromDishka[AsyncSession] session: db session - :param FromDishka[SessionStorage] session_storage: session storage - :param Request request: request - :param Response response: response + Args: + settings (FromDishka[Settings]): settings + session (FromDishka[AsyncSession]): db session + session_storage (FromDishka[SessionStorage]): session storage + request (Request): request + response (Response): response + user_agent (Annotated[str]): user agent :param Annotated[IPv4Address | IPv6Address] ip: ip address - :param Annotated[str] user_agent: user agent - :return UserSchema: user schema + + Returns: + UserSchema: user schema """ session_key = request.cookies.get("id", "") try: diff --git a/app/api/auth/router.py b/app/api/auth/router.py index 02acc54c7..a84e13068 100644 --- a/app/api/auth/router.py +++ b/app/api/auth/router.py @@ -68,20 +68,26 @@ async def login( \f :param Annotated[OAuth2Form, Depends form: login form - :param FromDishka[AsyncSession] session: db - :param FromDishka[Settings] settings: app settings - :param FromDishka[MultifactorAPI] mfa: mfa api wrapper - :param FromDishka[SessionStorage] storage: session storage - :param Response response: FastAPI response + + Args: + session (FromDishka[AsyncSession]): db + settings (FromDishka[Settings]): app settings + mfa (FromDishka[MultifactorAPI]): mfa api wrapper + storage (FromDishka[SessionStorage]): session storage + response (Response): FastAPI response :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip - :raises HTTPException: 401 if incorrect username or password - :raises HTTPException: 403 if user not part of domain admins - :raises HTTPException: 403 if user account is disabled - :raises HTTPException: 403 if user account is expired - :raises HTTPException: 403 if ip is not provided - :raises HTTPException: 403 if user not part of network policy - :raises HTTPException: 426 if mfa required - :return None: None + + Raises: + HTTPException: 401 if incorrect username or password + HTTPException: 403 if user not part of domain admins + HTTPException: 403 if user account is disabled + HTTPException: 403 if user account is expired + HTTPException: 403 if ip is not provided + HTTPException: 403 if user not part of network policy + HTTPException: 426 if mfa required + + Returns: + None: None """ user = await authenticate_user(session, form.username, form.password) @@ -182,14 +188,20 @@ async def password_reset( `userPrincipalName`, `saMAccountName` or `DN` - **new_password**: password to set \f - :param FromDishka[AsyncSession] session: db - :param FromDishka[AbstractKadmin] kadmin: kadmin api + + Args: + session (FromDishka[AsyncSession]): db + kadmin (FromDishka[AbstractKadmin]): kadmin api :param Annotated[str, Body identity: reset target user :param Annotated[str, Body new_password: new password for user - :raises HTTPException: 404 if user not found - :raises HTTPException: 422 if password not valid - :raises HTTPException: 424 if kerberos password update failed - :return None: None + + Raises: + HTTPException: 404 if user not found + HTTPException: 422 if password not valid + HTTPException: 424 if kerberos password update failed + + Returns: + None: None """ user = await get_user(session, identity) diff --git a/app/api/auth/router_mfa.py b/app/api/auth/router_mfa.py index 72ce39632..da1b577b7 100644 --- a/app/api/auth/router_mfa.py +++ b/app/api/auth/router_mfa.py @@ -64,9 +64,13 @@ async def setup_mfa( """Set mfa credentials, rewrites if exists. \f - :param MFACreateRequest mfa: MuliFactor credentials - :param FromDishka[AsyncSession] session: db - :return bool: status + + Args: + mfa (MFACreateRequest): MuliFactor credentials + session (FromDishka[AsyncSession]): db + + Returns: + bool: status """ async with session.begin_nested(): await session.execute( @@ -117,7 +121,9 @@ async def get_mfa( """Get MFA creds. \f - :return MFAGetResponse: response. + + Returns: + MFAGetResponse: response. """ if not mfa_creds: mfa_creds = MFA_HTTP_Creds(Creds(None, None)) @@ -149,15 +155,21 @@ async def callback_mfa( Callback endpoint for MFA. \f - :param FromDishka[AsyncSession] session: db - :param FromDishka[SessionStorage] storage: session storage - :param FromDishka[Settings] settings: app settings - :param FromDishka[MFA_HTTP_Creds] mfa_creds: - creds for multifactor (http app) + + Args: + session (FromDishka[AsyncSession]): db + storage (FromDishka[SessionStorage]): session storage + settings (FromDishka[Settings]): app settings + mfa_creds (FromDishka[MFA_HTTP_Creds]): creds for multifactor + (http app) :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip :param Annotated[str, Form access_token: token from multifactor callback - :raises HTTPException: if mfa not set up - :return RedirectResponse: on bypass or success + + Raises: + HTTPException: if mfa not set up + + Returns: + RedirectResponse: on bypass or success """ if not mfa_creds: raise HTTPException(status.HTTP_404_NOT_FOUND) @@ -207,19 +219,25 @@ async def two_factor_protocol( \f :param Annotated[OAuth2Form, Depends form: password form - :param Request request: FastAPI request - :param FromDishka[AsyncSession] session: db - :param FromDishka[MultifactorAPI] api: wrapper for MFA DAO - :param FromDishka[Settings] settings: app settings - :param FromDishka[SessionStorage] storage: redis storage - :param Response response: FastAPI response + + Args: + request (Request): FastAPI request + session (FromDishka[AsyncSession]): db + api (FromDishka[MultifactorAPI]): wrapper for MFA DAO + settings (FromDishka[Settings]): app settings + storage (FromDishka[SessionStorage]): redis storage + response (Response): FastAPI response :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip - :raises HTTPException: Missing API credentials - :raises HTTPException: Invalid credentials - :raises HTTPException: network policy violation - :raises HTTPException: Multifactor error - :return MFAChallengeResponse: - {'status': 'pending', 'message': https://example.com}. + + Raises: + HTTPException: Missing API credentials + HTTPException: Invalid credentials + HTTPException: network policy violation + HTTPException: Multifactor error + + Returns: + MFAChallengeResponse: {'status': 'pending', 'message': + https://example.com}. """ if not api: raise HTTPException( diff --git a/app/api/auth/utils.py b/app/api/auth/utils.py index dd78b6400..dd3aeae02 100644 --- a/app/api/auth/utils.py +++ b/app/api/auth/utils.py @@ -18,8 +18,11 @@ def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: """Get IP address from request. - :param Request request: The incoming request object. - :return IPv4Address | None: The IP address or None. + Args: + request (Request): The incoming request object. + + Returns: + IPv4Address | None: The IP address or None. """ forwarded_for = request.headers.get("X-Forwarded-For") if forwarded_for: @@ -35,8 +38,11 @@ def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: def get_user_agent_from_request(request: Request) -> str: """Get user agent from request. - :param Request request: The incoming request object. - :return str: The user agent header. + Args: + request (Request): The incoming request object. + + Returns: + str: The user agent header. """ user_agent_header = request.headers.get("User-Agent") return user_agent_header if user_agent_header else "" @@ -56,10 +62,11 @@ async def create_and_set_session_key( Update the user's last logon time and set the appropriate cookies in the response. - :param User user: db user - :param AsyncSession session: db session - :param Settings settings: app settings - :param Response response: fastapi response object + Args: + user (User): db user + session (AsyncSession): db session + settings (Settings): app settings + response (Response): fastapi response object """ await set_last_logon_user(user, session, settings.TIMEZONE) diff --git a/app/api/ldap_schema/attribute_type_router.py b/app/api/ldap_schema/attribute_type_router.py index 1f68e9318..caa587a2b 100644 --- a/app/api/ldap_schema/attribute_type_router.py +++ b/app/api/ldap_schema/attribute_type_router.py @@ -39,9 +39,14 @@ async def create_one_attribute_type( """Create a new attribute type. \f - :param AttributeTypeSchema request_data: Data for creating attribute type. - :param FromDishka[AsyncSession] session: Database session. - :return None. + + Args: + request_data (AttributeTypeSchema): Data for creating attribute + type. + session (FromDishka[AsyncSession]): Database session. + + Returns: + None. """ await create_attribute_type( oid=request_data.oid, @@ -66,10 +71,16 @@ async def get_one_attribute_type( """Retrieve a one attribute types. \f - :param str attribute_type_name: name of the Attribute Type. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_404_NOT_FOUND: If Attribute Type not found. - :return AttributeTypeSchema: One Attribute Type Schemas. + + Args: + attribute_type_name (str): name of the Attribute Type. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_404_NOT_FOUND: If Attribute Type not found. + + Returns: + AttributeTypeSchema: One Attribute Type Schemas. """ attribute_type = await get_attribute_type_by_name( attribute_type_name, @@ -98,10 +109,14 @@ async def get_list_attribute_types_with_pagination( """Retrieve a list of all attribute types with paginate. \f - :param int page_number: number of page. - :param FromDishka[AsyncSession] session: Database session. - :param int page_size: number of items per page. - :return AttributeTypePaginationSchema: Paginator. + + Args: + page_number (int): number of page. + session (FromDishka[AsyncSession]): Database session. + page_size (int): number of items per page. + + Returns: + AttributeTypePaginationSchema: Paginator. """ params = PaginationParams( page_number=page_number, @@ -133,12 +148,20 @@ async def modify_one_attribute_type( """Modify an Attribute Type. \f - :param str attribute_type_name: name of the attribute type for modifying. - :param AttributeTypeUpdateSchema request_data: Changed data. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_404_NOT_FOUND: If attribute type not found. - :raise HTTP_400_BAD_REQUEST: If attribute type is system->cannot be changed - :return None. + + Args: + attribute_type_name (str): name of the attribute type for + modifying. + request_data (AttributeTypeUpdateSchema): Changed data. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_404_NOT_FOUND: If attribute type not found. + HTTP_400_BAD_REQUEST: If attribute type is system->cannot be + changed + + Returns: + None. """ attribute_type = await get_attribute_type_by_name( attribute_type_name, @@ -176,10 +199,17 @@ async def delete_bulk_attribute_types( """Delete attribute types by their names. \f - :param list[str] attribute_types_names: List of attribute types names. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_400_BAD_REQUEST: If nothing to delete. - :return None: None + + Args: + attribute_types_names (list[str]): List of attribute types + names. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_400_BAD_REQUEST: If nothing to delete. + + Returns: + None: None """ if not attribute_types_names: raise HTTPException( diff --git a/app/api/ldap_schema/object_class_router.py b/app/api/ldap_schema/object_class_router.py index 64c04c294..fd3359e45 100644 --- a/app/api/ldap_schema/object_class_router.py +++ b/app/api/ldap_schema/object_class_router.py @@ -37,9 +37,14 @@ async def create_one_object_class( """Create a new Object Class. \f - :param ObjectClassSchema request_data: Data for creating Object Class. - :param FromDishka[AsyncSession] session: Database session. - :return None. + + Args: + request_data (ObjectClassSchema): Data for creating Object + Class. + session (FromDishka[AsyncSession]): Database session. + + Returns: + None. """ await create_object_class( oid=request_data.oid, @@ -65,10 +70,16 @@ async def get_one_object_class( """Retrieve a one object class. \f - :param str object_class_name: name of the Object Class. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_404_NOT_FOUND: If Object Class not found. - :return ObjectClassSchema: One Object Class Schemas. + + Args: + object_class_name (str): name of the Object Class. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_404_NOT_FOUND: If Object Class not found. + + Returns: + ObjectClassSchema: One Object Class Schemas. """ object_class = await get_object_class_by_name( object_class_name, @@ -97,10 +108,14 @@ async def get_list_object_classes_with_pagination( """Retrieve a list of all object classes with paginate. \f - :param int page_number: number of page. - :param FromDishka[AsyncSession] session: Database session. - :param int page_size: number of items per page. - :return ObjectClassPaginationSchema: Paginator. + + Args: + page_number (int): number of page. + session (FromDishka[AsyncSession]): Database session. + page_size (int): number of items per page. + + Returns: + ObjectClassPaginationSchema: Paginator. """ params = PaginationParams( page_number=page_number, @@ -132,12 +147,19 @@ async def modify_one_object_class( """Modify an Object Class. \f - :param str object_class_name: Name of the Object Class for modifying. - :param ObjectClassUpdateSchema request_data: Changed data. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_404_NOT_FOUND: If nothing to delete. - :raise HTTP_400_BAD_REQUEST: If object class is system->cannot be changed - :return None. + + Args: + object_class_name (str): Name of the Object Class for modifying. + request_data (ObjectClassUpdateSchema): Changed data. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_404_NOT_FOUND: If nothing to delete. + HTTP_400_BAD_REQUEST: If object class is system->cannot be + changed + + Returns: + None. """ object_class = await get_object_class_by_name(object_class_name, session) if not object_class: @@ -170,10 +192,16 @@ async def delete_bulk_object_classes( """Delete Object Classes by their names. \f - :param list[str] object_classes_names: List of Object Classes names. - :param FromDishka[AsyncSession] session: Database session. - :raise HTTP_400_BAD_REQUEST: If nothing to delete. - :return None: None + + Args: + object_classes_names (list[str]): List of Object Classes names. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTP_400_BAD_REQUEST: If nothing to delete. + + Returns: + None: None """ if not object_classes_names: raise HTTPException( diff --git a/app/api/main/ap_router.py b/app/api/main/ap_router.py index e315a3477..2045b50bf 100644 --- a/app/api/main/ap_router.py +++ b/app/api/main/ap_router.py @@ -27,8 +27,10 @@ async def get_access_policies( """Get APs. \f - :param AccessPolicySchema policy: ap - :param FromDishka[AsyncSession] session: db. + + Args: + policy (AccessPolicySchema): ap + session (FromDishka[AsyncSession]): db. """ return [ MaterialAccessPolicySchema( diff --git a/app/api/main/krb5_router.py b/app/api/main/krb5_router.py index bf0b98cbd..d877e917b 100644 --- a/app/api/main/krb5_router.py +++ b/app/api/main/krb5_router.py @@ -69,7 +69,9 @@ async def setup_krb_catalogue( :param Annotated[AsyncSession, Depends session: db :param Annotated[EmailStr, Body mail: krbadmin email :param Annotated[SecretStr, Body krbadmin_password: pw - :raises HTTPException: on conflict + + Raises: + HTTPException: on conflict """ base_dn_list = await get_base_directories(session) base_dn = base_dn_list[0].path_dn @@ -274,7 +276,9 @@ async def ktadd( """Create keytab from kadmin server. :param Annotated[LDAPSession, Depends ldap_session: ldap - :return bytes: file + + Returns: + bytes: file """ try: response = await kadmin.ktadd(names) @@ -298,7 +302,9 @@ async def get_krb_status( :param Annotated[AsyncSession, Depends session: db :param Annotated[LDAPSession, Depends ldap_session: ldap - :return KerberosState: state + + Returns: + KerberosState: state """ db_state = await get_krb_server_state(session) try: @@ -323,7 +329,9 @@ async def add_principal( \f :param Annotated[str, Body principal_name: upn :param Annotated[LDAPSession, Depends ldap_session: ldap - :raises HTTPException: on failed kamin request. + + Raises: + HTTPException: on failed kamin request. """ try: await kadmin.add_principal(f"{primary}/{instance}", None) @@ -346,7 +354,9 @@ async def rename_principal( :param Annotated[str, Body principal_name: upn :param Annotated[LIMITED_STR, Body principal_new_name: _description_ :param Annotated[LDAPSession, Depends ldap_session: ldap - :raises HTTPException: on failed kamin request. + + Raises: + HTTPException: on failed kamin request. """ try: await kadmin.rename_princ(principal_name, principal_new_name) @@ -369,7 +379,9 @@ async def reset_principal_pw( :param Annotated[str, Body principal_name: upn :param Annotated[LIMITED_STR, Body new_password: _description_ :param Annotated[LDAPSession, Depends ldap_session: ldap - :raises HTTPException: on failed kamin request. + + Raises: + HTTPException: on failed kamin request. """ try: await kadmin.change_principal_password(principal_name, new_password) @@ -389,8 +401,12 @@ async def delete_principal( \f :param Annotated[str, Body principal_name: upn - :param FromDishka[AbstractKadmin] kadmin: _description_ - :raises HTTPException: on failed kamin request + + Args: + kadmin (FromDishka[AbstractKadmin]): _description_ + + Raises: + HTTPException: on failed kamin request """ try: await kadmin.del_principal(principal_name) diff --git a/app/api/network/router.py b/app/api/network/router.py index 661bbe5cf..08492213b 100644 --- a/app/api/network/router.py +++ b/app/api/network/router.py @@ -44,10 +44,16 @@ async def add_network_policy( """Add policy. \f - :param Policy policy: policy to add - :raises HTTPException: 422 invalid group DN - :raises HTTPException: 422 Entry already exists - :return PolicyResponse: Ready policy + + Args: + policy (Policy): policy to add + + Raises: + HTTPException: 422 invalid group DN + HTTPException: 422 Entry already exists + + Returns: + PolicyResponse: Ready policy """ new_policy = NetworkPolicy( name=policy.name, @@ -110,7 +116,9 @@ async def get_list_network_policies( """Get network. \f - :return list[PolicyResponse]: all policies + + Returns: + list[PolicyResponse]: all policies """ groups = selectinload(NetworkPolicy.groups).selectinload(Group.directory) mfa_groups = selectinload(NetworkPolicy.mfa_groups).selectinload( @@ -157,12 +165,18 @@ async def delete_network_policy( """Delete policy. \f - :param int policy_id: id - :param User user: requires login - :raises HTTPException: 404 - :raises HTTPException: 422 On last active policy, - at least 1 should be in database. - :return bool: status of delete + + Args: + policy_id (int): id + user (User): requires login + + Raises: + HTTPException: 404 + HTTPException: 422 On last active policy, at least 1 should be + in database. + + Returns: + bool: status of delete """ policy = await session.get(NetworkPolicy, policy_id, with_for_update=True) @@ -199,12 +213,18 @@ async def switch_network_policy( - **policy_id**: int, policy to switch \f - :param int policy_id: id - :param User user: requires login - :raises HTTPException: 404 - :raises HTTPException: 422 On last active policy, - at least 1 should be active - :return bool: status of update + + Args: + policy_id (int): id + user (User): requires login + + Raises: + HTTPException: 404 + HTTPException: 422 On last active policy, at least 1 should be + active + + Returns: + bool: status of update """ policy = await session.get(NetworkPolicy, policy_id, with_for_update=True) @@ -227,11 +247,17 @@ async def update_network_policy( """Update network policy. \f - :param PolicyUpdate policy: update request - :raises HTTPException: 404 policy not found - :raises HTTPException: 422 Invalid group DN - :raises HTTPException: 422 Entry already exists - :return PolicyResponse: Policy from database + + Args: + policy (PolicyUpdate): update request + + Raises: + HTTPException: 404 policy not found + HTTPException: 422 Invalid group DN + HTTPException: 422 Entry already exists + + Returns: + PolicyResponse: Policy from database """ selected_policy = await session.get( NetworkPolicy, @@ -311,10 +337,16 @@ async def swap_network_policy( - **first_policy_id**: policy to swap - **second_policy_id**: policy to swap \f - :param int first_policy_id: policy to swap - :param int second_policy_id: policy to swap - :raises HTTPException: 404 - :return SwapResponse: policy new priorities + + Args: + first_policy_id (int): policy to swap + second_policy_id (int): policy to swap + + Raises: + HTTPException: 404 + + Returns: + SwapResponse: policy new priorities """ policy1 = await session.get( NetworkPolicy, diff --git a/app/api/network/schema.py b/app/api/network/schema.py index 5c62ae8b7..9fe7a35d9 100644 --- a/app/api/network/schema.py +++ b/app/api/network/schema.py @@ -79,8 +79,11 @@ def netmasks_serialize( ) -> list[str | dict]: """Serialize netmasks to list. - :param IPv4IntefaceListType netmasks: ip masks - :return list[str | dict]: ready to json serialized + Args: + netmasks (IPv4IntefaceListType): ip masks + + Returns: + list[str | dict]: ready to json serialized """ values: list[str | dict] = [] diff --git a/app/api/network/utils.py b/app/api/network/utils.py index c5a96370e..77db7dda3 100644 --- a/app/api/network/utils.py +++ b/app/api/network/utils.py @@ -14,8 +14,11 @@ async def check_policy_count(session: AsyncSession) -> None: """Check if policy count euqals 1. - :param AsyncSession session: db - :raises HTTPException: 422 + Args: + session (AsyncSession): db + + Raises: + HTTPException: 422 """ count = await session.scalars( ( diff --git a/app/api/shadow/router.py b/app/api/shadow/router.py index b45386950..f93ebec6f 100644 --- a/app/api/shadow/router.py +++ b/app/api/shadow/router.py @@ -87,13 +87,19 @@ async def sync_password( - **principal**: user upn - **new_password**: password to set \f - :param FromDishka[AsyncSession] session: db - :param FromDishka[AbstractKadmin] kadmin: kadmin api + + Args: + session (FromDishka[AsyncSession]): db + kadmin (FromDishka[AbstractKadmin]): kadmin api :param Annotated[str, Body principal: reset target user :param Annotated[str, Body new_password: new password for user - :raises HTTPException: 404 if user not found - :raises HTTPException: 422 if password not valid - :return None: None + + Raises: + HTTPException: 404 if user not found + HTTPException: 422 if password not valid + + Returns: + None: None """ user = await get_user(session, principal) diff --git a/app/config.py b/app/config.py index 53c86fc61..f47b62dfb 100644 --- a/app/config.py +++ b/app/config.py @@ -133,7 +133,8 @@ def create_tz(cls, tz: str) -> ZoneInfo: # noqa: N805 def MFA_API_URI(self) -> str: # noqa: N802 """Multifactor API url. - :return str: url + Returns: + str: url """ if self.MFA_API_SOURCE == "dev": return "https://api.multifactor.dev" diff --git a/app/extra/scripts/check_ldap_principal.py b/app/extra/scripts/check_ldap_principal.py index cc13bc4dc..e97d7e740 100644 --- a/app/extra/scripts/check_ldap_principal.py +++ b/app/extra/scripts/check_ldap_principal.py @@ -25,9 +25,10 @@ async def check_ldap_principal( ) -> None: """Check ldap principal and keytab existence. - :param AbstractKadmin kadmin: kadmin - :param AsyncSession session: db - :param Settings settings: settings + Args: + kadmin (AbstractKadmin): kadmin + session (AsyncSession): db + settings (Settings): settings """ logger.info("Checking ldap principal and keytab existence.") diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index e42ce8473..3aeccaec5 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -91,9 +91,12 @@ async def principal_block_sync( def _find_krb_exp_attr(directory: Directory) -> Attribute | None: """Find krbprincipalexpiration attribute in directory. - :param Directory directory: the directory object - :return Atrribute | None: the attribute with - the name 'krbprincipalexpiration', or None if not found. + Args: + directory (Directory): the directory object + + Returns: + Atrribute | None: the attribute with the name + 'krbprincipalexpiration', or None if not found. """ for attr in directory.attributes: if attr.name == "krbprincipalexpiration": diff --git a/app/extra/scripts/uac_sync.py b/app/extra/scripts/uac_sync.py index d5ccf8454..2b911431c 100644 --- a/app/extra/scripts/uac_sync.py +++ b/app/extra/scripts/uac_sync.py @@ -22,7 +22,8 @@ async def disable_accounts( ) -> None: """Update userAccountControl attr. - :param AsyncSession session: db + Args: + session (AsyncSession): db Original query: update "Attributes" a diff --git a/app/ioc.py b/app/ioc.py index 9e4c1771d..88b3ac0af 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -96,9 +96,12 @@ async def get_kadmin_http( ) -> AsyncIterator[KadminHTTPClient]: """Get kadmin class, inherits from AbstractKadmin. - :param Settings settings: app settings - :param AsyncSessionMaker session_maker: session maker - :return AsyncIterator[AbstractKadmin]: kadmin with client + Args: + settings (Settings): app settings + session_maker (AsyncSessionMaker): session maker + + Returns: + AsyncIterator[AbstractKadmin]: kadmin with client :yield Iterator[AsyncIterator[AbstractKadmin]]: kadmin """ limits = httpx.Limits( @@ -121,9 +124,12 @@ async def get_kadmin( ) -> AbstractKadmin: """Get kadmin class, inherits from AbstractKadmin. - :param Settings settings: app settings - :param AsyncSessionMaker session_maker: session maker - :return AsyncIterator[AbstractKadmin]: kadmin with client + Args: + settings (Settings): app settings + session_maker (AsyncSessionMaker): session maker + + Returns: + AsyncIterator[AbstractKadmin]: kadmin with client :yield Iterator[AsyncIterator[AbstractKadmin]]: kadmin """ return kadmin_class(client) @@ -217,7 +223,9 @@ async def get_auth(self, session: AsyncSession) -> Creds | None: """Admin creds get. :param Annotated[AsyncSession, Depends session: session - :return MFA_HTTP_Creds: optional creds + + Returns: + MFA_HTTP_Creds: optional creds """ return await get_creds(session, "mfa_key", "mfa_secret") @@ -225,8 +233,11 @@ async def get_auth(self, session: AsyncSession) -> Creds | None: async def get_auth_ldap(self, session: AsyncSession) -> Creds | None: """Admin creds get. - :param AsyncSession session: db - :return MFA_LDAP_Creds: optional creds + Args: + session (AsyncSession): db + + Returns: + MFA_LDAP_Creds: optional creds """ return await get_creds(session, "mfa_key_ldap", "mfa_secret_ldap") @@ -260,9 +271,12 @@ async def get_http_mfa( ) -> MultifactorAPI | None: """Get api from DI. - :param httpx.AsyncClient client: httpx client - :param Creds credentials: creds - :return MultifactorAPI: mfa integration + Args: + client (httpx.AsyncClient): httpx client + credentials (Creds): creds + + Returns: + MultifactorAPI: mfa integration """ if not credentials or not credentials.key or not credentials.secret: return None @@ -282,9 +296,12 @@ async def get_ldap_mfa( ) -> LDAPMultiFactorAPI | None: """Get api from DI. - :param httpx.AsyncClient client: httpx client - :param Creds credentials: creds - :return MultifactorAPI: mfa integration + Args: + client (httpx.AsyncClient): httpx client + credentials (Creds): creds + + Returns: + MultifactorAPI: mfa integration """ if not credentials or not credentials.key or not credentials.secret: return None diff --git a/app/ldap_protocol/dependency.py b/app/ldap_protocol/dependency.py index 8f903bcd2..8f55b905b 100644 --- a/app/ldap_protocol/dependency.py +++ b/app/ldap_protocol/dependency.py @@ -15,9 +15,12 @@ async def resolve_deps[T: Callable](func: T, container: AsyncContainer) -> T: """Provide async dependencies. - :param T func: Awaitable - :param AsyncContainer container: IoC container - :return T: Awaitable + Args: + func (T): Awaitable + container (AsyncContainer): IoC container + + Returns: + T: Awaitable """ hints = get_type_hints(func) del hints["return"] diff --git a/app/ldap_protocol/kerberos/__init__.py b/app/ldap_protocol/kerberos/__init__.py index d20ac4faa..09c9d30ab 100644 --- a/app/ldap_protocol/kerberos/__init__.py +++ b/app/ldap_protocol/kerberos/__init__.py @@ -14,8 +14,11 @@ async def get_kerberos_class(session: AsyncSession) -> type[AbstractKadmin]: """Get kerberos server state. - :param AsyncSession session: db - :return type[KerberosMDAPIClient] | type[StubKadminMDADPIClient]: api + Args: + session (AsyncSession): db + + Returns: + type[KerberosMDAPIClient] | type[StubKadminMDADPIClient]: api """ if await get_krb_server_state(session) == KerberosState.READY: return KerberosMDAPIClient diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index baeaece4f..467cacdff 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -39,7 +39,8 @@ class AbstractKadmin(ABC): def __init__(self, client: httpx.AsyncClient) -> None: """Set client. - :param httpx.AsyncClient client: httpx + Args: + client (httpx.AsyncClient): httpx """ self.client = client @@ -209,8 +210,11 @@ async def rename_princ(self, name: str, new_name: str) -> None: ... async def get_status(self, wait_for_positive: bool = False) -> bool | None: """Get status of setup. - :param bool wait_for_positive: wait for positive status - :return bool | None: status or None if max tries achieved + Args: + wait_for_positive (bool): wait for positive status + + Returns: + bool | None: status or None if max tries achieved """ response = await self.client.get("/setup/status") status = response.json() @@ -230,8 +234,9 @@ async def force_princ_pw_change(self, name: str) -> None: ... async def ldap_principal_setup(self, name: str, path: str) -> None: """LDAP principal setup. - :param str ldap_principal_name: ldap principal name - :param str ldap_keytab_path: ldap keytab path + Args: + ldap_principal_name (str): ldap principal name + ldap_keytab_path (str): ldap keytab path """ response = await self.client.get("/principal", params={"name": name}) if response.status_code == 200: diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index d4b3a5b04..28ab0bd71 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -87,8 +87,11 @@ async def rename_princ(self, name: str, new_name: str) -> None: async def ktadd(self, names: list[str]) -> httpx.Response: """Ktadd build request for stream and return response. - :param list[str] names: principals - :return httpx.Response: stream + Args: + names (list[str]): principals + + Returns: + httpx.Response: stream """ request = self.client.build_request( "POST", @@ -106,8 +109,11 @@ async def ktadd(self, names: list[str]) -> httpx.Response: async def lock_principal(self, name: str) -> None: """Lock princ. - :param str name: upn - :raises KRBAPIError: on error + Args: + name (str): upn + + Raises: + KRBAPIError: on error """ response = await self.client.post( "principal/lock", @@ -120,8 +126,11 @@ async def lock_principal(self, name: str) -> None: async def force_princ_pw_change(self, name: str) -> None: """Force mark password change for principal. - :param str name: pw - :raises KRBAPIError: err + Args: + name (str): pw + + Raises: + KRBAPIError: err """ response = await self.client.post( "principal/force_reset", diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 588e85a0c..4b6f5681c 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -15,8 +15,11 @@ def logger_wraps(is_stub: bool = False) -> Callable: """Log kadmin calls. - :param bool is_stub: flag to change logs, defaults to False - :return Callable: any method + Args: + is_stub (bool): flag to change logs, defaults to False + + Returns: + Callable: any method """ def wrapper(func: Callable) -> Callable: @@ -91,8 +94,9 @@ async def get_krb_server_state(session: AsyncSession) -> "KerberosState": async def unlock_principal(name: str, session: AsyncSession) -> None: """Unlock principal. - :param str name: upn - :param AsyncSession session: db + Args: + name (str): upn + session (AsyncSession): db """ subquery = ( select(Directory.id) diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index 63d9be776..0e999fcea 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -383,9 +383,12 @@ def from_dict( ) -> "AddRequest": """Create AddRequest from dict. - :param str entry: entry + Args: + entry (str): entry :param dict[str, list[str]] attributes: dict of attrs - :return AddRequest: instance + + Returns: + AddRequest: instance """ return AddRequest( entry=entry, diff --git a/app/ldap_protocol/ldap_requests/base.py b/app/ldap_protocol/ldap_requests/base.py index 462e2960f..88dfcf99e 100644 --- a/app/ldap_protocol/ldap_requests/base.py +++ b/app/ldap_protocol/ldap_requests/base.py @@ -63,9 +63,12 @@ async def _handle_api( ) -> list[BaseResponse]: """Hanlde response with api user. - :param DBUser user: user from db - :param AsyncSession session: db session - :return list[BaseResponse]: list of handled responses + Args: + user (DBUser): user from db + session (AsyncSession): db session + + Returns: + list[BaseResponse]: list of handled responses """ handler = await resolve_deps(func=self.handle, container=container) ldap_session = await container.get(LDAPSession) diff --git a/app/ldap_protocol/ldap_requests/bind.py b/app/ldap_protocol/ldap_requests/bind.py index 4d12470e6..ec78a0983 100644 --- a/app/ldap_protocol/ldap_requests/bind.py +++ b/app/ldap_protocol/ldap_requests/bind.py @@ -111,10 +111,13 @@ async def check_mfa( ) -> bool: """Check mfa api. - :param User user: db user - :param LDAPSession ldap_session: ldap session - :param AsyncSession session: db session - :return bool: response + Args: + user (User): db user + ldap_session (LDAPSession): ldap session + session (AsyncSession): db session + + Returns: + bool: response """ if api is None: return False diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index 756cdf61f..b7e47f147 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -59,11 +59,14 @@ def __str__(self) -> str: def get_bad_response(error_message: LDAPBindErrors) -> BindResponse: """Generate BindResponse object with an invalid credentials error. - :param LDAPBindErrors error_message: Error message to include in the - response - :return BindResponse: A response object with the result code set to - INVALID_CREDENTIALS, an empty matchedDN, and the - provided error message + Args: + error_message (LDAPBindErrors): Error message to include in the + response + + Returns: + BindResponse: A response object with the result code set to + INVALID_CREDENTIALS, an empty matchedDN, and the provided error + message """ return BindResponse( result_code=LDAPCodes.INVALID_CREDENTIALS, diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index 40c18a530..372a06089 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -83,14 +83,17 @@ def is_valid(self, user: User | None) -> bool: # noqa: ARG002 """Check if GSSAPI token is valid. :param User | None user: indb user - :return bool: status + + Returns: + bool: status """ return True def is_anonymous(self) -> bool: """Check if auth is anonymous. - :return bool: status + Returns: + bool: status """ return False @@ -98,8 +101,11 @@ def is_anonymous(self) -> bool: def from_data(cls, data: list[ASN1Row]) -> "SaslGSSAPIAuthentication": """Get auth from data. - :param list[ASN1Row] data: data - :return SaslGSSAPIAuthentication + Args: + data (list[ASN1Row]): data + + Returns: + SaslGSSAPIAuthentication """ return cls( ticket=data[1].value if len(data) > 1 else b"", @@ -112,8 +118,9 @@ async def _init_security_context( ) -> None: """Init security context. - :param AsyncSession session: db session - :param Settings settings: settings + Args: + session (AsyncSession): db session + settings (Settings): settings """ base_dn_list = await get_base_directories(session) base_dn = base_dn_list[0].name @@ -140,8 +147,11 @@ def _handle_ticket( ) -> GSSAPIAuthStatus: """Handle the ticket and make gssapi step. - :param gssapi.SecurityContext server_ctx: GSSAPI security context - :return GSSAPIAuthStatus: status + Args: + server_ctx (gssapi.SecurityContext): GSSAPI security context + + Returns: + GSSAPIAuthStatus: status """ try: out_token = server_ctx.step(self.ticket) @@ -153,9 +163,12 @@ def _handle_ticket( def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: """Validate security layer. - :param int client_layer: client security layer - :param Settings settings: settings - :return bool: validate result + Args: + client_layer (int): client security layer + settings (Settings): settings + + Returns: + bool: validate result """ supported = GSSAPISL.SUPPORTED_SECURITY_LAYERS return (client_layer & supported) == client_layer @@ -166,9 +179,12 @@ def _handle_final_client_message( ) -> GSSAPIAuthStatus: """Handle final client message. - :param gssapi.SecurityContext server_ctx: GSSAPI security context - :param Settings settings: settings - :return GSSAPIAuthStatus: status + Args: + server_ctx (gssapi.SecurityContext): GSSAPI security context + settings (Settings): settings + + Returns: + GSSAPIAuthStatus: status """ try: unwrap_message = server_ctx.unwrap(self.ticket) @@ -195,9 +211,12 @@ def _generate_final_message( ) -> bytes: """Generate final wrap message. - :param gssapi.SecurityContext server_ctx: gssapi context - :param Settings settings: settings - :return bytes: message + Args: + server_ctx (gssapi.SecurityContext): gssapi context + settings (Settings): settings + + Returns: + bytes: message """ max_size = settings.GSSAPI_MAX_OUTPUT_TOKEN_SIZE if GSSAPISL.SUPPORTED_SECURITY_LAYERS == GSSAPISL.NO_SECURITY: @@ -219,9 +238,10 @@ async def step( ) -> BindResponse | None: """GSSAPI step. - :param AsyncSession session: db session - :param LDAPSession ldap_session: ldap session - :param Settings settings: settings + Args: + session (AsyncSession): db session + ldap_session (LDAPSession): ldap session + settings (Settings): settings """ self._ldap_session = ldap_session @@ -265,8 +285,9 @@ async def get_user( # type: ignore ) -> User | None: """Get user. - :param gssapi.SecurityContext ctx: gssapi context - :param AsyncSession session: db session + Args: + ctx (gssapi.SecurityContext): gssapi context + session (AsyncSession): db session """ ctx = self._ldap_session.gssapi_security_context if not ctx: diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index e8c42cd87..cd16c1c11 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -27,7 +27,9 @@ def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. :param User | None user: indb user - :return bool: status + + Returns: + bool: status """ password = getattr(user, "password", None) if password is not None: @@ -40,7 +42,8 @@ def is_valid(self, user: User | None) -> bool: def is_anonymous(self) -> bool: """Check if auth is anonymous. - :return bool: status + Returns: + bool: status """ return False diff --git a/app/ldap_protocol/ldap_requests/bind_methods/simple.py b/app/ldap_protocol/ldap_requests/bind_methods/simple.py index 97cef77e6..44e684ca6 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/simple.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/simple.py @@ -24,7 +24,9 @@ def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. :param User | None user: indb user - :return bool: status + + Returns: + bool: status """ password = getattr(user, "password", None) if password is not None: @@ -34,7 +36,8 @@ def is_valid(self, user: User | None) -> bool: def is_anonymous(self) -> bool: """Check if auth is anonymous. - :return bool: status + Returns: + bool: status """ return not self.password diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index bb445f0e3..7459ba7e4 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -315,8 +315,11 @@ async def handle( def from_data(cls, data: list[ASN1Row]) -> "ExtendedRequest": """Create extended request from asn.1 decoded string. - :param ASN1Row data: any data - :return ExtendedRequest: universal request + Args: + data (ASN1Row): any data + + Returns: + ExtendedRequest: universal request """ oid = data[0].value ext_request = EXTENDED_REQUEST_OID_MAP[oid] diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index d2f070d48..d7213a458 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -167,7 +167,8 @@ async def get_root_dse( ) -> defaultdict[str, list[str]]: """Get RootDSE. - :return defaultdict[str, list[str]]: queried attrs + Returns: + defaultdict[str, list[str]]: queried attrs """ data = defaultdict(list) domain_query = ( @@ -222,9 +223,12 @@ async def get_root_dse( def cast_filter(self) -> UnaryExpression | ColumnElement: """Convert asn1 row filter_ to sqlalchemy obj. - :param ASN1Row filter_: requested filter_ - :param AsyncSession session: sa session - :return UnaryExpression: condition + Args: + filter_ (ASN1Row): requested filter_ + session (AsyncSession): sa session + + Returns: + UnaryExpression: condition """ return cast_filter2sql(self.filter) @@ -254,8 +258,9 @@ async def get_result( ) -> AsyncGenerator[SearchResultEntry | SearchResultDone, None]: """Create response. - :param bool user_logged: is user in session - :param AsyncSession session: sa session + Args: + user_logged (bool): is user in session + session (AsyncSession): sa session :yield SearchResult: search result """ is_root_dse = self.scope == Scope.BASE_OBJECT and not self.base_object @@ -396,9 +401,12 @@ async def paginate_query( ) -> tuple[Select, int, int]: """Paginate query. - :param _type_ query: _description_ - :param _type_ session: _description_ - :return tuple[select, int, int]: query, pages_total, count + Args: + query (_type_): _description_ + session (_type_): _description_ + + Returns: + tuple[select, int, int]: query, pages_total, count """ if self.page_number is None: return query, 0, 0 diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index 27cacd990..4439ddfd5 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -52,9 +52,12 @@ async def get_attribute_types_paginator( ) -> PaginationResult: """Retrieve paginated attribute_types. - :param PaginationParams params: page_size and page_number. - :param AsyncSession session: Database session. - :return PaginationResult: Chunk of attribute_types and metadata. + Args: + params (PaginationParams): page_size and page_number. + session (AsyncSession): Database session. + + Returns: + PaginationResult: Chunk of attribute_types and metadata. """ return await PaginationResult[AttributeType].get( params=params, @@ -83,14 +86,17 @@ async def create_attribute_type( ) -> None: """Create a new Attribute Type. - :param str oid: OID. - :param str name: Name. - :param str syntax: Syntax. - :param bool single_value: Single value. - :param bool no_user_modification: User can't modify it. - :param bool is_system: Attribute Type is system. - :param AsyncSession session: Database session. - :return None. + Args: + oid (str): OID. + name (str): Name. + syntax (str): Syntax. + single_value (bool): Single value. + no_user_modification (bool): User can't modify it. + is_system (bool): Attribute Type is system. + session (AsyncSession): Database session. + + Returns: + None. """ attribute_type = AttributeType( oid=oid, @@ -110,9 +116,12 @@ async def get_attribute_type_by_name( ) -> AttributeType | None: """Get single Attribute Type by name. - :param str attribute_type_name: Attribute Type name. - :param AsyncSession session: Database session. - :return AttributeType | None: Attribute Type. + Args: + attribute_type_name (str): Attribute Type name. + session (AsyncSession): Database session. + + Returns: + AttributeType | None: Attribute Type. """ return await session.scalar( select(AttributeType) @@ -126,9 +135,12 @@ async def get_attribute_types_by_names( ) -> list[AttributeType]: """Get list of Attribute Types by names. - :param list[str] attribute_type_names: Attribute Type names. - :param AsyncSession session: Database session. - :return list[AttributeType]: List of Attribute Types. + Args: + attribute_type_names (list[str]): Attribute Type names. + session (AsyncSession): Database session. + + Returns: + list[AttributeType]: List of Attribute Types. """ if not attribute_type_names: return [] @@ -147,10 +159,14 @@ async def modify_attribute_type( ) -> None: """Modify Attribute Type. - :param AttributeType attribute_type: Attribute Type. - :param AttributeTypeUpdateSchema new_statement: Attribute Type Schema. - :param AsyncSession session: Database session. - :return None. + Args: + attribute_type (AttributeType): Attribute Type. + new_statement (AttributeTypeUpdateSchema): Attribute Type + Schema. + session (AsyncSession): Database session. + + Returns: + None. """ attribute_type.syntax = new_statement.syntax attribute_type.single_value = new_statement.single_value @@ -164,9 +180,12 @@ async def delete_attribute_types_by_names( ) -> None: """Delete not system Attribute Types by names. - :param list[str] attribute_type_names: List of Attribute Types OIDs. - :param AsyncSession session: Database session. - :return None: None. + Args: + attribute_type_names (list[str]): List of Attribute Types OIDs. + session (AsyncSession): Database session. + + Returns: + None: None. """ if not attribute_type_names: return None diff --git a/app/ldap_protocol/ldap_schema/object_class_crud.py b/app/ldap_protocol/ldap_schema/object_class_crud.py index cf550ba7c..7f4a8c29b 100644 --- a/app/ldap_protocol/ldap_schema/object_class_crud.py +++ b/app/ldap_protocol/ldap_schema/object_class_crud.py @@ -64,9 +64,12 @@ async def get_object_classes_paginator( ) -> PaginationResult: """Retrieve paginated object_classes. - :param PaginationParams params: page_size and page_number. - :param AsyncSession session: Database session. - :return PaginationResult: Chunk of object_classes and metadata. + Args: + params (PaginationParams): page_size and page_number. + session (AsyncSession): Database session. + + Returns: + PaginationResult: Chunk of object_classes and metadata. """ return await PaginationResult[ObjectClass].get( params=params, @@ -95,15 +98,18 @@ async def create_object_class( ) -> None: """Create a new Object Class. - :param str oid: OID. - :param str name: Name. + Args: + oid (str): OID. + name (str): Name. + kind (KindType): Kind. + is_system (bool): Object Class is system. + attribute_type_names_must (list[str]): Attribute Types must. + attribute_type_names_may (list[str]): Attribute Types may. + session (AsyncSession): Database session. :param str | None superior_name: Parent Object Class. - :param KindType kind: Kind. - :param bool is_system: Object Class is system. - :param list[str] attribute_type_names_must: Attribute Types must. - :param list[str] attribute_type_names_may: Attribute Types may. - :param AsyncSession session: Database session. - :return None. + + Returns: + None. """ if kind not in OBJECT_CLASS_KINDS_ALLOWED: raise ValueError(f"Object class kind is not valid: {kind}.") @@ -148,9 +154,12 @@ async def get_object_class_by_name( ) -> ObjectClass | None: """Get single Object Class by name. - :param str object_class_name: Object Class name. - :param AsyncSession session: Database session. - :return ObjectClass | None: Object Class. + Args: + object_class_name (str): Object Class name. + session (AsyncSession): Database session. + + Returns: + ObjectClass | None: Object Class. """ return await session.scalar( select(ObjectClass) @@ -164,9 +173,12 @@ async def get_object_classes_by_names( ) -> list[ObjectClass]: """Get list of Object Classes by names. - :param list[str] object_class_names: Object Classes names. - :param AsyncSession session: Database session. - :return list[ObjectClass]: List of Object Classes. + Args: + object_class_names (list[str]): Object Classes names. + session (AsyncSession): Database session. + + Returns: + list[ObjectClass]: List of Object Classes. """ query = await session.scalars( select(ObjectClass) @@ -186,10 +198,14 @@ async def modify_object_class( ) -> None: """Modify Object Class. - :param ObjectClass object_class: Object Class. - :param ObjectClassUpdateSchema new_statement: New statement of object class - :param AsyncSession session: Database session. - :return None. + Args: + object_class (ObjectClass): Object Class. + new_statement (ObjectClassUpdateSchema): New statement of object + class + session (AsyncSession): Database session. + + Returns: + None. """ object_class.attribute_types_must.clear() object_class.attribute_types_must.extend( @@ -220,9 +236,12 @@ async def delete_object_classes_by_names( ) -> None: """Delete not system Object Classes by Names. - :param list[str] object_classes_names: Object classes names. - :param AsyncSession session: Database session. - :return None. + Args: + object_classes_names (list[str]): Object classes names. + session (AsyncSession): Database session. + + Returns: + None. """ await session.execute( delete(ObjectClass) diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index 78e2f6afa..2cc972d5f 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -118,10 +118,15 @@ def from_bytes(cls, source: bytes) -> "LDAPRequestMessage": def from_err(cls, source: bytes, err: Exception) -> LDAPResponseMessage: """Create error response message. - :param bytes source: source data - :param Exception err: any error - :raises ValueError: on invalid schema - :return LDAPResponseMessage: response with err code + Args: + source (bytes): source data + err (Exception): any error + + Raises: + ValueError: on invalid schema + + Returns: + LDAPResponseMessage: response with err code """ output = asn1todict(source) message_id = 0 diff --git a/app/ldap_protocol/multifactor.py b/app/ldap_protocol/multifactor.py index 155e8af15..081a70ee9 100644 --- a/app/ldap_protocol/multifactor.py +++ b/app/ldap_protocol/multifactor.py @@ -58,7 +58,8 @@ async def get_creds( ) -> Creds | None: """Get API creds. - :return tuple[str, str]: api key and secret + Returns: + tuple[str, str]: api key and secret """ query = ( select(CatalogueSetting) @@ -123,10 +124,12 @@ def __init__( ): """Set creds and web client. - :param str key: mfa key - :param str secret: mfa secret - :param httpx.AsyncClient client: client for making queries (activated) - :param Settings settings: app settings + Args: + key (str): mfa key + secret (str): mfa secret + client (httpx.AsyncClient): client for making queries + (activated) + settings (Settings): app settings """ self.client = client self.settings = settings @@ -149,13 +152,18 @@ async def ldap_validate_mfa( timeout is 60 seconds. "m" key-character is used to mark push request in multifactor API. - :param str username: un - :param str password: pwd - :param NetworkPolicy policy: policy - :raises MultifactorError: connect timeout - :raises MultifactorError: invalid json - :raises MultifactorError: Invalid status - :return bool: status + Args: + username (str): un + password (str): pwd + policy (NetworkPolicy): policy + + Raises: + MultifactorError: connect timeout + MultifactorError: invalid json + MultifactorError: Invalid status + + Returns: + bool: status """ passcode = password or "m" log_mfa.debug(f"LDAP MFA request: {username}, {password}") @@ -211,13 +219,18 @@ async def get_create_mfa( ) -> str: """Create mfa link. - :param str username: un - :param str callback_url: callback uri to send token - :param int uid: user id - :raises httpx.TimeoutException: on timeout - :raises self.MultifactorError: on invalid json, Key or error status - code - :return str: url to open in new page + Args: + username (str): un + callback_url (str): callback uri to send token + uid (int): user id + + Raises: + httpx.TimeoutException: on timeout + self.MultifactorError: on invalid json, Key or error status + code + + Returns: + str: url to open in new page """ data = { "identity": username, @@ -264,9 +277,14 @@ async def get_create_mfa( async def refresh_token(self, token: str) -> str: """Refresh mfa token. - :param str token: str jwt token - :raises self.MultifactorError: on api err - :return str: new token + Args: + token (str): str jwt token + + Raises: + self.MultifactorError: on api err + + Returns: + str: new token """ try: response = await self.client.post( diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index 7a1f8ec89..bf574bb92 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -27,8 +27,11 @@ async def get_policies(session: AsyncSession) -> list[AccessPolicy]: """Get policies. - :param AsyncSession session: db - :return list[AccessPolicy]: result + Args: + session (AsyncSession): db + + Returns: + list[AccessPolicy]: result """ query = select(AccessPolicy).options( selectinload(AccessPolicy.groups).selectinload(Group.directory), @@ -50,8 +53,9 @@ async def create_access_policy( ) -> None: """Get policies. - :param ENTRY_TYPE grant_dn: main dn - :param AsyncSession session: session + Args: + grant_dn (ENTRY_TYPE): main dn + session (AsyncSession): session """ path = get_search_path(grant_dn) dir_filter = get_path_filter( @@ -82,9 +86,12 @@ def mutate_ap[T: Select]( ) -> T: """Modify query with read rule filter, joins acess policies. - :param T query: select(Directory) - :param UserSchema user: user data - :return T: select(Directory).join(Directory.access_policies) + Args: + query (T): select(Directory) + user (UserSchema): user data + + Returns: + T: select(Directory).join(Directory.access_policies) """ whitelist = AccessPolicy.id.in_(user.access_policies_ids) diff --git a/app/ldap_protocol/policies/network_policy.py b/app/ldap_protocol/policies/network_policy.py index c39277b7e..addc2ebd9 100644 --- a/app/ldap_protocol/policies/network_policy.py +++ b/app/ldap_protocol/policies/network_policy.py @@ -22,11 +22,14 @@ def build_policy_query( ) -> Select: """Build a base query for network policies with optional group filtering. - :param IPv4Address ip: IP address to filter + Args: + ip (IPv4Address): IP address to filter :param Literal["is_http", "is_ldap", "is_kerberos"] protocol_field_name protocol: Protocol to filter :param list[int] | None user_group_ids: List of user group IDs, optional - :return: Select query + + Returns: + : Select query """ protocol_field = getattr(NetworkPolicy, protocol_field_name) query = ( @@ -62,10 +65,13 @@ async def check_mfa_group( ) -> bool: """Check if user is in a group with MFA policy. - :param NetworkPolicy policy: policy object - :param User user: user object - :param AsyncSession session: db session - :return bool: status + Args: + policy (NetworkPolicy): policy object + user (User): user object + session (AsyncSession): db session + + Returns: + bool: status """ return await session.scalar( select( @@ -84,9 +90,12 @@ async def get_user_network_policy( ) -> NetworkPolicy | None: """Get the highest priority network policy for user, ip and protocol. - :param User user: user object - :param AsyncSession session: db session - :return NetworkPolicy | None: a NetworkPolicy object + Args: + user (User): user object + session (AsyncSession): db session + + Returns: + NetworkPolicy | None: a NetworkPolicy object """ user_group_ids = [group.id for group in user.groups] @@ -102,10 +111,13 @@ async def is_user_group_valid( ) -> bool: """Validate user groups, is it including to policy. - :param User user: db user - :param NetworkPolicy policy: db policy - :param AsyncSession session: db - :return bool: status + Args: + user (User): db user + policy (NetworkPolicy): db policy + session (AsyncSession): db + + Returns: + bool: status """ if user is None or policy is None: return False diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 82f6e5386..3797faf61 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -29,8 +29,9 @@ async def post_save_password_actions( ) -> None: """Post save actions for password update. - :param User user: user from db - :param AsyncSession session: db + Args: + user (User): user from db + session (AsyncSession): db """ await session.execute( # update bind reject attribute update(Attribute) @@ -81,8 +82,11 @@ def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": async def create_policy_settings(self, session: AsyncSession) -> Self: """Create policies settings. - :param AsyncSession session: db session - :return PasswordPolicySchema: password policy. + Args: + session (AsyncSession): db session + + Returns: + PasswordPolicySchema: password policy. """ existing_policy = await session.scalar(select(exists(PasswordPolicy))) if existing_policy: @@ -98,8 +102,11 @@ async def get_policy_settings( ) -> "PasswordPolicySchema": """Get policy settings. - :param AsyncSession session: db - :return PasswordPolicySchema: policy + Args: + session (AsyncSession): db + + Returns: + PasswordPolicySchema: policy """ policy = await session.scalar(select(PasswordPolicy)) if not policy: @@ -109,7 +116,8 @@ async def get_policy_settings( async def update_policy_settings(self, session: AsyncSession) -> None: """Update policy. - :param AsyncSession session: db + Args: + session (AsyncSession): db """ await session.execute( (update(PasswordPolicy).values(self.model_dump(mode="json"))), @@ -123,8 +131,11 @@ async def delete_policy_settings( ) -> "PasswordPolicySchema": """Reset (delete) default policy. - :param AsyncSession session: db - :return PasswordPolicySchema: schema policy + Args: + session (AsyncSession): db + + Returns: + PasswordPolicySchema: schema policy """ default_policy = cls() await default_policy.update_policy_settings(session) @@ -134,8 +145,11 @@ async def delete_policy_settings( def _count_password_exists_days(last_pwd_set: Attribute) -> int: """Get number of days, pwd exists. - :param Attribute last_pwd_set: pwdLastSet - :return int: days + Args: + last_pwd_set (Attribute): pwdLastSet + + Returns: + int: days """ tz = ZoneInfo("UTC") now = datetime.now(tz=tz) @@ -155,9 +169,12 @@ async def get_pwd_last_set( ) -> Attribute: """Get pwdLastSet. - :param AsyncSession session: db - :param int directory_id: id - :return Attribute: pwdLastSet + Args: + session (AsyncSession): db + directory_id (int): id + + Returns: + Attribute: pwdLastSet """ plset = await session.scalar( select(Attribute) @@ -181,10 +198,12 @@ async def get_pwd_last_set( def validate_min_age(self, last_pwd_set: Attribute) -> bool: """Validate min password change age. - :param Attribute last_pwd_set: last pwd set - :return bool: can change pwd - True - not valid, can not change - False - valid, can change + Args: + last_pwd_set (Attribute): last pwd set + + Returns: + bool: can change pwd True - not valid, can not change False + - valid, can change on minimum_password_age_days can always change. """ @@ -198,10 +217,12 @@ def validate_min_age(self, last_pwd_set: Attribute) -> bool: def validate_max_age(self, last_pwd_set: Attribute) -> bool: """Validate max password change age. - :param Attribute last_pwd_set: last pwd set - :return bool: is pwd expired - True - not valid, expired - False - valid, not expired + Args: + last_pwd_set (Attribute): last pwd set + + Returns: + bool: is pwd expired True - not valid, expired False - + valid, not expired on maximum_password_age_days always valid. """ @@ -219,10 +240,13 @@ async def validate_password_with_policy( ) -> list[str]: """Validate password with chosen policy. - :param str password: new raw password - :param User user: db user - :param AsyncSession session: db - :return bool: status + Args: + password (str): new raw password + user (User): db user + session (AsyncSession): db + + Returns: + bool: status """ errors = [] history: Iterable = [] diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 48afb9dc1..eb79dba52 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -143,8 +143,11 @@ def _extract_proxy_protocol_address( ) -> tuple[IPv4Address | IPv6Address, bytes]: """Get ip from proxy protocol header. - :param bytes data: data - :return tuple: ip, data + Args: + data (bytes): data + + Returns: + tuple: ip, data """ peername = ":".join(map(str, writer.get_extra_info("peername"))) peer_addr = ip_address(peername.split(":")[0]) @@ -188,8 +191,11 @@ async def recieve( ) -> tuple[IPv4Address | IPv6Address, bytes] | bytes: """Read N packets by 1kB. - :param asyncio.StreamReader reader: reader - :return tuple: ip, data + Args: + reader (asyncio.StreamReader): reader + + Returns: + tuple: ip, data """ buffer = BytesIO() addr = None @@ -229,8 +235,11 @@ def _compute_ldap_message_size(data: bytes) -> int: source: https://github.com/cannatag/ldap3/blob/dev/ldap3/strategy/base.py#L455 - :param bytes data: body - :return int: actual size + Args: + data (bytes): body + + Returns: + int: actual size """ if len(data) > 2: if data[1] <= 127: # short @@ -255,12 +264,15 @@ async def _handle_request( ) -> None: """Create request object and send it to queue. - :param bytes data: initial data - :param asyncio.StreamReader reader: reader - :param asyncio.StreamWriter writer: writer - :param AsyncContainer container: container - :raises ConnectionAbortedError: if client sends empty request (b'') - :raises RuntimeError: reraises on unexpected exc + Args: + data (bytes): initial data + reader (asyncio.StreamReader): reader + writer (asyncio.StreamWriter): writer + container (AsyncContainer): container + + Raises: + ConnectionAbortedError: if client sends empty request (b'') + RuntimeError: reraises on unexpected exc """ ldap_session: LDAPSession = await container.get(LDAPSession) while True: @@ -294,9 +306,12 @@ async def _unwrap_request( ) -> bytes: """Unwrap request with GSSAPI security layer if needed. - :param bytes data: request data - :param LDAPSession ldap_session: session - :return bytes: unwrapped data + Args: + data (bytes): request data + ldap_session (LDAPSession): session + + Returns: + bytes: unwrapped data """ if ldap_session.gssapi_security_layer in ( GSSAPISL.INTEGRITY_PROTECTION, @@ -387,10 +402,13 @@ async def _wrap_response( ) -> bytes: """Wrap response with GSSAPI security layer if needed. - :param bytes data: response data - :param LDAPSession ldap_session: session - :param int protocol_op: protocol operation - :return bytes: wrapped data + Args: + data (bytes): response data + ldap_session (LDAPSession): session + protocol_op (int): protocol operation + + Returns: + bytes: wrapped data """ if ( ldap_session.gssapi_authenticated diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 227be6d29..f257b9e0e 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -32,25 +32,34 @@ class SessionStorage(ABC): async def get(self, key: str) -> dict: """Retrieve data associated with the given key from storage. - :param str key: The key to look up in the storage. - :return dict: The data associated with the key, - or an empty dictionary if the key is not found. + Args: + key (str): The key to look up in the storage. + + Returns: + dict: The data associated with the key, or an empty + dictionary if the key is not found. """ @abstractmethod async def _get_session_keys_by_uid(self, uid: int) -> set[str]: """Get session keys by user id. - :param int uid: user id - :return set[str]: session keys + Args: + uid (int): user id + + Returns: + set[str]: session keys """ @abstractmethod async def _get_session_keys_by_ip(self, ip: str) -> set[str]: """Get session keys by ip. - :param str ip: ip - :return set[str]: session keys + Args: + ip (str): ip + + Returns: + set[str]: session keys """ @abstractmethod @@ -61,9 +70,12 @@ async def get_user_sessions( ) -> dict: """Get sessions by user id. - :param int uid: user id + Args: + uid (int): user id :param ProtocolType | None protocol: protocol - :return dict: user sessions contents + + Returns: + dict: user sessions contents """ @abstractmethod @@ -74,25 +86,34 @@ async def get_ip_sessions( ) -> dict: """Get sessions data by ip. - :param str ip: ip + Args: + ip (str): ip :param ProtocolType | None protocol: protocol - :return dict: user sessions contents + + Returns: + dict: user sessions contents """ @abstractmethod async def clear_user_sessions(self, uid: int) -> None: """Clear user sessions. - :param int uid: user id - :return None: + Args: + uid (int): user id + + Returns: + None: """ @abstractmethod async def delete_user_session(self, session_id: str) -> None: """Delete user session. - :param str session_id: session id - :return None: + Args: + session_id (str): session id + + Returns: + None: """ @staticmethod @@ -119,15 +140,19 @@ def _get_protocol(self, session_id: str) -> ProtocolType: def _generate_key(self) -> str: """Generate a new key for storing data in the storage. - :return str: A new key. + Returns: + str: A new key. """ return f"http:{token_hex(self.key_length)}" def _get_lock_key(self, session_id: str) -> str: """Get lock key. - :param str session_id: session id - :return str: lock key + Args: + session_id (str): session id + + Returns: + str: lock key """ return f"lock:{session_id}" @@ -141,10 +166,13 @@ async def create_session( ) -> str: """Create session. - :param int uid: user id - :param Settings settings: app settings + Args: + uid (int): user id + settings (Settings): app settings :param dict | None extra_data: data, defaults to None - :return str: session id + + Returns: + str: session id """ async def get_user_id( @@ -156,11 +184,14 @@ async def get_user_id( ) -> int: """Get user from storage. - :param Settings settings: app settings - :param str session_key: session key - :param str user_agent: user agent - :param str ip: ip address - :return int: user id + Args: + settings (Settings): app settings + session_key (str): session key + user_agent (str): user agent + ip (str): ip address + + Returns: + int: user id """ try: session_id, signature = session_key.split(".") @@ -210,8 +241,11 @@ def _generate_session_data( async def check_session(self, session_id: str) -> bool: """Check session. - :param str session_id: session id - :return bool: True if session exists + Args: + session_id (str): session id + + Returns: + bool: True if session exists """ @abstractmethod @@ -223,26 +257,33 @@ async def create_ldap_session( ) -> None: """Create ldap session. - :param int uid: user id - :param dict data: data, defaults to None + Args: + uid (int): user id + data (dict): data, defaults to None """ @abstractmethod async def check_rekey(self, session_id: str, rekey_interval: int) -> bool: """Check rekey. - :param str session_id: session id - :param int rekey_interval: rekey interval in seconds - :return bool: True if rekey is needed + Args: + session_id (str): session id + rekey_interval (int): rekey interval in seconds + + Returns: + bool: True if rekey is needed """ @abstractmethod async def rekey_session(self, session_id: str, settings: Settings) -> str: """Rekey session. - :param str session_id: session id - :param Settings settings: app settings - :return str: jwt token + Args: + session_id (str): session id + settings (Settings): app settings + + Returns: + str: jwt token """ @@ -299,10 +340,11 @@ class RedisSessionStorage(SessionStorage): def __init__(self, storage: Redis, key_length: int, key_ttl: int) -> None: """Initialize the storage. - :param Redis storage: - The Redis/DragonflyDB instance to use for storage. - :param int key_length: The length of the keys to generate. - :param int key_ttl: The time-to-live for keys in seconds. + Args: + storage (Redis): The Redis/DragonflyDB instance to use for + storage. + key_length (int): The length of the keys to generate. + key_ttl (int): The time-to-live for keys in seconds. """ self._storage = storage self.key_length = key_length @@ -311,9 +353,12 @@ def __init__(self, storage: Redis, key_length: int, key_ttl: int) -> None: async def _get_lock(self, name: str, blocking_timeout: int = 5) -> Lock: """Get lock. - :param str name: lock name - :param int blocking_timeout: blocking timeout, defaults to 5 - :return Lock: lock object + Args: + name (str): lock name + blocking_timeout (int): blocking timeout, defaults to 5 + + Returns: + Lock: lock object """ return self._storage.lock( name=self._get_lock_key(name), @@ -323,9 +368,12 @@ async def _get_lock(self, name: str, blocking_timeout: int = 5) -> Lock: async def get(self, key: str) -> dict: """Retrieve data associated with the given key from storage. - :param str key: The key to look up in the storage. - :return dict: The data associated with the key, - or an empty dictionary if the key is not found. + Args: + key (str): The key to look up in the storage. + + Returns: + dict: The data associated with the key, or an empty + dictionary if the key is not found. """ data = await self._storage.get(key) if data is None: @@ -335,14 +383,16 @@ async def get(self, key: str) -> dict: async def delete(self, keys: Iterable[str]) -> None: """Delete data associated with the given key from storage. - :param str key: The key to delete from the storage. + Args: + key (str): The key to delete from the storage. """ await self._storage.delete(*keys) async def _fetch_keys(self, key: str) -> set[str]: """Fetch keys. - :param str key: key + Args: + key (str): key """ encoded_keys = await self._storage.smembers(key) # type: ignore return {k.decode() for k in encoded_keys} @@ -358,9 +408,12 @@ async def _get_session_keys_by_ip( specific protocol is provided, only sessions for that protocol are returned. - :param str ip: ip + Args: + ip (str): ip :param ProtocolType | None protocol: protocol - :return set[str]: session keys + + Returns: + set[str]: session keys """ if protocol: return await self._fetch_keys( @@ -382,9 +435,12 @@ async def _get_session_keys_by_uid( specific protocol is provided, only sessions for that protocol are returned. - :param int uid: user id + Args: + uid (int): user id :param ProtocolType | None protocol: protocol - :return set[str]: session keys + + Returns: + set[str]: session keys """ if protocol: return await self._fetch_keys( @@ -416,9 +472,12 @@ async def _get_sessions(self, keys: set[str], id_value: str | int) -> dict: 4. Remove expired session keys from the sets that track user ID or IP sessions. - :param set[str] keys: session keys + Args: + keys (set[str]): session keys :param str | int id_value: user id or ip - :return dict: user sessions contents + + Returns: + dict: user sessions contents """ if not keys: return {} @@ -458,9 +517,12 @@ async def get_user_sessions( ) -> dict: """Get sessions by user id. - :param int uid: user id + Args: + uid (int): user id :param ProtocolType | None protocol: protocol - :return dict: user sessions contents + + Returns: + dict: user sessions contents """ keys = await self._get_session_keys_by_uid(uid, protocol) return await self._get_sessions(keys, uid) @@ -472,9 +534,12 @@ async def get_ip_sessions( ) -> dict: """Get sessions data by ip. - :param str ip: ip + Args: + ip (str): ip :param ProtocolType | None protocol: protocol - :return dict: user sessions contents + + Returns: + dict: user sessions contents """ keys = await self._get_session_keys_by_ip(ip, protocol) return await self._get_sessions(keys, ip) @@ -494,7 +559,8 @@ async def clear_user_sessions(self, uid: int) -> None: 5. Identify and remove session references stored under UID-based keys. 6. Delete all user session keys from storage. - :param int uid: user id + Args: + uid (int): user id """ keys = await self._get_session_keys_by_uid(uid) if not keys: @@ -544,7 +610,8 @@ async def delete_user_session(self, session_id: str) -> None: 9. Delete the session data from storage. 10. Release the lock. - :param str session_id: session id + Args: + session_id (str): session id """ try: data = await self.get(session_id) @@ -591,11 +658,12 @@ async def _add_session( Adds a session to the storage and updates the session tracking keys for both user ID and IP address. - :param str session_id: session id - :param dict data: session data - :param int uid: user id - :param str ip_session_key: ip session key - :param str sessions_key: sessions key + Args: + session_id (str): session id + data (dict): session data + uid (int): user id + ip_session_key (str): ip session key + sessions_key (str): sessions key :param int | None ttl: time to live, defaults to None """ zset_key = ( @@ -636,12 +704,15 @@ async def create_session( 4. If an IP address is provided in `extra_data`, also link the session to the IP-based session tracking key (`ip:http:`). - :param int uid: user id - :param dict data: data dict - :param str secret: secret key - :param int expires_minutes: exire time in minutes + Args: + uid (int): user id + data (dict): data dict + secret (str): secret key + expires_minutes (int): exire time in minutes :param Literal[refresh, access] grant_type: grant type flag - :return str: jwt token + + Returns: + str: jwt token """ session_id, signature, data = self._generate_session_data( uid=uid, @@ -689,9 +760,10 @@ async def create_ldap_session( 4. If an IP address is provided in `extra_data`, also link the session to the IP-based session tracking key (`ip:ldap:`). - :param int uid: user id - :param str key: session key - :param dict data: any data + Args: + uid (int): user id + key (str): session key + data (dict): any data """ data["issued"] = datetime.now(UTC).isoformat() ldap_sessions_key = self._get_user_session_key(uid, "ldap") @@ -711,9 +783,12 @@ async def create_ldap_session( async def check_rekey(self, session_id: str, rekey_interval: int) -> bool: """Check rekey. - :param str session_id: session id - :param int rekey_interval: rekey interval in seconds - :return bool: True if rekey is needed + Args: + session_id (str): session id + rekey_interval (int): rekey interval in seconds + + Returns: + bool: True if rekey is needed """ lock = await self._get_lock(session_id) @@ -745,9 +820,12 @@ async def _rekey_session(self, session_id: str, settings: Settings) -> str: - The IP-based session tracking key (`ip:http:`) 8. Delete the old session. - :param str session_id: session id - :param Settings settings: app settings - :return str: jwt token + Args: + session_id (str): session id + settings (Settings): app settings + + Returns: + str: jwt token """ data = await self.get(session_id) @@ -785,9 +863,12 @@ async def _rekey_session(self, session_id: str, settings: Settings) -> str: async def rekey_session(self, session_id: str, settings: Settings) -> str: """Rekey session. - :param str session_id: session id - :param Settings settings: app settings - :return str: jwt token + Args: + session_id (str): session id + settings (Settings): app settings + + Returns: + str: jwt token """ lock = await self._get_lock(session_id) diff --git a/app/ldap_protocol/user_account_control.py b/app/ldap_protocol/user_account_control.py index 6aab592a0..22954689b 100644 --- a/app/ldap_protocol/user_account_control.py +++ b/app/ldap_protocol/user_account_control.py @@ -72,8 +72,12 @@ class UserAccountControlFlag(IntFlag): def is_value_valid(cls, uac_value: str | int) -> bool: """Check all flags set in the userAccountControl value. - :param int uac_value: userAccountControl attribute value - :return: True if the value is valid (only known flags), False otherwise + Args: + uac_value (int): userAccountControl attribute value + + Returns: + : True if the value is valid (only known flags), False + otherwise """ if isinstance(uac_value, int): pass @@ -94,9 +98,12 @@ async def get_check_uac( ) -> Callable[[UserAccountControlFlag], bool]: """Get userAccountControl attribute and check binary flags in it. - :param AsyncSession session: SA async session - :param int directory_id: id - :return Callable: function to check given flag in current + Args: + session (AsyncSession): SA async session + directory_id (int): id + + Returns: + Callable: function to check given flag in current userAccountControl attribute """ query = ( @@ -112,8 +119,11 @@ async def get_check_uac( def is_flag_true(flag: UserAccountControlFlag) -> bool: """Check given flag in current userAccountControl attribute. - :param userAccountControlFlag flag: flag - :return bool: result + Args: + flag (userAccountControlFlag): flag + + Returns: + bool: result """ return bool(int(value) & flag) diff --git a/app/ldap_protocol/utils/cte.py b/app/ldap_protocol/utils/cte.py index 98b4e48a1..66912cd64 100644 --- a/app/ldap_protocol/utils/cte.py +++ b/app/ldap_protocol/utils/cte.py @@ -222,9 +222,12 @@ async def get_all_parent_group_directories( ) -> AsyncScalarResult | None: """Get all parent groups directory. - :param list[Group] groups: directory groups - :param AsyncSession session: session - :return set[Directory]: all groups and their parent group directories + Args: + groups (list[Group]): directory groups + session (AsyncSession): session + + Returns: + set[Directory]: all groups and their parent group directories """ dn_list = [group.directory.path_dn for group in groups] diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 0b0553680..68fc70436 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -148,8 +148,12 @@ def validate_entry(entry: str) -> bool: cn=first,dc=example,dc=com -> valid cn=first,dc=example,dc=com -> valid - :param str entry: any str - :return bool: result + + Args: + entry (str): any str + + Returns: + bool: result """ return all( re.match(r"^[a-zA-Z\-]+$", part.split("=")[0]) @@ -187,9 +191,12 @@ def _get_domain(name: str) -> str: def create_integer_hash(text: str, size: int = 9) -> int: """Create integer hash from text. - :param str text: any string - :param int size: fixed size of hash, defaults to 15 - :return int: hash + Args: + text (str): any string + size (int): fixed size of hash, defaults to 15 + + Returns: + int: hash """ return int(hashlib.sha256(text.encode("utf-8")).hexdigest(), 16) % 10**size @@ -245,8 +252,11 @@ def string_to_sid(sid_string: str) -> bytes: - The identifier authority is packed as a 6-byte sequence. - Each sub-authority is packed as a 4-byte sequence. - :param sid_string: The string representation of the SID - :return bytes: The binary representation of the SID + Args: + sid_string: The string representation of the SID + + Returns: + bytes: The binary representation of the SID """ parts = sid_string.split("-") @@ -274,13 +284,15 @@ def create_object_sid( ) -> str: """Generate the objectSid attribute for an object. - :param domain: domain directory - :param int rid: relative identifier - :param bool reserved: A flag indicating whether the RID is reserved. - If `True`, the given RID is used directly. If - `False`, 1000 is added to the given RID to generate - the final RID - :return str: the complete objectSid as a string + Args: + domain: domain directory + rid (int): relative identifier + reserved (bool): A flag indicating whether the RID is reserved. + If `True`, the given RID is used directly. If `False`, 1000 + is added to the given RID to generate the final RID + + Returns: + str: the complete objectSid as a string """ return domain.object_sid + f"-{rid if reserved else 1000 + rid}" diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 3ab2ed552..400969628 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -39,9 +39,12 @@ async def get_base_directories(session: AsyncSession) -> list[Directory]: async def get_user(session: AsyncSession, name: str) -> User | None: """Get user with username. - :param AsyncSession session: sqlalchemy session - :param str name: any name: dn, email or upn - :return User | None: user from db + Args: + session (AsyncSession): sqlalchemy session + name (str): any name: dn, email or upn + + Returns: + User | None: user from db """ policies = selectinload(User.groups).selectinload(Group.access_policies) @@ -101,10 +104,15 @@ async def get_groups(dn_list: list[str], session: AsyncSession) -> list[Group]: async def get_group(dn: str | ENTRY_TYPE, session: AsyncSession) -> Directory: """Get dir with group by dn. - :param str dn: Distinguished Name - :param AsyncSession session: SA session - :raises AttributeError: on invalid dn - :return Directory: dir with group + Args: + dn (str): Distinguished Name + session (AsyncSession): SA session + + Raises: + AttributeError: on invalid dn + + Returns: + Directory: dir with group """ for base_directory in await get_base_directories(session): if dn_is_base_directory(base_directory, dn): @@ -131,8 +139,12 @@ async def check_kerberos_group( """Check if user in kerberos group. :param User | None user: user (sa model) - :param AsyncSession session: db - :return bool: exists result + + Args: + session (AsyncSession): db + + Returns: + bool: exists result """ if user is None: return False @@ -167,8 +179,11 @@ async def set_last_logon_user( def get_search_path(dn: str) -> list[str]: """Get search path for dn. - :param str dn: any DN, dn syntax - :return list[str]: reversed list of dn values + Args: + dn (str): any DN, dn syntax + + Returns: + list[str]: reversed list of dn values """ search_path = [path.strip() for path in dn.lower().split(",")] search_path.reverse() @@ -182,9 +197,12 @@ def get_path_filter( ) -> ColumnElement: """Get filter condition for path equality. - :param list[str] path: dn - :param Column field: path column, defaults to Directory.path - :return ColumnElement: filter (where) element + Args: + path (list[str]): dn + field (Column): path column, defaults to Directory.path + + Returns: + ColumnElement: filter (where) element """ return func.array_lowercase(column) == path @@ -224,9 +242,10 @@ async def create_group( cn=name,cn=groups,dc=domain,dc=com - :param str name: group name - :param int sid: objectSid - :param AsyncSession session: db + Args: + name (str): group name + sid (int): objectSid + session (AsyncSession): db """ base_dn_list = await get_base_directories(session) @@ -280,8 +299,9 @@ async def create_group( async def is_computer(directory_id: int, session: AsyncSession) -> bool: """Determine whether the entry is a computer. - :param AsyncSession session: db - :param int directory_id: id + Args: + session (AsyncSession): db + directory_id (int): id """ query = select( select(Attribute) @@ -302,9 +322,10 @@ async def add_lock_and_expire_attributes( ) -> None: """Add `nsAccountLock` and `shadowExpire` attributes to the directory. - :param AsyncSession session: db - :param Directory directory: directory - :param ZoneInfo tz: timezone info + Args: + session (AsyncSession): db + directory (Directory): directory + tz (ZoneInfo): timezone info """ now_with_tz = datetime.now(tz=tz) absolute_date = int(time.mktime(now_with_tz.timetuple()) / 86400) @@ -330,9 +351,12 @@ async def get_principal_directory( ) -> Directory | None: """Fetch the principal's directory by principal name. - :param AsyncSession session: db session - :param str principal_name: the principal name to search for - :return Directory | None: the principal's directory + Args: + session (AsyncSession): db session + principal_name (str): the principal name to search for + + Returns: + Directory | None: the principal's directory """ return await session.scalar( select(Directory) diff --git a/app/multidirectory.py b/app/multidirectory.py index 2b56d6f45..03f3a9361 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -55,9 +55,12 @@ async def proc_time_header_middleware( ) -> Response: """Set X-Process-Time header. - :param Request request: _description_ - :param Callable call_next: _description_ - :return Response: _description_ + Args: + request (Request): _description_ + call_next (Callable): _description_ + + Returns: + Response: _description_ """ start_time = time.perf_counter() response = await call_next(request) diff --git a/app/schedule.py b/app/schedule.py index cd013ecad..b4ce712c5 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -32,9 +32,10 @@ async def _schedule( ) -> None: """Run task periodically. - :param Awaitable task: any task - :param AsyncContainer container: container - :param float wait: time to wait after execution + Args: + task (Awaitable): any task + container (AsyncContainer): container + wait (float): time to wait after execution """ logger.info("Registered: {}", task.__name__) while True: diff --git a/app/security.py b/app/security.py index 8315c2953..20444349c 100644 --- a/app/security.py +++ b/app/security.py @@ -12,9 +12,12 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: """Validate password. - :param str plain_password: raw password - :param str hashed_password: pwd hash from db - :return bool: is password valid + Args: + plain_password (str): raw password + hashed_password (str): pwd hash from db + + Returns: + bool: is password valid """ return pwd_context.verify(plain_password, hashed_password) @@ -22,7 +25,10 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: def get_password_hash(password: str) -> str: """Hash password. - :param str password: raw pwd - :return str: hash + Args: + password (str): raw pwd + + Returns: + str: hash """ return pwd_context.hash(password, max_rounds=9) diff --git a/tests/conftest.py b/tests/conftest.py index 977c3381b..858e0f7b3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -448,7 +448,8 @@ async def unbound_http_client( ) -> AsyncIterator[httpx.AsyncClient]: """Get async client for fastapi tests. - :param FastAPI app: asgi app + Args: + app (FastAPI): asgi app :yield Iterator[AsyncIterator[httpx.AsyncClient]]: yield client """ async with httpx.AsyncClient( @@ -467,10 +468,13 @@ async def http_client( ) -> httpx.AsyncClient: """Authenticate and return client with cookies. - :param httpx.AsyncClient unbound_http_client: client w/o cookies - :param TestCreds creds: creds to authn - :param None setup_session: just a fixture call - :return httpx.AsyncClient: bound client with cookies + Args: + unbound_http_client (httpx.AsyncClient): client w/o cookies + creds (TestCreds): creds to authn + setup_session (None): just a fixture call + + Returns: + httpx.AsyncClient: bound client with cookies """ response = await unbound_http_client.post( "auth/", diff --git a/tests/test_api/test_auth/test_router.py b/tests/test_api/test_auth/test_router.py index 8ce3879d3..afaf9cd12 100644 --- a/tests/test_api/test_auth/test_router.py +++ b/tests/test_api/test_auth/test_router.py @@ -28,10 +28,11 @@ async def apply_user_account_control( ) -> dict[str, Any]: """Apply userAccountControl value and return response data. - :param AsyncClient http_client: client - :param str user_dn: distinguished name of the user - :param str user_account_control_value: new value to set for the - `userAccountControl` attribute. + Args: + http_client (AsyncClient): client + user_dn (str): distinguished name of the user + user_account_control_value (str): new value to set for the + `userAccountControl` attribute. """ response = await http_client.patch( "entry/update", diff --git a/tests/test_api/test_main/test_kadmin.py b/tests/test_api/test_main/test_kadmin.py index 89ffa4592..6cdfc2e03 100644 --- a/tests/test_api/test_main/test_kadmin.py +++ b/tests/test_api/test_main/test_kadmin.py @@ -138,8 +138,9 @@ async def test_setup_call( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.post( "/kerberos/setup", @@ -181,8 +182,9 @@ async def test_status_change( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.get("/kerberos/status") assert response.status_code == status.HTTP_200_OK @@ -209,8 +211,9 @@ async def test_ktadd( ) -> None: """Test ktadd. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ names = ["test1", "test2"] response = await http_client.post("/kerberos/ktadd", json=names) @@ -235,8 +238,9 @@ async def test_ktadd_404( ) -> None: """Test ktadd failure. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ kadmin.ktadd.side_effect = KRBAPIError() # type: ignore @@ -254,8 +258,9 @@ async def test_ldap_add( ) -> None: """Test add calls add_principal on user creation. - :param AsyncClient http_client: http - :param TestKadminClient kadmin: kadmin + Args: + http_client (AsyncClient): http + kadmin (TestKadminClient): kadmin """ san = "ktest" pw = "Password123" @@ -395,8 +400,9 @@ async def test_add_princ( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.post( "/kerberos/principal/add", @@ -418,8 +424,9 @@ async def test_rename_princ( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.patch( "/kerberos/principal/rename", @@ -441,8 +448,9 @@ async def test_change_princ( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.patch( "/kerberos/principal/reset", @@ -464,8 +472,9 @@ async def test_delete_princ( ) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.request( "delete", @@ -482,8 +491,9 @@ async def test_delete_princ( async def test_admin_incorrect_pw_setup(http_client: AsyncClient) -> None: """Test setup args. - :param AsyncClient http_client: http cl - :param LDAPSession ldap_session: ldap + Args: + http_client (AsyncClient): http cl + ldap_session (LDAPSession): ldap """ response = await http_client.get("/kerberos/status") assert response.status_code == status.HTTP_200_OK From e3660a61555b1f52f44825bc5bd5af6f398d1048 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Tue, 3 Jun 2025 18:59:18 +0300 Subject: [PATCH 03/25] refactor: docstrings task_508 --- .kerberos/config_server.py | 90 +++++---- app/alembic/env.py | 6 +- app/api/auth/oauth2.py | 2 +- app/api/auth/router.py | 30 ++- app/api/auth/schema.py | 8 + app/api/auth/utils.py | 6 +- app/api/exception_handlers.py | 7 +- app/api/ldap_schema/attribute_type_router.py | 28 +-- app/api/ldap_schema/object_class_router.py | 21 +- app/api/main/schema.py | 4 + app/api/network/schema.py | 15 +- app/api/shadow/router.py | 3 - app/config.py | 8 +- app/extra/dump_acme_certs.py | 3 + .../scripts/principal_block_user_sync.py | 4 +- app/extra/setup_dev.py | 2 +- app/ioc.py | 15 +- app/ldap_protocol/asn1parser.py | 30 ++- app/ldap_protocol/dialogue.py | 7 + app/ldap_protocol/dns.py | 72 ++++++- app/ldap_protocol/filter_interpreter.py | 80 +++++++- app/ldap_protocol/kerberos/utils.py | 9 +- app/ldap_protocol/ldap_requests/abandon.py | 7 +- app/ldap_protocol/ldap_requests/add.py | 11 +- app/ldap_protocol/ldap_requests/bind.py | 13 +- .../ldap_requests/bind_methods/base.py | 22 ++- .../ldap_requests/bind_methods/sasl_gssapi.py | 32 ++- .../ldap_requests/bind_methods/sasl_plain.py | 11 +- .../ldap_requests/bind_methods/simple.py | 7 +- app/ldap_protocol/ldap_requests/delete.py | 6 + app/ldap_protocol/ldap_requests/extended.py | 35 +++- app/ldap_protocol/ldap_requests/modify.py | 21 ++ app/ldap_protocol/ldap_requests/modify_dn.py | 7 +- app/ldap_protocol/ldap_requests/search.py | 30 ++- app/ldap_protocol/ldap_responses.py | 38 +++- .../ldap_schema/attribute_type_crud.py | 15 +- .../ldap_schema/object_class_crud.py | 15 +- app/ldap_protocol/messages.py | 18 +- app/ldap_protocol/multifactor.py | 20 +- app/ldap_protocol/policies/access_policy.py | 10 +- app/ldap_protocol/policies/network_policy.py | 15 +- app/ldap_protocol/policies/password_policy.py | 18 +- app/ldap_protocol/server.py | 34 +++- app/ldap_protocol/session_storage.py | 52 ++++- app/ldap_protocol/user_account_control.py | 11 +- app/ldap_protocol/utils/const.py | 10 + app/ldap_protocol/utils/cte.py | 33 ++-- app/ldap_protocol/utils/helpers.py | 62 ++++-- app/ldap_protocol/utils/pagination.py | 6 +- app/ldap_protocol/utils/queries.py | 24 ++- .../utils/raw_definition_parser.py | 24 +++ app/models.py | 182 +++++++++++++++--- app/multidirectory.py | 40 +++- app/schedule.py | 7 +- app/security.py | 9 +- pyproject.toml | 2 +- tests/conftest.py | 49 ++++- tests/test_api/test_main/test_kadmin.py | 6 + tests/test_api/test_shadow/conftest.py | 8 +- 59 files changed, 1045 insertions(+), 315 deletions(-) diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index 6c66fb493..f659eedd4 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -399,8 +399,9 @@ def write_configs( ) -> None: """Write two config files, strings are: hex bytes. - :param Annotated[str, Body krb5_config: krb5 hex bytes format config - :param Annotated[str, Body kdc_config: kdc hex bytes format config + Args: + krb5_config (str): krb5 hex bytes format config. + kdc_config (str): kdc hex bytes format config. """ with open("/etc/krb5.conf", "wb") as f: f.write(bytes.fromhex(krb5_config)) @@ -411,7 +412,11 @@ def write_configs( @setup_router.post("/stash", status_code=201) async def run_setup_stash(schema: ConfigSchema) -> None: - """Set up stash file.""" + """Set up stash file. + + Args: + schema (ConfigSchema): Configuration schema for stash setup. + """ proc = await asyncio.create_subprocess_exec( "kdb5_ldap_util", "-D", @@ -445,8 +450,11 @@ async def run_setup_stash(schema: ConfigSchema) -> None: async def run_setup_subtree(schema: ConfigSchema) -> None: """Set up subtree in ldap. - :param ConfigSchema schema: _description_ - :raises HTTPException: _description_ + Args: + schema (ConfigSchema): Configuration schema for subtree setup. + + Raises: + HTTPException: If setup fails. """ create_proc = await asyncio.create_subprocess_exec( "kdb5_ldap_util", @@ -500,9 +508,10 @@ async def add_princ( ) -> None: """Add principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. + password (str | None): Principal password. """ await kadmin.add_princ(name, password) @@ -512,11 +521,14 @@ async def get_princ( kadmin: Annotated[AbstractKRBManager, Depends(get_kadmin)], name: str, ) -> Principal: - """Add principal. + """Get principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. + + Returns: + Principal: Principal object. """ return await kadmin.get_princ(name) @@ -526,11 +538,11 @@ async def del_princ( kadmin: Annotated[AbstractKRBManager, Depends(get_kadmin)], name: str, ) -> None: - """Add principal. + """Delete principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. """ await kadmin.del_princ(name) @@ -541,11 +553,12 @@ async def change_princ_password( name: Annotated[str, Body()], password: Annotated[str, Body()], ) -> None: - """Change princ pw principal. + """Change principal password. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. + password (str): Principal password. """ await kadmin.change_password(name, password) @@ -560,11 +573,12 @@ async def create_or_update_princ_password( name: Annotated[str, Body()], password: Annotated[str, Body()], ) -> None: - """Change princ pw principal or create with new. + """Change principal password or create with new. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. + password (str): Principal password. """ await kadmin.create_or_update_princ_pw(name, password) @@ -581,9 +595,10 @@ async def rename_princ( ) -> None: """Rename principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body new_name: principal new name + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. + new_name (str): Principal new name. """ """""" await kadmin.rename_princ(name, new_name) @@ -596,9 +611,12 @@ async def ktadd( ) -> FileResponse: """Ktadd principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name - :param Annotated[str, Body password: principal password + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + names (list[str]): List of principal names. + + Returns: + FileResponse: Keytab file response. """ filename = os.path.join(gettempdir(), str(uuid.uuid1())) await kadmin.ktadd(names, filename) @@ -616,8 +634,9 @@ async def lock_princ( ) -> None: """Lock principal. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. """ await kadmin.lock_princ(name) @@ -627,10 +646,11 @@ async def force_pw_reset_principal( kadmin: Annotated[AbstractKRBManager, Depends(get_kadmin)], name: Annotated[str, Body(embed=True)], ) -> None: - """Mark princ as pw expired. + """Mark principal as password expired. - :param Annotated[AbstractKRBManager, Depends kadmin: kadmin abstract - :param Annotated[str, Body name: principal name + Args: + kadmin (AbstractKRBManager): Kadmin abstract manager. + name (str): Principal name. """ await kadmin.force_pw_principal(name) diff --git a/app/alembic/env.py b/app/alembic/env.py index c6ca0a787..85d4bcd36 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -22,7 +22,11 @@ def do_run_migrations(connection: AsyncConnection): - """Run sync migrations.""" + """Run sync migrations. + + Args: + connection: AsyncConnection: + """ context.configure( connection=connection, target_metadata=target_metadata, diff --git a/app/api/auth/oauth2.py b/app/api/auth/oauth2.py index c34345263..58b7c1fac 100644 --- a/app/api/auth/oauth2.py +++ b/app/api/auth/oauth2.py @@ -77,8 +77,8 @@ async def get_current_user( session_storage (FromDishka[SessionStorage]): session storage request (Request): request response (Response): response + ip (Annotated[IPv4Address | IPv6Address]): ip address user_agent (Annotated[str]): user agent - :param Annotated[IPv4Address | IPv6Address] ip: ip address Returns: UserSchema: user schema diff --git a/app/api/auth/router.py b/app/api/auth/router.py index a84e13068..37a5c1f78 100644 --- a/app/api/auth/router.py +++ b/app/api/auth/router.py @@ -67,15 +67,15 @@ async def login( - **password**: password \f - :param Annotated[OAuth2Form, Depends form: login form - Args: - session (FromDishka[AsyncSession]): db - settings (FromDishka[Settings]): app settings - mfa (FromDishka[MultifactorAPI]): mfa api wrapper - storage (FromDishka[SessionStorage]): session storage - response (Response): FastAPI response - :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip + form (OAuth2Form): Login form with username and password. + session (FromDishka[AsyncSession]): Database session. + settings (FromDishka[Settings]): Application settings. + mfa (FromDishka[MultifactorAPI]): MFA API wrapper. + storage (FromDishka[SessionStorage]): Session storage. + response (Response): FastAPI response object. + ip (IPv4Address | IPv6Address): Client IP address. + user_agent (str): Client user agent string. Raises: HTTPException: 401 if incorrect username or password @@ -85,9 +85,6 @@ async def login( HTTPException: 403 if ip is not provided HTTPException: 403 if user not part of network policy HTTPException: 426 if mfa required - - Returns: - None: None """ user = await authenticate_user(session, form.username, form.password) @@ -190,18 +187,15 @@ async def password_reset( \f Args: - session (FromDishka[AsyncSession]): db - kadmin (FromDishka[AbstractKadmin]): kadmin api - :param Annotated[str, Body identity: reset target user - :param Annotated[str, Body new_password: new password for user + identity (str): Reset target user identity. + new_password (str): New password for user. + session (FromDishka[AsyncSession]): Database session. + kadmin (FromDishka[AbstractKadmin]): Kadmin API instance. Raises: HTTPException: 404 if user not found HTTPException: 422 if password not valid HTTPException: 424 if kerberos password update failed - - Returns: - None: None """ user = await get_user(session, identity) diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index e8b2f3321..69cd70e2f 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -67,6 +67,12 @@ class SetupRequest(BaseModel): @field_validator("domain") def validate_domain(cls, v: str) -> str: # noqa + """Description. + + Args: + v: str: + + """ if re.match(_domain_re, v) is None: raise ValueError("Invalid domain value") return v.lower() @@ -82,6 +88,7 @@ class MFACreateRequest(BaseModel): @computed_field # type: ignore @property def key_name(self) -> str: + """Description.""" if self.is_ldap_scope: return "mfa_key_ldap" @@ -90,6 +97,7 @@ def key_name(self) -> str: @computed_field # type: ignore @property def secret_name(self) -> str: + """Description.""" if self.is_ldap_scope: return "mfa_secret_ldap" diff --git a/app/api/auth/utils.py b/app/api/auth/utils.py index dd3aeae02..66f13550e 100644 --- a/app/api/auth/utils.py +++ b/app/api/auth/utils.py @@ -19,7 +19,8 @@ def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: """Get IP address from request. Args: - request (Request): The incoming request object. + request(Request): The incoming request object. + request: Request: Returns: IPv4Address | None: The IP address or None. @@ -39,7 +40,8 @@ def get_user_agent_from_request(request: Request) -> str: """Get user agent from request. Args: - request (Request): The incoming request object. + request(Request): The incoming request object. + request: Request: Returns: str: The user agent header. diff --git a/app/api/exception_handlers.py b/app/api/exception_handlers.py index 764df6024..ec0c416b7 100644 --- a/app/api/exception_handlers.py +++ b/app/api/exception_handlers.py @@ -10,7 +10,12 @@ def handle_db_connect_error( request: Request, # noqa: ARG001 exc: Exception, ) -> NoReturn: - """Handle duplicate.""" + """Handle duplicate. + + Args: + request: Request: + # noqa: ARG001exc: Exception: + """ if "QueuePool limit of size" in str(exc): logger.critical("POOL EXCEEDED {}", exc) diff --git a/app/api/ldap_schema/attribute_type_router.py b/app/api/ldap_schema/attribute_type_router.py index caa587a2b..cb8045139 100644 --- a/app/api/ldap_schema/attribute_type_router.py +++ b/app/api/ldap_schema/attribute_type_router.py @@ -41,12 +41,8 @@ async def create_one_attribute_type( \f Args: - request_data (AttributeTypeSchema): Data for creating attribute - type. + request_data (AttributeTypeSchema): Data for creating attribute type. session (FromDishka[AsyncSession]): Database session. - - Returns: - None. """ await create_attribute_type( oid=request_data.oid, @@ -73,7 +69,7 @@ async def get_one_attribute_type( \f Args: - attribute_type_name (str): name of the Attribute Type. + attribute_type_name (str): Name of the Attribute Type. session (FromDishka[AsyncSession]): Database session. Raises: @@ -111,9 +107,9 @@ async def get_list_attribute_types_with_pagination( \f Args: - page_number (int): number of page. + page_number (int): Number of page. session (FromDishka[AsyncSession]): Database session. - page_size (int): number of items per page. + page_size (int): Number of items per page. Returns: AttributeTypePaginationSchema: Paginator. @@ -150,18 +146,13 @@ async def modify_one_attribute_type( \f Args: - attribute_type_name (str): name of the attribute type for - modifying. + attribute_type_name (str): Name of the attribute type for modifying. request_data (AttributeTypeUpdateSchema): Changed data. session (FromDishka[AsyncSession]): Database session. Raises: HTTP_404_NOT_FOUND: If attribute type not found. - HTTP_400_BAD_REQUEST: If attribute type is system->cannot be - changed - - Returns: - None. + HTTP_400_BAD_REQUEST: If attribute type is system->cannot be changed """ attribute_type = await get_attribute_type_by_name( attribute_type_name, @@ -199,17 +190,12 @@ async def delete_bulk_attribute_types( """Delete attribute types by their names. \f - Args: - attribute_types_names (list[str]): List of attribute types - names. + attribute_types_names (list[str]): List of attribute types names. session (FromDishka[AsyncSession]): Database session. Raises: HTTP_400_BAD_REQUEST: If nothing to delete. - - Returns: - None: None """ if not attribute_types_names: raise HTTPException( diff --git a/app/api/ldap_schema/object_class_router.py b/app/api/ldap_schema/object_class_router.py index fd3359e45..e3c07c7df 100644 --- a/app/api/ldap_schema/object_class_router.py +++ b/app/api/ldap_schema/object_class_router.py @@ -39,12 +39,8 @@ async def create_one_object_class( \f Args: - request_data (ObjectClassSchema): Data for creating Object - Class. + request_data (ObjectClassSchema): Data for creating Object Class. session (FromDishka[AsyncSession]): Database session. - - Returns: - None. """ await create_object_class( oid=request_data.oid, @@ -72,7 +68,7 @@ async def get_one_object_class( \f Args: - object_class_name (str): name of the Object Class. + object_class_name (str): Name of the Object Class. session (FromDishka[AsyncSession]): Database session. Raises: @@ -110,9 +106,9 @@ async def get_list_object_classes_with_pagination( \f Args: - page_number (int): number of page. + page_number (int): Number of page. session (FromDishka[AsyncSession]): Database session. - page_size (int): number of items per page. + page_size (int): Number of items per page. Returns: ObjectClassPaginationSchema: Paginator. @@ -155,11 +151,7 @@ async def modify_one_object_class( Raises: HTTP_404_NOT_FOUND: If nothing to delete. - HTTP_400_BAD_REQUEST: If object class is system->cannot be - changed - - Returns: - None. + HTTP_400_BAD_REQUEST: If object class is system->cannot be changed """ object_class = await get_object_class_by_name(object_class_name, session) if not object_class: @@ -199,9 +191,6 @@ async def delete_bulk_object_classes( Raises: HTTP_400_BAD_REQUEST: If nothing to delete. - - Returns: - None: None """ if not object_classes_names: raise HTTPException( diff --git a/app/api/main/schema.py b/app/api/main/schema.py index 2c8c53ab7..d910a59b1 100644 --- a/app/api/main/schema.py +++ b/app/api/main/schema.py @@ -50,6 +50,8 @@ class KerberosSetupRequest(BaseModel): class _PolicyFields: + """Description.""" + name: str can_read: bool can_add: bool @@ -59,6 +61,8 @@ class _PolicyFields: class _MaterialFields: + """Description.""" + id: int diff --git a/app/api/network/schema.py b/app/api/network/schema.py index 9fe7a35d9..cc2a5e8f8 100644 --- a/app/api/network/schema.py +++ b/app/api/network/schema.py @@ -54,6 +54,12 @@ def complete_netmasks(self) -> list[IPv4Address | IPv4Network]: @field_validator("groups") @classmethod def validate_group(cls, groups: list[str]) -> list[str]: + """Description. + + Args: + groups: list[str]: + + """ if not groups: return groups if all(validate_entry(group) for group in groups): @@ -64,6 +70,11 @@ def validate_group(cls, groups: list[str]) -> list[str]: @field_validator("mfa_groups") @classmethod def validate_mfa_group(cls, mfa_groups: list[str]) -> list[str]: + """Syka. + + Args: + mfa_groups: list[str]: + """ if not mfa_groups: return mfa_groups if all(validate_entry(group) for group in mfa_groups): @@ -80,10 +91,12 @@ def netmasks_serialize( """Serialize netmasks to list. Args: - netmasks (IPv4IntefaceListType): ip masks + netmasks(IPv4IntefaceListType): ip masks + netmasks: IPv4IntefaceListType: Returns: list[str | dict]: ready to json serialized + """ values: list[str | dict] = [] diff --git a/app/api/shadow/router.py b/app/api/shadow/router.py index f93ebec6f..932af49a5 100644 --- a/app/api/shadow/router.py +++ b/app/api/shadow/router.py @@ -97,9 +97,6 @@ async def sync_password( Raises: HTTPException: 404 if user not found HTTPException: 422 if password not valid - - Returns: - None: None """ user = await get_user(session, principal) diff --git a/app/config.py b/app/config.py index f47b62dfb..4a823a840 100644 --- a/app/config.py +++ b/app/config.py @@ -24,6 +24,7 @@ def _get_vendor_version() -> str: + """Description.""" with open("/pyproject.toml", "rb") as f: return tomllib.load(f)["tool"]["poetry"]["version"] @@ -118,7 +119,11 @@ def POSTGRES_URI(self) -> PostgresDsn: # noqa @field_validator("TIMEZONE", mode="before") def create_tz(cls, tz: str) -> ZoneInfo: # noqa: N805 - """Get timezone from a string.""" + """Get timezone from a string. + + Args: + tz: str: + """ try: value = ZoneInfo(tz) except ZoneInfoNotFoundError as err: @@ -135,6 +140,7 @@ def MFA_API_URI(self) -> str: # noqa: N802 Returns: str: url + """ if self.MFA_API_SOURCE == "dev": return "https://api.multifactor.dev" diff --git a/app/extra/dump_acme_certs.py b/app/extra/dump_acme_certs.py index 9c9442b78..49f62e418 100644 --- a/app/extra/dump_acme_certs.py +++ b/app/extra/dump_acme_certs.py @@ -17,6 +17,9 @@ def dump_acme_cert(resolver: str = "md-resolver") -> None: acme file can be generated long enough to exit the script, try read until file contents is generated. + + Args: + resolver: str: (Default value = "md-resolver") """ if os.path.exists("/certs/cert.pem") and os.path.exists( "/certs/privkey.pem" diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index 3aeccaec5..c77f4de24 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -92,9 +92,11 @@ def _find_krb_exp_attr(directory: Directory) -> Attribute | None: """Find krbprincipalexpiration attribute in directory. Args: - directory (Directory): the directory object + directory(Directory): the directory object + directory: Directory: Returns: + Atrribute | None: the attribute with the name Atrribute | None: the attribute with the name 'krbprincipalexpiration', or None if not found. """ diff --git a/app/extra/setup_dev.py b/app/extra/setup_dev.py index 2038cb789..c21f1cff6 100644 --- a/app/extra/setup_dev.py +++ b/app/extra/setup_dev.py @@ -9,7 +9,7 @@ CN=User 4 OU="2FA" CN=Service Accounts - CN=User 5 + CN=User 5 Copyright (c) 2024 MultiFactor License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE diff --git a/app/ioc.py b/app/ioc.py index 88b3ac0af..ad7185dbe 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -50,7 +50,11 @@ class MainProvider(Provider): @provide(scope=Scope.APP) def get_engine(self, settings: Settings) -> AsyncEngine: - """Get async engine.""" + """Get async engine. + + Args: + settings: Settings: + """ return create_async_engine( str(settings.POSTGRES_URI), pool_size=settings.INSTANCE_DB_POOL_SIZE, @@ -67,7 +71,11 @@ def get_session_factory( self, engine: AsyncEngine, ) -> async_sessionmaker[AsyncSession]: - """Create session factory.""" + """Create session factory. + + Args: + engine: AsyncEngine: + """ return async_sessionmaker(engine, expire_on_commit=False) @provide(scope=Scope.REQUEST) @@ -100,9 +108,8 @@ async def get_kadmin_http( settings (Settings): app settings session_maker (AsyncSessionMaker): session maker - Returns: + Yields: AsyncIterator[AbstractKadmin]: kadmin with client - :yield Iterator[AsyncIterator[AbstractKadmin]]: kadmin """ limits = httpx.Limits( max_connections=settings.KRB5_SERVER_MAX_CONN, diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index 06ce77c5d..a4a3b1da3 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -74,7 +74,12 @@ class ASN1Row[T: "ASN1Row | list[ASN1Row] | str | bytes | int | float"]: @classmethod def from_tag(cls, tag: Tag, value: T) -> "ASN1Row": - """Create row from tag.""" + """Create row from tag. + + Args: + tag: Tag: + value: T: + """ return cls(tag.cls, tag.nr, value) def _handle_extensible_match(self) -> str: @@ -145,6 +150,9 @@ def serialize(self, obj: "ASN1Row | T | None" = None) -> str: # noqa: C901 Recursively processes ASN.1 structures to construct a valid LDAP filter string based on LDAP operations such as AND, OR, and substring matches. + + Args: + obj: "ASN1Row | T | None": (Default value = None) """ if obj is None: obj = self @@ -229,6 +237,7 @@ def to_ldap_filter(self) -> str: The method recursively serializes ASN.1 rows into the LDAP filter format based on tag IDs and class IDs. + """ return self.serialize() @@ -237,7 +246,12 @@ def value_to_string( tag: Tag, value: str | bytes | int | bool, ) -> bytes | str | int: - """Convert value to string.""" + """Convert value to string. + + Args: + tag: Tag: + value: str | bytes | int | bool: + """ if tag.nr == Numbers.Integer: with suppress(ValueError): return int(value) @@ -255,7 +269,11 @@ def value_to_string( def asn1todict(decoder: Decoder) -> list[ASN1Row]: - """Recursively collect ASN.1 data to list of ASNRows.""" + """Recursively collect ASN.1 data to list of ASNRows. + + Args: + decoder: Decoder: + """ out = [] while not decoder.eof(): tag = decoder.peek() @@ -277,7 +295,11 @@ def asn1todict(decoder: Decoder) -> list[ASN1Row]: def _validate_oid(oid: str) -> str: - """Validate ldap oid with regex.""" + """Validate ldap oid with regex. + + Args: + oid: str: + """ if not Encoder._re_oid.match(oid): raise ValueError("Invalid LDAPOID") return oid diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index fa2e5c818..cc7af6dca 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -103,6 +103,12 @@ def user(self) -> UserSchema | None: @user.setter def user(self, user: User) -> None: + """Description. + + Args: + user: User: + + """ raise NotImplementedError( "Cannot manually set user, use `set_user()` instead", ) @@ -162,6 +168,7 @@ def key(self) -> str: return f"ldap:{self.id}" def _bound_ip(self) -> bool: + """Description.""" return hasattr(self, "ip") async def bind_session(self) -> None: diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index 6b92e44e1..a917b9742 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -46,9 +46,24 @@ def logger_wraps(is_stub: bool = False) -> Callable: - """Log DNSManager calls.""" + """Log DNSManager calls. + + Args: + is_stub (bool): If True, marks the logger as a stub. Default is False. + + Returns: + Callable: Decorator for logging. + """ def wrapper(func: Callable) -> Callable: + """Decorator for logging function calls. + + Args: + func (Callable): Function to wrap. + + Returns: + Callable: Wrapped function. + """ name = func.__name__ bus_type = " stub " if is_stub else " " @@ -105,7 +120,13 @@ def __init__( dns_server_ip: str | None, tsig_key: str | None, ) -> None: - """Set settings.""" + """Set settings. + + Args: + zone_name (str | None): DNS zone name. + dns_server_ip (str | None): DNS server IP address. + tsig_key (str | None): TSIG key. + """ self.zone_name = zone_name self.domain = zone_name + "." if zone_name is not None else None self.dns_server_ip = dns_server_ip @@ -363,7 +384,14 @@ async def get_all_records(self) -> list[DNSRecords]: async def get_dns_state( session: AsyncSession, ) -> "DNSManagerState": - """Get or create DNS manager state.""" + """Get or create DNS manager state. + + Args: + session (AsyncSession): Database session. + + Returns: + DNSManagerState: Current DNS manager state. + """ state = await session.scalar( select(CatalogueSetting) .filter(CatalogueSetting.name == DNS_MANAGER_STATE_NAME) @@ -386,7 +414,12 @@ async def set_dns_manager_state( session: AsyncSession, state: DNSManagerState | str, ) -> None: - """Update DNS state.""" + """Update DNS state. + + Args: + session (AsyncSession): Database session. + state (DNSManagerState | str): New DNS manager state. + """ await session.execute( update(CatalogueSetting) .values({"value": state}) @@ -395,7 +428,17 @@ async def set_dns_manager_state( async def resolve_dns_server_ip(host: str) -> str: - """Get DNS server IP from Docker network.""" + """Get DNS server IP from Docker network. + + Args: + host (str): Hostname to resolve. + + Returns: + str: Resolved IP address. + + Raises: + DNSConnectionError: If DNS server IP cannot be resolved. + """ async_resolver = AsyncResolver() dns_server_ip_resolve = await async_resolver.resolve(host) if dns_server_ip_resolve is None or dns_server_ip_resolve.rrset is None: @@ -407,7 +450,15 @@ async def get_dns_manager_settings( session: AsyncSession, resolve_coro: Awaitable[str], ) -> "DNSManagerSettings": - """Get DNS manager's settings.""" + """Get DNS manager's settings. + + Args: + session (AsyncSession): Database session. + resolve_coro (Awaitable[str]): Coroutine to resolve DNS server IP. + + Returns: + DNSManagerSettings: DNS manager settings. + """ settings_dict = {} for setting in await session.scalars( select(CatalogueSetting).filter( @@ -435,7 +486,14 @@ async def get_dns_manager_settings( async def get_dns_manager_class( session: AsyncSession, ) -> type[AbstractDNSManager]: - """Get DNS manager class.""" + """Get DNS manager class. + + Args: + session (AsyncSession): Database session. + + Returns: + type[AbstractDNSManager]: DNS manager class type. + """ if await get_dns_state(session) != DNSManagerState.NOT_CONFIGURED: return DNSManager return StubDNSManager diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 07aefe734..1dba50202 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -29,6 +29,11 @@ def _get_substring(right: ASN1Row) -> str: # RFC 4511 + """Description. + + Args: + right: ASN1Row: + """ expr = right.value[0] value = expr.value if isinstance(value, bytes): @@ -44,6 +49,14 @@ def _from_filter( attr: str, right: ASN1Row, ) -> UnaryExpression: + """Description. + + Args: + model: type: + item: ASN1Row: + attr: str: + right: ASN1Row: + """ is_substring = item.tag_id == TagNumbers.SUBSTRING col = getattr(model, attr) @@ -60,7 +73,11 @@ def _from_filter( def _filter_memberof(dn: str) -> UnaryExpression: - """Retrieve query conditions with the memberOF attribute.""" + """Retrieve query conditions with the memberOF attribute. + + Args: + dn: str: + """ group_id_subquery = ( select(Group.id) .join(Group.directory) @@ -78,7 +95,11 @@ def _filter_memberof(dn: str) -> UnaryExpression: def _filter_member(dn: str) -> UnaryExpression: - """Retrieve query conditions with the member attribute.""" + """Retrieve query conditions with the member attribute. + + Args: + dn: str: + """ user_id_subquery = ( select(User.id) .join(User.directory) @@ -96,14 +117,22 @@ def _filter_member(dn: str) -> UnaryExpression: def _recursive_filter_memberof(dn: str) -> UnaryExpression: - """Retrieve query conditions with the memberOF attribute(recursive).""" + """Retrieve query conditions with the memberOF attribute(recursive). + + Args: + dn: str: + """ cte = find_members_recursive_cte(dn) return Directory.id.in_(select(cte.c.directory_id).offset(1)) # type: ignore def _get_filter_function(column: str) -> Callable[..., UnaryExpression]: - """Retrieve the appropriate filter function based on the attribute.""" + """Retrieve the appropriate filter function based on the attribute. + + Args: + column: str: + """ if len(column.split(":")) == 1: attribute = column oid = "" @@ -127,7 +156,13 @@ def _ldap_filter_by_attribute( attr: ASN1Row, search_value: ASN1Row, ) -> UnaryExpression: - """Retrieve query conditions based on the specified LDAP attribute.""" + """Retrieve query conditions based on the specified LDAP attribute. + + Args: + oid: ASN1Row | None: + attr: ASN1Row: + search_value: ASN1Row: + """ if oid is None: attribute = attr.value.lower() else: @@ -139,6 +174,11 @@ def _ldap_filter_by_attribute( def _cast_item(item: ASN1Row) -> UnaryExpression | ColumnElement: + """Description. + + Args: + item: ASN1Row: + """ # present, for e.g. `attibuteName=*`, `(attibuteName)` if item.tag_id == 7: attr = item.value.lower().replace("objectcategory", "objectclass") @@ -182,7 +222,11 @@ def _cast_item(item: ASN1Row) -> UnaryExpression | ColumnElement: def cast_filter2sql(expr: ASN1Row) -> UnaryExpression | ColumnElement: - """Recursively cast Filter to SQLAlchemy conditions.""" + """Recursively cast Filter to SQLAlchemy conditions. + + Args: + expr: ASN1Row: + """ if expr.tag_id in range(3): conditions = [] for item in expr.value: @@ -202,6 +246,13 @@ def _from_str_filter( is_substring: bool, item: Filter, ) -> UnaryExpression: + """Description. + + Args: + model: type: + is_substring: bool: + item: Filter: + """ col = getattr(model, item.attr) if is_substring: @@ -212,12 +263,21 @@ def _from_str_filter( def _api_filter(item: Filter) -> UnaryExpression: - """Retrieve query conditions based on the specified LDAP attribute.""" + """Retrieve query conditions based on the specified LDAP attribute. + + Args: + item: Filter: + """ filter_func = _get_filter_function(item.attr) return filter_func(item.val) def _cast_filt_item(item: Filter) -> UnaryExpression | ColumnElement: + """Description. + + Args: + item: Filter: + """ if item.val == "*": if item.attr in User.search_fields: return not_(eq(getattr(User, item.attr), None)) @@ -247,7 +307,11 @@ def _cast_filt_item(item: Filter) -> UnaryExpression | ColumnElement: def cast_str_filter2sql(expr: Filter) -> UnaryExpression | ColumnElement: - """Cast ldap filter to sa query.""" + """Cast ldap filter to sa query. + + Args: + expr: Filter: + """ if expr.type == "group": conditions = [] for item in expr.filters: diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 4b6f5681c..1ff5f2290 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -16,13 +16,20 @@ def logger_wraps(is_stub: bool = False) -> Callable: """Log kadmin calls. Args: - is_stub (bool): flag to change logs, defaults to False + is_stub(bool): flag to change logs, defaults to False + is_stub: bool: (Default value = False) Returns: Callable: any method """ def wrapper(func: Callable) -> Callable: + """Description. + + Args: + func: Callable: + + """ name = func.__name__ bus_type = " stub " if is_stub else " " diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index 9c616f106..f85927805 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -20,7 +20,12 @@ class AbandonRequest(BaseRequest): @classmethod def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: ARG003 - """Create structure from ASN1Row dataclass list.""" + """Create structure from ASN1Row dataclass list. + + Args: + data: dict[str: + list[ASN1Row]]: + """ return cls(message_id=1) async def handle(self) -> AsyncGenerator: diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index 0e999fcea..178f20128 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -71,15 +71,21 @@ class AddRequest(BaseRequest): @property def attr_names(self) -> dict[str, list[str | bytes]]: + """Description.""" return {attr.l_name: attr.vals for attr in self.attributes} @property def attributes_dict(self) -> dict[str, list[str | bytes]]: + """Description.""" return {attr.type: attr.vals for attr in self.attributes} @classmethod def from_data(cls, data: ASN1Row) -> "AddRequest": - """Deserialize.""" + """Deserialize. + + Args: + data: ASN1Row: + """ entry, attributes = data # type: ignore attributes = [ PartialAttribute( @@ -385,7 +391,8 @@ def from_dict( Args: entry (str): entry - :param dict[str, list[str]] attributes: dict of attrs + attributes: dict[str, list[str]]: + password: str | None: (Default value = None) Returns: AddRequest: instance diff --git a/app/ldap_protocol/ldap_requests/bind.py b/app/ldap_protocol/ldap_requests/bind.py index ec78a0983..50dbe6343 100644 --- a/app/ldap_protocol/ldap_requests/bind.py +++ b/app/ldap_protocol/ldap_requests/bind.py @@ -59,7 +59,11 @@ class BindRequest(BaseRequest): @classmethod def from_data(cls, data: list[ASN1Row]) -> "BindRequest": - """Get bind from data dict.""" + """Get bind from data dict. + + Args: + data: list[ASN1Row]: + """ auth = data[2].tag_id otpassword: str | None @@ -235,7 +239,12 @@ class UnbindRequest(BaseRequest): @classmethod def from_data(cls, data: dict[str, list[ASN1Row]]) -> "UnbindRequest": # noqa: ARG003 - """Unbind request has no body.""" + """Unbind request has no body. + + Args: + data: dict[str: + list[ASN1Row]]: + """ return cls() async def handle( diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index b7e47f147..ba7093c9a 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -60,10 +60,14 @@ def get_bad_response(error_message: LDAPBindErrors) -> BindResponse: """Generate BindResponse object with an invalid credentials error. Args: - error_message (LDAPBindErrors): Error message to include in the - response + error_message(LDAPBindErrors): Error message to include in the + response + error_message: LDAPBindErrors: Returns: + BindResponse: A response object with the result code set to + BindResponse: A response object with the result code set to + INVALID_CREDENTIALS, an empty matchedDN, and the provided error BindResponse: A response object with the result code set to INVALID_CREDENTIALS, an empty matchedDN, and the provided error message @@ -88,11 +92,15 @@ def METHOD_ID(self) -> int: # noqa: N802 @abstractmethod def is_valid(self, user: User) -> bool: - """Validate state.""" + """Validate state. + + Args: + user: User: + """ @abstractmethod def is_anonymous(self) -> bool: - """Return true if anonymous.""" + """Description.""" @abstractmethod async def get_user(self, session: AsyncSession, username: str) -> User: @@ -108,4 +116,8 @@ class SaslAuthentication(AbstractLDAPAuth): @classmethod @abstractmethod def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": - """Get auth from data.""" + """Get auth from data. + + Args: + data: list[ASN1Row]: + """ diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index 372a06089..ea01dfb4c 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -82,10 +82,13 @@ class SaslGSSAPIAuthentication(SaslAuthentication): def is_valid(self, user: User | None) -> bool: # noqa: ARG002 """Check if GSSAPI token is valid. - :param User | None user: indb user + Args: + User: None user: indb user Returns: bool: status + user: User | None: + """ return True @@ -94,6 +97,7 @@ def is_anonymous(self) -> bool: Returns: bool: status + """ return False @@ -102,10 +106,11 @@ def from_data(cls, data: list[ASN1Row]) -> "SaslGSSAPIAuthentication": """Get auth from data. Args: - data (list[ASN1Row]): data + data(list[ASN1Row]): data + data: list[ASN1Row]: Returns: - SaslGSSAPIAuthentication + : SaslGSSAPIAuthentication """ return cls( ticket=data[1].value if len(data) > 1 else b"", @@ -148,10 +153,12 @@ def _handle_ticket( """Handle the ticket and make gssapi step. Args: - server_ctx (gssapi.SecurityContext): GSSAPI security context + server_ctx(gssapi.SecurityContext): GSSAPI security context + server_ctx: gssapi.SecurityContext: Returns: GSSAPIAuthStatus: status + """ try: out_token = server_ctx.step(self.ticket) @@ -164,11 +171,13 @@ def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: """Validate security layer. Args: - client_layer (int): client security layer - settings (Settings): settings + client_layer(int): client security layer + settings(Settings): settings + client_layer: GSSAPISL: Returns: bool: validate result + """ supported = GSSAPISL.SUPPORTED_SECURITY_LAYERS return (client_layer & supported) == client_layer @@ -180,8 +189,9 @@ def _handle_final_client_message( """Handle final client message. Args: - server_ctx (gssapi.SecurityContext): GSSAPI security context - settings (Settings): settings + server_ctx(gssapi.SecurityContext): GSSAPI security context + settings(Settings): settings + server_ctx: gssapi.SecurityContext: Returns: GSSAPIAuthStatus: status @@ -212,8 +222,10 @@ def _generate_final_message( """Generate final wrap message. Args: - server_ctx (gssapi.SecurityContext): gssapi context - settings (Settings): settings + server_ctx(gssapi.SecurityContext): gssapi context + settings(Settings): settings + server_ctx: gssapi.SecurityContext: + settings: Settings: Returns: bytes: message diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index cd16c1c11..6e128f7a9 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -26,10 +26,12 @@ class SaslPLAINAuthentication(SaslAuthentication): def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. - :param User | None user: indb user + Args: + User: None user: indb user Returns: bool: status + """ password = getattr(user, "password", None) if password is not None: @@ -44,12 +46,17 @@ def is_anonymous(self) -> bool: Returns: bool: status + """ return False @classmethod def from_data(cls, data: list[ASN1Row]) -> "SaslPLAINAuthentication": - """Get auth from data.""" + """Get auth from data. + + Args: + data: list[ASN1Row]: + """ _, username, password = data[1].value.split("\\x00") return cls( credentials=data[1].value, diff --git a/app/ldap_protocol/ldap_requests/bind_methods/simple.py b/app/ldap_protocol/ldap_requests/bind_methods/simple.py index 44e684ca6..8d621021d 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/simple.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/simple.py @@ -23,10 +23,12 @@ class SimpleAuthentication(AbstractLDAPAuth): def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. - :param User | None user: indb user + Args: + User: None user: indb user Returns: - bool: status + bool: status + """ password = getattr(user, "password", None) if password is not None: @@ -38,6 +40,7 @@ def is_anonymous(self) -> bool: Returns: bool: status + """ return not self.password diff --git a/app/ldap_protocol/ldap_requests/delete.py b/app/ldap_protocol/ldap_requests/delete.py index c6c0fc904..ccf3dc7c1 100644 --- a/app/ldap_protocol/ldap_requests/delete.py +++ b/app/ldap_protocol/ldap_requests/delete.py @@ -44,6 +44,12 @@ class DeleteRequest(BaseRequest): @classmethod def from_data(cls, data: ASN1Row) -> "DeleteRequest": + """Description. + + Args: + data: ASN1Row: + + """ return cls(entry=data) async def handle( diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 7459ba7e4..52d444e4d 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -41,7 +41,11 @@ class BaseExtendedValue(ABC, BaseModel): @classmethod @abstractmethod def from_data(cls, data: ASN1Row) -> "BaseExtendedValue": - """Create model from data, decoded from responseValue bytes.""" + """Create model from data, decoded from responseValue bytes. + + Args: + data: ASN1Row: + """ @abstractmethod async def handle( @@ -55,6 +59,12 @@ async def handle( @staticmethod def _decode_value(data: ASN1Row) -> ASN1Row: + """Description. + + Args: + data: ASN1Row: + + """ dec = Decoder() dec.start(data[1].value) # type: ignore output = asn1todict(dec) @@ -94,7 +104,11 @@ class WhoAmIRequestValue(BaseExtendedValue): @classmethod def from_data(cls, data: ASN1Row) -> "WhoAmIRequestValue": # noqa: ARG003 - """Create model from data, WhoAmIRequestValue data is empty.""" + """Create model from data, WhoAmIRequestValue data is empty. + + Args: + data: ASN1Row: + """ return cls() async def handle( @@ -142,7 +156,11 @@ async def handle( @classmethod def from_data(cls, data: ASN1Row) -> "StartTLSRequestValue": # noqa: ARG003 - """Create model from data, decoded from responseValue bytes.""" + """Create model from data, decoded from responseValue bytes. + + Args: + data: ASN1Row: + """ return cls() @@ -156,7 +174,7 @@ class PasswdModifyResponse(BaseExtendedResponseValue): gen_passwd: str = "" def get_value(self) -> str | None: - """Return gen password.""" + """Description.""" return self.gen_passwd @@ -246,7 +264,11 @@ async def handle( @classmethod def from_data(cls, data: ASN1Row) -> "PasswdModifyRequestValue": - """Create model from data, decoded from responseValue bytes.""" + """Create model from data, decoded from responseValue bytes. + + Args: + data: ASN1Row: + """ d: list = cls._decode_value(data) # type: ignore if len(d) == 3: return cls( @@ -316,7 +338,8 @@ def from_data(cls, data: list[ASN1Row]) -> "ExtendedRequest": """Create extended request from asn.1 decoded string. Args: - data (ASN1Row): any data + data(ASN1Row): any data + data: list[ASN1Row]: Returns: ExtendedRequest: universal request diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index bbfcfb4bf..54d8719a5 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -105,6 +105,12 @@ class ModifyRequest(BaseRequest): @classmethod def from_data(cls, data: list[ASN1Row]) -> "ModifyRequest": + """Description. + + Args: + data: list[ASN1Row]: + + """ entry, proto_changes = data changes = [] @@ -230,6 +236,12 @@ async def handle( yield ModifyResponse(result_code=LDAPCodes.SUCCESS) def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: + """Description. + + Args: + err: BaseException: + + """ match err: case ValueError(): logger.error(f"Invalid value: {err}") @@ -251,6 +263,7 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: raise err def _get_dir_query(self) -> Select: + """Description.""" return ( select(Directory) .join(Directory.attributes) @@ -267,6 +280,14 @@ def _check_password_change_requested( directory: Directory, user_dir_id: int, ) -> bool: + """Description. + + Args: + names: set[str]: + directory: Directory: + user_dir_id: int: + + """ return ( ("userpassword" in names or "unicodepwd" in names) and len(names) == 1 diff --git a/app/ldap_protocol/ldap_requests/modify_dn.py b/app/ldap_protocol/ldap_requests/modify_dn.py index 54c648664..aa96f27e3 100644 --- a/app/ldap_protocol/ldap_requests/modify_dn.py +++ b/app/ldap_protocol/ldap_requests/modify_dn.py @@ -65,6 +65,7 @@ class ModifyDNRequest(BaseRequest): deleteoldrdn=true new_superior='ou=users,dc=multifactor,dc=dev' + Args: >>> cn = main2, ou = users, dc = multifactor, dc = dev """ @@ -77,7 +78,11 @@ class ModifyDNRequest(BaseRequest): @classmethod def from_data(cls, data: list[ASN1Row]) -> "ModifyDNRequest": - """Create structure from ASN1Row dataclass list.""" + """Create structure from ASN1Row dataclass list. + + Args: + data: list[ASN1Row]: + """ return cls( entry=data[0].value, newrdn=data[1].value, diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index d7213a458..7699cd34e 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -99,7 +99,12 @@ class Config: @field_serializer("filter") def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # noqa: ANN401 - """Serialize filter field.""" + """Serialize filter field. + + Args: + val: ASN1Row | None: + _info: Any: + """ return val.to_ldap_filter() if isinstance(val, ASN1Row) else None @classmethod @@ -107,6 +112,13 @@ def from_data( cls, data: dict[str, list[ASN1Row]], ) -> "SearchRequest": + """Description. + + Args: + data: dict[str: + list[ASN1Row]]: + + """ ( base_object, scope, @@ -131,6 +143,7 @@ def from_data( @cached_property def requested_attrs(self) -> list[str]: + """Description.""" return [attr.lower() for attr in self.attributes] async def _get_subschema(self, session: AsyncSession) -> SearchResultEntry: @@ -224,8 +237,8 @@ def cast_filter(self) -> UnaryExpression | ColumnElement: """Convert asn1 row filter_ to sqlalchemy obj. Args: - filter_ (ASN1Row): requested filter_ - session (AsyncSession): sa session + filter_(ASN1Row): requested filter_ + session(AsyncSession): sa session Returns: UnaryExpression: condition @@ -308,18 +321,22 @@ async def get_result( @cached_property def member_of(self) -> bool: + """Description.""" return "memberof" in self.requested_attrs or self.all_attrs @cached_property def member(self) -> bool: + """Description.""" return "member" in self.requested_attrs or self.all_attrs @cached_property def token_groups(self) -> bool: + """Description.""" return "tokengroups" in self.requested_attrs @cached_property def all_attrs(self) -> bool: + """Description.""" return "*" in self.requested_attrs or not self.requested_attrs def build_query( @@ -327,7 +344,12 @@ def build_query( base_directories: list[Directory], user: UserSchema, ) -> Select: - """Build tree query.""" + """Build tree query. + + Args: + base_directories: list[Directory]: + user: UserSchema: + """ query = ( select(Directory) .join(User, isouter=True) diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index 5f1302c5e..6f67272e7 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -45,12 +45,17 @@ class BaseEncoder(BaseModel): """Class with encoder methods.""" def _get_asn1_fields(self) -> dict: + """Description.""" fields = self.model_dump() fields.pop("PROTOCOL_OP", None) return fields def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer.""" + """Serialize flat structure to bytes, write to encoder buffer. + + Args: + enc: Encoder: + """ for value in self._get_asn1_fields().values(): enc.write(value, type_map[type(value)]) @@ -76,7 +81,11 @@ class BindResponse(LDAPResult, BaseResponse): server_sasl_creds: bytes | None = Field(None, alias="serverSaslCreds") def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer.""" + """Serialize flat structure to bytes, write to encoder buffer. + + Args: + enc: Encoder: + """ enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) enc.write(self.error_message, type_map[type(self.error_message)]) @@ -103,11 +112,23 @@ def l_name(self) -> str: @field_validator("type", mode="before") @classmethod def validate_type(cls, v: str | bytes | int) -> str: + """Description. + + Args: + v: str | bytes | int: + + """ return str(v) @field_validator("vals", mode="before") @classmethod def validate_vals(cls, vals: list[str | int | bytes]) -> list[str | bytes]: + """Description. + + Args: + vals: list[str | int | bytes]: + + """ return [v if isinstance(v, bytes) else str(v) for v in vals] class Config: @@ -139,7 +160,11 @@ class SearchResultEntry(BaseResponse): partial_attributes: list[PartialAttribute] def to_asn1(self, enc: Encoder) -> None: - """Serialize search response structure to asn1 buffer.""" + """Serialize search response structure to asn1 buffer. + + Args: + enc: Encoder: + """ enc.write(self.object_name, Numbers.OctetString) enc.enter(Numbers.Sequence) @@ -165,6 +190,7 @@ class SearchResultDone(LDAPResult, BaseResponse): total_objects: int = 0 def _get_asn1_fields(self) -> dict: + """Description.""" fields = super()._get_asn1_fields() fields.pop("total_pages") fields.pop("total_objects") @@ -236,7 +262,11 @@ class ExtendedResponse(LDAPResult, BaseResponse): response_value: SerializeAsAny[BaseExtendedResponseValue] | None def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer.""" + """Serialize flat structure to bytes, write to encoder buffer. + + Args: + enc: Encoder: + """ enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) enc.write(self.error_message, type_map[type(self.error_message)]) diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index 4439ddfd5..e6454fb16 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -29,7 +29,11 @@ class AttributeTypeSchema(BaseSchemaModel): @classmethod def from_db(cls, attribute_type: AttributeType) -> "AttributeTypeSchema": - """Create an instance from database.""" + """Create an instance from database. + + Args: + attribute_type: AttributeType: + """ return cls( oid=attribute_type.oid, name=attribute_type.name, @@ -94,9 +98,6 @@ async def create_attribute_type( no_user_modification (bool): User can't modify it. is_system (bool): Attribute Type is system. session (AsyncSession): Database session. - - Returns: - None. """ attribute_type = AttributeType( oid=oid, @@ -164,9 +165,6 @@ async def modify_attribute_type( new_statement (AttributeTypeUpdateSchema): Attribute Type Schema. session (AsyncSession): Database session. - - Returns: - None. """ attribute_type.syntax = new_statement.syntax attribute_type.single_value = new_statement.single_value @@ -183,9 +181,6 @@ async def delete_attribute_types_by_names( Args: attribute_type_names (list[str]): List of Attribute Types OIDs. session (AsyncSession): Database session. - - Returns: - None: None. """ if not attribute_type_names: return None diff --git a/app/ldap_protocol/ldap_schema/object_class_crud.py b/app/ldap_protocol/ldap_schema/object_class_crud.py index 7f4a8c29b..f71368d15 100644 --- a/app/ldap_protocol/ldap_schema/object_class_crud.py +++ b/app/ldap_protocol/ldap_schema/object_class_crud.py @@ -40,7 +40,11 @@ class ObjectClassSchema(BaseSchemaModel): @classmethod def from_db(cls, object_class: ObjectClass) -> "ObjectClassSchema": - """Create an instance from database.""" + """Create an instance from database. + + Args: + object_class: ObjectClass: + """ return cls( oid=object_class.oid, name=object_class.name, @@ -107,9 +111,6 @@ async def create_object_class( attribute_type_names_may (list[str]): Attribute Types may. session (AsyncSession): Database session. :param str | None superior_name: Parent Object Class. - - Returns: - None. """ if kind not in OBJECT_CLASS_KINDS_ALLOWED: raise ValueError(f"Object class kind is not valid: {kind}.") @@ -203,9 +204,6 @@ async def modify_object_class( new_statement (ObjectClassUpdateSchema): New statement of object class session (AsyncSession): Database session. - - Returns: - None. """ object_class.attribute_types_must.clear() object_class.attribute_types_must.extend( @@ -239,9 +237,6 @@ async def delete_object_classes_by_names( Args: object_classes_names (list[str]): Object classes names. session (AsyncSession): Database session. - - Returns: - None. """ await session.execute( delete(ObjectClass) diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index 2cc972d5f..8b46c9e8f 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -76,7 +76,11 @@ class LDAPRequestMessage(LDAPMessage): @classmethod def from_bytes(cls, source: bytes) -> "LDAPRequestMessage": - """Create message from bytes.""" + """Create message from bytes. + + Args: + source: bytes: + """ dec = Decoder() dec.start(source) output = asn1todict(dec) @@ -119,14 +123,16 @@ def from_err(cls, source: bytes, err: Exception) -> LDAPResponseMessage: """Create error response message. Args: - source (bytes): source data - err (Exception): any error - - Raises: - ValueError: on invalid schema + source(bytes): source data + err(Exception): any error + source: bytes: + err: Exception: Returns: LDAPResponseMessage: response with err code + + Raises: + ValueError: on invalid schema """ output = asn1todict(source) message_id = 0 diff --git a/app/ldap_protocol/multifactor.py b/app/ldap_protocol/multifactor.py index 081a70ee9..4d492068c 100644 --- a/app/ldap_protocol/multifactor.py +++ b/app/ldap_protocol/multifactor.py @@ -85,23 +85,14 @@ class MultifactorAPI: Methods: - `__init__(key, secret, client, settings)`: Initializes the object with - the required credentials and bound HTTP client from di. + the required credentials and bound HTTP client from di. - `ldap_validate_mfa(username, password)`: Validates MFA for a user. If the - password is not provided, sends a push notification and waits for user - approval with a timeout of 60 seconds. + password is not provided, sends a push notification and waits for user + approval with a timeout of 60 seconds. - `get_create_mfa(username)`: Retrieves or creates an MFA token for the - specified user. + specified user. - `refresh_token()`: Refreshes the authentication token using the refresh - endpoint. - - Attributes: - - `MultifactorError`: Exception class for MFA-related errors. - - `AUTH_URL_USERS`: Endpoint URL for user authentication requests. - - `AUTH_URL_ADMIN`: Endpoint URL for admin authentication requests. - - `REFRESH_URL`: Endpoint URL for token refresh. - - `client`: Asynchronous HTTP client for making requests. - - `settings`: Configuration settings for the MFA service. - + endpoint. """ MultifactorError = _MultifactorError @@ -137,6 +128,7 @@ def __init__( @staticmethod def _generate_trace_id_header() -> dict[str, str]: + """Description.""" return {"mf-trace-id": f"md:{uuid.uuid4()}"} @log_mfa.catch(reraise=True) diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index bf574bb92..26a1032b5 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -87,8 +87,14 @@ def mutate_ap[T: Select]( """Modify query with read rule filter, joins acess policies. Args: - query (T): select(Directory) - user (UserSchema): user data + query(T): select(Directory) + user(UserSchema): user data + query: T: + user: UserSchema: + action: Literal["add": + "read": + "modify": + "del"]: (Default value = "read") Returns: T: select(Directory).join(Directory.access_policies) diff --git a/app/ldap_protocol/policies/network_policy.py b/app/ldap_protocol/policies/network_policy.py index addc2ebd9..2f48bd5df 100644 --- a/app/ldap_protocol/policies/network_policy.py +++ b/app/ldap_protocol/policies/network_policy.py @@ -24,12 +24,19 @@ def build_policy_query( Args: ip (IPv4Address): IP address to filter - :param Literal["is_http", "is_ldap", "is_kerberos"] protocol_field_name - protocol: Protocol to filter - :param list[int] | None user_group_ids: List of user group IDs, optional + + Args: + Literal: is_http", "is_ldap", "is_kerberos"] protocol_field_name + protocol: Protocol to filter + list: int] | None user_group_ids: List of user group IDs, optional Returns: - : Select query + : Select query + ip: IPv4Address | IPv6Address: + protocol_field_name: Literal["is_http": + "is_ldap": + "is_kerberos"]: + user_group_ids: list[int] | None: (Default value = None) """ protocol_field = getattr(NetworkPolicy, protocol_field_name) query = ( diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 3797faf61..590cc88a0 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -72,6 +72,7 @@ class PasswordPolicySchema(BaseModel): @model_validator(mode="after") def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": + """Description.""" if self.minimum_password_age_days > self.maximum_password_age_days: raise ValueError( "Minimum password age days must be " @@ -146,7 +147,8 @@ def _count_password_exists_days(last_pwd_set: Attribute) -> int: """Get number of days, pwd exists. Args: - last_pwd_set (Attribute): pwdLastSet + last_pwd_set(Attribute): pwdLastSet + last_pwd_set: Attribute: Returns: int: days @@ -199,12 +201,15 @@ def validate_min_age(self, last_pwd_set: Attribute) -> bool: """Validate min password change age. Args: - last_pwd_set (Attribute): last pwd set + last_pwd_set(Attribute): last pwd set + last_pwd_set: Attribute: Returns: + bool: can change pwd True - not valid, can not change False + bool: can change pwd True - not valid, can not change False + - valid, can change bool: can change pwd True - not valid, can not change False - valid, can change - on minimum_password_age_days can always change. """ if self.minimum_password_age_days == 0: @@ -218,12 +223,15 @@ def validate_max_age(self, last_pwd_set: Attribute) -> bool: """Validate max password change age. Args: - last_pwd_set (Attribute): last pwd set + last_pwd_set(Attribute): last pwd set + last_pwd_set: Attribute: Returns: + bool: is pwd expired True - not valid, expired False - + bool: is pwd expired True - not valid, expired False - + valid, not expired bool: is pwd expired True - not valid, expired False - valid, not expired - on maximum_password_age_days always valid. """ if self.maximum_password_age_days == 0: diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index eb79dba52..167847ab9 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -144,7 +144,9 @@ def _extract_proxy_protocol_address( """Get ip from proxy protocol header. Args: - data (bytes): data + data(bytes): data + data: bytes: + writer: asyncio.StreamWriter: Returns: tuple: ip, data @@ -236,7 +238,8 @@ def _compute_ldap_message_size(data: bytes) -> int: https://github.com/cannatag/ldap3/blob/dev/ldap3/strategy/base.py#L455 Args: - data (bytes): body + data(bytes): body + data: bytes: Returns: int: actual size @@ -341,6 +344,13 @@ async def _unwrap_request( @staticmethod def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: + """Description. + + Args: + addr: str: + msg: LDAPRequestMessage: + + """ log.debug( f"\nFrom: {addr!r}\n{msg.name}[{msg.message_id}]: " f"{msg.model_dump_json()}\n", @@ -348,6 +358,13 @@ def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: @staticmethod def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: + """Description. + + Args: + addr: str: + msg: LDAPResponseMessage: + + """ log.debug( f"\nTo: {addr!r}\n{msg.name}[{msg.message_id}]: " f"{msg.model_dump_json()}"[:3000], @@ -355,6 +372,13 @@ def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: @staticmethod def _log_short(addr: str, msg: LDAPMessage) -> None: + """Description. + + Args: + addr: str: + msg: LDAPMessage: + + """ log.info(f"\n{addr!r}: {msg.name}[{msg.message_id}]\n") async def _handle_single_response( @@ -467,6 +491,12 @@ async def _run_server(server: asyncio.base_events.Server) -> None: @staticmethod def log_addrs(server: asyncio.base_events.Server) -> None: + """Description. + + Args: + server: asyncio.base_events.Server: + + """ addrs = ", ".join(str(sock.getsockname()) for sock in server.sockets) log.info(f"Server on {addrs}") diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index f257b9e0e..a292485b5 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -100,9 +100,6 @@ async def clear_user_sessions(self, uid: int) -> None: Args: uid (int): user id - - Returns: - None: """ @abstractmethod @@ -111,13 +108,17 @@ async def delete_user_session(self, session_id: str) -> None: Args: session_id (str): session id - - Returns: - None: """ @staticmethod def _sign(session_id: str, settings: Settings) -> str: + """Description. + + Args: + session_id: str: + settings: Settings: + + """ return hmac.new( settings.SECRET_KEY.encode(), session_id.encode(), @@ -125,16 +126,40 @@ def _sign(session_id: str, settings: Settings) -> str: ).hexdigest() def get_user_agent_hash(self, user_agent: str) -> str: - """Get user agent hash.""" + """Get user agent hash. + + Args: + user_agent: str: + """ return hashlib.blake2b(user_agent.encode(), digest_size=6).hexdigest() def _get_ip_session_key(self, ip: str, protocol: ProtocolType) -> str: + """Description. + + Args: + ip: str: + protocol: ProtocolType: + + """ return f"ip:{protocol}:{ip}" def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: + """Description. + + Args: + uid: int: + protocol: ProtocolType: + + """ return f"keys:{protocol}:{uid}" def _get_protocol(self, session_id: str) -> ProtocolType: + """Description. + + Args: + session_id: str: + + """ return "http" if session_id.startswith("http:") else "ldap" def _generate_key(self) -> str: @@ -142,6 +167,7 @@ def _generate_key(self) -> str: Returns: str: A new key. + """ return f"http:{token_hex(self.key_length)}" @@ -149,7 +175,8 @@ def _get_lock_key(self, session_id: str) -> str: """Get lock key. Args: - session_id (str): session id + session_id(str): session id + session_id: str: Returns: str: lock key @@ -226,7 +253,14 @@ def _generate_session_data( settings: Settings, extra_data: dict | None, ) -> tuple[str, str, dict]: - """Set data.""" + """Set data. + + Args: + self: Self: + uid: int: + settings: Settings: + extra_data: dict | None: + """ if extra_data is None: extra_data = {} diff --git a/app/ldap_protocol/user_account_control.py b/app/ldap_protocol/user_account_control.py index 22954689b..486654ac2 100644 --- a/app/ldap_protocol/user_account_control.py +++ b/app/ldap_protocol/user_account_control.py @@ -73,11 +73,11 @@ def is_value_valid(cls, uac_value: str | int) -> bool: """Check all flags set in the userAccountControl value. Args: - uac_value (int): userAccountControl attribute value + uac_value(int): userAccountControl attribute value + uac_value: str | int: Returns: - : True if the value is valid (only known flags), False - otherwise + bool: True if all flags are set correctly, False otherwise """ if isinstance(uac_value, int): pass @@ -120,10 +120,11 @@ def is_flag_true(flag: UserAccountControlFlag) -> bool: """Check given flag in current userAccountControl attribute. Args: - flag (userAccountControlFlag): flag + flag(userAccountControlFlag): flag + flag: UserAccountControlFlag: Returns: - bool: result + bool: True if flag is set, False otherwise """ return bool(int(value) & flag) diff --git a/app/ldap_protocol/utils/const.py b/app/ldap_protocol/utils/const.py index d882da7cb..00ecb5239 100644 --- a/app/ldap_protocol/utils/const.py +++ b/app/ldap_protocol/utils/const.py @@ -13,6 +13,11 @@ def _type_validate_entry(entry: str) -> str: + """Description. + + Args: + entry: str: + """ if validate_entry(entry): return entry raise ValueError(f"Invalid entry name {entry}") @@ -24,6 +29,11 @@ def _type_validate_entry(entry: str) -> str: def _type_validate_email(email: str) -> str: + """Description. + + Args: + email: str: + """ if EMAIL_RE.fullmatch(email): return email raise ValueError(f"Invalid entry name {email}") diff --git a/app/ldap_protocol/utils/cte.py b/app/ldap_protocol/utils/cte.py index 66912cd64..c5c19b395 100644 --- a/app/ldap_protocol/utils/cte.py +++ b/app/ldap_protocol/utils/cte.py @@ -26,26 +26,26 @@ def find_members_recursive_cte(dn: str) -> CTE: ------------------ 1. **Base Query (Initial Part of the CTE)**: - The function begins by defining the initial part of the CTE, named - `directory_hierarchy`. This query selects the `directory_id` and - `group_id` from the `Directory` and `Groups` tables, filtering based - on the distinguished name (DN) provided by the `dn` argument. + !The function begins by defining the initial part of the CTE, named + !`directory_hierarchy`. This query selects the `directory_id` and + !`group_id` from the `Directory` and `Groups` tables, filtering based + !on the distinguished name (DN) provided by the `dn` argument. 2. **Recursive Part of the CTE**: - The second part of the CTE is recursive. It joins the results of - `directory_hierarchy` with the `DirectoryMemberships` table to find - all groups that are members of other groups, iterating through - all nested memberships. + !The second part of the CTE is recursive. It joins the results of + !`directory_hierarchy` with the `DirectoryMemberships` table to find + !all groups that are members of other groups, iterating through + !all nested memberships. 3. **Combining Results**: - The CTE combines the initial and recursive parts using `union_all` - effectively creating a recursive query that gathers all directorie - and their associated groups, both directly and indirectly related. + !The CTE combines the initial and recursive parts using `union_all` + !effectively creating a recursive query that gathers all directorie + !and their associated groups, both directly and indirectly related. 4. **Final Query**: - The final query applies the method (typically a comparison operation - to the results of the CTE, returning the desired condition for furthe - use in the main query. + !The final query applies the method (typically a comparison operation + !to the results of the CTE, returning the desired condition for furthe + !use in the main query. The query translates to the following SQL: @@ -76,6 +76,8 @@ def find_members_recursive_cte(dn: str) -> CTE: In the case of a recursive search through the specified group1, the search result will be as follows: user1, user2, group2, user3, group3, user4. + Args: + dn: str: """ directory_hierarchy = ( select(Directory.id.label("directory_id"), Group.id.label("group_id")) @@ -135,6 +137,8 @@ def find_root_group_recursive_cte(dn_list: list) -> CTE: result will be as follows: group1, group2, group3, user4. + Args: + dn_list: list: """ directory_hierarchy = ( select( @@ -176,7 +180,6 @@ async def get_members_root_group( In the case of a recursive search through the specified user4, the search result will be as follows: group1, user1, user2, group2, user3, group3, user4. - """ cte = find_root_group_recursive_cte([dn]) result = await session.scalars(select(cte.c.directory_id)) diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 68fc70436..5065cdc37 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -150,7 +150,8 @@ def validate_entry(entry: str) -> bool: cn=first,dc=example,dc=com -> valid Args: - entry (str): any str + entry(str): any str + entry: str: Returns: bool: result @@ -163,22 +164,40 @@ def validate_entry(entry: str) -> bool: def is_dn_in_base_directory(base_directory: Directory, entry: str) -> bool: - """Check if an entry in a base dn.""" + """Check if an entry in a base dn. + + Args: + base_directory: Directory: + entry: str: + """ return entry.lower().endswith(base_directory.path_dn.lower()) def dn_is_base_directory(base_directory: Directory, entry: str) -> bool: - """Check if an entry is a base dn.""" + """Check if an entry is a base dn. + + Args: + base_directory: Directory: + entry: str: + """ return base_directory.path_dn.lower() == entry.lower() def get_generalized_now(tz: ZoneInfo) -> str: - """Get generalized time (formated) with tz.""" + """Get generalized time (formated) with tz. + + Args: + tz: ZoneInfo: + """ return datetime.now(tz).strftime("%Y%m%d%H%M%S.%f%z") def _get_domain(name: str) -> str: - """Get domain from name.""" + """Get domain from name. + + Args: + name: str: + """ return ".".join( [ item[3:].lower() @@ -192,8 +211,10 @@ def create_integer_hash(text: str, size: int = 9) -> int: """Create integer hash from text. Args: - text (str): any string - size (int): fixed size of hash, defaults to 15 + text(str): any string + size(int): fixed size of hash, defaults to 15 + text: str: + size: int: (Default value = 9) Returns: int: hash @@ -202,7 +223,11 @@ def create_integer_hash(text: str, size: int = 9) -> int: def get_windows_timestamp(value: datetime) -> int: - """Get the Windows timestamp from the value.""" + """Get the Windows timestamp from the value. + + Args: + value: datetime: + """ return (int(value.timestamp()) + 11644473600) * 10000000 @@ -214,6 +239,9 @@ def dt_to_ft(dt: datetime) -> int: """Convert a datetime to a Windows filetime. If the object is time zone-naive, it is forced to UTC before conversion. + + Args: + dt: datetime: """ if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) != 0: dt = dt.astimezone(ZoneInfo("UTC")) @@ -228,6 +256,9 @@ def ft_to_dt(filetime: int) -> datetime: The new datetime object is timezone-naive but is equivalent to tzinfo=utc. 1) Get seconds and remainder in terms of Unix epoch 2) Convert to datetime object, with remainder as microseconds. + + Args: + filetime: int: """ s, ns100 = divmod(filetime - _EPOCH_AS_FILETIME, _HUNDREDS_OF_NS) return datetime.fromtimestamp(s, tz=ZoneInfo("UTC")).replace( @@ -254,6 +285,7 @@ def string_to_sid(sid_string: str) -> bytes: Args: sid_string: The string representation of the SID + sid_string: str: Returns: bytes: The binary representation of the SID @@ -286,10 +318,13 @@ def create_object_sid( Args: domain: domain directory - rid (int): relative identifier - reserved (bool): A flag indicating whether the RID is reserved. - If `True`, the given RID is used directly. If `False`, 1000 - is added to the given RID to generate the final RID + rid(int): relative identifier + reserved(bool): A flag indicating whether the RID is reserved. + If `True`, the given RID is used directly. If `False`, 1000 + is added to the given RID to generate the final RID + domain: Directory: + rid: int: + reserved: bool: (Default value = False) Returns: str: the complete objectSid as a string @@ -311,6 +346,9 @@ def create_user_name(directory_id: int) -> str: """Create username by directory id. NOTE: keycloak + + Args: + directory_id: int: """ return blake2b(str(directory_id).encode(), digest_size=8).hexdigest() diff --git a/app/ldap_protocol/utils/pagination.py b/app/ldap_protocol/utils/pagination.py index 0402b6a3f..6b1f0bec1 100644 --- a/app/ldap_protocol/utils/pagination.py +++ b/app/ldap_protocol/utils/pagination.py @@ -69,7 +69,11 @@ class BaseSchemaModel[S: Base](BaseModel): @classmethod @abstractmethod def from_db(cls, sqla_instance: S) -> "BaseSchemaModel[S]": - """Create an instance of Schema from instance of SQLA model.""" + """Create an instance of Schema from instance of SQLA model. + + Args: + sqla_instance: S: + """ @dataclass diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 400969628..dfc789a7a 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -180,7 +180,8 @@ def get_search_path(dn: str) -> list[str]: """Get search path for dn. Args: - dn (str): any DN, dn syntax + dn(str): any DN, dn syntax + dn: str: Returns: list[str]: reversed list of dn values @@ -198,8 +199,11 @@ def get_path_filter( """Get filter condition for path equality. Args: - path (list[str]): dn - field (Column): path column, defaults to Directory.path + path(list[str]): dn + field(Column): path column, defaults to Directory.path + path: list[str]: + *: + column: ColumnElement | Column | InstrumentedAttribute: (Default value = Directory.path) Returns: ColumnElement: filter (where) element @@ -212,7 +216,13 @@ def get_filter_from_path( *, column: Column | InstrumentedAttribute = Directory.path, ) -> ColumnElement: - """Get filter condition for path equality from dn.""" + """Get filter condition for path equality from dn. + + Args: + dn: str: + *: + column: Column | InstrumentedAttribute: (Default value = Directory.path) + """ return get_path_filter(get_search_path(dn), column=column) @@ -228,7 +238,11 @@ async def get_dn_by_id(id_: int, session: AsyncSession) -> str: def get_domain_object_class(domain: Directory) -> Iterator[Attribute]: - """Get default domain attrs.""" + """Get default domain attrs. + + Args: + domain: Directory: + """ for value in ["domain", "top", "domainDNS"]: yield Attribute(name="objectClass", value=value, directory=domain) diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index 465f4a25e..5a80c380b 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -16,6 +16,12 @@ class RawDefinitionParser: @staticmethod def _list_to_string(data: list[str]) -> str | None: + """Description. + + Args: + data: list[str]: + + """ if not data: return None if len(data) == 1: @@ -24,11 +30,23 @@ def _list_to_string(data: list[str]) -> str | None: @staticmethod def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: + """Description. + + Args: + raw_definition: str: + + """ tmp = AttributeTypeInfo.from_definition(definitions=[raw_definition]) return next(iter(tmp.values())) @staticmethod def get_object_class_info(raw_definition: str) -> ObjectClassInfo: + """Description. + + Args: + raw_definition: str: + + """ tmp = ObjectClassInfo.from_definition(definitions=[raw_definition]) return next(iter(tmp.values())) @@ -47,6 +65,12 @@ async def _get_attribute_types_by_names( def create_attribute_type_by_raw( raw_definition: str, ) -> AttributeType: + """Description. + + Args: + raw_definition: str: + + """ attribute_type_info = RawDefinitionParser._get_attribute_type_info( raw_definition=raw_definition ) diff --git a/app/models.py b/app/models.py index 468227219..c9c43e5eb 100644 --- a/app/models.py +++ b/app/models.py @@ -66,7 +66,16 @@ def compile_create_uc( compiler: DDLCompiler, **kw: dict, ) -> str: - """Add NULLS NOT DISTINCT if its in args.""" + """Add NULLS NOT DISTINCT if its in args. + + Args: + create (DDLElement): The DDL element to compile. + compiler (DDLCompiler): The DDL compiler instance. + **kw (dict): Additional keyword arguments. + + Returns: + str: Compiled unique constraint statement. + """ stmt = compiler.visit_unique_constraint(create, **kw) postgresql_opts = create.dialect_options["postgresql"] # type: ignore @@ -232,6 +241,11 @@ class Directory(Base): @property def attributes_dict(self) -> defaultdict[str, list[str]]: + """Description. + + Returns: + defaultdict[str, list[str]]: Dictionary of attribute names to their values. + """ attributes = defaultdict(list) for attribute in self.attributes: attributes[attribute.name].extend(attribute.values) @@ -295,29 +309,52 @@ def attributes_dict(self) -> defaultdict[str, list[str]]: } def get_dn_prefix(self) -> DistinguishedNamePrefix: - """Get distinguished name prefix.""" + """Get distinguished name prefix. + + Returns: + DistinguishedNamePrefix: Prefix for distinguished name. + """ return { "organizationalUnit": "ou", "domain": "dc", }.get(self.object_class, "cn") # type: ignore def get_dn(self, dn: str = "cn") -> str: - """Get distinguished name.""" + """Get distinguished name. + + Args: + dn (str): Distinguished name prefix (default: "cn"). + + Returns: + str: Distinguished name. + """ return f"{dn}={self.name}" @property def is_domain(self) -> bool: - """Is directory domain.""" + """Is directory domain. + + Returns: + bool: True if directory is domain, otherwise False. + """ return not self.parent_id and self.object_class == "domain" @property def host_principal(self) -> str: - """Principal computer name.""" + """Principal computer name. + + Returns: + str: Host principal name. + """ return f"host/{self.name}" @property def path_dn(self) -> str: - """Get DN from path.""" + """Get DN from path. + + Returns: + str: Distinguished name from path. + """ return ",".join(reversed(self.path)) def create_path( @@ -325,18 +362,31 @@ def create_path( parent: Directory | None = None, dn: str = "cn", ) -> None: - """Create path from a new directory.""" + """Create path from a new directory. + + Args: + parent (Directory | None): Parent directory (default: None). + dn (str): Distinguished name prefix (default: "cn"). + """ pre_path: list[str] = parent.path if parent else [] self.path = [*pre_path, self.get_dn(dn)] self.depth = len(self.path) self.rdname = dn def __str__(self) -> str: - """Dir name.""" + """Dir name. + + Returns: + str: Directory name. + """ return f"Directory({self.name})" def __repr__(self) -> str: - """Dir id and name.""" + """Dir id and name. + + Returns: + str: Directory id and name. + """ return f"Directory({self.id}:{self.name})" @@ -427,19 +477,35 @@ class User(Base): ) def get_upn_prefix(self) -> str: - """Get userPrincipalName prefix.""" + """Get userPrincipalName prefix. + + Returns: + str: Prefix of userPrincipalName. + """ return self.user_principal_name.split("@")[0] def __str__(self) -> str: - """User show.""" + """User show. + + Returns: + str: User string representation. + """ return f"User({self.sam_accout_name})" def __repr__(self) -> str: - """User map with dir id.""" + """User map with dir id. + + Returns: + str: User id and directory id. + """ return f"User({self.directory_id}:{self.sam_accout_name})" def is_expired(self) -> bool: - """Check AccountExpires.""" + """Check AccountExpires. + + Returns: + bool: True if account is expired, otherwise False. + """ if self.account_exp is None: return False @@ -526,11 +592,19 @@ class Group(Base): ) def __str__(self) -> str: - """Group id.""" + """Group id. + + Returns: + str: Group id. + """ return f"Group({self.id})" def __repr__(self) -> str: - """Group id and dir id.""" + """Group id and dir id. + + Returns: + str: Group id and directory id. + """ return f"Group({self.id}:{self.directory_id})" @@ -565,7 +639,11 @@ class Attribute(Base): @property def _decoded_value(self) -> str | None: - """Get attribute value.""" + """Get attribute value. + + Returns: + str | None: Decoded attribute value. + """ if self.value: return self.value if self.bvalue: @@ -574,15 +652,27 @@ def _decoded_value(self) -> str | None: @property def values(self) -> list[str]: - """Get attribute value by list.""" + """Get attribute value by list. + + Returns: + list[str]: List of attribute values. + """ return [self._decoded_value] if self._decoded_value else [] def __str__(self) -> str: - """Attribute name and value.""" + """Attribute name and value. + + Returns: + str: Attribute name and value. + """ return f"Attribute({self.name}:{self._decoded_value})" def __repr__(self) -> str: - """Attribute name and value.""" + """Attribute name and value. + + Returns: + str: Attribute name and value. + """ return f"Attribute({self.name}:{self._decoded_value})" @@ -605,7 +695,14 @@ class AttributeType(Base): is_system: Mapped[bool] # NOTE: it's not equal `NO-USER-MODIFICATION` def get_raw_definition(self) -> str: - """Format SQLAlchemy Attribute Type object to LDAP definition.""" + """Format SQLAlchemy Attribute Type object to LDAP definition. + + Returns: + str: LDAP definition string. + + Raises: + ValueError: If required fields are missing. + """ if not self.oid or not self.name or not self.syntax: err_msg = f"{self}: Fields 'oid', 'name', and 'syntax' are required for LDAP definition." # noqa: E501 raise ValueError(err_msg) @@ -625,11 +722,19 @@ def get_raw_definition(self) -> str: return " ".join(chunks) def __str__(self) -> str: - """AttributeType name.""" + """AttributeType name. + + Returns: + str: AttributeType name. + """ return f"AttributeType({self.name})" def __repr__(self) -> str: - """AttributeType oid and name.""" + """AttributeType oid and name. + + Returns: + str: AttributeType oid and name. + """ return f"AttributeType({self.oid}:{self.name})" @@ -726,7 +831,14 @@ class ObjectClass(Base): ) def get_raw_definition(self) -> str: - """Format SQLAlchemy Object Class object to LDAP definition.""" + """Format SQLAlchemy Object Class object to LDAP definition. + + Returns: + str: LDAP definition string. + + Raises: + ValueError: If required fields are missing. + """ if not self.oid or not self.name or not self.kind: err_msg = f"{self}: Fields 'oid', 'name', and 'kind' are required for LDAP definition." # noqa: E501 raise ValueError(err_msg) @@ -751,20 +863,36 @@ def get_raw_definition(self) -> str: @property def attribute_type_names_must(self) -> list[str]: - """Display attribute types must.""" + """Display attribute types must. + + Returns: + list[str]: List of must attribute type names. + """ return [attr.name for attr in self.attribute_types_must] @property def attribute_type_names_may(self) -> list[str]: - """Display attribute types may.""" + """Display attribute types may. + + Returns: + list[str]: List of may attribute type names. + """ return [attr.name for attr in self.attribute_types_may] def __str__(self) -> str: - """ObjectClass name.""" + """ObjectClass name. + + Returns: + str: ObjectClass name. + """ return f"ObjectClass({self.name})" def __repr__(self) -> str: - """ObjectClass oid and name.""" + """ObjectClass oid and name. + + Returns: + str: ObjectClass oid and name. + """ return f"ObjectClass({self.oid}:{self.name})" diff --git a/app/multidirectory.py b/app/multidirectory.py index 03f3a9361..df451b093 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -60,7 +60,7 @@ async def proc_time_header_middleware( call_next (Callable): _description_ Returns: - Response: _description_ + Response: Response object with X-Process-Time header. """ start_time = time.perf_counter() response = await call_next(request) @@ -71,12 +71,24 @@ async def proc_time_header_middleware( @asynccontextmanager async def _lifespan(app: FastAPI) -> AsyncIterator[None]: + """Lifespan context manager. + + Args: + app (FastAPI): FastAPI application. + """ yield await app.state.dishka_container.close() def _create_basic_app(settings: Settings) -> FastAPI: - """Create basic FastAPI app with dependencies overrides.""" + """Create basic FastAPI app with dependencies overrides. + + Args: + settings: Settings: + + Returns: + FastAPI: Configured FastAPI application. + """ app = FastAPI( name="MultiDirectory", title="MultiDirectory", @@ -114,7 +126,14 @@ def _create_basic_app(settings: Settings) -> FastAPI: def _create_shadow_app(settings: Settings) -> FastAPI: - """Create shadow FastAPI app for shadow.""" + """Create shadow FastAPI app for shadow. + + Args: + settings: Settings: + + Returns: + FastAPI: Configured FastAPI application for shadow API. + """ app = FastAPI( name="Shadow API", title="Internal API", @@ -130,7 +149,13 @@ def create_prod_app( factory: Callable[[Settings], FastAPI] = _create_basic_app, settings: Settings | None = None, ) -> FastAPI: - """Create production app with container.""" + """Create production app with container. + + Args: + factory: Callable[[Settings]: + FastAPI]: (Default value = _create_basic_app) + settings: Settings | None: (Default value = None) + """ settings = settings or Settings.from_os() app = factory(settings) container = make_async_container( @@ -149,7 +174,11 @@ def create_prod_app( def ldap(settings: Settings) -> None: - """Run server.""" + """Run server. + + Args: + settings: Settings: + """ async def _servers(settings: Settings) -> None: servers = [] @@ -169,6 +198,7 @@ async def _servers(settings: Settings) -> None: await asyncio.gather(*servers) def _run() -> None: + """Description.""" uvloop.run(_servers(settings), debug=settings.DEBUG) try: diff --git a/app/schedule.py b/app/schedule.py index b4ce712c5..363237ec1 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -51,7 +51,11 @@ async def _schedule( def scheduler(settings: Settings) -> None: - """Sript entrypoint.""" + """Sript entrypoint. + + Args: + settings: Settings: + """ async def runner(settings: Settings) -> None: container = make_async_container( @@ -64,6 +68,7 @@ async def runner(settings: Settings) -> None: tg.create_task(_schedule(task, timeout, container)) def _run() -> None: + """Run the scheduler.""" uvloop.run(runner(settings)) try: diff --git a/app/security.py b/app/security.py index 20444349c..24a45d05e 100644 --- a/app/security.py +++ b/app/security.py @@ -13,8 +13,10 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: """Validate password. Args: - plain_password (str): raw password - hashed_password (str): pwd hash from db + plain_password(str): raw password + hashed_password(str): pwd hash from db + plain_password: str: + hashed_password: str: Returns: bool: is password valid @@ -26,7 +28,8 @@ def get_password_hash(password: str) -> str: """Hash password. Args: - password (str): raw pwd + password(str): raw pwd + password: str: Returns: str: hash diff --git a/pyproject.toml b/pyproject.toml index a98522526..73cf925f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -174,7 +174,7 @@ ignore-var-parameters = true ignore-variadic-names = true [tool.ruff.lint.per-file-ignores] -"tests/*.py" = ["S101"] # Ignore `Flake8-bandit S101` rule for the `tests/` directory. +"tests/*.py" = ["S101", "D104", "DOC501", "D417", "DOC201", "DOC402"] # Ignore rules for the `tests/` directory. "alembic/*.py" = ["I001"] # Ignore `Flake8-isort IO01` rule for the `alembic/` directory. It works incorrect in CI ruff test. [tool.ruff.lint.mccabe] diff --git a/tests/conftest.py b/tests/conftest.py index 858e0f7b3..dbcce4a8d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -146,7 +146,11 @@ async def resolve() -> str: @provide(scope=Scope.RUNTIME, provides=AsyncEngine) def get_engine(self, settings: Settings) -> AsyncEngine: - """Get async engine.""" + """Get async engine. + + Args: + settings: Settings: + """ return create_async_engine(str(settings.POSTGRES_URI), pool_size=10) @provide(scope=Scope.APP, provides=async_sessionmaker[AsyncSession]) @@ -154,7 +158,11 @@ def get_session_factory( self, engine: AsyncEngine, ) -> async_sessionmaker[AsyncSession]: - """Create session factory.""" + """Create session factory. + + Args: + engine: AsyncEngine: + """ return async_sessionmaker( engine, expire_on_commit=False, @@ -320,10 +328,22 @@ async def _migrations( config.attributes["app_settings"] = settings def upgrade(conn: AsyncConnection) -> None: + """Description. + + Args: + conn: AsyncConnection: + + """ config.attributes["connection"] = conn command.upgrade(config, "head") def downgrade(conn: AsyncConnection) -> None: + """Description. + + Args: + conn: AsyncConnection: + + """ config.attributes["connection"] = conn command.downgrade(config, "base") @@ -413,7 +433,12 @@ def _server( event_loop: asyncio.BaseEventLoop, handler: PoolClientHandler, ) -> Generator: - """Run server in background.""" + """Run server in background. + + Args: + event_loop: asyncio.BaseEventLoop: + handler: PoolClientHandler: + """ task = asyncio.ensure_future(handler.start(), loop=event_loop) event_loop.run_until_complete(asyncio.sleep(0.1)) yield @@ -423,7 +448,11 @@ def _server( @pytest.fixture def ldap_client(settings: Settings) -> ldap3.Connection: - """Get ldap clinet without a creds.""" + """Get ldap clinet without a creds. + + Args: + settings: Settings: + """ return ldap3.Connection( ldap3.Server(str(settings.HOST), settings.PORT, get_info="ALL") ) @@ -489,7 +518,11 @@ async def http_client( @pytest.fixture def creds(user: dict) -> TestCreds: - """Get creds from test data.""" + """Get creds from test data. + + Args: + user: dict: + """ return TestCreds(user["sam_accout_name"], user["password"]) @@ -501,7 +534,11 @@ def user() -> dict: @pytest.fixture def _force_override_tls(settings: Settings) -> Iterator: - """Override tls status for tests.""" + """Override tls status for tests. + + Args: + settings: Settings: + """ current_status = settings.USE_CORE_TLS settings.USE_CORE_TLS = True yield diff --git a/tests/test_api/test_main/test_kadmin.py b/tests/test_api/test_main/test_kadmin.py index 6cdfc2e03..15cfa1e60 100644 --- a/tests/test_api/test_main/test_kadmin.py +++ b/tests/test_api/test_main/test_kadmin.py @@ -22,6 +22,12 @@ def _create_test_user_data( name: str, pw: str, ) -> dict[str, str | list[dict[str, str | list[str]]]]: + """Description. + + Args: + name: str: + pw: str: + """ return { "entry": "cn=ktest,dc=md,dc=test", "password": pw, diff --git a/tests/test_api/test_shadow/conftest.py b/tests/test_api/test_shadow/conftest.py index ebae802b6..b0c641cb5 100644 --- a/tests/test_api/test_shadow/conftest.py +++ b/tests/test_api/test_shadow/conftest.py @@ -15,13 +15,7 @@ class ProxyRequestModel(BaseModel): - """Model for the proxy request. - - Attributes: - principal: Unique user identifier - ip: IP address from which the request is made - - """ + """Model for the proxy request.""" principal: str ip: str From 23c6d5ff8d62ab653ac23ea81fe2ea056ff6e2d6 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Wed, 4 Jun 2025 13:54:56 +0300 Subject: [PATCH 04/25] refactor: docstrings convert to google task_508 --- app/api/auth/router.py | 30 ++++- app/api/auth/router_mfa.py | 41 +++---- app/api/auth/router_pwd_policy.py | 38 ++++++- app/api/auth/schema.py | 5 + app/api/auth/session_router.py | 32 +++++- app/api/auth/utils.py | 11 +- app/api/exception_handlers.py | 19 +++- app/api/ldap_schema/attribute_type_router.py | 52 ++++----- app/api/ldap_schema/object_class_router.py | 61 ++++------ app/api/main/ap_router.py | 5 +- app/api/main/dns_router.py | 51 ++++++++- app/api/main/krb5_router.py | 107 ++++++++++-------- app/api/main/router.py | 61 +++++++++- app/api/main/schema.py | 16 ++- app/api/main/utils.py | 10 +- app/api/network/router.py | 10 +- app/api/shadow/router.py | 15 ++- app/extra/dump_acme_certs.py | 5 +- app/extra/scripts/uac_sync.py | 4 +- app/ldap_protocol/asn1parser.py | 2 +- app/ldap_protocol/kerberos/__init__.py | 6 + app/ldap_protocol/kerberos/utils.py | 13 ++- app/ldap_protocol/ldap_requests/abandon.py | 3 + app/ldap_protocol/ldap_requests/base.py | 14 ++- .../ldap_requests/bind_methods/simple.py | 18 ++- app/ldap_protocol/ldap_requests/delete.py | 16 ++- app/ldap_protocol/ldap_requests/modify_dn.py | 16 ++- .../ldap_schema/attribute_type_crud.py | 3 + app/ldap_protocol/policies/access_policy.py | 8 +- app/ldap_protocol/policies/network_policy.py | 18 +-- app/ldap_protocol/session_storage.py | 77 ++++++++++--- app/ldap_protocol/utils/helpers.py | 4 +- app/ldap_protocol/utils/pagination.py | 15 ++- app/ldap_protocol/utils/queries.py | 9 +- .../utils/raw_definition_parser.py | 29 ++++- app/models.py | 3 +- app/multidirectory.py | 7 +- 37 files changed, 595 insertions(+), 239 deletions(-) diff --git a/app/api/auth/router.py b/app/api/auth/router.py index 37a5c1f78..e3113c772 100644 --- a/app/api/auth/router.py +++ b/app/api/auth/router.py @@ -63,7 +63,7 @@ async def login( """Create session to cookies and storage. - **username**: username formats: - `DN`, `userPrincipalName`, `saMAccountName` + `DN`, `userPrincipalName`, `saMAccountName` - **password**: password \f @@ -153,7 +153,14 @@ async def login( async def users_me( user: Annotated[UserSchema, Depends(get_current_user)], ) -> UserSchema: - """Get current logged in user data.""" + """Get current logged in user data. + + Args: + user (UserSchema): Current user schema from dependency. + + Returns: + UserSchema: Current user data. + """ return user @@ -185,7 +192,6 @@ async def password_reset( `userPrincipalName`, `saMAccountName` or `DN` - **new_password**: password to set \f - Args: identity (str): Reset target user identity. new_password (str): New password for user. @@ -233,6 +239,12 @@ async def check_setup(session: FromDishka[AsyncSession]) -> bool: """Check if initial setup needed. True if setup already complete, False if setup is needed. + + Args: + session (FromDishka[AsyncSession]): Database session. + + Returns: + bool: True if setup is complete, False if setup is needed. """ query = select(exists(Directory).where(Directory.parent_id.is_(None))) retval = await session.scalars(query) @@ -248,7 +260,17 @@ async def first_setup( request: SetupRequest, session: FromDishka[AsyncSession], ) -> None: - """Perform initial setup.""" + """Perform initial setup. + + Args: + request (SetupRequest): Setup request containing domain and user data. + session (FromDishka[AsyncSession]): Database session. + + Raises: + HTTPException: 422 if password policy validation fails + HTTPException: 423 if setup already performed + HTTPException: 424 if integrity error occurs during setup. + """ setup_already_performed = await session.scalar( select(Directory) .filter(Directory.parent_id.is_(None)) diff --git a/app/api/auth/router_mfa.py b/app/api/auth/router_mfa.py index da1b577b7..eb7efbaf9 100644 --- a/app/api/auth/router_mfa.py +++ b/app/api/auth/router_mfa.py @@ -64,7 +64,6 @@ async def setup_mfa( """Set mfa credentials, rewrites if exists. \f - Args: mfa (MFACreateRequest): MuliFactor credentials session (FromDishka[AsyncSession]): db @@ -121,6 +120,9 @@ async def get_mfa( """Get MFA creds. \f + Args: + mfa_creds (FromDishka[MFA_HTTP_Creds]): creds for http app. + mfa_creds_ldap (FromDishka[MFA_LDAP_Creds]): creds for ldap app. Returns: MFAGetResponse: response. @@ -155,15 +157,14 @@ async def callback_mfa( Callback endpoint for MFA. \f - Args: - session (FromDishka[AsyncSession]): db - storage (FromDishka[SessionStorage]): session storage - settings (FromDishka[Settings]): app settings - mfa_creds (FromDishka[MFA_HTTP_Creds]): creds for multifactor - (http app) - :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip - :param Annotated[str, Form access_token: token from multifactor callback + access_token (str): Token from multifactor callback. + session (FromDishka[AsyncSession]): db session. + storage (FromDishka[SessionStorage]): session storage. + settings (FromDishka[Settings]): app settings. + mfa_creds (FromDishka[MFA_HTTP_Creds]): creds for http app. + ip (IPv4Address | IPv6Address): Client IP address. + user_agent (str): Client user agent string. Raises: HTTPException: if mfa not set up @@ -218,16 +219,17 @@ async def two_factor_protocol( """Initiate two factor protocol with app. \f - :param Annotated[OAuth2Form, Depends form: password form - Args: - request (Request): FastAPI request - session (FromDishka[AsyncSession]): db - api (FromDishka[MultifactorAPI]): wrapper for MFA DAO - settings (FromDishka[Settings]): app settings - storage (FromDishka[SessionStorage]): redis storage - response (Response): FastAPI response - :param Annotated[IPv4Address | IPv6Address, Depends ip: client ip + form (Annotated[OAuth2Form, Depends]): Password form containing\ + username and password. + request (Request): FastAPI request. + session (FromDishka[AsyncSession]): Database session. + api (FromDishka[MultifactorAPI]): Wrapper for MFA DAO. + settings (FromDishka[Settings]): App settings. + storage (FromDishka[SessionStorage]): Redis storage. + response (Response): FastAPI response. + ip (Annotated[IPv4Address | IPv6Address, Depends]): Client IP address. + user_agent (Annotated[str, Depends]): Client user agent string. Raises: HTTPException: Missing API credentials @@ -236,8 +238,7 @@ async def two_factor_protocol( HTTPException: Multifactor error Returns: - MFAChallengeResponse: {'status': 'pending', 'message': - https://example.com}. + MFAChallengeResponse: Response containing status and message. """ if not api: raise HTTPException( diff --git a/app/api/auth/router_pwd_policy.py b/app/api/auth/router_pwd_policy.py index 3fcbc70c6..aecf8a2d2 100644 --- a/app/api/auth/router_pwd_policy.py +++ b/app/api/auth/router_pwd_policy.py @@ -25,7 +25,15 @@ async def create_policy( policy: PasswordPolicySchema, session: FromDishka[AsyncSession], ) -> PasswordPolicySchema: - """Create current policy setting.""" + """Create current policy setting. + + Args: + policy (PasswordPolicySchema): Password policy schema to create. + session (AsyncSession): Database session. + + Returns: + PasswordPolicySchema: Created password policy schema. + """ return await policy.create_policy_settings(session) @@ -33,7 +41,14 @@ async def create_policy( async def get_policy( session: FromDishka[AsyncSession], ) -> PasswordPolicySchema: - """Get current policy setting.""" + """Get current policy setting. + + Args: + session (AsyncSession): Database session. + + Returns: + PasswordPolicySchema: Current password policy schema. + """ return await PasswordPolicySchema.get_policy_settings(session) @@ -42,7 +57,15 @@ async def update_policy( policy: PasswordPolicySchema, session: FromDishka[AsyncSession], ) -> PasswordPolicySchema: - """Update current policy setting.""" + """Update current policy setting. + + Args: + policy (PasswordPolicySchema): Password policy schema to update. + session (AsyncSession): Database session. + + Returns: + PasswordPolicySchema: Updated password policy schema. + """ await policy.update_policy_settings(session) return policy @@ -51,5 +74,12 @@ async def update_policy( async def reset_policy( session: FromDishka[AsyncSession], ) -> PasswordPolicySchema: - """Reset current policy setting.""" + """Reset current policy setting. + + Args: + session (AsyncSession): Database session. + + Returns: + PasswordPolicySchema: Reset password policy schema. + """ return await PasswordPolicySchema.delete_policy_settings(session) diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index 69cd70e2f..fe51b727c 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -72,6 +72,11 @@ def validate_domain(cls, v: str) -> str: # noqa Args: v: str: + Raises: + ValueError: If the domain is invalid. + + Returns: + str: Validated domain string. """ if re.match(_domain_re, v) is None: raise ValueError("Invalid domain value") diff --git a/app/api/auth/session_router.py b/app/api/auth/session_router.py index f6c0d4403..07c999522 100644 --- a/app/api/auth/session_router.py +++ b/app/api/auth/session_router.py @@ -26,7 +26,19 @@ async def get_user_session( storage: FromDishka[SessionStorage], session: FromDishka[AsyncSession], ) -> dict[str, SessionContentSchema]: - """Get user (upn, san or dn) data.""" + """Get user session data by UPN, SAN, or DN. + + Args: + upn (str): User principal name, SAN, or DN. + storage (SessionStorage): Session storage dependency. + session (AsyncSession): Database session. + + Returns: + dict[str, SessionContentSchema]: Dictionary of session data for user. + + Raises: + HTTPException: If user is not found. + """ user = await get_user(session, upn) if not user: raise HTTPException(status.HTTP_404_NOT_FOUND, "User not found.") @@ -39,7 +51,16 @@ async def delete_user_sessions( storage: FromDishka[SessionStorage], session: FromDishka[AsyncSession], ) -> None: - """Delete user (upn, san or dn) data.""" + """Delete all sessions for a user by UPN, SAN, or DN. + + Args: + upn (str): User principal name, SAN, or DN. + storage (SessionStorage): Session storage dependency. + session (AsyncSession): Database session. + + Raises: + HTTPException: If user is not found. + """ user = await get_user(session, upn) if not user: raise HTTPException(status.HTTP_404_NOT_FOUND, "User not found.") @@ -54,5 +75,10 @@ async def delete_session( session_id: str, storage: FromDishka[SessionStorage], ) -> None: - """Delete current logged in user data.""" + """Delete a specific user session by session ID. + + Args: + session_id (str): Session identifier. + storage (SessionStorage): Session storage dependency. + """ await storage.delete_user_session(session_id) diff --git a/app/api/auth/utils.py b/app/api/auth/utils.py index 66f13550e..d08acff46 100644 --- a/app/api/auth/utils.py +++ b/app/api/auth/utils.py @@ -19,11 +19,13 @@ def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: """Get IP address from request. Args: - request(Request): The incoming request object. - request: Request: + request (Request): The incoming request object. + + Raises: + HTTPException: If the request client is None. Returns: - IPv4Address | None: The IP address or None. + IPv4Address | IPv6Address: The IP address or None. """ forwarded_for = request.headers.get("X-Forwarded-For") if forwarded_for: @@ -69,6 +71,9 @@ async def create_and_set_session_key( session (AsyncSession): db session settings (Settings): app settings response (Response): fastapi response object + storage (SessionStorage): session storage backend + ip (IPv4Address | IPv6Address): IP address of the client + user_agent (str): user agent string of the client """ await set_last_logon_user(user, session, settings.TIMEZONE) diff --git a/app/api/exception_handlers.py b/app/api/exception_handlers.py index ec0c416b7..80d21e98b 100644 --- a/app/api/exception_handlers.py +++ b/app/api/exception_handlers.py @@ -10,11 +10,14 @@ def handle_db_connect_error( request: Request, # noqa: ARG001 exc: Exception, ) -> NoReturn: - """Handle duplicate. + """Handle database connection errors. Args: - request: Request: - # noqa: ARG001exc: Exception: + request (Request): FastAPI request object. + exc (Exception): Exception instance. + + Raises: + HTTPException: If connection pool is exceeded or backend error occurs. """ if "QueuePool limit of size" in str(exc): logger.critical("POOL EXCEEDED {}", exc) @@ -33,6 +36,14 @@ async def handle_dns_error( request: Request, # noqa: ARG001 exc: Exception, ) -> NoReturn: - """Handle EmptyLabel exception.""" + """Handle DNS-related errors. + + Args: + request (Request): FastAPI request object. + exc (Exception): Exception instance. + + Raises: + HTTPException: Always raised for DNS errors. + """ logger.critical("DNS manager error: {}", exc) raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) diff --git a/app/api/ldap_schema/attribute_type_router.py b/app/api/ldap_schema/attribute_type_router.py index cb8045139..0650c9aae 100644 --- a/app/api/ldap_schema/attribute_type_router.py +++ b/app/api/ldap_schema/attribute_type_router.py @@ -38,11 +38,9 @@ async def create_one_attribute_type( ) -> None: """Create a new attribute type. - \f - Args: request_data (AttributeTypeSchema): Data for creating attribute type. - session (FromDishka[AsyncSession]): Database session. + session (AsyncSession): Database session. """ await create_attribute_type( oid=request_data.oid, @@ -64,19 +62,17 @@ async def get_one_attribute_type( attribute_type_name: str, session: FromDishka[AsyncSession], ) -> AttributeTypeSchema: - """Retrieve a one attribute types. - - \f + """Retrieve a single attribute type by name. Args: - attribute_type_name (str): Name of the Attribute Type. - session (FromDishka[AsyncSession]): Database session. - - Raises: - HTTP_404_NOT_FOUND: If Attribute Type not found. + attribute_type_name (str): Name of the attribute type. + session (AsyncSession): Database session. Returns: - AttributeTypeSchema: One Attribute Type Schemas. + AttributeTypeSchema: Attribute type schema. + + Raises: + HTTPException: If attribute type not found. """ attribute_type = await get_attribute_type_by_name( attribute_type_name, @@ -102,17 +98,15 @@ async def get_list_attribute_types_with_pagination( session: FromDishka[AsyncSession], page_size: int = 50, ) -> AttributeTypePaginationSchema: - """Retrieve a list of all attribute types with paginate. - - \f + """Retrieve a paginated list of attribute types. Args: - page_number (int): Number of page. - session (FromDishka[AsyncSession]): Database session. - page_size (int): Number of items per page. + page_number (int): Page number. + session (AsyncSession): Database session. + page_size (int, optional): Number of items per page. Defaults to 50. Returns: - AttributeTypePaginationSchema: Paginator. + AttributeTypePaginationSchema: Paginated attribute types. """ params = PaginationParams( page_number=page_number, @@ -141,18 +135,15 @@ async def modify_one_attribute_type( request_data: AttributeTypeUpdateSchema, session: FromDishka[AsyncSession], ) -> None: - """Modify an Attribute Type. - - \f + """Modify an attribute type. Args: - attribute_type_name (str): Name of the attribute type for modifying. - request_data (AttributeTypeUpdateSchema): Changed data. - session (FromDishka[AsyncSession]): Database session. + attribute_type_name (str): Name of the attribute type to modify. + request_data (AttributeTypeUpdateSchema): Data to update. + session (AsyncSession): Database session. Raises: - HTTP_404_NOT_FOUND: If attribute type not found. - HTTP_400_BAD_REQUEST: If attribute type is system->cannot be changed + HTTPException: If attribute type not found or is a system attribute. """ attribute_type = await get_attribute_type_by_name( attribute_type_name, @@ -189,13 +180,12 @@ async def delete_bulk_attribute_types( ) -> None: """Delete attribute types by their names. - \f Args: - attribute_types_names (list[str]): List of attribute types names. - session (FromDishka[AsyncSession]): Database session. + attribute_types_names (list[str]): List of attribute type names. + session (AsyncSession): Database session. Raises: - HTTP_400_BAD_REQUEST: If nothing to delete. + HTTPException: If no attribute type names are provided. """ if not attribute_types_names: raise HTTPException( diff --git a/app/api/ldap_schema/object_class_router.py b/app/api/ldap_schema/object_class_router.py index e3c07c7df..85a2157d6 100644 --- a/app/api/ldap_schema/object_class_router.py +++ b/app/api/ldap_schema/object_class_router.py @@ -1,4 +1,4 @@ -"""Attribute Type management routers. +"""Object Class management routers. Copyright (c) 2024 MultiFactor License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE @@ -34,13 +34,11 @@ async def create_one_object_class( request_data: ObjectClassSchema, session: FromDishka[AsyncSession], ) -> None: - """Create a new Object Class. - - \f + """Create a new object class. Args: - request_data (ObjectClassSchema): Data for creating Object Class. - session (FromDishka[AsyncSession]): Database session. + request_data (ObjectClassSchema): Data for creating object class. + session (AsyncSession): Database session. """ await create_object_class( oid=request_data.oid, @@ -63,19 +61,17 @@ async def get_one_object_class( object_class_name: str, session: FromDishka[AsyncSession], ) -> ObjectClassSchema: - """Retrieve a one object class. - - \f + """Retrieve a single object class by name. Args: - object_class_name (str): Name of the Object Class. - session (FromDishka[AsyncSession]): Database session. - - Raises: - HTTP_404_NOT_FOUND: If Object Class not found. + object_class_name (str): Name of the object class. + session (AsyncSession): Database session. Returns: - ObjectClassSchema: One Object Class Schemas. + ObjectClassSchema: Object class schema. + + Raises: + HTTPException: If object class not found. """ object_class = await get_object_class_by_name( object_class_name, @@ -101,17 +97,15 @@ async def get_list_object_classes_with_pagination( session: FromDishka[AsyncSession], page_size: int = 25, ) -> ObjectClassPaginationSchema: - """Retrieve a list of all object classes with paginate. - - \f + """Retrieve a paginated list of object classes. Args: - page_number (int): Number of page. - session (FromDishka[AsyncSession]): Database session. - page_size (int): Number of items per page. + page_number (int): Page number. + session (AsyncSession): Database session. + page_size (int, optional): Number of items per page. Defaults to 25. Returns: - ObjectClassPaginationSchema: Paginator. + ObjectClassPaginationSchema: Paginated object classes. """ params = PaginationParams( page_number=page_number, @@ -140,18 +134,15 @@ async def modify_one_object_class( request_data: ObjectClassUpdateSchema, session: FromDishka[AsyncSession], ) -> None: - """Modify an Object Class. - - \f + """Modify an object class. Args: - object_class_name (str): Name of the Object Class for modifying. - request_data (ObjectClassUpdateSchema): Changed data. - session (FromDishka[AsyncSession]): Database session. + object_class_name (str): Name of the object class to modify. + request_data (ObjectClassUpdateSchema): Data to update. + session (AsyncSession): Database session. Raises: - HTTP_404_NOT_FOUND: If nothing to delete. - HTTP_400_BAD_REQUEST: If object class is system->cannot be changed + HTTPException: If object class not found or is a system object class. """ object_class = await get_object_class_by_name(object_class_name, session) if not object_class: @@ -181,16 +172,14 @@ async def delete_bulk_object_classes( object_classes_names: Annotated[list[str], Body(embed=True)], session: FromDishka[AsyncSession], ) -> None: - """Delete Object Classes by their names. - - \f + """Delete object classes by their names. Args: - object_classes_names (list[str]): List of Object Classes names. - session (FromDishka[AsyncSession]): Database session. + object_classes_names (list[str]): List of object class names. + session (AsyncSession): Database session. Raises: - HTTP_400_BAD_REQUEST: If nothing to delete. + HTTPException: If no object class names are provided. """ if not object_classes_names: raise HTTPException( diff --git a/app/api/main/ap_router.py b/app/api/main/ap_router.py index 2045b50bf..02d1458f0 100644 --- a/app/api/main/ap_router.py +++ b/app/api/main/ap_router.py @@ -27,10 +27,11 @@ async def get_access_policies( """Get APs. \f - Args: - policy (AccessPolicySchema): ap session (FromDishka[AsyncSession]): db. + + Returns: + list[MaterialAccessPolicySchema]: list of access policies. """ return [ MaterialAccessPolicySchema( diff --git a/app/api/main/dns_router.py b/app/api/main/dns_router.py index 2e664351a..34888a01c 100644 --- a/app/api/main/dns_router.py +++ b/app/api/main/dns_router.py @@ -41,7 +41,12 @@ async def create_record( data: DNSServiceRecordCreateRequest, dns_manager: FromDishka[AbstractDNSManager], ) -> None: - """Create DNS record with given params.""" + """Create a DNS record with the given parameters. + + Args: + data (DNSServiceRecordCreateRequest): DNS record creation request data. + dns_manager (AbstractDNSManager): DNS manager dependency. + """ await dns_manager.create_record( data.record_name, data.record_value, @@ -55,7 +60,12 @@ async def delete_single_record( data: DNSServiceRecordDeleteRequest, dns_manager: FromDishka[AbstractDNSManager], ) -> None: - """Delete DNS record with given params.""" + """Delete a DNS record with the given parameters. + + Args: + data (DNSServiceRecordDeleteRequest): DNS record deletion request data. + dns_manager (AbstractDNSManager): DNS manager dependency. + """ await dns_manager.delete_record( data.record_name, data.record_value, @@ -68,7 +78,12 @@ async def update_record( data: DNSServiceRecordUpdateRequest, dns_manager: FromDishka[AbstractDNSManager], ) -> None: - """Update DNS record with given params.""" + """Update a DNS record with the given parameters. + + Args: + data (DNSServiceRecordUpdateRequest): DNS record update request data. + dns_manager (AbstractDNSManager): DNS manager dependency. + """ await dns_manager.update_record( data.record_name, data.record_value, @@ -81,7 +96,14 @@ async def update_record( async def get_all_records( dns_manager: FromDishka[AbstractDNSManager], ) -> list[DNSRecords]: - """Get all DNS records of current zone.""" + """Get all DNS records of the current zone. + + Args: + dns_manager (AbstractDNSManager): DNS manager dependency. + + Returns: + list[DNSRecords]: List of DNS records. + """ return await dns_manager.get_all_records() @@ -90,7 +112,15 @@ async def get_dns_status( session: FromDishka[AsyncSession], dns_settings: FromDishka[DNSManagerSettings], ) -> dict[str, str | None]: - """Get DNS service status.""" + """Get DNS service status. + + Args: + session (AsyncSession): Database session. + dns_settings (DNSManagerSettings): DNS manager settings. + + Returns: + dict[str, str | None]: DNS status, zone name, and DNS server IP. + """ state = await get_dns_state(session) return { "dns_status": state, @@ -108,7 +138,16 @@ async def setup_dns( ) -> None: """Set up DNS service. - Create zone file, get TSIG key, reload DNS server if selfhosted. + Creates zone file, gets TSIG key, reloads DNS server if self-hosted. + + Args: + data (DNSServiceSetupRequest): DNS setup request data. + dns_manager (AbstractDNSManager): DNS manager dependency. + session (AsyncSession): Database session. + settings (Settings): Application settings. + + Raises: + HTTPException: If DNS setup fails. """ zone_file = None conf_part = None diff --git a/app/api/main/krb5_router.py b/app/api/main/krb5_router.py index d877e917b..51959bcdb 100644 --- a/app/api/main/krb5_router.py +++ b/app/api/main/krb5_router.py @@ -64,14 +64,17 @@ async def setup_krb_catalogue( ldap_session: Annotated[LDAPSession, Depends(get_ldap_session)], kadmin: FromDishka[AbstractKadmin], ) -> None: - """Generate tree for kdc/kadmin. + """Generate tree for KDC/Kadmin. - :param Annotated[AsyncSession, Depends session: db - :param Annotated[EmailStr, Body mail: krbadmin email - :param Annotated[SecretStr, Body krbadmin_password: pw + Args: + session (AsyncSession): Database session. + mail (EmailStr): Kerberos admin email. + krbadmin_password (SecretStr): Kerberos admin password. + ldap_session (LDAPSession): LDAP session. + kadmin (AbstractKadmin): Kadmin manager. Raises: - HTTPException: on conflict + HTTPException: On conflict or failed creation. """ base_dn_list = await get_base_directories(session) base_dn = base_dn_list[0].path_dn @@ -156,20 +159,26 @@ async def setup_kdc( settings: FromDishka[Settings], kadmin: FromDishka[AbstractKadmin], request: Request, -) -> None: +) -> Response: """Set up KDC server. - Create data structure in catalogue, generate config files, trigger commands + Creates data structure in catalogue, generates config files, + and triggers commands. - - **mail**: krbadmin mail - - **password**: krbadmin password + Args: + data (KerberosSetupRequest): Kerberos setup request data. + user (UserSchema): Current user. + session (AsyncSession): Database session. + settings (Settings): Application settings. + kadmin (AbstractKadmin): Kadmin manager. + request (Request): FastAPI request. - \f - :param Annotated[EmailStr, Body mail: json, defaults to 'admin')] - :param Annotated[str, Body password: json, defaults to 'password')] - :param Annotated[AsyncSession, Depends session: db - :param Annotated[LDAPSession, Depends ldap_session: ldap session - """ + Returns: + Response: Background task response. + + Raises: + HTTPException: On authentication or KDC setup failure. + """ # noqa: DOC501 base_dn_list = await get_base_directories(session) base_dn = base_dn_list[0].path_dn domain: str = base_dn_list[0].name @@ -256,7 +265,7 @@ async def setup_kdc( data.admin_password.get_secret_value(), ) - return Response(background=task) # type: ignore + return Response(background=task) finally: await session.commit() @@ -275,10 +284,15 @@ async def ktadd( ) -> StreamingResponse: """Create keytab from kadmin server. - :param Annotated[LDAPSession, Depends ldap_session: ldap + Args: + kadmin (AbstractKadmin): Kadmin manager. + names (list[str]): List of principal names. Returns: - bytes: file + StreamingResponse: Keytab file as a streaming response. + + Raises: + HTTPException: If principal not found. """ try: response = await kadmin.ktadd(names) @@ -298,13 +312,17 @@ async def get_krb_status( session: FromDishka[AsyncSession], kadmin: FromDishka[AbstractKadmin], ) -> KerberosState: - """Get server status. + """Get Kerberos server status. - :param Annotated[AsyncSession, Depends session: db - :param Annotated[LDAPSession, Depends ldap_session: ldap + Args: + session (AsyncSession): Database session. + kadmin (AbstractKadmin): Kadmin manager. Returns: - KerberosState: state + KerberosState: Current Kerberos server state. + + Raises: + HTTPException: If unable to get server status. """ db_state = await get_krb_server_state(session) try: @@ -324,14 +342,15 @@ async def add_principal( instance: Annotated[LIMITED_STR, Body()], kadmin: FromDishka[AbstractKadmin], ) -> None: - """Create principal in kerberos with given name. + """Create principal in Kerberos with given name. - \f - :param Annotated[str, Body principal_name: upn - :param Annotated[LDAPSession, Depends ldap_session: ldap + Args: + primary (str): Principal primary name. + instance (str): Principal instance. + kadmin (AbstractKadmin): Kadmin manager. Raises: - HTTPException: on failed kamin request. + HTTPException: On failed kadmin request. """ try: await kadmin.add_principal(f"{primary}/{instance}", None) @@ -348,15 +367,15 @@ async def rename_principal( principal_new_name: Annotated[LIMITED_STR, Body()], kadmin: FromDishka[AbstractKadmin], ) -> None: - """Rename principal in kerberos with given name. + """Rename principal in Kerberos. - \f - :param Annotated[str, Body principal_name: upn - :param Annotated[LIMITED_STR, Body principal_new_name: _description_ - :param Annotated[LDAPSession, Depends ldap_session: ldap + Args: + principal_name (str): Current principal name. + principal_new_name (str): New principal name. + kadmin (AbstractKadmin): Kadmin manager. Raises: - HTTPException: on failed kamin request. + HTTPException: On failed kadmin request. """ try: await kadmin.rename_princ(principal_name, principal_new_name) @@ -373,15 +392,15 @@ async def reset_principal_pw( new_password: Annotated[LIMITED_STR, Body()], kadmin: FromDishka[AbstractKadmin], ) -> None: - """Reset principal password in kerberos with given name. + """Reset principal password in Kerberos. - \f - :param Annotated[str, Body principal_name: upn - :param Annotated[LIMITED_STR, Body new_password: _description_ - :param Annotated[LDAPSession, Depends ldap_session: ldap + Args: + principal_name (str): Principal name. + new_password (str): New password. + kadmin (AbstractKadmin): Kadmin manager. Raises: - HTTPException: on failed kamin request. + HTTPException: On failed kadmin request. """ try: await kadmin.change_principal_password(principal_name, new_password) @@ -397,16 +416,14 @@ async def delete_principal( principal_name: Annotated[LIMITED_STR, Body(embed=True)], kadmin: FromDishka[AbstractKadmin], ) -> None: - """Delete principal in kerberos with given name. - - \f - :param Annotated[str, Body principal_name: upn + """Delete principal in Kerberos. Args: - kadmin (FromDishka[AbstractKadmin]): _description_ + principal_name (str): Principal name. + kadmin (AbstractKadmin): Kadmin manager. Raises: - HTTPException: on failed kamin request + HTTPException: On failed kadmin request. """ try: await kadmin.del_principal(principal_name) diff --git a/app/api/main/router.py b/app/api/main/router.py index 776839490..f3b796786 100644 --- a/app/api/main/router.py +++ b/app/api/main/router.py @@ -32,7 +32,15 @@ async def search( request: SearchRequest, req: Request, ) -> SearchResponse: - """LDAP SEARCH entry request.""" + """Handle LDAP SEARCH entry request. + + Args: + request (SearchRequest): object containing search parameters. + req (Request): object for accessing application state. + + Returns: + SearchResponse: Response containing search results and metadata. + """ responses = await request.handle_api(req.state.dishka_container) metadata: SearchResultDone = responses.pop(-1) # type: ignore @@ -51,7 +59,15 @@ async def add( request: AddRequest, req: Request, ) -> LDAPResult: - """LDAP ADD entry request.""" + """Handle LDAP ADD entry request. + + Args: + request (AddRequest): object containing entry data to add. + req (Request): object for accessing application state. + + Returns: + LDAPResult: Result of the add operation. + """ return await request.handle_api(req.state.dishka_container) @@ -60,7 +76,15 @@ async def modify( request: ModifyRequest, req: Request, ) -> LDAPResult: - """LDAP MODIFY entry request.""" + """Handle LDAP MODIFY entry request. + + Args: + request (ModifyRequest): object containing modification data. + req (Request): object for accessing application state. + + Returns: + LDAPResult: Result of the modify operation. + """ return await request.handle_api(req.state.dishka_container) @@ -69,7 +93,16 @@ async def modify_many( requests: list[ModifyRequest], req: Request, ) -> list[LDAPResult]: - """Bulk LDAP MODIFY entry request.""" + """Handle bulk LDAP MODIFY entry requests. + + Args: + requests (list[ModifyRequest]): List of ModifyRequest objects\ + containing modification data. + req (Request): object for accessing application state. + + Returns: + list[LDAPResult]: List of results for each modify operation. + """ results = [] for request in requests: results.append(await request.handle_api(req.state.dishka_container)) @@ -81,7 +114,15 @@ async def modify_dn( request: ModifyDNRequest, req: Request, ) -> LDAPResult: - """LDAP MODIFY entry DN request.""" + """Handle LDAP MODIFY entry DN request. + + Args: + request (ModifyDNRequest): object containing DN modification data. + req (Request): object for accessing application state. + + Returns: + LDAPResult: Result of the DN modify operation. + """ return await request.handle_api(req.state.dishka_container) @@ -90,5 +131,13 @@ async def delete( request: DeleteRequest, req: Request, ) -> LDAPResult: - """LDAP DELETE entry request.""" + """Handle LDAP DELETE entry request. + + Args: + request (DeleteRequest): object containing entry to delete. + req (Request): object for accessing application state. + + Returns: + LDAPResult: Result of the delete operation. + """ return await request.handle_api(req.state.dishka_container) diff --git a/app/api/main/schema.py b/app/api/main/schema.py index d910a59b1..3d9bd6725 100644 --- a/app/api/main/schema.py +++ b/app/api/main/schema.py @@ -22,7 +22,11 @@ class SearchRequest(LDAPSearchRequest): filter: str = Field(..., examples=["(objectClass=*)"]) # type: ignore def cast_filter(self) -> UnaryExpression | ColumnElement: - """Cast str filter to sa sql.""" + """Cast str filter to sa sql. + + Returns: + UnaryExpression | ColumnElement: SQL expression for the filter. + """ filter_ = self.filter.lower().replace("objectcategory", "objectclass") return cast_str_filter2sql(Filter.parse(filter_).simplify()) @@ -31,7 +35,15 @@ async def handle_api( # type: ignore self, container: AsyncContainer, ) -> list[SearchResultEntry | SearchResultDone]: - """Get all responses.""" + """Get all responses. + + Args: + container (AsyncContainer): Async container with dependencies. + + Returns: + list[SearchResultEntry | SearchResultDone]: List of LDAP search\ + result entries or done responses. + """ return await self._handle_api(container) # type: ignore diff --git a/app/api/main/utils.py b/app/api/main/utils.py index 0f98ad933..8fcc9f101 100644 --- a/app/api/main/utils.py +++ b/app/api/main/utils.py @@ -15,6 +15,14 @@ async def get_ldap_session( ldap_session: FromDishka[LDAPSession], user: Annotated[UserSchema, Depends(get_current_user)], ) -> LDAPSession: - """Create LDAP session.""" + """Create LDAP session. + + Args: + ldap_session (FromDishka[LDAPSession]): LDAP session. + user (UserSchema): Current user. + + Returns: + LDAPSession: LDAP session with user set. + """ await ldap_session.set_user(user) return ldap_session diff --git a/app/api/network/router.py b/app/api/network/router.py index 08492213b..f02555aa6 100644 --- a/app/api/network/router.py +++ b/app/api/network/router.py @@ -44,7 +44,6 @@ async def add_network_policy( """Add policy. \f - Args: policy (Policy): policy to add @@ -116,9 +115,12 @@ async def get_list_network_policies( """Get network. \f + Raises: + HTTPException: 404 if no policies found + HTTPException: 422 if no policies found in database. Returns: - list[PolicyResponse]: all policies + list[PolicyResponse]: List of policies with their details. """ groups = selectinload(NetworkPolicy.groups).selectinload(Group.directory) mfa_groups = selectinload(NetworkPolicy.mfa_groups).selectinload( @@ -165,7 +167,6 @@ async def delete_network_policy( """Delete policy. \f - Args: policy_id (int): id user (User): requires login @@ -213,7 +214,6 @@ async def switch_network_policy( - **policy_id**: int, policy to switch \f - Args: policy_id (int): id user (User): requires login @@ -247,7 +247,6 @@ async def update_network_policy( """Update network policy. \f - Args: policy (PolicyUpdate): update request @@ -337,7 +336,6 @@ async def swap_network_policy( - **first_policy_id**: policy to swap - **second_policy_id**: policy to swap \f - Args: first_policy_id (int): policy to swap second_policy_id (int): policy to swap diff --git a/app/api/shadow/router.py b/app/api/shadow/router.py index 932af49a5..1559e83fe 100644 --- a/app/api/shadow/router.py +++ b/app/api/shadow/router.py @@ -33,7 +33,19 @@ async def proxy_request( mfa: FromDishka[LDAPMultiFactorAPI], session: FromDishka[AsyncSession], ) -> None: - """Proxy request to mfa.""" + """Proxy request to mfa. + + Args: + principal (str): user principal name + ip (IPv4Address): user ip address + mfa (FromDishka[LDAPMultiFactorAPI]): mfa api + session (FromDishka[AsyncSession]): db session + + Raises: + HTTPException: 401 if mfa is required but not passed or failed + HTTPException: 403 if user is not allowed to use kerberos + HTTPException: 422 if user not found + """ user = await get_user(session, principal) if not user: @@ -87,7 +99,6 @@ async def sync_password( - **principal**: user upn - **new_password**: password to set \f - Args: session (FromDishka[AsyncSession]): db kadmin (FromDishka[AbstractKadmin]): kadmin api diff --git a/app/extra/dump_acme_certs.py b/app/extra/dump_acme_certs.py index 49f62e418..16e2b8d52 100644 --- a/app/extra/dump_acme_certs.py +++ b/app/extra/dump_acme_certs.py @@ -19,7 +19,10 @@ def dump_acme_cert(resolver: str = "md-resolver") -> None: try read until file contents is generated. Args: - resolver: str: (Default value = "md-resolver") + resolver: str: (Default value = "md-resolver") + + Raises: + SystemExit: If there is an error loading the TLS certificate. """ if os.path.exists("/certs/cert.pem") and os.path.exists( "/certs/privkey.pem" diff --git a/app/extra/scripts/uac_sync.py b/app/extra/scripts/uac_sync.py index 2b911431c..9936c4137 100644 --- a/app/extra/scripts/uac_sync.py +++ b/app/extra/scripts/uac_sync.py @@ -23,7 +23,9 @@ async def disable_accounts( """Update userAccountControl attr. Args: - session (AsyncSession): db + session (AsyncSession): Database session. + kadmin (AbstractKadmin): Kadmin interface for locking principals. + settings (Settings): Application settings. Original query: update "Attributes" a diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index a4a3b1da3..91327c564 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -152,7 +152,7 @@ def serialize(self, obj: "ASN1Row | T | None" = None) -> str: # noqa: C901 substring matches. Args: - obj: "ASN1Row | T | None": (Default value = None) + obj: "ASN1Row | T | None": (Default value = None) """ if obj is None: obj = self diff --git a/app/ldap_protocol/kerberos/__init__.py b/app/ldap_protocol/kerberos/__init__.py index 09c9d30ab..e3783cbd3 100644 --- a/app/ldap_protocol/kerberos/__init__.py +++ b/app/ldap_protocol/kerberos/__init__.py @@ -1,3 +1,9 @@ +"""Kerberos API module. + +Copyright (c) 2024 MultiFactor +License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE +""" + from sqlalchemy.ext.asyncio import AsyncSession from .base import ( diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 1ff5f2290..9c64592e0 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -17,7 +17,7 @@ def logger_wraps(is_stub: bool = False) -> Callable: Args: is_stub(bool): flag to change logs, defaults to False - is_stub: bool: (Default value = False) + is_stub: bool: (Default value = False) Returns: Callable: any method @@ -29,6 +29,8 @@ def wrapper(func: Callable) -> Callable: Args: func: Callable: + Returns: + Callable: wrapped function """ name = func.__name__ bus_type = " stub " if is_stub else " " @@ -87,7 +89,14 @@ async def set_state(session: AsyncSession, state: "KerberosState") -> None: async def get_krb_server_state(session: AsyncSession) -> "KerberosState": - """Get kerberos server state.""" + """Get kerberos server state. + + Args: + session (AsyncSession): db session + + Returns: + KerberosState: The current kerberos server state. + """ state = await session.scalar( select(CatalogueSetting) .filter(CatalogueSetting.name == KERBEROS_STATE_NAME) diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index f85927805..8f9c69fd3 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -25,6 +25,9 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: Args: data: dict[str: list[ASN1Row]]: + + Returns: + AbandonRequest: Instance of AbandonRequest. """ return cls(message_id=1) diff --git a/app/ldap_protocol/ldap_requests/base.py b/app/ldap_protocol/ldap_requests/base.py index 88dfcf99e..0d801d4b0 100644 --- a/app/ldap_protocol/ldap_requests/base.py +++ b/app/ldap_protocol/ldap_requests/base.py @@ -61,11 +61,10 @@ async def _handle_api( self, container: AsyncContainer, ) -> list[BaseResponse]: - """Hanlde response with api user. + """Handle response with api user. Args: - user (DBUser): user from db - session (AsyncSession): db session + container (AsyncContainer): Dependency injection container. Returns: list[BaseResponse]: list of handled responses @@ -97,5 +96,12 @@ async def _handle_api( return responses async def handle_api(self, container: AsyncContainer) -> LDAPResult: - """Get single response.""" + """Get single response. + + Args: + container (AsyncContainer): Dependency injection container. + + Returns: + LDAPResult: The first response from the handled API responses. + """ return (await self._handle_api(container))[0] # type: ignore diff --git a/app/ldap_protocol/ldap_requests/bind_methods/simple.py b/app/ldap_protocol/ldap_requests/bind_methods/simple.py index 8d621021d..b7362b2d3 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/simple.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/simple.py @@ -24,11 +24,10 @@ def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. Args: - User: None user: indb user + user (User | None): User object or None. Returns: - bool: status - + bool: status """ password = getattr(user, "password", None) if password is not None: @@ -39,11 +38,18 @@ def is_anonymous(self) -> bool: """Check if auth is anonymous. Returns: - bool: status - + bool: True if password is empty, False otherwise. """ return not self.password async def get_user(self, session: AsyncSession, username: str) -> User: - """Get user.""" + """Get user. + + Args: + session (AsyncSession): Database session. + username (str): Username to search for. + + Returns: + User: User object if found, raises exception otherwise. + """ return await get_user(session, username) # type: ignore diff --git a/app/ldap_protocol/ldap_requests/delete.py b/app/ldap_protocol/ldap_requests/delete.py index ccf3dc7c1..501f1710b 100644 --- a/app/ldap_protocol/ldap_requests/delete.py +++ b/app/ldap_protocol/ldap_requests/delete.py @@ -47,8 +47,10 @@ def from_data(cls, data: ASN1Row) -> "DeleteRequest": """Description. Args: - data: ASN1Row: + data (ASN1Row): ASN1Row containing the entry to delete. + Returns: + DeleteRequest: Instance of DeleteRequest with the entry set. """ return cls(entry=data) @@ -59,7 +61,17 @@ async def handle( kadmin: AbstractKadmin, session_storage: SessionStorage, ) -> AsyncGenerator[DeleteResponse, None]: - """Delete request handler.""" + """Delete request handler. + + Args: + session (AsyncSession): The database session. + ldap_session (LDAPSession): The LDAP session. + kadmin (AbstractKadmin): The Kerberos administration interface. + session_storage (SessionStorage): Session storage for user sessions + + Yields: + DeleteResponse: The response to the delete request. + """ if not ldap_session.user: yield DeleteResponse(**INVALID_ACCESS_RESPONSE) return diff --git a/app/ldap_protocol/ldap_requests/modify_dn.py b/app/ldap_protocol/ldap_requests/modify_dn.py index aa96f27e3..06990f24d 100644 --- a/app/ldap_protocol/ldap_requests/modify_dn.py +++ b/app/ldap_protocol/ldap_requests/modify_dn.py @@ -81,7 +81,11 @@ def from_data(cls, data: list[ASN1Row]) -> "ModifyDNRequest": """Create structure from ASN1Row dataclass list. Args: - data: list[ASN1Row]: + data (list[ASN1Row]): List of ASN1Row objects containing\ + the request data. + + Returns: + ModifyDNRequest: Instance of ModifyDNRequest with parsed data. """ return cls( entry=data[0].value, @@ -95,7 +99,15 @@ async def handle( ldap_session: LDAPSession, session: AsyncSession, ) -> AsyncGenerator[ModifyDNResponse, None]: - """Handle message with current user.""" + """Handle message with current user. + + Args: + ldap_session (LDAPSession): Current LDAP session. + session (AsyncSession): Database session. + + Yields: + ModifyDNResponse: Response to the Modify DN request. + """ if not ldap_session.user: yield ModifyDNResponse(**INVALID_ACCESS_RESPONSE) return diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index e6454fb16..51b27bf52 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -33,6 +33,9 @@ def from_db(cls, attribute_type: AttributeType) -> "AttributeTypeSchema": Args: attribute_type: AttributeType: + + Returns: + AttributeTypeSchema: Instance of AttributeTypeSchema. """ return cls( oid=attribute_type.oid, diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index 26a1032b5..0bc03c81f 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -54,7 +54,13 @@ async def create_access_policy( """Get policies. Args: + name (str): access policy name + can_read (bool): can read + can_add (bool): can add + can_modify (bool): can modify + can_delete (bool): can delete grant_dn (ENTRY_TYPE): main dn + groups (list[ENTRY_TYPE]): list of groups session (AsyncSession): session """ path = get_search_path(grant_dn) @@ -94,7 +100,7 @@ def mutate_ap[T: Select]( action: Literal["add": "read": "modify": - "del"]: (Default value = "read") + "del"]: (Default value = "read") Returns: T: select(Directory).join(Directory.access_policies) diff --git a/app/ldap_protocol/policies/network_policy.py b/app/ldap_protocol/policies/network_policy.py index 2f48bd5df..ed935a681 100644 --- a/app/ldap_protocol/policies/network_policy.py +++ b/app/ldap_protocol/policies/network_policy.py @@ -23,20 +23,13 @@ def build_policy_query( """Build a base query for network policies with optional group filtering. Args: - ip (IPv4Address): IP address to filter - - Args: - Literal: is_http", "is_ldap", "is_kerberos"] protocol_field_name - protocol: Protocol to filter - list: int] | None user_group_ids: List of user group IDs, optional + ip (IPv4Address | IPv6Address): IP address to filter + protocol_field_name (Literal["is_http", "is_ldap", "is_kerberos"]):\ + Protocol to filter + user_group_ids (list[int] | None): List of user group IDs, optional Returns: - : Select query - ip: IPv4Address | IPv6Address: - protocol_field_name: Literal["is_http": - "is_ldap": - "is_kerberos"]: - user_group_ids: list[int] | None: (Default value = None) + Select: SQLAlchemy query """ protocol_field = getattr(NetworkPolicy, protocol_field_name) query = ( @@ -98,6 +91,7 @@ async def get_user_network_policy( """Get the highest priority network policy for user, ip and protocol. Args: + ip (IPv4Address | IPv6Address): IP address to filter user (User): user object session (AsyncSession): db session diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index a292485b5..7db56bff7 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -72,7 +72,9 @@ async def get_user_sessions( Args: uid (int): user id - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): The protocol type to filter\ + sessions by (e.g., "http" or "ldap"). If None,\ + sessions for all protocols are returned. Returns: dict: user sessions contents @@ -88,7 +90,9 @@ async def get_ip_sessions( Args: ip (str): ip - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): The protocol type to filter\ + sessions by (e.g., "http" or "ldap"). If None,\ + sessions for all protocols are returned. Returns: dict: user sessions contents @@ -118,6 +122,9 @@ def _sign(session_id: str, settings: Settings) -> str: session_id: str: settings: Settings: + Returns: + str: The HMAC signature for the session_id using provided settings. + """ return hmac.new( settings.SECRET_KEY.encode(), @@ -130,6 +137,9 @@ def get_user_agent_hash(self, user_agent: str) -> str: Args: user_agent: str: + + Returns: + str: The hash of the user agent. """ return hashlib.blake2b(user_agent.encode(), digest_size=6).hexdigest() @@ -140,6 +150,8 @@ def _get_ip_session_key(self, ip: str, protocol: ProtocolType) -> str: ip: str: protocol: ProtocolType: + Returns: + str: The session key for the given IP and protocol. """ return f"ip:{protocol}:{ip}" @@ -150,6 +162,8 @@ def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: uid: int: protocol: ProtocolType: + Returns: + str: The session key for the given user and protocol. """ return f"keys:{protocol}:{uid}" @@ -159,6 +173,8 @@ def _get_protocol(self, session_id: str) -> ProtocolType: Args: session_id: str: + Returns: + ProtocolType: Protocol type ("http" or "ldap") for given session_id """ return "http" if session_id.startswith("http:") else "ldap" @@ -196,7 +212,8 @@ async def create_session( Args: uid (int): user id settings (Settings): app settings - :param dict | None extra_data: data, defaults to None + extra_data (dict | None): Additional data to include\ + in the session, defaults to None. Returns: str: session id @@ -218,7 +235,10 @@ async def get_user_id( ip (str): ip address Returns: - int: user id + int: user id. + + Raises: + KeyError: key error. """ try: session_id, signature = session_key.split(".") @@ -260,6 +280,10 @@ def _generate_session_data( uid: int: settings: Settings: extra_data: dict | None: + + Returns: + tuple[str, str, dict]: A tuple containing the session_id,\ + signature, and session data dictionary. """ if extra_data is None: extra_data = {} @@ -408,6 +432,9 @@ async def get(self, key: str) -> dict: Returns: dict: The data associated with the key, or an empty dictionary if the key is not found. + + Raises: + KeyError: If the key is not found in the storage. """ data = await self._storage.get(key) if data is None: @@ -418,7 +445,7 @@ async def delete(self, keys: Iterable[str]) -> None: """Delete data associated with the given key from storage. Args: - key (str): The key to delete from the storage. + keys (Iterable[str]): The keys to delete from the storage. """ await self._storage.delete(*keys) @@ -427,6 +454,9 @@ async def _fetch_keys(self, key: str) -> set[str]: Args: key (str): key + + Returns: + set[str]: A set of decoded keys from the storage. """ encoded_keys = await self._storage.smembers(key) # type: ignore return {k.decode() for k in encoded_keys} @@ -444,7 +474,7 @@ async def _get_session_keys_by_ip( Args: ip (str): ip - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): protocol Returns: set[str]: session keys @@ -471,7 +501,7 @@ async def _get_session_keys_by_uid( Args: uid (int): user id - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): protocol Returns: set[str]: session keys @@ -508,7 +538,7 @@ async def _get_sessions(self, keys: set[str], id_value: str | int) -> dict: Args: keys (set[str]): session keys - :param str | int id_value: user id or ip + id_value (str | int): user id or ip Returns: dict: user sessions contents @@ -553,7 +583,7 @@ async def get_user_sessions( Args: uid (int): user id - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): protocol Returns: dict: user sessions contents @@ -570,7 +600,7 @@ async def get_ip_sessions( Args: ip (str): ip - :param ProtocolType | None protocol: protocol + protocol (ProtocolType | None): protocol Returns: dict: user sessions contents @@ -646,6 +676,9 @@ async def delete_user_session(self, session_id: str) -> None: Args: session_id (str): session id + + Raises: + KeyError: key error. """ try: data = await self.get(session_id) @@ -698,7 +731,7 @@ async def _add_session( uid (int): user id ip_session_key (str): ip session key sessions_key (str): sessions key - :param int | None ttl: time to live, defaults to None + ttl (int | None): time to live, defaults to None """ zset_key = ( self.ZSET_HTTP_SESSIONS @@ -740,10 +773,9 @@ async def create_session( Args: uid (int): user id - data (dict): data dict - secret (str): secret key - expires_minutes (int): exire time in minutes - :param Literal[refresh, access] grant_type: grant type flag + settings (Settings): settings + *, + extra_data (dict): extra data Returns: str: jwt token @@ -771,7 +803,14 @@ async def create_session( return f"{session_id}.{signature}" async def check_session(self, session_id: str) -> bool: - """Check session.""" + """Check session. + + Args: + session_id (str): session id + + Returns: + bool: True if exists. + """ return await self._storage.exists(session_id) async def create_ldap_session( @@ -796,7 +835,8 @@ async def create_ldap_session( Args: uid (int): user id - key (str): session key + key (str): The session key to use for storing the LDAP session. + This is the unique identifier for the LDAP session in storage. data (dict): any data """ data["issued"] = datetime.now(UTC).isoformat() @@ -860,6 +900,9 @@ async def _rekey_session(self, session_id: str, settings: Settings) -> str: Returns: str: jwt token + + Raises: + KeyError: key error. """ data = await self.get(session_id) diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 5065cdc37..309ff11f6 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -214,7 +214,7 @@ def create_integer_hash(text: str, size: int = 9) -> int: text(str): any string size(int): fixed size of hash, defaults to 15 text: str: - size: int: (Default value = 9) + size: int: (Default value = 9) Returns: int: hash @@ -324,7 +324,7 @@ def create_object_sid( is added to the given RID to generate the final RID domain: Directory: rid: int: - reserved: bool: (Default value = False) + reserved: bool: (Default value = False) Returns: str: the complete objectSid as a string diff --git a/app/ldap_protocol/utils/pagination.py b/app/ldap_protocol/utils/pagination.py index 6b1f0bec1..14117403b 100644 --- a/app/ldap_protocol/utils/pagination.py +++ b/app/ldap_protocol/utils/pagination.py @@ -94,7 +94,20 @@ async def get( sqla_model: type[S], session: AsyncSession, ) -> "PaginationResult[S]": - """Get paginator.""" + """Get paginator. + + Args: + query (Select[tuple[S]]): SQLAlchemy query to execute. + params (PaginationParams): Pagination parameters. + sqla_model (type[S]): SQLAlchemy model class to paginate. + session (AsyncSession): SQLAlchemy async session. + + Raises: + ValueError: If the query does not have an order_by clause. + + Returns: + PaginationResult[S]: Paginator with metadata and items. + """ if query._order_by_clause is None or len(query._order_by_clause) == 0: raise ValueError("Select query must have an order_by clause.") diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index dfc789a7a..35406bfb6 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -138,9 +138,8 @@ async def check_kerberos_group( ) -> bool: """Check if user in kerberos group. - :param User | None user: user (sa model) - Args: + user (User | None): user (sa model) session (AsyncSession): db Returns: @@ -203,7 +202,8 @@ def get_path_filter( field(Column): path column, defaults to Directory.path path: list[str]: *: - column: ColumnElement | Column | InstrumentedAttribute: (Default value = Directory.path) + column: ColumnElement | Column | InstrumentedAttribute:\ + (Default value = Directory.path) Returns: ColumnElement: filter (where) element @@ -221,7 +221,8 @@ def get_filter_from_path( Args: dn: str: *: - column: Column | InstrumentedAttribute: (Default value = Directory.path) + column: Column | InstrumentedAttribute: (Default value =\ + Directory.path) """ return get_path_filter(get_search_path(dn), column=column) diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index 5a80c380b..eb009b4e9 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -19,8 +19,13 @@ def _list_to_string(data: list[str]) -> str | None: """Description. Args: - data: list[str]: + data (list[str]): list of strings + Raises: + ValueError: if list has more than one element + + Returns: + str | None: single string if list has one element """ if not data: return None @@ -33,8 +38,10 @@ def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: """Description. Args: - raw_definition: str: + raw_definition (str): raw definition of attribute type + Returns: + AttributeTypeInfo: parsed attribute type info """ tmp = AttributeTypeInfo.from_definition(definitions=[raw_definition]) return next(iter(tmp.values())) @@ -44,8 +51,10 @@ def get_object_class_info(raw_definition: str) -> ObjectClassInfo: """Description. Args: - raw_definition: str: + raw_definition (str): raw definition of object class + Returns: + ObjectClassInfo: parsed object class info """ tmp = ObjectClassInfo.from_definition(definitions=[raw_definition]) return next(iter(tmp.values())) @@ -68,8 +77,10 @@ def create_attribute_type_by_raw( """Description. Args: - raw_definition: str: + raw_definition (str): raw definition of attribute type + Returns: + AttributeType: created attribute type instance """ attribute_type_info = RawDefinitionParser._get_attribute_type_info( raw_definition=raw_definition @@ -102,7 +113,15 @@ async def create_object_class_by_info( session: AsyncSession, object_class_info: ObjectClassInfo, ) -> ObjectClass: - """Create Object Class by ObjectClassInfo.""" + """Create Object Class by ObjectClassInfo. + + Args: + session (AsyncSession): db session + object_class_info (ObjectClassInfo): object class info + + Returns: + ObjectClass: object class instance + """ superior_name = RawDefinitionParser._list_to_string( object_class_info.superior ) diff --git a/app/models.py b/app/models.py index c9c43e5eb..84b462433 100644 --- a/app/models.py +++ b/app/models.py @@ -244,7 +244,8 @@ def attributes_dict(self) -> defaultdict[str, list[str]]: """Description. Returns: - defaultdict[str, list[str]]: Dictionary of attribute names to their values. + defaultdict[str, list[str]]: Dictionary of attribute names\ + to their values. """ attributes = defaultdict(list) for attribute in self.attributes: diff --git a/app/multidirectory.py b/app/multidirectory.py index df451b093..2174c989a 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -153,8 +153,11 @@ def create_prod_app( Args: factory: Callable[[Settings]: - FastAPI]: (Default value = _create_basic_app) - settings: Settings | None: (Default value = None) + FastAPI]: (Default value = _create_basic_app) + settings: Settings | None: (Default value = None) + + Returns: + FastAPI: application. """ settings = settings or Settings.from_os() app = factory(settings) From b7e60526f4189049f010100af2217d408c944406 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Wed, 4 Jun 2025 19:17:05 +0300 Subject: [PATCH 05/25] refactor: docstings --- .github/workflows/checks.yml | 2 +- Makefile | 6 +- app/api/shadow/router.py | 5 +- app/extra/dump_acme_certs.py | 2 +- app/ioc.py | 57 ++++++++++++---- app/ldap_protocol/dialogue.py | 6 +- app/ldap_protocol/dns.py | 48 ++++++++----- app/ldap_protocol/kerberos/base.py | 68 +++++++++++++++---- app/ldap_protocol/kerberos/stub.py | 18 ++--- app/ldap_protocol/kerberos/utils.py | 3 +- app/ldap_protocol/ldap_requests/add.py | 18 +++-- app/ldap_protocol/ldap_requests/bind.py | 54 ++++++++++++--- .../ldap_requests/bind_methods/sasl_gssapi.py | 12 ++-- .../ldap_requests/bind_methods/sasl_plain.py | 18 +++-- app/ldap_protocol/ldap_requests/extended.py | 6 +- app/ldap_protocol/ldap_requests/modify.py | 14 ++-- app/ldap_protocol/ldap_requests/search.py | 13 +++- .../ldap_schema/object_class_crud.py | 10 ++- app/ldap_protocol/messages.py | 6 +- app/ldap_protocol/multifactor.py | 38 ++++++----- app/ldap_protocol/server.py | 4 +- app/ldap_protocol/session_storage.py | 1 + app/ldap_protocol/utils/queries.py | 39 +++++------ tests/test_api/test_shadow/test_router.py | 14 ++-- tests/test_ldap/test_util/test_search.py | 20 +++--- 25 files changed, 325 insertions(+), 157 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index e49355fd8..f796e9727 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -46,7 +46,7 @@ jobs: - name: Run linters env: NEW_TAG: linter - run: docker run $NEW_TAG ruff format --check + run: docker run $NEW_TAG ruff format --check --preview mypy: runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index a986f4d8e..7da457e64 100644 --- a/Makefile +++ b/Makefile @@ -3,9 +3,9 @@ help: ## show help message @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) before_pr: - ruff format . - ruff check . --preview --fix --unsafe-fixes - ruff format . + ruff format . --preview + ruff check . --fix --unsafe-fixes --select=DOC501 --preview + ruff format . --preview mypy . build: ## build app and manually generate self-signed cert diff --git a/app/api/shadow/router.py b/app/api/shadow/router.py index 1559e83fe..495a4b00b 100644 --- a/app/api/shadow/router.py +++ b/app/api/shadow/router.py @@ -100,10 +100,9 @@ async def sync_password( - **new_password**: password to set \f Args: + principal Annotated[str, Body]: reset target user + new_password Annotated[str, Body]: new password for user session (FromDishka[AsyncSession]): db - kadmin (FromDishka[AbstractKadmin]): kadmin api - :param Annotated[str, Body principal: reset target user - :param Annotated[str, Body new_password: new password for user Raises: HTTPException: 404 if user not found diff --git a/app/extra/dump_acme_certs.py b/app/extra/dump_acme_certs.py index 16e2b8d52..d6c23cff8 100644 --- a/app/extra/dump_acme_certs.py +++ b/app/extra/dump_acme_certs.py @@ -19,7 +19,7 @@ def dump_acme_cert(resolver: str = "md-resolver") -> None: try read until file contents is generated. Args: - resolver: str: (Default value = "md-resolver") + resolver (str): (Default value = "md-resolver") Raises: SystemExit: If there is an error loading the TLS certificate. diff --git a/app/ioc.py b/app/ioc.py index ad7185dbe..243f8f610 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -53,7 +53,10 @@ def get_engine(self, settings: Settings) -> AsyncEngine: """Get async engine. Args: - settings: Settings: + settings (Settings): settings. + + Returns: + AsyncEngine: """ return create_async_engine( str(settings.POSTGRES_URI), @@ -74,7 +77,10 @@ def get_session_factory( """Create session factory. Args: - engine: AsyncEngine: + engine (AsyncEngine): Async Engine. + + Returns: + async_sessionmaker[AsyncSession]: """ return async_sessionmaker(engine, expire_on_commit=False) @@ -83,7 +89,11 @@ async def create_session( self, async_session: async_sessionmaker[AsyncSession], ) -> AsyncIterator[AsyncSession]: - """Create session for request.""" + """Create session for request. + + Yields: + AsyncIterator[AsyncSession] + """ async with async_session() as session: yield session await session.commit() @@ -132,12 +142,11 @@ async def get_kadmin( """Get kadmin class, inherits from AbstractKadmin. Args: - settings (Settings): app settings - session_maker (AsyncSessionMaker): session maker + client (KadminHTTPClient): app settings + kadmin_class (type[AbstractKadmin]): session maker Returns: - AsyncIterator[AbstractKadmin]: kadmin with client - :yield Iterator[AsyncIterator[AbstractKadmin]]: kadmin + AbstractKadmin: kadmin with client """ return kadmin_class(client) @@ -167,7 +176,11 @@ async def get_dns_mngr( settings: DNSManagerSettings, dns_manager_class: type[AbstractDNSManager], ) -> AsyncIterator[AbstractDNSManager]: - """Get DNSManager class.""" + """Get DNSManager class. + + Yields: + AsyncIterator[AbstractDNSManager] + """ yield dns_manager_class(settings=settings) @provide(scope=Scope.APP) @@ -175,7 +188,14 @@ async def get_redis_for_sessions( self, settings: Settings, ) -> AsyncIterator[SessionStorageClient]: - """Get redis connection.""" + """Get redis connection. + + Yields: + AsyncIterator[SessionStorageClient] + + Raises: + SystemError: Redis is not available + """ client = redis.Redis.from_url(str(settings.SESSION_STORAGE_URL)) if not await client.ping(): @@ -229,7 +249,8 @@ class MFACredsProvider(Provider): async def get_auth(self, session: AsyncSession) -> Creds | None: """Admin creds get. - :param Annotated[AsyncSession, Depends session: session + Args: + session (AsyncSession): async session Returns: MFA_HTTP_Creds: optional creds @@ -259,7 +280,11 @@ async def get_client( self, settings: Settings, ) -> AsyncIterator[MFAHTTPClient]: - """Get async client for DI.""" + """Get async client for DI. + + Yields: + AsyncIterator[MFAHTTPClient] + """ async with httpx.AsyncClient( timeout=settings.MFA_CONNECT_TIMEOUT_SECONDS, limits=httpx.Limits( @@ -279,8 +304,9 @@ async def get_http_mfa( """Get api from DI. Args: - client (httpx.AsyncClient): httpx client - credentials (Creds): creds + credentials (MFA_HTTP_Creds): http creds + client (MFAHTTPClient): https client + settings (Settings): settings Returns: MultifactorAPI: mfa integration @@ -304,8 +330,9 @@ async def get_ldap_mfa( """Get api from DI. Args: - client (httpx.AsyncClient): httpx client - credentials (Creds): creds + credentials (MFA_LDAP_Creds): ldap creds + client (MFAHTTPClient): https client + settings (Settings): settings Returns: MultifactorAPI: mfa integration diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index cc7af6dca..53f9f5b92 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -136,7 +136,11 @@ async def get_user(self) -> UserSchema | None: @asynccontextmanager async def lock(self) -> AsyncIterator[UserSchema | None]: - """Lock session, user cannot be deleted or get while lock is set.""" + """Lock session, user cannot be deleted or get while lock is set. + + Yields: + AsyncIterator[UserSchema | None] + """ async with self._lock: yield self._user diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index a917b9742..2d504956e 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -205,15 +205,13 @@ async def setup( .where(CatalogueSetting.name == name), ) else: - session.add_all( - [ - CatalogueSetting(name=name, value=value) - for name, value in new_settings.items() - ] - ) + session.add_all([ + CatalogueSetting(name=name, value=value) + for name, value in new_settings.items() + ]) @abstractmethod - async def create_record( + async def create_record( # noqa: D102 self, hostname: str, ip: str, @@ -222,7 +220,7 @@ async def create_record( ) -> None: ... @abstractmethod - async def update_record( + async def update_record( # noqa: D102 self, hostname: str, ip: str | None, @@ -231,7 +229,7 @@ async def update_record( ) -> None: ... @abstractmethod - async def delete_record( + async def delete_record( # noqa: D102 self, hostname: str, ip: str, @@ -239,14 +237,21 @@ async def delete_record( ) -> None: ... @abstractmethod - async def get_all_records(self) -> list[DNSRecords]: ... + async def get_all_records(self) -> list[DNSRecords]: ... # noqa: D102 class DNSManager(AbstractDNSManager): """DNS server manager.""" async def _send(self, action: Message) -> None: - """Send request to DNS server.""" + """Send request to DNS server. + + Args: + action (Message): DNS message + + Raises: + DNSConnectionError: + """ if self._dns_settings.tsig_key is not None: action.use_tsig( keyring=TsigKey("zone.", self._dns_settings.tsig_key), @@ -274,7 +279,14 @@ async def create_record( @logger_wraps() async def get_all_records(self) -> list[DNSRecords]: - """Get all DNS records.""" + """Get all DNS records. + + Returns: + list[DNSRecords] + + Raises: + DNSConnectionError: cant connect + """ if ( self._dns_settings.dns_server_ip is None or self._dns_settings.zone_name is None @@ -350,7 +362,7 @@ class StubDNSManager(AbstractDNSManager): """Stub client.""" @logger_wraps(is_stub=True) - async def create_record( + async def create_record( # noqa: D102 self, hostname: str, ip: str, @@ -359,7 +371,7 @@ async def create_record( ) -> None: ... @logger_wraps(is_stub=True) - async def update_record( + async def update_record( # noqa: D102 self, hostname: str, ip: str, @@ -368,7 +380,7 @@ async def update_record( ) -> None: ... @logger_wraps(is_stub=True) - async def delete_record( + async def delete_record( # noqa: D102 self, hostname: str, ip: str, @@ -377,7 +389,11 @@ async def delete_record( @logger_wraps(is_stub=True) async def get_all_records(self) -> list[DNSRecords]: - """Stub DNS manager get all records.""" + """Stub DNS manager get all records. + + Returns: + list[DNSRecords] + """ return [] diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index 467cacdff..fb2325c0d 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -49,7 +49,15 @@ async def setup_configs( krb5_config: str, kdc_config: str, ) -> None: - """Request Setup.""" + """Request Setup. + + Args: + krb5_config (str): config + kdc_config (str): config + + Raises: + KRBAPIError: not correct + """ log.info("Setting up configs") response = await self.client.post( "/setup/configs", @@ -72,7 +80,20 @@ async def setup_stash( admin_password: str, stash_password: str, ) -> None: - """Set up stash.""" + """Set up stash. + + Args: + domain (str): domain + admin_dn (str): admin_dn + services_dn (str): services_dn + krbadmin_dn (str): krbadmin_dn + krbadmin_password (str): krbadmin_password + admin_password (str): admin_password + stash_password (str): stash_password + + Raises: + KRBAPIError: not correct + """ log.info("Setting up stash") response = await self.client.post( "/setup/stash", @@ -100,7 +121,20 @@ async def setup_subtree( admin_password: str, stash_password: str, ) -> None: - """Set up subtree.""" + """Set up subtree. + + Args: + domain (str): domain + admin_dn (str): admin_dn + services_dn (str): services_dn + krbadmin_dn (str): krbadmin_dn + krbadmin_password (str): krbadmin_password + admin_password (str): admin_password + stash_password (str): stash_password. + + Raises: + KRBAPIError: not correct + """ log.info("Setting up subtree") response = await self.client.post( "/setup/subtree", @@ -165,7 +199,7 @@ async def setup( ) @abstractmethod - async def add_principal( + async def add_principal( # noqa: D102 self, name: str, password: str | None, @@ -173,27 +207,27 @@ async def add_principal( ) -> None: ... @abstractmethod - async def get_principal(self, name: str) -> dict: ... + async def get_principal(self, name: str) -> dict: ... # noqa: D102 @abstractmethod - async def del_principal(self, name: str) -> None: ... + async def del_principal(self, name: str) -> None: ... # noqa: D102 @abstractmethod - async def change_principal_password( + async def change_principal_password( # noqa: D102 self, name: str, password: str, ) -> None: ... @abstractmethod - async def create_or_update_principal_pw( + async def create_or_update_principal_pw( # noqa: D102 self, name: str, password: str, ) -> None: ... @abstractmethod - async def rename_princ(self, name: str, new_name: str) -> None: ... + async def rename_princ(self, name: str, new_name: str) -> None: ... # noqa: D102 @backoff.on_exception( backoff.constant, @@ -211,10 +245,14 @@ async def get_status(self, wait_for_positive: bool = False) -> bool | None: """Get status of setup. Args: - wait_for_positive (bool): wait for positive status + wait_for_positive (bool): wait for positive status\ + (Default value = False) Returns: bool | None: status or None if max tries achieved + + Raises: + ValueError: not status """ response = await self.client.get("/setup/status") status = response.json() @@ -223,20 +261,20 @@ async def get_status(self, wait_for_positive: bool = False) -> bool | None: return status @abstractmethod - async def ktadd(self, names: list[str]) -> httpx.Response: ... + async def ktadd(self, names: list[str]) -> httpx.Response: ... # noqa: D102 @abstractmethod - async def lock_principal(self, name: str) -> None: ... + async def lock_principal(self, name: str) -> None: ... # noqa: D102 @abstractmethod - async def force_princ_pw_change(self, name: str) -> None: ... + async def force_princ_pw_change(self, name: str) -> None: ... # noqa: D102 async def ldap_principal_setup(self, name: str, path: str) -> None: """LDAP principal setup. Args: - ldap_principal_name (str): ldap principal name - ldap_keytab_path (str): ldap keytab path + name (str): ldap principal name + path (str): ldap keytab path """ response = await self.client.get("/principal", params={"name": name}) if response.status_code == 200: diff --git a/app/ldap_protocol/kerberos/stub.py b/app/ldap_protocol/kerberos/stub.py index ac8ab4240..1c093927f 100644 --- a/app/ldap_protocol/kerberos/stub.py +++ b/app/ldap_protocol/kerberos/stub.py @@ -15,7 +15,7 @@ async def setup(self, *args, **kwargs) -> None: # type: ignore await super().setup(*args, **kwargs) @logger_wraps(is_stub=True) - async def add_principal( + async def add_principal( # noqa: D102 self, name: str, password: str | None, @@ -23,34 +23,34 @@ async def add_principal( ) -> None: ... @logger_wraps(is_stub=True) - async def get_principal(self, name: str) -> None: ... + async def get_principal(self, name: str) -> None: ... # noqa: D102 @logger_wraps(is_stub=True) - async def del_principal(self, name: str) -> None: ... + async def del_principal(self, name: str) -> None: ... # noqa: D102 @logger_wraps(is_stub=True) - async def change_principal_password( + async def change_principal_password( # noqa: D102 self, name: str, password: str, ) -> None: ... @logger_wraps(is_stub=True) - async def create_or_update_principal_pw( + async def create_or_update_principal_pw( # noqa: D102 self, name: str, password: str, ) -> None: ... @logger_wraps(is_stub=True) - async def rename_princ(self, name: str, new_name: str) -> None: ... + async def rename_princ(self, name: str, new_name: str) -> None: ... # noqa: D102 @logger_wraps(is_stub=True) - async def ktadd(self, names: list[str]) -> NoReturn: # noqa: ARG002 + async def ktadd(self, names: list[str]) -> NoReturn: # noqa: ARG002 D102 raise KRBAPIError @logger_wraps(is_stub=True) - async def lock_principal(self, name: str) -> None: ... + async def lock_principal(self, name: str) -> None: ... # noqa: D102 @logger_wraps(is_stub=True) - async def force_princ_pw_change(self, name: str) -> None: ... + async def force_princ_pw_change(self, name: str) -> None: ... # noqa: D102 diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 9c64592e0..50e93b397 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -16,8 +16,7 @@ def logger_wraps(is_stub: bool = False) -> Callable: """Log kadmin calls. Args: - is_stub(bool): flag to change logs, defaults to False - is_stub: bool: (Default value = False) + is_stub (bool): flag to change logs (Default value = False) Returns: Callable: any method diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index 178f20128..c7222ebf6 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -84,7 +84,10 @@ def from_data(cls, data: ASN1Row) -> "AddRequest": """Deserialize. Args: - data: ASN1Row: + data (ASN1Row): data + + Returns: + AddRequest """ entry, attributes = data # type: ignore attributes = [ @@ -102,7 +105,14 @@ async def handle( # noqa: C901 ldap_session: LDAPSession, kadmin: AbstractKadmin, ) -> AsyncGenerator[AddResponse, None]: - """Add request handler.""" + """Add request handler. + + Yields: + AsyncGenerator[AddResponse, None] + + Raises: + TypeError: + """ if not ldap_session.user: yield AddResponse(**INVALID_ACCESS_RESPONSE) return @@ -391,8 +401,8 @@ def from_dict( Args: entry (str): entry - attributes: dict[str, list[str]]: - password: str | None: (Default value = None) + attributes (dict[str, list[str]]): attributes + password (str | None): (Default value = None) Returns: AddRequest: instance diff --git a/app/ldap_protocol/ldap_requests/bind.py b/app/ldap_protocol/ldap_requests/bind.py index 50dbe6343..4940af155 100644 --- a/app/ldap_protocol/ldap_requests/bind.py +++ b/app/ldap_protocol/ldap_requests/bind.py @@ -62,7 +62,13 @@ def from_data(cls, data: list[ASN1Row]) -> "BindRequest": """Get bind from data dict. Args: - data: list[ASN1Row]: + data (list[ASN1Row]): data + + Returns: + BindRequest + + Raises: + ValueError: Auth version not supported """ auth = data[2].tag_id @@ -103,7 +109,16 @@ async def is_user_group_valid( ldap_session: LDAPSession, session: AsyncSession, ) -> bool: - """Test compability.""" + """Test compability. + + Args: + user (User): db user + ldap_session (LDAPSession): ldap session + session (AsyncSession): async session + + Returns: + bool + """ return await is_user_group_valid(user, ldap_session.policy, session) @staticmethod @@ -116,9 +131,10 @@ async def check_mfa( """Check mfa api. Args: - user (User): db user - ldap_session (LDAPSession): ldap session - session (AsyncSession): db session + api (MultifactorAPI | None): MultiFactor API + identity (str): username + otp (str | None): password + policy (NetworkPolicy): network policy Returns: bool: response @@ -143,7 +159,18 @@ async def handle( settings: Settings, mfa: LDAPMultiFactorAPI, ) -> AsyncGenerator[BindResponse, None]: - """Handle bind request, check user and password.""" + """Handle bind request, check user and password. + + Args: + session (AsyncSession): async session + ldap_session (LDAPSession): ldap session + kadmin (AbstractKadmin): kadmin user + settings (Settings): settings + mfa (LDAPMultiFactorAPI): api + + Yields: + AsyncGenerator[BindResponse, None] + """ if not self.name and self.authentication_choice.is_anonymous(): yield BindResponse(result_code=LDAPCodes.SUCCESS) return @@ -242,8 +269,10 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "UnbindRequest": # noqa: """Unbind request has no body. Args: - data: dict[str: - list[ASN1Row]]: + data (dict[str, list[ASN1Row]]): data + + Returns: + UnbindRequest """ return cls() @@ -251,7 +280,14 @@ async def handle( self, ldap_session: LDAPSession, ) -> AsyncGenerator[BaseResponse, None]: - """Handle unbind request, no need to send response.""" + """Handle unbind request, no need to send response. + + Args: + ldap_session (LDAPSession): ldap session + + Yields: + AsyncGenerator[BaseResponse, None] + """ await ldap_session.delete_user() return # declare empty async generator and exit yield # type: ignore diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index ea01dfb4c..7c3951e4b 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -83,12 +83,10 @@ def is_valid(self, user: User | None) -> bool: # noqa: ARG002 """Check if GSSAPI token is valid. Args: - User: None user: indb user + user (User | None): indb user Returns: bool: status - user: User | None: - """ return True @@ -254,6 +252,9 @@ async def step( session (AsyncSession): db session ldap_session (LDAPSession): ldap session settings (Settings): settings + + Returns: + BindResponse | None """ self._ldap_session = ldap_session @@ -298,8 +299,11 @@ async def get_user( # type: ignore """Get user. Args: - ctx (gssapi.SecurityContext): gssapi context session (AsyncSession): db session + username (str): user name + + Returns: + User | None """ ctx = self._ldap_session.gssapi_security_context if not ctx: diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index 6e128f7a9..db0ddd8d9 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -27,11 +27,10 @@ def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. Args: - User: None user: indb user + user (User): in db user Returns: bool: status - """ password = getattr(user, "password", None) if password is not None: @@ -55,7 +54,10 @@ def from_data(cls, data: list[ASN1Row]) -> "SaslPLAINAuthentication": """Get auth from data. Args: - data: list[ASN1Row]: + data (list[ASN1Row]): data + + Returns: + SaslPLAINAuthentication """ _, username, password = data[1].value.split("\\x00") return cls( @@ -65,5 +67,13 @@ def from_data(cls, data: list[ASN1Row]) -> "SaslPLAINAuthentication": ) async def get_user(self, session: AsyncSession, _: str) -> User: - """Get user.""" + """Get user. + + Args: + session (AsyncSession): async db session + _ (str): unused arg + + Returns: + User: user + """ return await get_user(session, self.username) # type: ignore diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 52d444e4d..57dfb4eab 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -311,7 +311,11 @@ async def handle( kadmin: AbstractKadmin, settings: Settings, ) -> AsyncGenerator[ExtendedResponse, None]: - """Call proxy handler.""" + """Call proxy handler. + + Yields: + AsyncGenerator[ExtendedResponse, None]: + """ try: response = await self.request_value.handle( ldap_session, diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index 54d8719a5..ceea2846d 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -158,7 +158,11 @@ async def handle( kadmin: AbstractKadmin, settings: Settings, ) -> AsyncGenerator[ModifyResponse, None]: - """Change request handler.""" + """Change request handler. + + Yields: + AsyncGenerator[ModifyResponse, None] + """ if not ldap_session.user: yield ModifyResponse( result_code=LDAPCodes.INSUFFICIENT_ACCESS_RIGHTS, @@ -283,10 +287,12 @@ def _check_password_change_requested( """Description. Args: - names: set[str]: - directory: Directory: - user_dir_id: int: + names (set[str]): attr names + directory (Directory): directory + user_dir_id (int): user id + Returns: + bool: """ return ( ("userpassword" in names or "unicodepwd" in names) diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index 7699cd34e..7db2d0f92 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -258,6 +258,10 @@ async def handle( Provides following responses: Entry -> Reference (optional) -> Done + + Yields: + AsyncGenerator[SearchResultDone | SearchResultReference |\ + SearchResultEntry, None] """ async with ldap_session.lock() as user: async for response in self.get_result(user, session, settings): @@ -272,9 +276,12 @@ async def get_result( """Create response. Args: - user_logged (bool): is user in session - session (AsyncSession): sa session - :yield SearchResult: search result + user (UserSchema | None): schema of user + session (AsyncSession): async session. + settings (Settings): settings. + + Yields: + AsyncGenerator[SearchResultEntry | SearchResultDone, None]: """ is_root_dse = self.scope == Scope.BASE_OBJECT and not self.base_object is_schema = self.base_object.lower() == "cn=schema" diff --git a/app/ldap_protocol/ldap_schema/object_class_crud.py b/app/ldap_protocol/ldap_schema/object_class_crud.py index f71368d15..f1125e2f0 100644 --- a/app/ldap_protocol/ldap_schema/object_class_crud.py +++ b/app/ldap_protocol/ldap_schema/object_class_crud.py @@ -43,7 +43,10 @@ def from_db(cls, object_class: ObjectClass) -> "ObjectClassSchema": """Create an instance from database. Args: - object_class: ObjectClass: + object_class (ObjectClass): source + + Returns: + ObjectClassSchema: instance of ObjectClassSchema. """ return cls( oid=object_class.oid, @@ -105,12 +108,15 @@ async def create_object_class( Args: oid (str): OID. name (str): Name. + superior_name (str | None): Parent Object Class. kind (KindType): Kind. is_system (bool): Object Class is system. attribute_type_names_must (list[str]): Attribute Types must. attribute_type_names_may (list[str]): Attribute Types may. session (AsyncSession): Database session. - :param str | None superior_name: Parent Object Class. + + Raises: + ValueError: kind is not valid """ if kind not in OBJECT_CLASS_KINDS_ALLOWED: raise ValueError(f"Object class kind is not valid: {kind}.") diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index 8b46c9e8f..bcf935070 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -130,9 +130,6 @@ def from_err(cls, source: bytes, err: Exception) -> LDAPResponseMessage: Returns: LDAPResponseMessage: response with err code - - Raises: - ValueError: on invalid schema """ output = asn1todict(source) message_id = 0 @@ -164,7 +161,8 @@ async def create_response( ) -> AsyncGenerator[LDAPResponseMessage, None]: """Call unique context handler. - :yield LDAPResponseMessage: create response for context. + Yields: + LDAPResponseMessage: create response for context. """ async for response in handler(): yield LDAPResponseMessage( diff --git a/app/ldap_protocol/multifactor.py b/app/ldap_protocol/multifactor.py index 4d492068c..1744c825d 100644 --- a/app/ldap_protocol/multifactor.py +++ b/app/ldap_protocol/multifactor.py @@ -128,7 +128,11 @@ def __init__( @staticmethod def _generate_trace_id_header() -> dict[str, str]: - """Description.""" + """Generate trace id header. + + Returns: + dict[str, str] + """ return {"mf-trace-id": f"md:{uuid.uuid4()}"} @log_mfa.catch(reraise=True) @@ -149,14 +153,14 @@ async def ldap_validate_mfa( password (str): pwd policy (NetworkPolicy): policy - Raises: - MultifactorError: connect timeout - MultifactorError: invalid json - MultifactorError: Invalid status - Returns: bool: status - """ + + Raises: + ConnectTimeout: API Timeout + MFAMissconfiguredError: API Key or Secret is invalid + MultifactorError: status error + """ # noqa: DOC502 passcode = password or "m" log_mfa.debug(f"LDAP MFA request: {username}, {password}") try: @@ -216,14 +220,14 @@ async def get_create_mfa( callback_url (str): callback uri to send token uid (int): user id - Raises: - httpx.TimeoutException: on timeout - self.MultifactorError: on invalid json, Key or error status - code - Returns: str: url to open in new page - """ + + Raises: + MFAConnectError: API Timeout + MFAMissconfiguredError: API Key or Secret is invalid + MultifactorError: Incorrect resource + """ # noqa: DOC502 data = { "identity": username, "claims": { @@ -272,12 +276,12 @@ async def refresh_token(self, token: str) -> str: Args: token (str): str jwt token - Raises: - self.MultifactorError: on api err - Returns: str: new token - """ + + Raises: + MultifactorError: on api err + """ # noqa: DOC502 try: response = await self.client.post( self.settings.MFA_API_URI + self.REFRESH_URL, diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 167847ab9..8b7052e04 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -195,9 +195,11 @@ async def recieve( Args: reader (asyncio.StreamReader): reader + writer (asyncio.StreamWriter): writer + return_addr (bool): address (Default value = "read") Returns: - tuple: ip, data + tuple[IPv4Address | IPv6Address, bytes] | bytes: """ buffer = BytesIO() addr = None diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 7db56bff7..67d4770f0 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -317,6 +317,7 @@ async def create_ldap_session( Args: uid (int): user id + key (str): key data (dict): data, defaults to None """ diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 35406bfb6..c8ecab3b9 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -105,14 +105,14 @@ async def get_group(dn: str | ENTRY_TYPE, session: AsyncSession) -> Directory: """Get dir with group by dn. Args: - dn (str): Distinguished Name + dn (str| ENTRY_TYPE): Distinguished Name session (AsyncSession): SA session - Raises: - AttributeError: on invalid dn - Returns: Directory: dir with group + + Raises: + ValueError: Cannot set memberOf with base dn or group not found """ for base_directory in await get_base_directories(session): if dn_is_base_directory(base_directory, dn): @@ -242,7 +242,10 @@ def get_domain_object_class(domain: Directory) -> Iterator[Attribute]: """Get default domain attrs. Args: - domain: Directory: + domain (Directory): instance of Directory + + Yields: + Iterator[Attribute] """ for value in ["domain", "top", "domainDNS"]: yield Attribute(name="objectClass", value=value, directory=domain) @@ -344,20 +347,18 @@ async def add_lock_and_expire_attributes( """ now_with_tz = datetime.now(tz=tz) absolute_date = int(time.mktime(now_with_tz.timetuple()) / 86400) - session.add_all( - [ - Attribute( - name="nsAccountLock", - value="true", - directory=directory, - ), - Attribute( - name="shadowExpire", - value=str(absolute_date), - directory=directory, - ), - ] - ) + session.add_all([ + Attribute( + name="nsAccountLock", + value="true", + directory=directory, + ), + Attribute( + name="shadowExpire", + value=str(absolute_date), + directory=directory, + ), + ]) async def get_principal_directory( diff --git a/tests/test_api/test_shadow/test_router.py b/tests/test_api/test_shadow/test_router.py index 5e4dddafe..83ba6230a 100644 --- a/tests/test_api/test_shadow/test_router.py +++ b/tests/test_api/test_shadow/test_router.py @@ -92,10 +92,9 @@ async def test_shadow_api_whitelist_without_user_group( ) -> None: """Test shadow api whitelist without user group.""" await session.execute( - update(NetworkPolicy).values( - {NetworkPolicy.mfa_status: MFAFlags.WHITELIST} - ), - ) + update(NetworkPolicy) + .values({NetworkPolicy.mfa_status: MFAFlags.WHITELIST}), + ) # fmt: skip response = await http_client.post( "/shadow/mfa/push", @@ -114,10 +113,9 @@ async def test_shadow_api_enable_mfa( ) -> None: """Test shadow api enable mfa.""" await session.execute( - update(NetworkPolicy).values( - {NetworkPolicy.mfa_status: MFAFlags.ENABLED} - ), - ) + update(NetworkPolicy) + .values({NetworkPolicy.mfa_status: MFAFlags.ENABLED}), + ) # fmt: skip response = await http_client.post( "/shadow/mfa/push", diff --git a/tests/test_ldap/test_util/test_search.py b/tests/test_ldap/test_util/test_search.py index f16b4f6d2..cb5e029cd 100644 --- a/tests/test_ldap/test_util/test_search.py +++ b/tests/test_ldap/test_util/test_search.py @@ -339,14 +339,12 @@ async def test_ldap_search_access_control_denied( dn_list = [d for d in data if d.startswith("dn:")] assert result == 0 - assert sorted(dn_list) == sorted( - [ - "dn: dc=md,dc=test", - "dn: ou=users,dc=md,dc=test", - "dn: cn=groups,dc=md,dc=test", - "dn: cn=domain admins,cn=groups,dc=md,dc=test", - "dn: cn=developers,cn=groups,dc=md,dc=test", - "dn: cn=domain users,cn=groups,dc=md,dc=test", - "dn: cn=user_non_admin,ou=users,dc=md,dc=test", - ] - ) + assert sorted(dn_list) == sorted([ + "dn: dc=md,dc=test", + "dn: ou=users,dc=md,dc=test", + "dn: cn=groups,dc=md,dc=test", + "dn: cn=domain admins,cn=groups,dc=md,dc=test", + "dn: cn=developers,cn=groups,dc=md,dc=test", + "dn: cn=domain users,cn=groups,dc=md,dc=test", + "dn: cn=user_non_admin,ou=users,dc=md,dc=test", + ]) From 84c0f62356672f99d90d702489af0708455acf22 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 13:26:51 +0300 Subject: [PATCH 06/25] refactor: docstrings task_508 --- .darglint2 | 2 + .kerberos/config_server.py | 138 ++++++++++++------ Makefile | 2 +- app/api/network/schema.py | 24 ++- app/config.py | 32 +++- app/ldap_protocol/asn1parser.py | 56 +++++-- app/ldap_protocol/dialogue.py | 10 +- app/ldap_protocol/filter_interpreter.py | 8 +- app/ldap_protocol/kerberos/client.py | 67 ++++++++- app/ldap_protocol/ldap_requests/extended.py | 68 +++++++-- app/ldap_protocol/messages.py | 14 +- app/ldap_protocol/policies/password_policy.py | 14 +- app/ldap_protocol/server.py | 24 ++- app/ldap_protocol/utils/const.py | 18 ++- app/ldap_protocol/utils/cte.py | 26 +++- app/multidirectory.py | 3 +- pyproject.toml | 9 +- 17 files changed, 415 insertions(+), 100 deletions(-) create mode 100644 .darglint2 diff --git a/.darglint2 b/.darglint2 new file mode 100644 index 000000000..0df78f458 --- /dev/null +++ b/.darglint2 @@ -0,0 +1,2 @@ +[darglint2] +docstring_style=google diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index f659eedd4..6133d8287 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -99,24 +99,29 @@ async def add_princ( ) -> None: """Create principal. - :param str name: principal - :param str | None password: if empty - uses randkey. + Args: + name (str): principal name + password (str | None): password, if empty - uses randkey. """ @abstractmethod async def get_princ(self, name: str) -> Principal | None: """Get principal. - :param str name: principal - :return: kadmin.Principal: Principal + Args: + name (str): principal name + + Returns: + Principal | None: """ @abstractmethod async def change_password(self, name: str, new_password: str) -> None: """Chanage principal's password. - :param str name: principal - :param str new_password: ... + Args: + name (str): principal name + new_password (str): password """ @abstractmethod @@ -127,45 +132,51 @@ async def create_or_update_princ_pw( ) -> None: """Create new principal or update password. - :param str name: principal - :param _type_ new_password: pw + Args: + name (str): principal name + new_password (str): password """ @abstractmethod async def del_princ(self, name: str) -> None: """Delete principal by name. - :param str name: principal + Args: + name (str): principal name """ @abstractmethod async def rename_princ(self, name: str, new_name: str) -> None: """Rename principal. - :param str name: original name - :param str new_name: new name + Args: + name (str): principal name + new_name (str): new principal name """ @abstractmethod async def ktadd(self, names: list[str], fn: str) -> None: """Create or write to keytab. - :param str name: principal - :param str fn: filename + Args: + names (list[str]): principal names + fn (str): file name """ @abstractmethod async def lock_princ(self, name: str, **dbargs) -> None: """Lock principal. - :param str name: principal + Args: + name (str): principal name """ @abstractmethod async def force_pw_principal(self, name: str, **dbargs) -> None: - """Lock principal. + """Force password principal. - :param str name: principal + Args: + name (str): principal name """ @@ -179,7 +190,11 @@ def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: self.loop = loop or asyncio.get_running_loop() async def connect(self) -> Self: - """Create threadpool for kadmin client.""" + """Create threadpool for kadmin client. + + Returns: + KAdminLocalManager: + """ self.pool = ThreadPoolExecutor(max_workers=500).__enter__() self.client = await asyncio.wait_for(self._init_client(), 40) return self @@ -203,7 +218,11 @@ async def __aexit__( await self.disconnect() async def _init_client(self) -> KAdminProtocol: - """Init kadmin local connection.""" + """Init kadmin local connection. + + Returns: + KAdminProtocol: + """ return await self.loop.run_in_executor(self.pool, kadmv.local) async def add_princ( @@ -214,8 +233,9 @@ async def add_princ( ) -> None: """Create principal. - :param str name: principal - :param str | None password: if empty - uses randkey. + Args: + name (str): principal name + password (str): password, if empty - uses randkey. """ await self.loop.run_in_executor( self.pool, @@ -247,8 +267,11 @@ async def _get_raw_principal(self, name: str) -> PrincipalProtocol: async def get_princ(self, name: str) -> Principal: """Get principal. - :param str name: principal - :return: kadmin.Principal: Principal + Args: + name (str): principal name + + Returns: + Principal: Principal kadmin object """ principal = await self._get_raw_principal(name) return Principal.model_validate(principal, from_attributes=True) @@ -256,8 +279,9 @@ async def get_princ(self, name: str) -> Principal: async def change_password(self, name: str, new_password: str) -> None: """Chanage principal's password. - :param str name: principal - :param str new_password: ... + Args: + name (str): principal name + new_password (str): password """ princ = await self._get_raw_principal(name) await self.loop.run_in_executor( @@ -271,10 +295,11 @@ async def create_or_update_princ_pw( name: str, new_password: str, ) -> None: - """Create new principal or update password. + """Create new or update password principal. - :param str name: principal - :param _type_ new_password: ... + Args: + name (str): principal name + new_password (str): password """ try: await self.change_password(name, new_password) @@ -284,15 +309,17 @@ async def create_or_update_princ_pw( async def del_princ(self, name: str) -> None: """Delete principal by name. - :param str name: principal + Args: + name (str): principal name """ await self.loop.run_in_executor(self.pool, self.client.delprinc, name) async def rename_princ(self, name: str, new_name: str) -> None: """Rename principal. - :param str name: original name - :param str new_name: new name + Args: + name (str): Principal name. + new_name (str): Principal new name. """ await self.loop.run_in_executor( self.pool, @@ -304,9 +331,12 @@ async def rename_princ(self, name: str, new_name: str) -> None: async def ktadd(self, names: list[str], fn: str) -> None: """Create or write to keytab. - :param str name: principal - :param str fn: filename - :raises self.PrincipalNotFoundError: on not found princ + Args: + names (list[str]): principal names + fn (str): file name + + Raises: + PrincipalNotFoundError: Principal not found """ principals = [await self._get_raw_principal(name) for name in names] if not all(principals): @@ -318,16 +348,18 @@ async def ktadd(self, names: list[str], fn: str) -> None: async def lock_princ(self, name: str, **dbargs) -> None: """Lock princ. - :param str name: upn + Args: + name (str): principal names """ princ = await self._get_raw_principal(name) princ.expire = "Now" await self.loop.run_in_executor(self.pool, princ.commit) async def force_pw_principal(self, name: str, **dbargs) -> None: - """Lock princ. + """Force password principal. - :param str name: upn + Args: + name (str): principal names """ princ = await self._get_raw_principal(name) princ.pwexpire = "Now" @@ -365,7 +397,11 @@ def get_kadmin() -> KAdminLocalManager: def handle_db_error(request: Request, exc: BaseException): # noqa: ARG001 - """Handle duplicate.""" + """Handle duplicate. + + Raises: + HTTPException: Database Error + """ raise HTTPException( status.HTTP_424_FAILED_DEPENDENCY, detail="Database Error", @@ -373,7 +409,11 @@ def handle_db_error(request: Request, exc: BaseException): # noqa: ARG001 def handle_duplicate(request: Request, exc: BaseException): # noqa: ARG001 - """Handle duplicate.""" + """Handle duplicate. + + Raises: + HTTPException: Principal already exists + """ raise HTTPException( status.HTTP_409_CONFLICT, detail="Principal already exists", @@ -381,7 +421,11 @@ def handle_duplicate(request: Request, exc: BaseException): # noqa: ARG001 def handle_not_found(request: Request, exc: BaseException): # noqa: ARG001 - """Handle duplicate.""" + """Handle duplicate. + + Raises: + HTTPException: Principal does not exist + """ raise HTTPException( status.HTTP_404_NOT_FOUND, detail="Principal does not exist", @@ -416,6 +460,9 @@ async def run_setup_stash(schema: ConfigSchema) -> None: Args: schema (ConfigSchema): Configuration schema for stash setup. + + Raises: + HTTPException: Failed stash """ proc = await asyncio.create_subprocess_exec( "kdb5_ldap_util", @@ -600,7 +647,6 @@ async def rename_princ( name (str): Principal name. new_name (str): Principal new name. """ - """""" await kadmin.rename_princ(name, new_name) @@ -661,6 +707,12 @@ def get_status(request: Request) -> bool: true - is ready false - not set + + Args: + request (Request): http request + + Returns: + bool """ kadmind = getattr(request.app.state, "kadmind", None) @@ -668,7 +720,11 @@ def get_status(request: Request) -> bool: def create_app() -> FastAPI: - """Create FastAPI app.""" + """Create FastAPI app. + + Returns: + FastAPI: web app + """ app = FastAPI( name="KadminMultiDirectory", title="KadminMultiDirectory", diff --git a/Makefile b/Makefile index 7da457e64..c8851e65e 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ help: ## show help message before_pr: ruff format . --preview - ruff check . --fix --unsafe-fixes --select=DOC501 --preview + ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . diff --git a/app/api/network/schema.py b/app/api/network/schema.py index cc2a5e8f8..e2fb03d2e 100644 --- a/app/api/network/schema.py +++ b/app/api/network/schema.py @@ -57,8 +57,13 @@ def validate_group(cls, groups: list[str]) -> list[str]: """Description. Args: - groups: list[str]: + groups (list[str]): groups names + Returns: + list[str]: + + Raises: + ValueError: Invalid DN """ if not groups: return groups @@ -73,7 +78,13 @@ def validate_mfa_group(cls, mfa_groups: list[str]) -> list[str]: """Syka. Args: - mfa_groups: list[str]: + mfa_groups (list[str]): mfa groups names + + Returns: + list[str]: + + Raises: + ValueError: Invalid DN """ if not mfa_groups: return mfa_groups @@ -175,7 +186,14 @@ class PolicyUpdate(BaseModel, NetmasksMixin): @model_validator(mode="after") def check_passwords_match(self) -> Self: - """Validate if all fields are empty.""" + """Validate if all fields are empty. + + Returns: + PolicyUpdate: + + Raises: + ValueError: Name, netmasks and group cannot be empty + """ if not self.name and not self.netmasks and not self.groups: raise ValueError("Name, netmasks and group cannot be empty") diff --git a/app/config.py b/app/config.py index 4a823a840..178803f9c 100644 --- a/app/config.py +++ b/app/config.py @@ -24,7 +24,11 @@ def _get_vendor_version() -> str: - """Description.""" + """Description. + + Returns: + str: + """ with open("/pyproject.toml", "rb") as f: return tomllib.load(f)["tool"]["poetry"]["version"] @@ -122,7 +126,13 @@ def create_tz(cls, tz: str) -> ZoneInfo: # noqa: N805 """Get timezone from a string. Args: - tz: str: + tz (str): timezone + + Returns: + ZoneInfo: + + Raises: + ValueError: timezone info not found """ try: value = ZoneInfo(tz) @@ -147,7 +157,11 @@ def MFA_API_URI(self) -> str: # noqa: N802 return "https://api.multifactor.ru" def get_copy_4_tls(self) -> "Settings": - """Create a copy for TLS bind.""" + """Create a copy for TLS bind. + + Returns: + Settings: + """ from copy import copy tls_settings = copy(self) @@ -156,10 +170,18 @@ def get_copy_4_tls(self) -> "Settings": return tls_settings def check_certs_exist(self) -> bool: - """Check if certs exist.""" + """Check if certs exist. + + Returns: + bool + """ return os.path.exists(self.SSL_CERT) and os.path.exists(self.SSL_KEY) @classmethod def from_os(cls) -> "Settings": - """Get cls from environ.""" + """Get cls from environ. + + Returns: + Settings: + """ return Settings(**os.environ) diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index 91327c564..71b8f8e55 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -77,13 +77,23 @@ def from_tag(cls, tag: Tag, value: T) -> "ASN1Row": """Create row from tag. Args: - tag: Tag: - value: T: + tag (Tag): instance of Tag + value (T): any value + + Returns: + ASN1Row """ return cls(tag.cls, tag.nr, value) def _handle_extensible_match(self) -> str: - """Handle extensible match filters.""" + """Handle extensible match filters. + + Returns: + str: match + + Raises: + TypeError: + """ oid = attribute = value = None dn_attributes = False @@ -126,7 +136,14 @@ def _handle_extensible_match(self) -> str: return f"({match})" def _handle_substring(self) -> str: - """Process and format substring operations for LDAP.""" + """Process and format substring operations for LDAP. + + Returns: + str: + + Raises: + ValueError: + """ value = ( self.value.decode(errors="replace") if isinstance(self.value, bytes) @@ -152,7 +169,14 @@ def serialize(self, obj: "ASN1Row | T | None" = None) -> str: # noqa: C901 substring matches. Args: - obj: "ASN1Row | T | None": (Default value = None) + obj ("ASN1Row | T | None"): (Default value = None) + + Returns: + str: + + Raises: + ValueError: + TypeError: """ if obj is None: obj = self @@ -238,6 +262,8 @@ def to_ldap_filter(self) -> str: The method recursively serializes ASN.1 rows into the LDAP filter format based on tag IDs and class IDs. + Returns: + str: """ return self.serialize() @@ -249,8 +275,11 @@ def value_to_string( """Convert value to string. Args: - tag: Tag: - value: str | bytes | int | bool: + tag (Tag): instance of Tag + value (str | bytes | int | bool): value + + Returns: + bytes | str | int: """ if tag.nr == Numbers.Integer: with suppress(ValueError): @@ -272,7 +301,10 @@ def asn1todict(decoder: Decoder) -> list[ASN1Row]: """Recursively collect ASN.1 data to list of ASNRows. Args: - decoder: Decoder: + decoder (Decoder): instance of Decoder + + Returns: + list[ASN1Row]: """ out = [] while not decoder.eof(): @@ -298,7 +330,13 @@ def _validate_oid(oid: str) -> str: """Validate ldap oid with regex. Args: - oid: str: + oid (str): oid + + Returns: + str: + + Raises: + ValueError: Invalid LDAPOID """ if not Encoder._re_oid.match(oid): raise ValueError("Invalid LDAPOID") diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index 53f9f5b92..2686ad8cc 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -157,7 +157,11 @@ async def validate_conn( ip: IPv4Address | IPv6Address, session: AsyncSession, ) -> None: - """Validate network policies.""" + """Validate network policies. + + Raises: + PermissionError: + """ policy = await self._get_policy(ip, session) # type: ignore if policy is not None: self.policy = policy @@ -197,6 +201,10 @@ async def ensure_session_exists(self) -> NoReturn: """Ensure session exists in storage. Does nothing if anonymous, wait 30s and if user bound, check it. + + Raises: + AttributeError: Storage is not set + ConnectionAbortedError: Session missing in storage """ if self.storage is None: raise AttributeError("Storage is not set") diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 1dba50202..f40910095 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -131,7 +131,13 @@ def _get_filter_function(column: str) -> Callable[..., UnaryExpression]: """Retrieve the appropriate filter function based on the attribute. Args: - column: str: + column (str): column name + + Returns: + Callable[..., UnaryExpression]: + + Raises: + ValueError: Incorrect attribute specified """ if len(column.split(":")) == 1: attribute = column diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index 28ab0bd71..754094dda 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -20,7 +20,16 @@ async def add_principal( password: str | None, timeout: int = 1, ) -> None: - """Add request.""" + """Add principal. + + Args: + name (str): principal name + password (str | None): password + timeout (int): timeout + + Raises: + KRBAPIError: + """ response = await self.client.post( "principal", json={"name": name, "password": password}, @@ -32,7 +41,17 @@ async def add_principal( @logger_wraps() async def get_principal(self, name: str) -> dict: - """Get request.""" + """Get principal. + + Args: + name (str): principal name + + Returns: + dict + + Raises: + KRBAPIError: + """ response = await self.client.get("principal", params={"name": name}) if response.status_code != 200: raise KRBAPIError(response.text) @@ -41,7 +60,14 @@ async def get_principal(self, name: str) -> dict: @logger_wraps() async def del_principal(self, name: str) -> None: - """Delete principal.""" + """Delete principal. + + Args: + name (str): principal name + + Raises: + KRBAPIError: + """ response = await self.client.delete("principal", params={"name": name}) if response.status_code != 200: raise KRBAPIError(response.text) @@ -52,7 +78,15 @@ async def change_principal_password( name: str, password: str, ) -> None: - """Change password request.""" + """Change principal password. + + Args: + name (str): principal name + password: password + + Raises: + KRBAPIError: + """ response = await self.client.patch( "principal", json={"name": name, "password": password}, @@ -66,7 +100,15 @@ async def create_or_update_principal_pw( name: str, password: str, ) -> None: - """Change password request.""" + """Create or update principal password. + + Args: + name (str): principal name + password: password. + + Raises: + KRBAPIError: + """ response = await self.client.post( "/principal/create_or_update", json={"name": name, "password": password}, @@ -76,7 +118,15 @@ async def create_or_update_principal_pw( @logger_wraps() async def rename_princ(self, name: str, new_name: str) -> None: - """Rename request.""" + """Rename principal. + + Args: + name (str): current principal name + new_name: (str): new principal name + + Raises: + KRBAPIError: + """ response = await self.client.put( "principal", json={"name": name, "new_name": new_name}, @@ -88,10 +138,13 @@ async def ktadd(self, names: list[str]) -> httpx.Response: """Ktadd build request for stream and return response. Args: - names (list[str]): principals + names (list[str]): principal names Returns: httpx.Response: stream + + Raises: + KRBAPIError: principal not found """ request = self.client.build_request( "POST", diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 57dfb4eab..e753a6cc7 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -62,7 +62,10 @@ def _decode_value(data: ASN1Row) -> ASN1Row: """Description. Args: - data: ASN1Row: + data: ASN1Row + + Returns: + ASN1Row """ dec = Decoder() @@ -89,7 +92,11 @@ class WhoAmIResponse(BaseExtendedResponseValue): authz_id: str def get_value(self) -> str | None: - """Get authz id.""" + """Get authz id. + + Returns: + str | None + """ return self.authz_id @@ -107,7 +114,10 @@ def from_data(cls, data: ASN1Row) -> "WhoAmIRequestValue": # noqa: ARG003 """Create model from data, WhoAmIRequestValue data is empty. Args: - data: ASN1Row: + data: ASN1Row + + Returns: + WhoAmIRequestValue """ return cls() @@ -132,7 +142,11 @@ class StartTLSResponse(BaseExtendedResponseValue): """Start tls response.""" def get_value(self) -> str | None: - """Get response value.""" + """Get response value. + + Returns: + str | None + """ return "" @@ -148,7 +162,20 @@ async def handle( kadmin: AbstractKadmin, # noqa: ARG002 settings: Settings, ) -> StartTLSResponse: - """Update password of current or selected user.""" + """Update password of current or selected user. + + Args: + ldap_session: LDAPSession + session: AsyncSession + kadmin: AbstractKadmin + settings: Settings + + Returns: + StartTLSResponse + + Raises: + PermissionError: No TLS + """ if settings.USE_CORE_TLS: return StartTLSResponse() @@ -159,7 +186,10 @@ def from_data(cls, data: ASN1Row) -> "StartTLSRequestValue": # noqa: ARG003 """Create model from data, decoded from responseValue bytes. Args: - data: ASN1Row: + data: ASN1Row + + Returns: + StartTLSRequestValue """ return cls() @@ -174,7 +204,11 @@ class PasswdModifyResponse(BaseExtendedResponseValue): gen_passwd: str = "" def get_value(self) -> str | None: - """Description.""" + """Description. + + Returns: + str | None + """ return self.gen_passwd @@ -206,7 +240,20 @@ async def handle( kadmin: AbstractKadmin, settings: Settings, ) -> PasswdModifyResponse: - """Update password of current or selected user.""" + """Update password of current or selected user. + + Args: + ldap_session: LDAPSession + session: AsyncSession + kadmin: AbstractKadmin + settings: Settings + + Returns: + PasswdModifyResponse + + Raises: + PermissionError: + """ if not settings.USE_CORE_TLS: raise PermissionError("TLS required") @@ -267,7 +314,10 @@ def from_data(cls, data: ASN1Row) -> "PasswdModifyRequestValue": """Create model from data, decoded from responseValue bytes. Args: - data: ASN1Row: + data: ASN1Row + + Returns: + PasswdModifyRequestValue """ d: list = cls._decode_value(data) # type: ignore if len(d) == 3: diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index bcf935070..f6f76c6d2 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -46,7 +46,11 @@ class LDAPResponseMessage(LDAPMessage): context: SerializeAsAny[BaseResponse] def encode(self) -> bytes: - """Encode message to asn1.""" + """Encode message to asn1. + + Returns: + bytes + """ enc = Encoder() enc.start() enc.enter(Numbers.Sequence) @@ -79,7 +83,13 @@ def from_bytes(cls, source: bytes) -> "LDAPRequestMessage": """Create message from bytes. Args: - source: bytes: + source: bytes + + Returns: + LDAPRequestMessage + + Raises: + ValueError: incorrect schema """ dec = Decoder() dec.start(source) diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 590cc88a0..b69150f75 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -72,7 +72,14 @@ class PasswordPolicySchema(BaseModel): @model_validator(mode="after") def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": - """Description.""" + """Description. + + Returns: + self + + Raises: + ValueError: not valid + """ if self.minimum_password_age_days > self.maximum_password_age_days: raise ValueError( "Minimum password age days must be " @@ -84,10 +91,13 @@ async def create_policy_settings(self, session: AsyncSession) -> Self: """Create policies settings. Args: - session (AsyncSession): db session + session: db session Returns: PasswordPolicySchema: password policy. + + Raises: + PermissionError: Policy already exists. """ existing_policy = await session.scalar(select(exists(PasswordPolicy))) if existing_policy: diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 8b7052e04..5b3ced655 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -124,7 +124,11 @@ async def __call__( await writer.wait_closed() def _load_ssl_context(self) -> None: - """Load SSL context for LDAPS.""" + """Load SSL context for LDAPS. + + Raises: + SystemExit: + """ if self.settings.USE_CORE_TLS and self.settings.LDAP_LOAD_SSL_CERT: if not self.settings.check_certs_exist(): log.critical("Certs not found, exiting...") @@ -144,12 +148,14 @@ def _extract_proxy_protocol_address( """Get ip from proxy protocol header. Args: - data(bytes): data - data: bytes: - writer: asyncio.StreamWriter: + data (bytes): data + writer (asyncio.StreamWriter): writer Returns: tuple: ip, data + + Raises: + ValueError: Invalid source address """ peername = ":".join(map(str, writer.get_extra_info("peername"))) peer_addr = ip_address(peername.split(":")[0]) @@ -317,6 +323,10 @@ async def _unwrap_request( Returns: bytes: unwrapped data + + Raises: + ConnectionAbortedError: SASL buffer length mismatch or\ + GSSAPI security context not found """ if ldap_session.gssapi_security_layer in ( GSSAPISL.INTEGRITY_PROTECTION, @@ -388,7 +398,11 @@ async def _handle_single_response( writer: asyncio.StreamWriter, container: AsyncContainer, ) -> None: - """Get message from queue and handle it.""" + """Get message from queue and handle it. + + Raises: + RuntimeError: any error + """ ldap_session: LDAPSession = await container.get(LDAPSession) addr = str(ldap_session.ip) diff --git a/app/ldap_protocol/utils/const.py b/app/ldap_protocol/utils/const.py index 00ecb5239..6c7e7f745 100644 --- a/app/ldap_protocol/utils/const.py +++ b/app/ldap_protocol/utils/const.py @@ -16,7 +16,13 @@ def _type_validate_entry(entry: str) -> str: """Description. Args: - entry: str: + entry (str): entry name + + Returns: + str: entry name + + Raises: + ValueError: Invalid entry name """ if validate_entry(entry): return entry @@ -32,11 +38,17 @@ def _type_validate_email(email: str) -> str: """Description. Args: - email: str: + email (str): email address + + Returns: + str: email address + + Raises: + ValueError: Invalid email """ if EMAIL_RE.fullmatch(email): return email - raise ValueError(f"Invalid entry name {email}") + raise ValueError(f"Invalid email {email}") ENTRY_TYPE = Annotated[str, AfterValidator(_type_validate_entry)] diff --git a/app/ldap_protocol/utils/cte.py b/app/ldap_protocol/utils/cte.py index c5c19b395..628956142 100644 --- a/app/ldap_protocol/utils/cte.py +++ b/app/ldap_protocol/utils/cte.py @@ -77,7 +77,10 @@ def find_members_recursive_cte(dn: str) -> CTE: result will be as follows: user1, user2, group2, user3, group3, user4. Args: - dn: str: + dn (str): domain name + + Returns: + CTE: Common Table Expression """ directory_hierarchy = ( select(Directory.id.label("directory_id"), Group.id.label("group_id")) @@ -104,7 +107,7 @@ def find_members_recursive_cte(dn: str) -> CTE: return directory_hierarchy.union_all(recursive_part) -def find_root_group_recursive_cte(dn_list: list) -> CTE: +def find_root_group_recursive_cte(dn_list: list[str]) -> CTE: """Create CTE to filter directory root group. The query translates to the following SQL: @@ -138,7 +141,10 @@ def find_root_group_recursive_cte(dn_list: list) -> CTE: user4. Args: - dn_list: list: + dn_list (list[str]): domain names + + Returns: + CTE: Common Table Expression """ directory_hierarchy = ( select( @@ -180,6 +186,16 @@ async def get_members_root_group( In the case of a recursive search through the specified user4, the search result will be as follows: group1, user1, user2, group2, user3, group3, user4. + + Args: + dn (str): domain name + session (AsyncSession): async session + + Returns: + list[Directory]: list of directories + + Raises: + RuntimeError: not found directory """ cte = find_root_group_recursive_cte([dn]) result = await session.scalars(select(cte.c.directory_id)) @@ -209,7 +225,7 @@ async def get_members_root_group( select(Directory) .where( or_( - *[Directory.id == dir_id for dir_id in dir_ids], + *[Directory.id == dir_id for dir_id in dir_ids] ) ) ) # fmt: skip @@ -230,7 +246,7 @@ async def get_all_parent_group_directories( session (AsyncSession): session Returns: - set[Directory]: all groups and their parent group directories + AsyncScalarResult | None: all groups and their parent group directories """ dn_list = [group.directory.path_dn for group in groups] diff --git a/app/multidirectory.py b/app/multidirectory.py index 2174c989a..f77360743 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -152,8 +152,7 @@ def create_prod_app( """Create production app with container. Args: - factory: Callable[[Settings]: - FastAPI]: (Default value = _create_basic_app) + factory (Callable[[Settings], FastAPI]): _create_basic_app settings: Settings | None: (Default value = None) Returns: diff --git a/pyproject.toml b/pyproject.toml index 73cf925f8..1a51b8896 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,14 +98,13 @@ output-format = "grouped" unsafe-fixes = true [tool.ruff.format] +preview = true docstring-code-format = true docstring-code-line-length = 79 line-ending = "lf" skip-magic-trailing-comma = false # default: false [tool.ruff.lint] -extend-select = [] - select = [ "F", # Pyflakes. Must have "E", # pycodestyle (Error), check tool.ruff.lint.pycodestyle. Must have @@ -115,6 +114,7 @@ select = [ "N", # pep8-naming "A", # flake8 builtin-attribute-shadowing "D", # pydocstyle, check tool.ruff.lint.pydocstyle + "DOC", # pydoclint TODO uncomment, ruff fix and fix error "UP", # pyupgrade, check tool.ruff.lint.pyupgrade. Must have "ANN", # flake8-annotations, check tool.ruff.lint.flake8-annotations "ASYNC", # flake8-async @@ -136,9 +136,9 @@ select = [ "ERA", # eradicate # "PGH", # pygrep-hooks TODO does we need it? uncomment, ruff fix and fix error # "PL", # Pylint TODO uncomment, ruff fix and fix error - "DOC", # pydoclint TODO uncomment, ruff fix and fix error "RUF", # Ruff-specific rules "FAST", # FastAPI checks + # "FURB", # Refurb ] # Gradually remove all values marked 'TODO' and fix errors. @@ -159,6 +159,7 @@ ignore = [ "TC003", # this is necessary. "B905", # this is necessary. get-attr-with-constant "RUF029", + "DOC201", # TODO delete it and fix ] fixable = ["ALL"] @@ -167,7 +168,7 @@ unfixable = [ ] [tool.ruff.lint.pydocstyle] -convention = "google" +convention = "google" # Google Python Style Guide - Docstrings: https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings ignore-var-parameters = true [tool.ruff.lint.flake8-unused-arguments] From dc3b99688902d26659bcef3a89d983add461f99b Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 15:22:07 +0300 Subject: [PATCH 07/25] refactor: fix format ":param" task_508 --- app/alembic/env.py | 2 +- app/api/auth/schema.py | 10 ++-- app/api/auth/utils.py | 3 +- app/config.py | 2 +- .../scripts/principal_block_user_sync.py | 3 +- app/ldap_protocol/dialogue.py | 2 +- app/ldap_protocol/filter_interpreter.py | 40 ++++++---------- app/ldap_protocol/kerberos/utils.py | 2 +- .../ldap_requests/bind_methods/base.py | 8 +--- .../ldap_requests/bind_methods/sasl_gssapi.py | 10 ++-- app/ldap_protocol/ldap_requests/modify.py | 3 +- app/ldap_protocol/ldap_requests/search.py | 8 ++-- app/ldap_protocol/ldap_responses.py | 8 ++-- .../ldap_schema/attribute_type_crud.py | 4 +- app/ldap_protocol/messages.py | 6 +-- app/ldap_protocol/policies/access_policy.py | 12 ++--- app/ldap_protocol/policies/password_policy.py | 23 +++------ app/ldap_protocol/server.py | 22 ++++----- app/ldap_protocol/session_storage.py | 28 +++++------ app/ldap_protocol/user_account_control.py | 3 +- app/ldap_protocol/utils/helpers.py | 47 ++++++++----------- app/ldap_protocol/utils/pagination.py | 2 +- app/ldap_protocol/utils/queries.py | 7 ++- app/multidirectory.py | 6 +-- app/schedule.py | 2 +- app/security.py | 9 ++-- tests/conftest.py | 18 +++---- tests/test_api/test_main/test_kadmin.py | 4 +- 28 files changed, 120 insertions(+), 174 deletions(-) diff --git a/app/alembic/env.py b/app/alembic/env.py index 85d4bcd36..72afc84cf 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -25,7 +25,7 @@ def do_run_migrations(connection: AsyncConnection): """Run sync migrations. Args: - connection: AsyncConnection: + connection (AsyncConnection): async connection """ context.configure( connection=connection, diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index fe51b727c..e4b7a7666 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -66,17 +66,17 @@ class SetupRequest(BaseModel): password: str @field_validator("domain") - def validate_domain(cls, v: str) -> str: # noqa + def validate_domain(cls, v: str) -> str: # noqa FIXME why noqa? """Description. Args: - v: str: - - Raises: - ValueError: If the domain is invalid. + v (str): value Returns: str: Validated domain string. + + Raises: + ValueError: If the domain is invalid. """ if re.match(_domain_re, v) is None: raise ValueError("Invalid domain value") diff --git a/app/api/auth/utils.py b/app/api/auth/utils.py index d08acff46..a26b1672a 100644 --- a/app/api/auth/utils.py +++ b/app/api/auth/utils.py @@ -42,8 +42,7 @@ def get_user_agent_from_request(request: Request) -> str: """Get user agent from request. Args: - request(Request): The incoming request object. - request: Request: + request (Request): The incoming request object. Returns: str: The user agent header. diff --git a/app/config.py b/app/config.py index 178803f9c..14a9df4f8 100644 --- a/app/config.py +++ b/app/config.py @@ -34,7 +34,7 @@ def _get_vendor_version() -> str: class Settings(BaseModel): - """Settigns with database dsn.""" + """Settings with database dsn.""" DOMAIN: str diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index c77f4de24..a6e3f41ea 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -92,8 +92,7 @@ def _find_krb_exp_attr(directory: Directory) -> Attribute | None: """Find krbprincipalexpiration attribute in directory. Args: - directory(Directory): the directory object - directory: Directory: + directory (Directory): the directory object Returns: Atrribute | None: the attribute with the name diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index 2686ad8cc..4f6a5edb1 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -106,7 +106,7 @@ def user(self, user: User) -> None: """Description. Args: - user: User: + user (User): instance of User """ raise NotImplementedError( diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index f40910095..4296fe3e4 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -52,10 +52,13 @@ def _from_filter( """Description. Args: - model: type: - item: ASN1Row: - attr: str: - right: ASN1Row: + model (type): Any Model + item (ASN1Row): Row with metadata + attr (str): Attribute name + right (ASN1Row): Row with metadata + + Returns: + UnaryExpression """ is_substring = item.tag_id == TagNumbers.SUBSTRING col = getattr(model, attr) @@ -76,7 +79,7 @@ def _filter_memberof(dn: str) -> UnaryExpression: """Retrieve query conditions with the memberOF attribute. Args: - dn: str: + dn (str): any DN, dn syntax """ group_id_subquery = ( select(Group.id) @@ -98,7 +101,7 @@ def _filter_member(dn: str) -> UnaryExpression: """Retrieve query conditions with the member attribute. Args: - dn: str: + dn (str): any DN, dn syntax """ user_id_subquery = ( select(User.id) @@ -120,7 +123,7 @@ def _recursive_filter_memberof(dn: str) -> UnaryExpression: """Retrieve query conditions with the memberOF attribute(recursive). Args: - dn: str: + dn (str): any DN, dn syntax """ cte = find_members_recursive_cte(dn) @@ -252,13 +255,6 @@ def _from_str_filter( is_substring: bool, item: Filter, ) -> UnaryExpression: - """Description. - - Args: - model: type: - is_substring: bool: - item: Filter: - """ col = getattr(model, item.attr) if is_substring: @@ -269,21 +265,12 @@ def _from_str_filter( def _api_filter(item: Filter) -> UnaryExpression: - """Retrieve query conditions based on the specified LDAP attribute. - - Args: - item: Filter: - """ + """Retrieve query conditions based on the specified LDAP attribute.""" filter_func = _get_filter_function(item.attr) return filter_func(item.val) def _cast_filt_item(item: Filter) -> UnaryExpression | ColumnElement: - """Description. - - Args: - item: Filter: - """ if item.val == "*": if item.attr in User.search_fields: return not_(eq(getattr(User, item.attr), None)) @@ -316,7 +303,10 @@ def cast_str_filter2sql(expr: Filter) -> UnaryExpression | ColumnElement: """Cast ldap filter to sa query. Args: - expr: Filter: + expr (Filter): LDAP Base filter + + Returns: + UnaryExpression | ColumnElement: """ if expr.type == "group": conditions = [] diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 50e93b397..1e43414fa 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -26,7 +26,7 @@ def wrapper(func: Callable) -> Callable: """Description. Args: - func: Callable: + func (Callable): any function Returns: Callable: wrapped function diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index ba7093c9a..3227c892b 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -60,14 +60,10 @@ def get_bad_response(error_message: LDAPBindErrors) -> BindResponse: """Generate BindResponse object with an invalid credentials error. Args: - error_message(LDAPBindErrors): Error message to include in the + error_message (LDAPBindErrors): Error message to include in the response - error_message: LDAPBindErrors: Returns: - BindResponse: A response object with the result code set to - BindResponse: A response object with the result code set to - INVALID_CREDENTIALS, an empty matchedDN, and the provided error BindResponse: A response object with the result code set to INVALID_CREDENTIALS, an empty matchedDN, and the provided error message @@ -95,7 +91,7 @@ def is_valid(self, user: User) -> bool: """Validate state. Args: - user: User: + user (User): User directory """ @abstractmethod diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index 7c3951e4b..a92891314 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -169,9 +169,7 @@ def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: """Validate security layer. Args: - client_layer(int): client security layer - settings(Settings): settings - client_layer: GSSAPISL: + client_layer (int): client security layer Returns: bool: validate result @@ -220,10 +218,8 @@ def _generate_final_message( """Generate final wrap message. Args: - server_ctx(gssapi.SecurityContext): gssapi context - settings(Settings): settings - server_ctx: gssapi.SecurityContext: - settings: Settings: + server_ctx (gssapi.SecurityContext): gssapi context + settings (Settings): settings Returns: bytes: message diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index ceea2846d..2ec5365ff 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -243,8 +243,7 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: """Description. Args: - err: BaseException: - + err (BaseException): error """ match err: case ValueError(): diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index 7db2d0f92..34a15aa4d 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -102,8 +102,8 @@ def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # no """Serialize filter field. Args: - val: ASN1Row | None: - _info: Any: + val (ASN1Row | None): instance of ASN1Row + _info (Any): not used """ return val.to_ldap_filter() if isinstance(val, ASN1Row) else None @@ -354,8 +354,8 @@ def build_query( """Build tree query. Args: - base_directories: list[Directory]: - user: UserSchema: + base_directories (list[Directory]): instances of Directory + user (UserSchema): serialized user """ query = ( select(Directory) diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index 6f67272e7..7c2cdc6a1 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -54,7 +54,7 @@ def to_asn1(self, enc: Encoder) -> None: """Serialize flat structure to bytes, write to encoder buffer. Args: - enc: Encoder: + enc (Encoder): encoder """ for value in self._get_asn1_fields().values(): enc.write(value, type_map[type(value)]) @@ -84,7 +84,7 @@ def to_asn1(self, enc: Encoder) -> None: """Serialize flat structure to bytes, write to encoder buffer. Args: - enc: Encoder: + enc (Encoder): encoder """ enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) @@ -163,7 +163,7 @@ def to_asn1(self, enc: Encoder) -> None: """Serialize search response structure to asn1 buffer. Args: - enc: Encoder: + enc (Encoder): encoder """ enc.write(self.object_name, Numbers.OctetString) enc.enter(Numbers.Sequence) @@ -265,7 +265,7 @@ def to_asn1(self, enc: Encoder) -> None: """Serialize flat structure to bytes, write to encoder buffer. Args: - enc: Encoder: + enc (Encoder): encoder """ enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index 51b27bf52..8ec74bfc7 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -32,10 +32,10 @@ def from_db(cls, attribute_type: AttributeType) -> "AttributeTypeSchema": """Create an instance from database. Args: - attribute_type: AttributeType: + attribute_type (AttributeType): instance of AttributeType Returns: - AttributeTypeSchema: Instance of AttributeTypeSchema. + AttributeTypeSchema: serialized AttributeType. """ return cls( oid=attribute_type.oid, diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index f6f76c6d2..ef1a003e7 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -133,10 +133,8 @@ def from_err(cls, source: bytes, err: Exception) -> LDAPResponseMessage: """Create error response message. Args: - source(bytes): source data - err(Exception): any error - source: bytes: - err: Exception: + source (bytes): source data + err (Exception): any error Returns: LDAPResponseMessage: response with err code diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index 0bc03c81f..1a87eeab3 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -93,14 +93,10 @@ def mutate_ap[T: Select]( """Modify query with read rule filter, joins acess policies. Args: - query(T): select(Directory) - user(UserSchema): user data - query: T: - user: UserSchema: - action: Literal["add": - "read": - "modify": - "del"]: (Default value = "read") + query (T): select(Directory) + user (UserSchema): serialized user + action (Literal["add", "read", "modify", "del"]): + (Default value = "read") Returns: T: select(Directory).join(Directory.access_policies) diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index b69150f75..73fb3a342 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -157,11 +157,10 @@ def _count_password_exists_days(last_pwd_set: Attribute) -> int: """Get number of days, pwd exists. Args: - last_pwd_set(Attribute): pwdLastSet - last_pwd_set: Attribute: + last_pwd_set (Attribute): pwdLastSet Returns: - int: days + int: count of days """ tz = ZoneInfo("UTC") now = datetime.now(tz=tz) @@ -211,16 +210,11 @@ def validate_min_age(self, last_pwd_set: Attribute) -> bool: """Validate min password change age. Args: - last_pwd_set(Attribute): last pwd set - last_pwd_set: Attribute: + last_pwd_set (Attribute): last pwd set Returns: bool: can change pwd True - not valid, can not change False - bool: can change pwd True - not valid, can not change False - - valid, can change - bool: can change pwd True - not valid, can not change False - - valid, can change - on minimum_password_age_days can always change. + - valid, can change on minimum_password_age_days can always change. """ if self.minimum_password_age_days == 0: return False @@ -233,16 +227,11 @@ def validate_max_age(self, last_pwd_set: Attribute) -> bool: """Validate max password change age. Args: - last_pwd_set(Attribute): last pwd set - last_pwd_set: Attribute: + last_pwd_set (Attribute): last pwd set Returns: bool: is pwd expired True - not valid, expired False - - bool: is pwd expired True - not valid, expired False - - valid, not expired - bool: is pwd expired True - not valid, expired False - - valid, not expired - on maximum_password_age_days always valid. + valid, not expired on maximum_password_age_days always valid. """ if self.maximum_password_age_days == 0: return False diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 5b3ced655..a77dd8043 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -246,8 +246,7 @@ def _compute_ldap_message_size(data: bytes) -> int: https://github.com/cannatag/ldap3/blob/dev/ldap3/strategy/base.py#L455 Args: - data(bytes): body - data: bytes: + data (bytes): body Returns: int: actual size @@ -356,11 +355,11 @@ async def _unwrap_request( @staticmethod def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: - """Description. + """Request full log. Args: - addr: str: - msg: LDAPRequestMessage: + addr (str): address + msg (LDAPRequestMessage): message """ log.debug( @@ -370,12 +369,11 @@ def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: @staticmethod def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: - """Description. + """Response full log. Args: - addr: str: - msg: LDAPResponseMessage: - + addr (str): address + msg (LDAPResponseMessage): message """ log.debug( f"\nTo: {addr!r}\n{msg.name}[{msg.message_id}]: " @@ -384,11 +382,11 @@ def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: @staticmethod def _log_short(addr: str, msg: LDAPMessage) -> None: - """Description. + """Short log. Args: - addr: str: - msg: LDAPMessage: + addr (str): address + msg (LDAPMessage): message """ log.info(f"\n{addr!r}: {msg.name}[{msg.message_id}]\n") diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 67d4770f0..dbe075511 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -119,8 +119,8 @@ def _sign(session_id: str, settings: Settings) -> str: """Description. Args: - session_id: str: - settings: Settings: + session_id (str): Session id + settings (Settings): Settings with database dsn. Returns: str: The HMAC signature for the session_id using provided settings. @@ -136,7 +136,7 @@ def get_user_agent_hash(self, user_agent: str) -> str: """Get user agent hash. Args: - user_agent: str: + user_agent (str): user agent Returns: str: The hash of the user agent. @@ -147,8 +147,8 @@ def _get_ip_session_key(self, ip: str, protocol: ProtocolType) -> str: """Description. Args: - ip: str: - protocol: ProtocolType: + ip (str): IP + protocol (ProtocolType): Type of Protocol Returns: str: The session key for the given IP and protocol. @@ -159,8 +159,8 @@ def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: """Description. Args: - uid: int: - protocol: ProtocolType: + uid (int): uid + protocol (ProtocolType): Type of Protocol Returns: str: The session key for the given user and protocol. @@ -171,10 +171,10 @@ def _get_protocol(self, session_id: str) -> ProtocolType: """Description. Args: - session_id: str: + session_id (str): Session id Returns: - ProtocolType: Protocol type ("http" or "ldap") for given session_id + ProtocolType: Protocol type for given session_id """ return "http" if session_id.startswith("http:") else "ldap" @@ -191,8 +191,7 @@ def _get_lock_key(self, session_id: str) -> str: """Get lock key. Args: - session_id(str): session id - session_id: str: + session_id (str): session id Returns: str: lock key @@ -276,10 +275,9 @@ def _generate_session_data( """Set data. Args: - self: Self: - uid: int: - settings: Settings: - extra_data: dict | None: + uid (int): uid + settings (Settings): Settings with database dsn. + extra_data (dict | None): additional data Returns: tuple[str, str, dict]: A tuple containing the session_id,\ diff --git a/app/ldap_protocol/user_account_control.py b/app/ldap_protocol/user_account_control.py index 486654ac2..1d00d69c4 100644 --- a/app/ldap_protocol/user_account_control.py +++ b/app/ldap_protocol/user_account_control.py @@ -120,8 +120,7 @@ def is_flag_true(flag: UserAccountControlFlag) -> bool: """Check given flag in current userAccountControl attribute. Args: - flag(userAccountControlFlag): flag - flag: UserAccountControlFlag: + flag (userAccountControlFlag): flag Returns: bool: True if flag is set, False otherwise diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 309ff11f6..3f44d65f7 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -150,11 +150,10 @@ def validate_entry(entry: str) -> bool: cn=first,dc=example,dc=com -> valid Args: - entry(str): any str - entry: str: + entry (str): entry path Returns: - bool: result + bool: entry path is correct """ return all( re.match(r"^[a-zA-Z\-]+$", part.split("=")[0]) @@ -167,8 +166,8 @@ def is_dn_in_base_directory(base_directory: Directory, entry: str) -> bool: """Check if an entry in a base dn. Args: - base_directory: Directory: - entry: str: + base_directory (Directory): instance of Directory + entry (str): entry path """ return entry.lower().endswith(base_directory.path_dn.lower()) @@ -177,8 +176,8 @@ def dn_is_base_directory(base_directory: Directory, entry: str) -> bool: """Check if an entry is a base dn. Args: - base_directory: Directory: - entry: str: + base_directory (Directory): base Directory instance + entry (str): entry path """ return base_directory.path_dn.lower() == entry.lower() @@ -187,7 +186,7 @@ def get_generalized_now(tz: ZoneInfo) -> str: """Get generalized time (formated) with tz. Args: - tz: ZoneInfo: + tz (ZoneInfo): timezone """ return datetime.now(tz).strftime("%Y%m%d%H%M%S.%f%z") @@ -196,7 +195,7 @@ def _get_domain(name: str) -> str: """Get domain from name. Args: - name: str: + name (str): directory path """ return ".".join( [ @@ -211,10 +210,8 @@ def create_integer_hash(text: str, size: int = 9) -> int: """Create integer hash from text. Args: - text(str): any string - size(int): fixed size of hash, defaults to 15 - text: str: - size: int: (Default value = 9) + text (str): any string + size (int): fixed size of hash, defaults to 15 Returns: int: hash @@ -226,7 +223,7 @@ def get_windows_timestamp(value: datetime) -> int: """Get the Windows timestamp from the value. Args: - value: datetime: + value (datetime): date and time """ return (int(value.timestamp()) + 11644473600) * 10000000 @@ -241,7 +238,7 @@ def dt_to_ft(dt: datetime) -> int: If the object is time zone-naive, it is forced to UTC before conversion. Args: - dt: datetime: + dt (datetime): date and time """ if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) != 0: dt = dt.astimezone(ZoneInfo("UTC")) @@ -258,7 +255,7 @@ def ft_to_dt(filetime: int) -> datetime: 2) Convert to datetime object, with remainder as microseconds. Args: - filetime: int: + filetime (int): Windows file time number """ s, ns100 = divmod(filetime - _EPOCH_AS_FILETIME, _HUNDREDS_OF_NS) return datetime.fromtimestamp(s, tz=ZoneInfo("UTC")).replace( @@ -284,8 +281,7 @@ def string_to_sid(sid_string: str) -> bytes: - Each sub-authority is packed as a 4-byte sequence. Args: - sid_string: The string representation of the SID - sid_string: str: + sid_string (str): The string representation of the SID Returns: bytes: The binary representation of the SID @@ -317,14 +313,11 @@ def create_object_sid( """Generate the objectSid attribute for an object. Args: - domain: domain directory - rid(int): relative identifier - reserved(bool): A flag indicating whether the RID is reserved. - If `True`, the given RID is used directly. If `False`, 1000 - is added to the given RID to generate the final RID - domain: Directory: - rid: int: - reserved: bool: (Default value = False) + domain (Directory): domain directory + rid (int): relative identifier + reserved (bool): A flag indicating whether the RID is reserved. + If `True`, the given RID is used directly. If `False`, 1000 + is added to the given RID to generate the final RID Returns: str: the complete objectSid as a string @@ -348,7 +341,7 @@ def create_user_name(directory_id: int) -> str: NOTE: keycloak Args: - directory_id: int: + directory_id (int): Directory's id """ return blake2b(str(directory_id).encode(), digest_size=8).hexdigest() diff --git a/app/ldap_protocol/utils/pagination.py b/app/ldap_protocol/utils/pagination.py index 14117403b..54b2d916c 100644 --- a/app/ldap_protocol/utils/pagination.py +++ b/app/ldap_protocol/utils/pagination.py @@ -72,7 +72,7 @@ def from_db(cls, sqla_instance: S) -> "BaseSchemaModel[S]": """Create an instance of Schema from instance of SQLA model. Args: - sqla_instance: S: + sqla_instance (S): instance of SQLAlchemy Model """ diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index c8ecab3b9..27b155a92 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -179,8 +179,7 @@ def get_search_path(dn: str) -> list[str]: """Get search path for dn. Args: - dn(str): any DN, dn syntax - dn: str: + dn (str): any DN, dn syntax Returns: list[str]: reversed list of dn values @@ -219,9 +218,9 @@ def get_filter_from_path( """Get filter condition for path equality from dn. Args: - dn: str: + dn (str): any DN, dn syntax *: - column: Column | InstrumentedAttribute: (Default value =\ + column (Column | InstrumentedAttribute): (Default value =\ Directory.path) """ return get_path_filter(get_search_path(dn), column=column) diff --git a/app/multidirectory.py b/app/multidirectory.py index f77360743..db5ef2ba8 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -84,7 +84,7 @@ def _create_basic_app(settings: Settings) -> FastAPI: """Create basic FastAPI app with dependencies overrides. Args: - settings: Settings: + settings (Settings): Settings with database dsn. Returns: FastAPI: Configured FastAPI application. @@ -129,7 +129,7 @@ def _create_shadow_app(settings: Settings) -> FastAPI: """Create shadow FastAPI app for shadow. Args: - settings: Settings: + settings (Settings): Settings with database dsn. Returns: FastAPI: Configured FastAPI application for shadow API. @@ -179,7 +179,7 @@ def ldap(settings: Settings) -> None: """Run server. Args: - settings: Settings: + settings (Settings): Settings with database dsn. """ async def _servers(settings: Settings) -> None: diff --git a/app/schedule.py b/app/schedule.py index 363237ec1..c6c336fc0 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -54,7 +54,7 @@ def scheduler(settings: Settings) -> None: """Sript entrypoint. Args: - settings: Settings: + settings (Settings): Settings with database dsn. """ async def runner(settings: Settings) -> None: diff --git a/app/security.py b/app/security.py index 24a45d05e..20444349c 100644 --- a/app/security.py +++ b/app/security.py @@ -13,10 +13,8 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: """Validate password. Args: - plain_password(str): raw password - hashed_password(str): pwd hash from db - plain_password: str: - hashed_password: str: + plain_password (str): raw password + hashed_password (str): pwd hash from db Returns: bool: is password valid @@ -28,8 +26,7 @@ def get_password_hash(password: str) -> str: """Hash password. Args: - password(str): raw pwd - password: str: + password (str): raw pwd Returns: str: hash diff --git a/tests/conftest.py b/tests/conftest.py index dbcce4a8d..65ee8f7df 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -149,7 +149,7 @@ def get_engine(self, settings: Settings) -> AsyncEngine: """Get async engine. Args: - settings: Settings: + settings (Settings): Settings with database dsn. """ return create_async_engine(str(settings.POSTGRES_URI), pool_size=10) @@ -161,7 +161,7 @@ def get_session_factory( """Create session factory. Args: - engine: AsyncEngine: + engine (AsyncEngine): async engine """ return async_sessionmaker( engine, @@ -331,7 +331,7 @@ def upgrade(conn: AsyncConnection) -> None: """Description. Args: - conn: AsyncConnection: + conn (AsyncConnection): connection """ config.attributes["connection"] = conn @@ -341,7 +341,7 @@ def downgrade(conn: AsyncConnection) -> None: """Description. Args: - conn: AsyncConnection: + conn (AsyncConnection): connection """ config.attributes["connection"] = conn @@ -436,8 +436,8 @@ def _server( """Run server in background. Args: - event_loop: asyncio.BaseEventLoop: - handler: PoolClientHandler: + event_loop (asyncio.BaseEventLoop): events loop + handler (PoolClientHandler): handler """ task = asyncio.ensure_future(handler.start(), loop=event_loop) event_loop.run_until_complete(asyncio.sleep(0.1)) @@ -451,7 +451,7 @@ def ldap_client(settings: Settings) -> ldap3.Connection: """Get ldap clinet without a creds. Args: - settings: Settings: + settings (Settings): Settings with database dsn. """ return ldap3.Connection( ldap3.Server(str(settings.HOST), settings.PORT, get_info="ALL") @@ -521,7 +521,7 @@ def creds(user: dict) -> TestCreds: """Get creds from test data. Args: - user: dict: + user (dict): user data """ return TestCreds(user["sam_accout_name"], user["password"]) @@ -537,7 +537,7 @@ def _force_override_tls(settings: Settings) -> Iterator: """Override tls status for tests. Args: - settings: Settings: + settings (Settings): Settings with database dsn. """ current_status = settings.USE_CORE_TLS settings.USE_CORE_TLS = True diff --git a/tests/test_api/test_main/test_kadmin.py b/tests/test_api/test_main/test_kadmin.py index 15cfa1e60..482b2f5a5 100644 --- a/tests/test_api/test_main/test_kadmin.py +++ b/tests/test_api/test_main/test_kadmin.py @@ -25,8 +25,8 @@ def _create_test_user_data( """Description. Args: - name: str: - pw: str: + name (str): user name + pw (str): user password """ return { "entry": "cn=ktest,dc=md,dc=test", From 39bf381463707b334f065ba8326d965f4062058c Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 17:26:03 +0300 Subject: [PATCH 08/25] refactor: fix all docstrings by darglint2 short-strictness task_508 --- .darglint2 | 3 ++ app/api/auth/oauth2.py | 5 ++- app/api/ldap_schema/attribute_type_router.py | 4 +- app/api/ldap_schema/object_class_router.py | 4 +- app/api/network/router.py | 24 ++++++------ app/api/shadow/router.py | 6 +-- app/extra/setup_dev.py | 13 ++++++- app/ioc.py | 16 +++++++- app/ldap_protocol/asn1parser.py | 14 +++---- app/ldap_protocol/dialogue.py | 8 +++- app/ldap_protocol/dns.py | 2 +- app/ldap_protocol/filter_interpreter.py | 15 ++++++++ app/ldap_protocol/kerberos/client.py | 22 +++++------ app/ldap_protocol/ldap_requests/abandon.py | 3 +- app/ldap_protocol/ldap_requests/add.py | 7 +++- .../ldap_requests/bind_methods/base.py | 10 ++--- .../ldap_requests/bind_methods/sasl_gssapi.py | 10 ++--- .../ldap_requests/bind_methods/sasl_plain.py | 2 +- app/ldap_protocol/ldap_requests/extended.py | 8 +++- app/ldap_protocol/ldap_requests/modify.py | 14 ++++++- app/ldap_protocol/ldap_requests/search.py | 37 ++++++++++++------- app/ldap_protocol/ldap_responses.py | 8 +++- .../ldap_schema/attribute_type_crud.py | 3 ++ app/ldap_protocol/messages.py | 4 ++ app/ldap_protocol/multifactor.py | 8 +++- app/ldap_protocol/policies/password_policy.py | 5 +-- app/ldap_protocol/server.py | 6 ++- app/ldap_protocol/session_storage.py | 3 +- app/ldap_protocol/user_account_control.py | 2 +- app/ldap_protocol/utils/helpers.py | 26 ++++++++++++- app/ldap_protocol/utils/queries.py | 17 ++++++--- app/multidirectory.py | 5 ++- app/schedule.py | 4 +- pyproject.toml | 1 + 34 files changed, 224 insertions(+), 95 deletions(-) diff --git a/.darglint2 b/.darglint2 index 0df78f458..dd71638e0 100644 --- a/.darglint2 +++ b/.darglint2 @@ -1,2 +1,5 @@ [darglint2] docstring_style=google + +# https://akaihola.github.io/darglint2/master/readme.html#strictness-configuration +strictness=long diff --git a/app/api/auth/oauth2.py b/app/api/auth/oauth2.py index 58b7c1fac..3a60ab3eb 100644 --- a/app/api/auth/oauth2.py +++ b/app/api/auth/oauth2.py @@ -82,7 +82,10 @@ async def get_current_user( Returns: UserSchema: user schema - """ + + Raises: + _CREDENTIALS_EXCEPTION: creds not valid + """ # noqa: DOC502 session_key = request.cookies.get("id", "") try: user_id = await session_storage.get_user_id( diff --git a/app/api/ldap_schema/attribute_type_router.py b/app/api/ldap_schema/attribute_type_router.py index 0650c9aae..0e3af5630 100644 --- a/app/api/ldap_schema/attribute_type_router.py +++ b/app/api/ldap_schema/attribute_type_router.py @@ -102,8 +102,8 @@ async def get_list_attribute_types_with_pagination( Args: page_number (int): Page number. - session (AsyncSession): Database session. - page_size (int, optional): Number of items per page. Defaults to 50. + session (FromDishka[AsyncSession]): Database session. + page_size (int): Number of items per page (Default value = 50) Returns: AttributeTypePaginationSchema: Paginated attribute types. diff --git a/app/api/ldap_schema/object_class_router.py b/app/api/ldap_schema/object_class_router.py index 85a2157d6..ad33f89f4 100644 --- a/app/api/ldap_schema/object_class_router.py +++ b/app/api/ldap_schema/object_class_router.py @@ -101,8 +101,8 @@ async def get_list_object_classes_with_pagination( Args: page_number (int): Page number. - session (AsyncSession): Database session. - page_size (int, optional): Number of items per page. Defaults to 25. + session (FromDishka[AsyncSession]): Database session. + page_size (int): Number of items per page. Defaults to 25. Returns: ObjectClassPaginationSchema: Paginated object classes. diff --git a/app/api/network/router.py b/app/api/network/router.py index f02555aa6..dee5e8d60 100644 --- a/app/api/network/router.py +++ b/app/api/network/router.py @@ -46,6 +46,7 @@ async def add_network_policy( \f Args: policy (Policy): policy to add + session (AsyncSession): Database session Raises: HTTPException: 422 invalid group DN @@ -115,9 +116,8 @@ async def get_list_network_policies( """Get network. \f - Raises: - HTTPException: 404 if no policies found - HTTPException: 422 if no policies found in database. + Args: + session (AsyncSession): Database session Returns: list[PolicyResponse]: List of policies with their details. @@ -169,7 +169,8 @@ async def delete_network_policy( \f Args: policy_id (int): id - user (User): requires login + request (Request): http request + session (AsyncSession): Database session Raises: HTTPException: 404 @@ -216,7 +217,7 @@ async def switch_network_policy( \f Args: policy_id (int): id - user (User): requires login + session (FromDishka[AsyncSession]): async db session Raises: HTTPException: 404 @@ -248,15 +249,16 @@ async def update_network_policy( \f Args: - policy (PolicyUpdate): update request + request (PolicyUpdate): update request + session (FromDishka[AsyncSession]): async db session + + Returns: + PolicyResponse: Policy from database Raises: HTTPException: 404 policy not found HTTPException: 422 Invalid group DN HTTPException: 422 Entry already exists - - Returns: - PolicyResponse: Policy from database """ selected_policy = await session.get( NetworkPolicy, @@ -337,8 +339,8 @@ async def swap_network_policy( - **second_policy_id**: policy to swap \f Args: - first_policy_id (int): policy to swap - second_policy_id (int): policy to swap + swap (SwapRequest): http request + session (FromDishka[AsyncSession]): async db session Raises: HTTPException: 404 diff --git a/app/api/shadow/router.py b/app/api/shadow/router.py index 495a4b00b..caf2d6c95 100644 --- a/app/api/shadow/router.py +++ b/app/api/shadow/router.py @@ -100,13 +100,13 @@ async def sync_password( - **new_password**: password to set \f Args: - principal Annotated[str, Body]: reset target user - new_password Annotated[str, Body]: new password for user + principal (Annotated[str, Body]): user principal name + new_password (Annotated[str, Body]): new password for user session (FromDishka[AsyncSession]): db Raises: - HTTPException: 404 if user not found HTTPException: 422 if password not valid + HTTPException: 404 if user not found """ user = await get_user(session, principal) diff --git a/app/extra/setup_dev.py b/app/extra/setup_dev.py index c21f1cff6..1b97d0b0e 100644 --- a/app/extra/setup_dev.py +++ b/app/extra/setup_dev.py @@ -150,7 +150,16 @@ async def setup_enviroment( data: list, dn: str = "multifactor.dev", ) -> None: - """Create directories and users for enviroment.""" + """Create directories and users for enviroment. + + Args: + session (AsyncSession): Database async session + data (list): data + dn (str): domain name (Default value = 'multifactor.dev') + + Raises: + Exception: Failed to setup environment + """ cat_result = await session.execute(select(Directory)) if cat_result.scalar_one_or_none(): logger.warning("dev data already set up") @@ -207,4 +216,4 @@ async def setup_enviroment( import traceback logger.error(traceback.format_exc()) - raise + raise Exception("Failed to setup environment") diff --git a/app/ioc.py b/app/ioc.py index 243f8f610..afa77c907 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -91,6 +91,9 @@ async def create_session( ) -> AsyncIterator[AsyncSession]: """Create session for request. + Args: + async_session (async_sessionmaker[AsyncSession]): async db session + Yields: AsyncIterator[AsyncSession] """ @@ -116,7 +119,6 @@ async def get_kadmin_http( Args: settings (Settings): app settings - session_maker (AsyncSessionMaker): session maker Yields: AsyncIterator[AbstractKadmin]: kadmin with client @@ -178,6 +180,10 @@ async def get_dns_mngr( ) -> AsyncIterator[AbstractDNSManager]: """Get DNSManager class. + Args: + settings (DNSManagerSettings): DNS Manager settings + dns_manager_class (type[AbstractDNSManager]): manager class + Yields: AsyncIterator[AbstractDNSManager] """ @@ -190,6 +196,9 @@ async def get_redis_for_sessions( ) -> AsyncIterator[SessionStorageClient]: """Get redis connection. + Args: + settings: Settings with database dsn. + Yields: AsyncIterator[SessionStorageClient] @@ -282,8 +291,11 @@ async def get_client( ) -> AsyncIterator[MFAHTTPClient]: """Get async client for DI. + Args: + settings: Settings with database dsn. + Yields: - AsyncIterator[MFAHTTPClient] + AsyncIterator[MFAHTTPClient]. """ async with httpx.AsyncClient( timeout=settings.MFA_CONNECT_TIMEOUT_SECONDS, diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index 71b8f8e55..59cdd8437 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -92,7 +92,7 @@ def _handle_extensible_match(self) -> str: str: match Raises: - TypeError: + TypeError: If value isnt a list """ oid = attribute = value = None dn_attributes = False @@ -139,10 +139,10 @@ def _handle_substring(self) -> str: """Process and format substring operations for LDAP. Returns: - str: + str: substring Raises: - ValueError: + ValueError: Invalid tag_id """ value = ( self.value.decode(errors="replace") @@ -169,14 +169,14 @@ def serialize(self, obj: "ASN1Row | T | None" = None) -> str: # noqa: C901 substring matches. Args: - obj ("ASN1Row | T | None"): (Default value = None) + obj (ASN1Row | T | None): (Default value = None) Returns: - str: + str: result string Raises: - ValueError: - TypeError: + ValueError: Invalid tag_id + TypeError: cant serialize """ if obj is None: obj = self diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index 4f6a5edb1..33f5dc1e4 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -108,6 +108,8 @@ def user(self, user: User) -> None: Args: user (User): instance of User + Raises: + NotImplementedError: Cannot manually set user """ raise NotImplementedError( "Cannot manually set user, use `set_user()` instead", @@ -159,8 +161,12 @@ async def validate_conn( ) -> None: """Validate network policies. + Args: + ip (IPv4Address | IPv6Address): IP + session (AsyncSession): async session + Raises: - PermissionError: + PermissionError: NetworkPolicy is None """ policy = await self._get_policy(ip, session) # type: ignore if policy is not None: diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index 2d504956e..2d2549360 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -250,7 +250,7 @@ async def _send(self, action: Message) -> None: action (Message): DNS message Raises: - DNSConnectionError: + DNSConnectionError: DNS server IP is None """ if self._dns_settings.tsig_key is not None: action.use_tsig( diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 4296fe3e4..46f4a5963 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -80,6 +80,9 @@ def _filter_memberof(dn: str) -> UnaryExpression: Args: dn (str): any DN, dn syntax + + Returns: + UnaryExpression """ group_id_subquery = ( select(Group.id) @@ -102,6 +105,9 @@ def _filter_member(dn: str) -> UnaryExpression: Args: dn (str): any DN, dn syntax + + Returns: + UnaryExpression """ user_id_subquery = ( select(User.id) @@ -124,6 +130,9 @@ def _recursive_filter_memberof(dn: str) -> UnaryExpression: Args: dn (str): any DN, dn syntax + + Returns: + UnaryExpression """ cte = find_members_recursive_cte(dn) @@ -171,6 +180,9 @@ def _ldap_filter_by_attribute( oid: ASN1Row | None: attr: ASN1Row: search_value: ASN1Row: + + Returns: + UnaryExpression """ if oid is None: attribute = attr.value.lower() @@ -235,6 +247,9 @@ def cast_filter2sql(expr: ASN1Row) -> UnaryExpression | ColumnElement: Args: expr: ASN1Row: + + Returns: + UnaryExpression | ColumnElement """ if expr.tag_id in range(3): conditions = [] diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index 754094dda..15bbfcd44 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -28,7 +28,7 @@ async def add_principal( timeout (int): timeout Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.post( "principal", @@ -50,7 +50,7 @@ async def get_principal(self, name: str) -> dict: dict Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.get("principal", params={"name": name}) if response.status_code != 200: @@ -66,7 +66,7 @@ async def del_principal(self, name: str) -> None: name (str): principal name Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.delete("principal", params={"name": name}) if response.status_code != 200: @@ -85,7 +85,7 @@ async def change_principal_password( password: password Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.patch( "principal", @@ -107,7 +107,7 @@ async def create_or_update_principal_pw( password: password. Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.post( "/principal/create_or_update", @@ -125,7 +125,7 @@ async def rename_princ(self, name: str, new_name: str) -> None: new_name: (str): new principal name Raises: - KRBAPIError: + KRBAPIError: API error """ response = await self.client.put( "principal", @@ -160,13 +160,13 @@ async def ktadd(self, names: list[str]) -> httpx.Response: @logger_wraps() async def lock_principal(self, name: str) -> None: - """Lock princ. + """Lock principal. Args: - name (str): upn + name (str): user principal name Raises: - KRBAPIError: on error + KRBAPIError: API error """ response = await self.client.post( "principal/lock", @@ -180,10 +180,10 @@ async def force_princ_pw_change(self, name: str) -> None: """Force mark password change for principal. Args: - name (str): pw + name (str): user principal name Raises: - KRBAPIError: err + KRBAPIError: API error """ response = await self.client.post( "principal/force_reset", diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index 8f9c69fd3..dcf2a60f3 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -23,8 +23,7 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: """Create structure from ASN1Row dataclass list. Args: - data: dict[str: - list[ASN1Row]]: + data (dict[str, list[ASN1Row]]): data Returns: AbandonRequest: Instance of AbandonRequest. diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index c7222ebf6..a32c93e2a 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -107,11 +107,16 @@ async def handle( # noqa: C901 ) -> AsyncGenerator[AddResponse, None]: """Add request handler. + Args: + session (AsyncSession): Async DB session + ldap_session (LDAPSession): LDAP session + kadmin (AbstractKadmin): Abstract Kerberos Admin + Yields: AsyncGenerator[AddResponse, None] Raises: - TypeError: + TypeError: not valid attribute type """ if not ldap_session.user: yield AddResponse(**INVALID_ACCESS_RESPONSE) diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index 3227c892b..97b4f3843 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -60,13 +60,13 @@ def get_bad_response(error_message: LDAPBindErrors) -> BindResponse: """Generate BindResponse object with an invalid credentials error. Args: - error_message (LDAPBindErrors): Error message to include in the - response + error_message (LDAPBindErrors): Error message to include in the\ + response Returns: - BindResponse: A response object with the result code set to - INVALID_CREDENTIALS, an empty matchedDN, and the provided error - message + BindResponse: A response object with the result code set to\ + INVALID_CREDENTIALS, an empty matchedDN, and the provided error\ + message """ return BindResponse( result_code=LDAPCodes.INVALID_CREDENTIALS, diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index a92891314..34723c4f7 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -83,7 +83,7 @@ def is_valid(self, user: User | None) -> bool: # noqa: ARG002 """Check if GSSAPI token is valid. Args: - user (User | None): indb user + user (User | None): indb user Returns: bool: status @@ -108,7 +108,7 @@ def from_data(cls, data: list[ASN1Row]) -> "SaslGSSAPIAuthentication": data: list[ASN1Row]: Returns: - : SaslGSSAPIAuthentication + SaslGSSAPIAuthentication """ return cls( ticket=data[1].value if len(data) > 1 else b"", @@ -169,7 +169,7 @@ def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: """Validate security layer. Args: - client_layer (int): client security layer + client_layer (GSSAPISL): client security layer Returns: bool: validate result @@ -185,9 +185,7 @@ def _handle_final_client_message( """Handle final client message. Args: - server_ctx(gssapi.SecurityContext): GSSAPI security context - settings(Settings): settings - server_ctx: gssapi.SecurityContext: + server_ctx (gssapi.SecurityContext): GSSAPI security context Returns: GSSAPIAuthStatus: status diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index db0ddd8d9..f63880b56 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -27,7 +27,7 @@ def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. Args: - user (User): in db user + user (User | None): in db user Returns: bool: status diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index e753a6cc7..5b757e683 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -252,7 +252,7 @@ async def handle( PasswdModifyResponse Raises: - PermissionError: + PermissionError: user not authorized """ if not settings.USE_CORE_TLS: raise PermissionError("TLS required") @@ -363,6 +363,12 @@ async def handle( ) -> AsyncGenerator[ExtendedResponse, None]: """Call proxy handler. + Args: + ldap_session (LDAPSession): LDAP session + session (AsyncSession): Async db session + kadmin (AbstractKadmin): Stub client for non set up dirs. + settings (Settings): Settings with database dsn + Yields: AsyncGenerator[ExtendedResponse, None]: """ diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index 2ec5365ff..611dcace7 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -110,6 +110,8 @@ def from_data(cls, data: list[ASN1Row]) -> "ModifyRequest": Args: data: list[ASN1Row]: + Returns: + ModifyRequest """ entry, proto_changes = data @@ -160,6 +162,13 @@ async def handle( ) -> AsyncGenerator[ModifyResponse, None]: """Change request handler. + Args: + ldap_session (LDAPSession): LDAP session + session (AsyncSession): Database session + session_storage (SessionStorage): Session storage + kadmin (AbstractKadmin): Kadmin + settings (Settings): Settings + Yields: AsyncGenerator[ModifyResponse, None] """ @@ -244,7 +253,10 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: Args: err (BaseException): error - """ + + Returns: + tuple[LDAPCodes, str] + """ # noqa: DAR401 match err: case ValueError(): logger.error(f"Invalid value: {err}") diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index 34a15aa4d..a4cf2408d 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -104,20 +104,21 @@ def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # no Args: val (ASN1Row | None): instance of ASN1Row _info (Any): not used + + Returns: + str | None """ return val.to_ldap_filter() if isinstance(val, ASN1Row) else None @classmethod - def from_data( - cls, - data: dict[str, list[ASN1Row]], - ) -> "SearchRequest": + def from_data(cls, data: dict[str, list[ASN1Row]]) -> "SearchRequest": """Description. Args: - data: dict[str: - list[ASN1Row]]: + data (dict[str, list[ASN1Row]]): data + Returns: + SearchRequest: LDAP search request """ ( base_object, @@ -180,6 +181,10 @@ async def get_root_dse( ) -> defaultdict[str, list[str]]: """Get RootDSE. + Args: + session (AsyncSession): Database session + settings (Settings): Settings + Returns: defaultdict[str, list[str]]: queried attrs """ @@ -236,12 +241,8 @@ async def get_root_dse( def cast_filter(self) -> UnaryExpression | ColumnElement: """Convert asn1 row filter_ to sqlalchemy obj. - Args: - filter_(ASN1Row): requested filter_ - session(AsyncSession): sa session - Returns: - UnaryExpression: condition + UnaryExpression | ColumnElement """ return cast_filter2sql(self.filter) @@ -259,6 +260,11 @@ async def handle( Provides following responses: Entry -> Reference (optional) -> Done + Args: + session (AsyncSession): Database session + ldap_session (LDAPSession): LDAP session + settings (Settings): Settings + Yields: AsyncGenerator[SearchResultDone | SearchResultReference |\ SearchResultEntry, None] @@ -356,6 +362,9 @@ def build_query( Args: base_directories (list[Directory]): instances of Directory user (UserSchema): serialized user + + Returns: + Select """ query = ( select(Directory) @@ -431,11 +440,11 @@ async def paginate_query( """Paginate query. Args: - query (_type_): _description_ - session (_type_): _description_ + query (Select): SQLAlchemy select query + session (AsyncSession): async session Returns: - tuple[select, int, int]: query, pages_total, count + tuple[Select, int, int]: select query, pages_total, count """ if self.page_number is None: return query, 0, 0 diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index 7c2cdc6a1..d8c7627ea 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -115,8 +115,10 @@ def validate_type(cls, v: str | bytes | int) -> str: """Description. Args: - v: str | bytes | int: + v (str | bytes | int): value + Returns: + str: value """ return str(v) @@ -126,8 +128,10 @@ def validate_vals(cls, vals: list[str | int | bytes]) -> list[str | bytes]: """Description. Args: - vals: list[str | int | bytes]: + vals (list[str | int | bytes]): values + Returns: + list[str | bytes]: values """ return [v if isinstance(v, bytes) else str(v) for v in vals] diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index 8ec74bfc7..83d72793d 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -184,6 +184,9 @@ async def delete_attribute_types_by_names( Args: attribute_type_names (list[str]): List of Attribute Types OIDs. session (AsyncSession): Database session. + + Returns: + None """ if not attribute_type_names: return None diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index ef1a003e7..1b884ebb6 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -169,6 +169,10 @@ async def create_response( ) -> AsyncGenerator[LDAPResponseMessage, None]: """Call unique context handler. + Args: + handler (Callable[..., AsyncGenerator[BaseResponse, None]]):\ + handler + Yields: LDAPResponseMessage: create response for context. """ diff --git a/app/ldap_protocol/multifactor.py b/app/ldap_protocol/multifactor.py index 1744c825d..25d2a4e4d 100644 --- a/app/ldap_protocol/multifactor.py +++ b/app/ldap_protocol/multifactor.py @@ -58,6 +58,11 @@ async def get_creds( ) -> Creds | None: """Get API creds. + Args: + session (AsyncSession): session + key_name (str): key name + secret_name (str): secret name + Returns: tuple[str, str]: api key and secret """ @@ -151,13 +156,12 @@ async def ldap_validate_mfa( Args: username (str): un password (str): pwd - policy (NetworkPolicy): policy Returns: bool: status Raises: - ConnectTimeout: API Timeout + MFAConnectError: API Timeout MFAMissconfiguredError: API Key or Secret is invalid MultifactorError: status error """ # noqa: DOC502 diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 73fb3a342..32a33ca12 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -91,10 +91,10 @@ async def create_policy_settings(self, session: AsyncSession) -> Self: """Create policies settings. Args: - session: db session + session (AsyncSession): db session Returns: - PasswordPolicySchema: password policy. + Self: Serialized password policy. Raises: PermissionError: Policy already exists. @@ -250,7 +250,6 @@ async def validate_password_with_policy( Args: password (str): new raw password user (User): db user - session (AsyncSession): db Returns: bool: status diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index a77dd8043..8d103acf8 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -127,7 +127,7 @@ def _load_ssl_context(self) -> None: """Load SSL context for LDAPS. Raises: - SystemExit: + SystemExit: Certs not found """ if self.settings.USE_CORE_TLS and self.settings.LDAP_LOAD_SSL_CERT: if not self.settings.check_certs_exist(): @@ -398,6 +398,10 @@ async def _handle_single_response( ) -> None: """Get message from queue and handle it. + Args: + writer (asyncio.StreamWriter): writer + container (AsyncContainer): container + Raises: RuntimeError: any error """ diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index dbe075511..0ecce67a5 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -768,12 +768,11 @@ async def create_session( 3. Link the session to the user's session tracking key (`keys:http:`). 4. If an IP address is provided in `extra_data`, also link the session - to the IP-based session tracking key (`ip:http:`). + to the IP-based session tracking key (`ip:http:`). Args: uid (int): user id settings (Settings): settings - *, extra_data (dict): extra data Returns: diff --git a/app/ldap_protocol/user_account_control.py b/app/ldap_protocol/user_account_control.py index 1d00d69c4..23616119d 100644 --- a/app/ldap_protocol/user_account_control.py +++ b/app/ldap_protocol/user_account_control.py @@ -120,7 +120,7 @@ def is_flag_true(flag: UserAccountControlFlag) -> bool: """Check given flag in current userAccountControl attribute. Args: - flag (userAccountControlFlag): flag + flag (UserAccountControlFlag): flag Returns: bool: True if flag is set, False otherwise diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 3f44d65f7..05bfc14ab 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -168,6 +168,9 @@ def is_dn_in_base_directory(base_directory: Directory, entry: str) -> bool: Args: base_directory (Directory): instance of Directory entry (str): entry path + + Returns: + bool: True if the entry is in the base directory, False otherwise """ return entry.lower().endswith(base_directory.path_dn.lower()) @@ -178,6 +181,9 @@ def dn_is_base_directory(base_directory: Directory, entry: str) -> bool: Args: base_directory (Directory): base Directory instance entry (str): entry path + + Returns: + bool: True if the entry is a base dn, False otherwise """ return base_directory.path_dn.lower() == entry.lower() @@ -187,6 +193,9 @@ def get_generalized_now(tz: ZoneInfo) -> str: Args: tz (ZoneInfo): timezone + + Returns: + str: generalized time """ return datetime.now(tz).strftime("%Y%m%d%H%M%S.%f%z") @@ -196,6 +205,9 @@ def _get_domain(name: str) -> str: Args: name (str): directory path + + Returns: + str: domain """ return ".".join( [ @@ -211,7 +223,7 @@ def create_integer_hash(text: str, size: int = 9) -> int: Args: text (str): any string - size (int): fixed size of hash, defaults to 15 + size (int): fixed size of hash, defaults to 9 Returns: int: hash @@ -224,6 +236,9 @@ def get_windows_timestamp(value: datetime) -> int: Args: value (datetime): date and time + + Returns: + int: Windows timestamp """ return (int(value.timestamp()) + 11644473600) * 10000000 @@ -239,6 +254,9 @@ def dt_to_ft(dt: datetime) -> int: Args: dt (datetime): date and time + + Returns: + int: Windows filetime """ if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) != 0: dt = dt.astimezone(ZoneInfo("UTC")) @@ -256,6 +274,9 @@ def ft_to_dt(filetime: int) -> datetime: Args: filetime (int): Windows file time number + + Returns: + datetime: Python datetime """ s, ns100 = divmod(filetime - _EPOCH_AS_FILETIME, _HUNDREDS_OF_NS) return datetime.fromtimestamp(s, tz=ZoneInfo("UTC")).replace( @@ -342,6 +363,9 @@ def create_user_name(directory_id: int) -> str: Args: directory_id (int): Directory's id + + Returns: + str: username """ return blake2b(str(directory_id).encode(), digest_size=8).hexdigest() diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 27b155a92..fb302c908 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -197,11 +197,8 @@ def get_path_filter( """Get filter condition for path equality. Args: - path(list[str]): dn - field(Column): path column, defaults to Directory.path - path: list[str]: - *: - column: ColumnElement | Column | InstrumentedAttribute:\ + path (list[str]): domain name + column (ColumnElement | Column | InstrumentedAttribute):\ (Default value = Directory.path) Returns: @@ -219,9 +216,11 @@ def get_filter_from_path( Args: dn (str): any DN, dn syntax - *: column (Column | InstrumentedAttribute): (Default value =\ Directory.path) + + Returns: + ColumnElement: filter (where) element """ return get_path_filter(get_search_path(dn), column=column) @@ -263,6 +262,9 @@ async def create_group( name (str): group name sid (int): objectSid session (AsyncSession): db + + Returns: + tuple[Directory, Group] """ base_dn_list = await get_base_directories(session) @@ -319,6 +321,9 @@ async def is_computer(directory_id: int, session: AsyncSession) -> bool: Args: session (AsyncSession): db directory_id (int): id + + Returns: + bool: True if the entry is a computer, False otherwise """ query = select( select(Attribute) diff --git a/app/multidirectory.py b/app/multidirectory.py index db5ef2ba8..a1e2f22ba 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -75,6 +75,9 @@ async def _lifespan(app: FastAPI) -> AsyncIterator[None]: Args: app (FastAPI): FastAPI application. + + Yields: + AsyncIterator: async iterator """ yield await app.state.dishka_container.close() @@ -153,7 +156,7 @@ def create_prod_app( Args: factory (Callable[[Settings], FastAPI]): _create_basic_app - settings: Settings | None: (Default value = None) + settings (Settings | None): (Default value = None) Returns: FastAPI: application. diff --git a/app/schedule.py b/app/schedule.py index c6c336fc0..0e029e396 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -33,9 +33,9 @@ async def _schedule( """Run task periodically. Args: - task (Awaitable): any task - container (AsyncContainer): container + task (TaskType): callable coroutine wait (float): time to wait after execution + container (AsyncContainer): container """ logger.info("Registered: {}", task.__name__) while True: diff --git a/pyproject.toml b/pyproject.toml index 1a51b8896..31308c286 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,6 +113,7 @@ select = [ "I", # isort, check tool.ruff.lint.isort. Must have "N", # pep8-naming "A", # flake8 builtin-attribute-shadowing + # DOC and D. used darglint2 "D", # pydocstyle, check tool.ruff.lint.pydocstyle "DOC", # pydoclint TODO uncomment, ruff fix and fix error "UP", # pyupgrade, check tool.ruff.lint.pyupgrade. Must have From 52d2969e456f4613dcd7f153430574b3376ce635 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 17:31:04 +0300 Subject: [PATCH 09/25] refactor: fix all docstrings by darglint2 short and long strictness task_508 --- .darglint2 | 4 +++- app/ldap_protocol/filter_interpreter.py | 10 ++++++++-- app/ldap_protocol/kerberos/utils.py | 4 ++++ app/ldap_protocol/server.py | 4 ++++ app/ldap_protocol/utils/queries.py | 7 +++++++ pyproject.toml | 2 +- 6 files changed, 27 insertions(+), 4 deletions(-) diff --git a/.darglint2 b/.darglint2 index dd71638e0..d0599bfe9 100644 --- a/.darglint2 +++ b/.darglint2 @@ -2,4 +2,6 @@ docstring_style=google # https://akaihola.github.io/darglint2/master/readme.html#strictness-configuration -strictness=long +strictness=full + +ignore=DAR401, diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 46f4a5963..1ef0cdea3 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -32,7 +32,10 @@ def _get_substring(right: ASN1Row) -> str: # RFC 4511 """Description. Args: - right: ASN1Row: + right (ASN1Row): Row with metadata + + Returns: + str: substring """ expr = right.value[0] value = expr.value @@ -198,7 +201,10 @@ def _cast_item(item: ASN1Row) -> UnaryExpression | ColumnElement: """Description. Args: - item: ASN1Row: + item (ASN1Row): Row with metadata + + Returns: + UnaryExpression | ColumnElement """ # present, for e.g. `attibuteName=*`, `(attibuteName)` if item.tag_id == 7: diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 1e43414fa..d1b9dc579 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -69,6 +69,10 @@ async def set_state(session: AsyncSession, state: "KerberosState") -> None: This function updates the server state in the database by either adding a new entry, updating an existing entry, or deleting and re-adding the entry if there are multiple entries found. + + Args: + session (AsyncSession): Database session + state (KerberosState): Kerberos server state """ results = await session.execute( select(CatalogueSetting) diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 8d103acf8..2658178ab 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -483,6 +483,10 @@ async def _handle_responses( Spawns (default 5) workers, then every task awaits for queue object, cycle locks until pool completes at least 1 task. + + Args: + writer (asyncio.StreamWriter): writer + container (AsyncContainer): container """ tasks = [ self._handle_single_response(writer, container) diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index fb302c908..0d9da4e69 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -230,6 +230,13 @@ async def get_dn_by_id(id_: int, session: AsyncSession) -> str: >>> await get_dn_by_id(0, session) >>> "cn=groups,dc=example,dc=com" + + Args: + id_ (int): id + session (AsyncSession): Database session + + Returns: + str: domain name """ query = select(Directory).filter(Directory.id == id_) retval = (await session.scalars(query)).one() diff --git a/pyproject.toml b/pyproject.toml index 31308c286..ef9425e66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,7 +113,7 @@ select = [ "I", # isort, check tool.ruff.lint.isort. Must have "N", # pep8-naming "A", # flake8 builtin-attribute-shadowing - # DOC and D. used darglint2 + # DOC and D not enough. Use darglint2. "D", # pydocstyle, check tool.ruff.lint.pydocstyle "DOC", # pydoclint TODO uncomment, ruff fix and fix error "UP", # pyupgrade, check tool.ruff.lint.pyupgrade. Must have From 593603b069aa58c14d1873217ecf0d8a8d97683b Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 18:09:10 +0300 Subject: [PATCH 10/25] refactor: docstrings darglint finally task_508 --- .darglint2 | 2 +- app/alembic/env.py | 6 +- app/api/auth/router.py | 8 ++- app/api/auth/router_mfa.py | 7 ++- app/api/auth/schema.py | 21 +++++-- app/api/main/schema.py | 4 +- app/api/network/schema.py | 15 +++-- app/config.py | 11 ++-- .../scripts/principal_block_user_sync.py | 12 ++-- app/extra/scripts/update_krb5_config.py | 8 ++- app/extra/setup_dev.py | 9 ++- app/ioc.py | 53 +++++++++++++++-- app/ldap_protocol/dialogue.py | 57 +++++++++++++++---- app/ldap_protocol/dns.py | 44 ++++++++++++-- app/ldap_protocol/filter_interpreter.py | 15 +++-- app/ldap_protocol/kerberos/base.py | 15 ++++- app/ldap_protocol/kerberos/stub.py | 7 ++- app/ldap_protocol/kerberos/utils.py | 2 +- app/ldap_protocol/ldap_requests/abandon.py | 6 +- app/ldap_protocol/ldap_requests/add.py | 12 +++- .../ldap_requests/bind_methods/base.py | 24 ++++++-- .../ldap_requests/bind_methods/sasl_gssapi.py | 3 - .../ldap_requests/bind_methods/sasl_plain.py | 1 - app/ldap_protocol/ldap_requests/delete.py | 2 +- app/ldap_protocol/ldap_requests/extended.py | 29 ++++++++-- app/ldap_protocol/ldap_requests/modify.py | 35 ++++++++---- app/ldap_protocol/ldap_requests/search.py | 42 +++++++++++--- app/ldap_protocol/ldap_responses.py | 34 ++++++++--- .../ldap_schema/attribute_type_crud.py | 5 +- app/ldap_protocol/messages.py | 6 +- app/ldap_protocol/policies/password_policy.py | 2 +- app/ldap_protocol/server.py | 33 ++++++++--- app/ldap_protocol/session_storage.py | 10 ++-- app/ldap_protocol/utils/const.py | 4 +- app/ldap_protocol/utils/helpers.py | 12 +++- app/ldap_protocol/utils/pagination.py | 3 + app/ldap_protocol/utils/queries.py | 37 ++++++++++-- .../utils/raw_definition_parser.py | 8 +-- app/models.py | 2 +- app/multidirectory.py | 2 +- tests/conftest.py | 6 +- tests/test_api/test_main/test_kadmin.py | 2 +- 42 files changed, 480 insertions(+), 136 deletions(-) diff --git a/.darglint2 b/.darglint2 index d0599bfe9..8c68e3f19 100644 --- a/.darglint2 +++ b/.darglint2 @@ -4,4 +4,4 @@ docstring_style=google # https://akaihola.github.io/darglint2/master/readme.html#strictness-configuration strictness=full -ignore=DAR401, +# ignore=DAR401, diff --git a/app/alembic/env.py b/app/alembic/env.py index 72afc84cf..07811cd68 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -39,7 +39,11 @@ def do_run_migrations(connection: AsyncConnection): async def run_async_migrations(settings: Settings): - """Run async migrations.""" + """Run async migrations. + + Args: + settings (Settings): Settings + """ engine = create_async_engine(str(settings.POSTGRES_URI)) async with engine.connect() as connection: diff --git a/app/api/auth/router.py b/app/api/auth/router.py index e3113c772..4652bc89a 100644 --- a/app/api/auth/router.py +++ b/app/api/auth/router.py @@ -170,7 +170,13 @@ async def logout( storage: FromDishka[SessionStorage], user: Annotated[UserSchema, Depends(get_current_user)], ) -> None: - """Delete token cookies.""" + """Delete token cookies. + + Args: + response (Response): FastAPI response object. + storage (FromDishka[SessionStorage]): Session storage. + user (UserSchema): Current user schema from dependency. + """ response.delete_cookie("id", httponly=True) await storage.delete_user_session(user.session_id) diff --git a/app/api/auth/router_mfa.py b/app/api/auth/router_mfa.py index eb7efbaf9..71efdcf6e 100644 --- a/app/api/auth/router_mfa.py +++ b/app/api/auth/router_mfa.py @@ -99,7 +99,12 @@ async def remove_mfa( session: FromDishka[AsyncSession], scope: Literal["ldap", "http"], ) -> None: - """Remove mfa credentials.""" + """Remove mfa credentials. + + Args: + session (FromDishka[AsyncSession]): Database session. + scope (Literal["ldap", "http"]): Scope of the credentials. + """ if scope == "http": keys = ["mfa_key", "mfa_secret"] else: diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index e4b7a7666..e62e483cb 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -42,7 +42,12 @@ def __init__( username: str = Form(), password: str = Form(), ): - """Initialize form.""" + """Initialize form. + + Args: + username (str): username + password (str): password + """ self.username = username self.password = password @@ -67,7 +72,7 @@ class SetupRequest(BaseModel): @field_validator("domain") def validate_domain(cls, v: str) -> str: # noqa FIXME why noqa? - """Description. + """Validate domain. Args: v (str): value @@ -93,7 +98,11 @@ class MFACreateRequest(BaseModel): @computed_field # type: ignore @property def key_name(self) -> str: - """Description.""" + """Get key name. + + Returns: + str: key name + """ if self.is_ldap_scope: return "mfa_key_ldap" @@ -102,7 +111,11 @@ def key_name(self) -> str: @computed_field # type: ignore @property def secret_name(self) -> str: - """Description.""" + """Get secret name. + + Returns: + str: secret name + """ if self.is_ldap_scope: return "mfa_secret_ldap" diff --git a/app/api/main/schema.py b/app/api/main/schema.py index 3d9bd6725..7cb90af82 100644 --- a/app/api/main/schema.py +++ b/app/api/main/schema.py @@ -62,7 +62,7 @@ class KerberosSetupRequest(BaseModel): class _PolicyFields: - """Description.""" + """Policy fields.""" name: str can_read: bool @@ -73,7 +73,7 @@ class _PolicyFields: class _MaterialFields: - """Description.""" + """Material fields.""" id: int diff --git a/app/api/network/schema.py b/app/api/network/schema.py index e2fb03d2e..3de74d32a 100644 --- a/app/api/network/schema.py +++ b/app/api/network/schema.py @@ -40,7 +40,11 @@ class NetmasksMixin: @computed_field # type: ignore @property def complete_netmasks(self) -> list[IPv4Address | IPv4Network]: - """Validate range or return networks range.""" + """Validate range or return networks range. + + Returns: + list[IPv4Address | IPv4Network]: complete netmasks + """ values = [] for item in self.netmasks: if isinstance(item, IPRange): @@ -54,13 +58,13 @@ def complete_netmasks(self) -> list[IPv4Address | IPv4Network]: @field_validator("groups") @classmethod def validate_group(cls, groups: list[str]) -> list[str]: - """Description. + """Validate groups. Args: groups (list[str]): groups names Returns: - list[str]: + list[str]: groups Raises: ValueError: Invalid DN @@ -75,13 +79,13 @@ def validate_group(cls, groups: list[str]) -> list[str]: @field_validator("mfa_groups") @classmethod def validate_mfa_group(cls, mfa_groups: list[str]) -> list[str]: - """Syka. + """Validate mfa groups. Args: mfa_groups (list[str]): mfa groups names Returns: - list[str]: + list[str]: mfa groups Raises: ValueError: Invalid DN @@ -107,7 +111,6 @@ def netmasks_serialize( Returns: list[str | dict]: ready to json serialized - """ values: list[str | dict] = [] diff --git a/app/config.py b/app/config.py index 14a9df4f8..861119d79 100644 --- a/app/config.py +++ b/app/config.py @@ -24,10 +24,10 @@ def _get_vendor_version() -> str: - """Description. + """Get vendor version. Returns: - str: + str: vendor version """ with open("/pyproject.toml", "rb") as f: return tomllib.load(f)["tool"]["poetry"]["version"] @@ -74,7 +74,11 @@ class Settings(BaseModel): @computed_field # type: ignore @cached_property def POSTGRES_URI(self) -> PostgresDsn: # noqa - """Build postgres DSN.""" + """Build postgres DSN. + + Returns: + PostgresDsn: postgres DSN + """ return PostgresDsn( f"{self.POSTGRES_SCHEMA}://" f"{self.POSTGRES_USER}:" @@ -150,7 +154,6 @@ def MFA_API_URI(self) -> str: # noqa: N802 Returns: str: url - """ if self.MFA_API_SOURCE == "dev": return "https://api.multifactor.dev" diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index a6e3f41ea..1e43652cb 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -26,7 +26,12 @@ async def principal_block_sync( session: AsyncSession, settings: Settings, ) -> None: - """Synchronize principal and user account blocking.""" + """Synchronize principal and user account blocking. + + Args: + session (AsyncSession): Database session. + settings (Settings): Settings. + """ for user in await session.scalars(select(User)): uac_check = await get_check_uac(session, user.directory_id) if uac_check(UserAccountControlFlag.ACCOUNTDISABLE): @@ -95,9 +100,8 @@ def _find_krb_exp_attr(directory: Directory) -> Attribute | None: directory (Directory): the directory object Returns: - Atrribute | None: the attribute with the name - Atrribute | None: the attribute with the name - 'krbprincipalexpiration', or None if not found. + Attribute | None: the attribute with the name + 'krbprincipalexpiration', or None if not found. """ for attr in directory.attributes: if attr.name == "krbprincipalexpiration": diff --git a/app/extra/scripts/update_krb5_config.py b/app/extra/scripts/update_krb5_config.py index 45c4f570d..85db92691 100644 --- a/app/extra/scripts/update_krb5_config.py +++ b/app/extra/scripts/update_krb5_config.py @@ -17,7 +17,13 @@ async def update_krb5_config( session: AsyncSession, settings: Settings, ) -> None: - """Update kerberos config.""" + """Update kerberos config. + + Args: + kadmin (AbstractKadmin): Kerberos client. + session (AsyncSession): Database session. + settings (Settings): Settings. + """ if not (await kadmin.get_status(wait_for_positive=True)): logger.error("kadmin_api is not running") return diff --git a/app/extra/setup_dev.py b/app/extra/setup_dev.py index 1b97d0b0e..a63c47557 100644 --- a/app/extra/setup_dev.py +++ b/app/extra/setup_dev.py @@ -53,7 +53,14 @@ async def _create_dir( domain: Directory, parent: Directory | None = None, ) -> None: - """Create data recursively.""" + """Create data recursively. + + Args: + data (dict): data + session (AsyncSession): Database session + domain (Directory): domain + parent (Directory | None): parent + """ dir_ = Directory( object_class=data["object_class"], name=data["name"], diff --git a/app/ioc.py b/app/ioc.py index afa77c907..24db3ccee 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -106,7 +106,14 @@ async def get_krb_class( self, session_maker: async_sessionmaker[AsyncSession], ) -> type[AbstractKadmin]: - """Get kerberos type.""" + """Get kerberos type. + + Args: + session_maker (async_sessionmaker[AsyncSession]): session maker + + Returns: + type[AbstractKadmin]: kerberos class + """ async with session_maker() as session: return await get_kerberos_class(session) @@ -157,7 +164,14 @@ async def get_dns_mngr_class( self, session_maker: async_sessionmaker[AsyncSession], ) -> type[AbstractDNSManager]: - """Get DNS manager type.""" + """Get DNS manager type. + + Args: + session_maker (async_sessionmaker[AsyncSession]): session maker + + Returns: + type[AbstractDNSManager]: DNS manager class + """ async with session_maker() as session: return await get_dns_manager_class(session) @@ -167,7 +181,15 @@ async def get_dns_mngr_settings( session_maker: async_sessionmaker[AsyncSession], settings: Settings, ) -> DNSManagerSettings: - """Get DNS manager's settings.""" + """Get DNS manager's settings. + + Args: + session_maker (async_sessionmaker[AsyncSession]): session maker + settings (Settings): app settings + + Returns: + DNSManagerSettings: DNS manager settings + """ resolve_coro = resolve_dns_server_ip(settings.DNS_BIND_HOST) async with session_maker() as session: return await get_dns_manager_settings(session, resolve_coro) @@ -219,7 +241,15 @@ async def get_session_storage( client: SessionStorageClient, settings: Settings, ) -> SessionStorage: - """Get session storage.""" + """Get session storage. + + Args: + client (SessionStorageClient): session storage client + settings (Settings): app settings + + Returns: + SessionStorage: session storage + """ return RedisSessionStorage( client, settings.SESSION_KEY_LENGTH, @@ -234,7 +264,11 @@ class HTTPProvider(Provider): @provide(provides=LDAPSession) async def get_session(self) -> LDAPSession: - """Create ldap session.""" + """Create ldap session. + + Returns: + LDAPSession: ldap session + """ return LDAPSession() @@ -245,7 +279,14 @@ class LDAPServerProvider(Provider): @provide(scope=Scope.SESSION, provides=LDAPSession) async def get_session(self, storage: SessionStorage) -> LDAPSession: - """Create ldap session.""" + """Create ldap session. + + Args: + storage (SessionStorage): session storage + + Returns: + LDAPSession: ldap session + """ return LDAPSession(storage=storage) diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index 33f5dc1e4..9fd42d0f8 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -50,7 +50,15 @@ async def from_db( user: User, session_id: str, ) -> UserSchema: - """Create model from db model.""" + """Create model from db model. + + Args: + user (User): instance of User + session_id (str): session id + + Returns: + UserSchema: instance of UserSchema + """ return cls( id=user.id, session_id=session_id.split(".")[0], @@ -85,7 +93,12 @@ def __init__( user: UserSchema | None = None, storage: SessionStorage | None = None, ) -> None: - """Set lock.""" + """Set lock. + + Args: + user (UserSchema | None): instance of UserSchema + storage (SessionStorage | None): instance of SessionStorage + """ self._lock = asyncio.Lock() self._user: UserSchema | None = user self.queue: asyncio.Queue[LDAPRequestMessage] = asyncio.Queue() @@ -93,17 +106,25 @@ def __init__( self.storage = storage def __str__(self) -> str: - """Session with id.""" + """Session with id. + + Returns: + str: session with id + """ return f"LDAPSession({self.id})" @property def user(self) -> UserSchema | None: - """User getter, not implemented.""" + """User getter, not implemented. + + Returns: + UserSchema | None: instance of UserSchema + """ return self._user @user.setter def user(self, user: User) -> None: - """Description. + """User setter. Args: user (User): instance of User @@ -116,7 +137,11 @@ def user(self, user: User) -> None: ) async def set_user(self, user: User | UserSchema) -> None: - """Bind user to session concurrently save.""" + """Bind user to session concurrently save. + + Args: + user (User | UserSchema): instance of User or UserSchema + """ async with self._lock: if isinstance(user, User): self._user = await UserSchema.from_db(user, self.key) @@ -132,7 +157,11 @@ async def delete_user(self) -> None: self._user = None async def get_user(self) -> UserSchema | None: - """Get user from session concurrently save.""" + """Get user from session concurrently save. + + Returns: + UserSchema | None: instance of UserSchema + """ async with self._lock: return self._user @@ -141,7 +170,7 @@ async def lock(self) -> AsyncIterator[UserSchema | None]: """Lock session, user cannot be deleted or get while lock is set. Yields: - AsyncIterator[UserSchema | None] + AsyncIterator[UserSchema | None]: instance of UserSchema """ async with self._lock: yield self._user @@ -178,11 +207,19 @@ async def validate_conn( @property def key(self) -> str: - """Get key.""" + """Get key. + + Returns: + str: key + """ return f"ldap:{self.id}" def _bound_ip(self) -> bool: - """Description.""" + """Check if ip is bound. + + Returns: + bool: True if ip is bound, False otherwise + """ return hasattr(self, "ip") async def bind_session(self) -> None: diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index 2d2549360..506f02323 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -162,7 +162,11 @@ class AbstractDNSManager(ABC): """Abstract DNS manager class.""" def __init__(self, settings: DNSManagerSettings) -> None: - """Set up DNS manager.""" + """Set up DNS manager. + + Args: + settings (DNSManagerSettings): DNS manager settings + """ self._dns_settings = settings @logger_wraps() @@ -176,7 +180,17 @@ async def setup( tsig_key: str | None, named_conf_local_part: str | None, ) -> None: - """Set up DNS server and DNS manager.""" + """Set up DNS server and DNS manager. + + Args: + session (AsyncSession): Database session + settings (Settings): Settings + domain (str): Domain name + dns_ip_address (str | None): DNS server IP address + zone_file (str | None): Zone file + tsig_key (str | None): TSIG key + named_conf_local_part (str | None): Named conf local part + """ if zone_file is not None and named_conf_local_part is not None: with open(settings.DNS_ZONE_FILE, "w") as f: f.write(zone_file) @@ -271,7 +285,14 @@ async def create_record( record_type: str, ttl: int | None, ) -> None: - """Create DNS record.""" + """Create DNS record. + + Args: + hostname (str): Hostname + ip (str): IP address + record_type (str): Record type + ttl (int | None): TTL + """ action = Update(self._dns_settings.zone_name) action.add(hostname, ttl, record_type, ip) @@ -338,7 +359,14 @@ async def update_record( record_type: str, ttl: int | None, ) -> None: - """Update DNS record.""" + """Update DNS record. + + Args: + hostname (str): Hostname + ip (str | None): IP address + record_type (str): Record type + ttl (int | None): TTL + """ action = Update(self._dns_settings.zone_name) action.replace(hostname, ttl, record_type, ip) @@ -351,7 +379,13 @@ async def delete_record( ip: str, record_type: str, ) -> None: - """Delete DNS record.""" + """Delete DNS record. + + Args: + hostname (str): Hostname + ip (str): IP address + record_type (str): Record type + """ action = Update(self._dns_settings.zone_name) action.delete(hostname, record_type, ip) diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 1ef0cdea3..6ad386bb1 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -29,7 +29,7 @@ def _get_substring(right: ASN1Row) -> str: # RFC 4511 - """Description. + """Get substring. Args: right (ASN1Row): Row with metadata @@ -52,7 +52,7 @@ def _from_filter( attr: str, right: ASN1Row, ) -> UnaryExpression: - """Description. + """Get filter from item. Args: model (type): Any Model @@ -198,7 +198,7 @@ def _ldap_filter_by_attribute( def _cast_item(item: ASN1Row) -> UnaryExpression | ColumnElement: - """Description. + """Cast item to sqlalchemy condition. Args: item (ASN1Row): Row with metadata @@ -286,7 +286,14 @@ def _from_str_filter( def _api_filter(item: Filter) -> UnaryExpression: - """Retrieve query conditions based on the specified LDAP attribute.""" + """Retrieve query conditions based on the specified LDAP attribute. + + Args: + item (Filter): LDAP filter + + Returns: + UnaryExpression + """ filter_func = _get_filter_function(item.attr) return filter_func(item.val) diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index fb2325c0d..561ab9417 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -170,7 +170,20 @@ async def setup( kdc_config: str, ldap_keytab_path: str, ) -> None: - """Request Setup.""" + """Request Setup. + + Args: + domain (str): domain + admin_dn (str): admin_dn + services_dn (str): services_dn + krbadmin_dn (str): krbadmin_dn + krbadmin_password (str): krbadmin_password + admin_password (str): admin_password + stash_password (str): stash_password + krb5_config (str): krb5_config + kdc_config (str): kdc_config + ldap_keytab_path (str): ldap keytab path + """ await self.setup_configs(krb5_config, kdc_config) await self.setup_stash( domain, diff --git a/app/ldap_protocol/kerberos/stub.py b/app/ldap_protocol/kerberos/stub.py index 1c093927f..5a01b82d4 100644 --- a/app/ldap_protocol/kerberos/stub.py +++ b/app/ldap_protocol/kerberos/stub.py @@ -11,7 +11,12 @@ class StubKadminMDADPIClient(AbstractKadmin): @logger_wraps() async def setup(self, *args, **kwargs) -> None: # type: ignore - """Call setup.""" + """Call setup. + + Args: + *args: arguments + **kwargs: keyword arguments + """ await super().setup(*args, **kwargs) @logger_wraps(is_stub=True) diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index d1b9dc579..26f2cd0ff 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -23,7 +23,7 @@ def logger_wraps(is_stub: bool = False) -> Callable: """ def wrapper(func: Callable) -> Callable: - """Description. + """Wrap kadmin calls. Args: func (Callable): any function diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index dcf2a60f3..6d0dc65f4 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -31,7 +31,11 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: return cls(message_id=1) async def handle(self) -> AsyncGenerator: - """Handle message with current user.""" + """Handle message with current user. + + Returns: + AsyncGenerator: Async generator. + """ await asyncio.sleep(0) return yield # type: ignore diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index a32c93e2a..1ebd433f4 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -71,12 +71,20 @@ class AddRequest(BaseRequest): @property def attr_names(self) -> dict[str, list[str | bytes]]: - """Description.""" + """Get attribute names. + + Returns: + dict[str, list[str | bytes]]: attribute names + """ return {attr.l_name: attr.vals for attr in self.attributes} @property def attributes_dict(self) -> dict[str, list[str | bytes]]: - """Description.""" + """Get attributes dictionary. + + Returns: + dict[str, list[str | bytes]]: attributes dictionary + """ return {attr.type: attr.vals for attr in self.attributes} @classmethod diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index 97b4f3843..e47c74c34 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -48,7 +48,11 @@ class LDAPBindErrors(StrEnum): ACCOUNT_LOCKED_OUT = "775" def __str__(self) -> str: - """Return the error message as a string.""" + """Return the error message as a string. + + Returns: + str: Error message + """ return ( "80090308: LdapErr: DSID-0C09030B, " "comment: AcceptSecurityContext error, " @@ -96,11 +100,20 @@ def is_valid(self, user: User) -> bool: @abstractmethod def is_anonymous(self) -> bool: - """Description.""" + """Check if anonymous. + + Returns: + bool: True if anonymous, False otherwise + """ @abstractmethod async def get_user(self, session: AsyncSession, username: str) -> User: - """Get user.""" + """Get user. + + Args: + session (AsyncSession): sqlalchemy session + username (str): username + """ class SaslAuthentication(AbstractLDAPAuth): @@ -115,5 +128,8 @@ def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": """Get auth from data. Args: - data: list[ASN1Row]: + data (list[ASN1Row]): data + + Returns: + SaslAuthentication: sasl authentication """ diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index 34723c4f7..dfb0ac373 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -95,7 +95,6 @@ def is_anonymous(self) -> bool: Returns: bool: status - """ return False @@ -156,7 +155,6 @@ def _handle_ticket( Returns: GSSAPIAuthStatus: status - """ try: out_token = server_ctx.step(self.ticket) @@ -173,7 +171,6 @@ def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: Returns: bool: validate result - """ supported = GSSAPISL.SUPPORTED_SECURITY_LAYERS return (client_layer & supported) == client_layer diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index f63880b56..4bffa4e51 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -45,7 +45,6 @@ def is_anonymous(self) -> bool: Returns: bool: status - """ return False diff --git a/app/ldap_protocol/ldap_requests/delete.py b/app/ldap_protocol/ldap_requests/delete.py index 501f1710b..0d3696901 100644 --- a/app/ldap_protocol/ldap_requests/delete.py +++ b/app/ldap_protocol/ldap_requests/delete.py @@ -44,7 +44,7 @@ class DeleteRequest(BaseRequest): @classmethod def from_data(cls, data: ASN1Row) -> "DeleteRequest": - """Description. + """Get delete request from data. Args: data (ASN1Row): ASN1Row containing the entry to delete. diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 5b757e683..ae9e2bc3d 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -55,18 +55,27 @@ async def handle( kadmin: AbstractKadmin, settings: Settings, ) -> BaseExtendedResponseValue: - """Generate specific extended resoponse.""" + """Generate specific extended resoponse. + + Args: + ldap_session (LDAPSession): LDAP session + session (AsyncSession): Database session + kadmin (AbstractKadmin): Kerberos client + settings (Settings): Settings + + Returns: + BaseExtendedResponseValue + """ @staticmethod def _decode_value(data: ASN1Row) -> ASN1Row: - """Description. + """Decode value. Args: data: ASN1Row Returns: ASN1Row - """ dec = Decoder() dec.start(data[1].value) # type: ignore @@ -128,7 +137,17 @@ async def handle( kadmin: AbstractKadmin, # noqa: ARG002 settings: Settings, # noqa: ARG002 ) -> "WhoAmIResponse": - """Return user from session.""" + """Return user from session. + + Args: + ldap_session (LDAPSession): LDAP session + _ (AsyncSession): Database session + kadmin (AbstractKadmin): Kerberos client + settings (Settings): Settings + + Returns: + WhoAmIResponse + """ un = ( f"u:{ldap_session.user.user_principal_name}" if ldap_session.user @@ -204,7 +223,7 @@ class PasswdModifyResponse(BaseExtendedResponseValue): gen_passwd: str = "" def get_value(self) -> str | None: - """Description. + """Get response value. Returns: str | None diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index 611dcace7..d46aeba31 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -67,7 +67,11 @@ class Changes(BaseModel): modification: PartialAttribute def get_name(self) -> str: - """Get mod name.""" + """Get mod name. + + Returns: + str: mod name + """ return self.modification.type.lower() @@ -105,13 +109,13 @@ class ModifyRequest(BaseRequest): @classmethod def from_data(cls, data: list[ASN1Row]) -> "ModifyRequest": - """Description. + """Get modify request from data. Args: - data: list[ASN1Row]: + data (list[ASN1Row]): data Returns: - ModifyRequest + ModifyRequest: modify request """ entry, proto_changes = data @@ -136,7 +140,12 @@ async def _update_password_expiration( change: Changes, session: AsyncSession, ) -> None: - """Update password expiration if policy allows.""" + """Update password expiration if policy allows. + + Args: + change (Changes): Change + session (AsyncSession): Database session + """ if not ( change.modification.type == "krbpasswordexpiration" and change.modification.vals[0] == "19700101000000Z" @@ -249,14 +258,14 @@ async def handle( yield ModifyResponse(result_code=LDAPCodes.SUCCESS) def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: - """Description. + """Match bad response. Args: err (BaseException): error Returns: - tuple[LDAPCodes, str] - """ # noqa: DAR401 + tuple[LDAPCodes, str]: result code and message + """ match err: case ValueError(): logger.error(f"Invalid value: {err}") @@ -278,7 +287,11 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: raise err def _get_dir_query(self) -> Select: - """Description.""" + """Get directory query. + + Returns: + Select: directory query + """ return ( select(Directory) .join(Directory.attributes) @@ -295,7 +308,7 @@ def _check_password_change_requested( directory: Directory, user_dir_id: int, ) -> bool: - """Description. + """Check if password change is requested. Args: names (set[str]): attr names @@ -303,7 +316,7 @@ def _check_password_change_requested( user_dir_id (int): user id Returns: - bool: + bool: True if password change is requested, False otherwise """ return ( ("userpassword" in names or "unicodepwd" in names) diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index a4cf2408d..5864ed826 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -112,7 +112,7 @@ def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # no @classmethod def from_data(cls, data: dict[str, list[ASN1Row]]) -> "SearchRequest": - """Description. + """Get search request from data. Args: data (dict[str, list[ASN1Row]]): data @@ -144,7 +144,11 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "SearchRequest": @cached_property def requested_attrs(self) -> list[str]: - """Description.""" + """Get requested attributes. + + Returns: + list[str]: requested attributes + """ return [attr.lower() for attr in self.attributes] async def _get_subschema(self, session: AsyncSession) -> SearchResultEntry: @@ -334,22 +338,38 @@ async def get_result( @cached_property def member_of(self) -> bool: - """Description.""" + """Check if member of is requested. + + Returns: + bool: True if member of is requested, False otherwise + """ return "memberof" in self.requested_attrs or self.all_attrs @cached_property def member(self) -> bool: - """Description.""" + """Check if member is requested. + + Returns: + bool: True if member is requested, False otherwise + """ return "member" in self.requested_attrs or self.all_attrs @cached_property def token_groups(self) -> bool: - """Description.""" + """Check if token groups is requested. + + Returns: + bool: True if token groups is requested, False otherwise + """ return "tokengroups" in self.requested_attrs @cached_property def all_attrs(self) -> bool: - """Description.""" + """Check if all attributes are requested. + + Returns: + bool: True if all attributes are requested, False otherwise + """ return "*" in self.requested_attrs or not self.requested_attrs def build_query( @@ -464,7 +484,15 @@ async def tree_view( # noqa: C901 query: Select, session: AsyncSession, ) -> AsyncGenerator[SearchResultEntry, None]: - """Yield all resulted directories.""" + """Yield all resulted directories. + + Args: + query (Select): SQLAlchemy select query + session (AsyncSession): async session + + Yields: + AsyncGenerator[SearchResultEntry, None]: yielded directories + """ directories = await session.stream_scalars(query) # logger.debug(query.compile(compile_kwargs={"literal_binds": True})) # noqa diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index d8c7627ea..cb180c7c9 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -45,7 +45,11 @@ class BaseEncoder(BaseModel): """Class with encoder methods.""" def _get_asn1_fields(self) -> dict: - """Description.""" + """Get ASN1 fields. + + Returns: + dict: ASN1 fields + """ fields = self.model_dump() fields.pop("PROTOCOL_OP", None) return fields @@ -66,7 +70,11 @@ class BaseResponse(ABC, BaseEncoder): @property @abstractmethod def PROTOCOL_OP(self) -> int: # noqa: N802 - """Protocol OP response code.""" + """Protocol OP response code. + + Returns: + int: Protocol OP response code + """ class BindResponse(LDAPResult, BaseResponse): @@ -106,13 +114,17 @@ class PartialAttribute(BaseModel): @property def l_name(self) -> str: - """Get lower case name.""" + """Get lower case name. + + Returns: + str: lower case name + """ return self.type.lower() @field_validator("type", mode="before") @classmethod def validate_type(cls, v: str | bytes | int) -> str: - """Description. + """Validate type. Args: v (str | bytes | int): value @@ -125,7 +137,7 @@ def validate_type(cls, v: str | bytes | int) -> str: @field_validator("vals", mode="before") @classmethod def validate_vals(cls, vals: list[str | int | bytes]) -> list[str | bytes]: - """Description. + """Validate vals. Args: vals (list[str | int | bytes]): values @@ -194,7 +206,11 @@ class SearchResultDone(LDAPResult, BaseResponse): total_objects: int = 0 def _get_asn1_fields(self) -> dict: - """Description.""" + """Get ASN1 fields. + + Returns: + dict: ASN1 fields + """ fields = super()._get_asn1_fields() fields.pop("total_pages") fields.pop("total_objects") @@ -249,7 +265,11 @@ class BaseExtendedResponseValue(ABC, BaseEncoder): @abstractmethod def get_value(self) -> str | None: - """Get response value.""" + """Get response value. + + Returns: + str | None: response value + """ class ExtendedResponse(LDAPResult, BaseResponse): diff --git a/app/ldap_protocol/ldap_schema/attribute_type_crud.py b/app/ldap_protocol/ldap_schema/attribute_type_crud.py index 83d72793d..d134516dc 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_crud.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_crud.py @@ -184,12 +184,9 @@ async def delete_attribute_types_by_names( Args: attribute_type_names (list[str]): List of Attribute Types OIDs. session (AsyncSession): Database session. - - Returns: - None """ if not attribute_type_names: - return None + return await session.execute( delete(AttributeType) diff --git a/app/ldap_protocol/messages.py b/app/ldap_protocol/messages.py index 1b884ebb6..317140b37 100644 --- a/app/ldap_protocol/messages.py +++ b/app/ldap_protocol/messages.py @@ -36,7 +36,11 @@ class LDAPMessage(ABC, BaseModel): @property def name(self) -> str: - """Message name.""" + """Message name. + + Returns: + str: message name + """ return get_class_name(self.context) diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 32a33ca12..ecadab424 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -72,7 +72,7 @@ class PasswordPolicySchema(BaseModel): @model_validator(mode="after") def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": - """Description. + """Validate minimum password age. Returns: self diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 2658178ab..484984957 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -57,7 +57,12 @@ class PoolClientHandler: ssl_context: ssl.SSLContext | None = None def __init__(self, settings: Settings, container: AsyncContainer): - """Set workers number for single client concurrent handling.""" + """Set workers number for single client concurrent handling. + + Args: + settings (Settings): settings + container (AsyncContainer): container + """ self.container = container self.settings = settings @@ -77,7 +82,12 @@ async def __call__( reader: asyncio.StreamReader, writer: asyncio.StreamWriter, ) -> None: - """Create session, queue and start message handlers concurrently.""" + """Create session, queue and start message handlers concurrently. + + Args: + reader (asyncio.StreamReader): reader + writer (asyncio.StreamWriter): writer + """ async with self.container(scope=Scope.SESSION) as session_scope: ldap_session = await session_scope.get(LDAPSession) addr, first_chunk = await self.recieve( @@ -360,7 +370,6 @@ def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: Args: addr (str): address msg (LDAPRequestMessage): message - """ log.debug( f"\nFrom: {addr!r}\n{msg.name}[{msg.message_id}]: " @@ -387,7 +396,6 @@ def _log_short(addr: str, msg: LDAPMessage) -> None: Args: addr (str): address msg (LDAPMessage): message - """ log.info(f"\n{addr!r}: {msg.name}[{msg.message_id}]\n") @@ -496,7 +504,11 @@ async def _handle_responses( await asyncio.gather(*tasks) async def _get_server(self) -> asyncio.base_events.Server: - """Get async server.""" + """Get async server. + + Returns: + asyncio.base_events.Server: async server + """ return await asyncio.start_server( self, str(self.settings.HOST), @@ -507,17 +519,20 @@ async def _get_server(self) -> asyncio.base_events.Server: @staticmethod async def _run_server(server: asyncio.base_events.Server) -> None: - """Run server.""" + """Run server. + + Args: + server (asyncio.base_events.Server): async server + """ async with server: await server.serve_forever() @staticmethod def log_addrs(server: asyncio.base_events.Server) -> None: - """Description. + """Log server addresses. Args: - server: asyncio.base_events.Server: - + server (asyncio.base_events.Server): async server """ addrs = ", ".join(str(sock.getsockname()) for sock in server.sockets) log.info(f"Server on {addrs}") diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 0ecce67a5..f3bb934f7 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -116,7 +116,7 @@ async def delete_user_session(self, session_id: str) -> None: @staticmethod def _sign(session_id: str, settings: Settings) -> str: - """Description. + """Sign session id. Args: session_id (str): Session id @@ -124,7 +124,6 @@ def _sign(session_id: str, settings: Settings) -> str: Returns: str: The HMAC signature for the session_id using provided settings. - """ return hmac.new( settings.SECRET_KEY.encode(), @@ -144,7 +143,7 @@ def get_user_agent_hash(self, user_agent: str) -> str: return hashlib.blake2b(user_agent.encode(), digest_size=6).hexdigest() def _get_ip_session_key(self, ip: str, protocol: ProtocolType) -> str: - """Description. + """Get ip session key. Args: ip (str): IP @@ -156,7 +155,7 @@ def _get_ip_session_key(self, ip: str, protocol: ProtocolType) -> str: return f"ip:{protocol}:{ip}" def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: - """Description. + """Get user session key. Args: uid (int): uid @@ -168,7 +167,7 @@ def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: return f"keys:{protocol}:{uid}" def _get_protocol(self, session_id: str) -> ProtocolType: - """Description. + """Get protocol. Args: session_id (str): Session id @@ -183,7 +182,6 @@ def _generate_key(self) -> str: Returns: str: A new key. - """ return f"http:{token_hex(self.key_length)}" diff --git a/app/ldap_protocol/utils/const.py b/app/ldap_protocol/utils/const.py index 6c7e7f745..79da3d901 100644 --- a/app/ldap_protocol/utils/const.py +++ b/app/ldap_protocol/utils/const.py @@ -13,7 +13,7 @@ def _type_validate_entry(entry: str) -> str: - """Description. + """Validate entry name. Args: entry (str): entry name @@ -35,7 +35,7 @@ def _type_validate_entry(entry: str) -> str: def _type_validate_email(email: str) -> str: - """Description. + """Validate email. Args: email (str): email address diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 05bfc14ab..ab3129174 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -285,7 +285,11 @@ def ft_to_dt(filetime: int) -> datetime: def ft_now() -> str: - """Get now filetime timestamp.""" + """Get now filetime timestamp. + + Returns: + str: now filetime timestamp + """ return str(dt_to_ft(datetime.now(tz=ZoneInfo("UTC")))) @@ -347,7 +351,11 @@ def create_object_sid( def generate_domain_sid() -> str: - """Generate domain objectSid attr.""" + """Generate domain objectSid attr. + + Returns: + str: domain objectSid attr + """ sub_authorities = [ random.randint(1000000000, (1 << 32) - 1), random.randint(1000000000, (1 << 32) - 1), diff --git a/app/ldap_protocol/utils/pagination.py b/app/ldap_protocol/utils/pagination.py index 54b2d916c..2bd047c67 100644 --- a/app/ldap_protocol/utils/pagination.py +++ b/app/ldap_protocol/utils/pagination.py @@ -73,6 +73,9 @@ def from_db(cls, sqla_instance: S) -> "BaseSchemaModel[S]": Args: sqla_instance (S): instance of SQLAlchemy Model + + Returns: + BaseSchemaModel[S]: instance of Schema """ diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 0d9da4e69..615908a7f 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -28,7 +28,14 @@ @cache async def get_base_directories(session: AsyncSession) -> list[Directory]: - """Get base domain directories.""" + """Get base domain directories. + + Args: + session (AsyncSession): sqlalchemy session + + Returns: + list[Directory]: base domain directories + """ result = await session.execute( select(Directory) .filter(Directory.parent_id.is_(None)) @@ -68,7 +75,15 @@ async def get_directories( dn_list: list[ENTRY_TYPE], session: AsyncSession, ) -> list[Directory]: - """Get directories by dn list.""" + """Get directories by dn list. + + Args: + dn_list (list[ENTRY_TYPE]): dn list + session (AsyncSession): sqlalchemy session + + Returns: + list[Directory]: directories + """ paths = [] for dn in dn_list: @@ -93,7 +108,15 @@ async def get_directories( async def get_groups(dn_list: list[str], session: AsyncSession) -> list[Group]: - """Get dirs with groups by dn list.""" + """Get dirs with groups by dn list. + + Args: + dn_list (list[str]): dn list + session (AsyncSession): sqlalchemy session + + Returns: + list[Group]: groups + """ return [ directory.group for directory in await get_directories(dn_list, session) @@ -166,7 +189,13 @@ async def set_last_logon_user( session: AsyncSession, tz: ZoneInfo, ) -> None: - """Update lastLogon attr.""" + """Update lastLogon attr. + + Args: + user (User): user + session (AsyncSession): sqlalchemy session + tz (ZoneInfo): timezone info + """ await session.execute( update(User) .values({"last_logon": datetime.now(tz=tz)}) diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index eb009b4e9..9599c4e34 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -16,7 +16,7 @@ class RawDefinitionParser: @staticmethod def _list_to_string(data: list[str]) -> str | None: - """Description. + """Convert list to string. Args: data (list[str]): list of strings @@ -35,7 +35,7 @@ def _list_to_string(data: list[str]) -> str | None: @staticmethod def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: - """Description. + """Get attribute type info. Args: raw_definition (str): raw definition of attribute type @@ -48,7 +48,7 @@ def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: @staticmethod def get_object_class_info(raw_definition: str) -> ObjectClassInfo: - """Description. + """Get object class info. Args: raw_definition (str): raw definition of object class @@ -74,7 +74,7 @@ async def _get_attribute_types_by_names( def create_attribute_type_by_raw( raw_definition: str, ) -> AttributeType: - """Description. + """Create attribute type by raw definition. Args: raw_definition (str): raw definition of attribute type diff --git a/app/models.py b/app/models.py index 84b462433..2f74496a2 100644 --- a/app/models.py +++ b/app/models.py @@ -241,7 +241,7 @@ class Directory(Base): @property def attributes_dict(self) -> defaultdict[str, list[str]]: - """Description. + """Get attributes dictionary. Returns: defaultdict[str, list[str]]: Dictionary of attribute names\ diff --git a/app/multidirectory.py b/app/multidirectory.py index a1e2f22ba..6b8746f42 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -203,7 +203,7 @@ async def _servers(settings: Settings) -> None: await asyncio.gather(*servers) def _run() -> None: - """Description.""" + """Run ldap server.""" uvloop.run(_servers(settings), debug=settings.DEBUG) try: diff --git a/tests/conftest.py b/tests/conftest.py index 65ee8f7df..e14c7b838 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -328,21 +328,19 @@ async def _migrations( config.attributes["app_settings"] = settings def upgrade(conn: AsyncConnection) -> None: - """Description. + """Run up migrations. Args: conn (AsyncConnection): connection - """ config.attributes["connection"] = conn command.upgrade(config, "head") def downgrade(conn: AsyncConnection) -> None: - """Description. + """Run down migrations. Args: conn (AsyncConnection): connection - """ config.attributes["connection"] = conn command.downgrade(config, "base") diff --git a/tests/test_api/test_main/test_kadmin.py b/tests/test_api/test_main/test_kadmin.py index 482b2f5a5..5e78a2d1b 100644 --- a/tests/test_api/test_main/test_kadmin.py +++ b/tests/test_api/test_main/test_kadmin.py @@ -22,7 +22,7 @@ def _create_test_user_data( name: str, pw: str, ) -> dict[str, str | list[dict[str, str | list[str]]]]: - """Description. + """Create test user data. Args: name (str): user name From a8c6ad7bb7f887d78ce5d56205cfc2df19f3521e Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Thu, 5 Jun 2025 18:45:46 +0300 Subject: [PATCH 11/25] refactor: add darglint2 task_508 --- .darglint2 | 3 +- .github/workflows/checks.yml | 20 +++ .kerberos/config_server.py | 47 ++++++- Makefile | 1 + app/ldap_protocol/kerberos/client.py | 8 +- app/ldap_protocol/ldap_requests/abandon.py | 2 +- app/ldap_protocol/ldap_requests/modify.py | 5 +- app/ldap_protocol/ldap_requests/search.py | 2 +- poetry.lock | 141 +++++++++++++++++---- pyproject.toml | 2 + tests/conftest.py | 21 ++- 11 files changed, 213 insertions(+), 39 deletions(-) diff --git a/.darglint2 b/.darglint2 index 8c68e3f19..77c3a1e82 100644 --- a/.darglint2 +++ b/.darglint2 @@ -1,7 +1,6 @@ +# https://akaihola.github.io/darglint2/master/index.html [darglint2] docstring_style=google # https://akaihola.github.io/darglint2/master/readme.html#strictness-configuration strictness=full - -# ignore=DAR401, diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index f796e9727..6fb1c931e 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -68,6 +68,26 @@ jobs: NEW_TAG: linter run: docker run $NEW_TAG mypy . + darglint_docstrings: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: build linters + env: + TAG: ghcr.io/${{ env.REPO }}_linters:latest + NEW_TAG: linter + run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 + - name: Run linters + env: + NEW_TAG: linter + run: docker run $NEW_TAG darglint2 -v 2 .kerberos/ app + tests: runs-on: ubuntu-latest steps: diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index 6133d8287..a09d9653a 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -102,6 +102,7 @@ async def add_princ( Args: name (str): principal name password (str | None): password, if empty - uses randkey. + **dbargs: database arguments """ @abstractmethod @@ -169,6 +170,7 @@ async def lock_princ(self, name: str, **dbargs) -> None: Args: name (str): principal name + **dbargs: database arguments """ @abstractmethod @@ -177,6 +179,7 @@ async def force_pw_principal(self, name: str, **dbargs) -> None: Args: name (str): principal name + **dbargs: database arguments """ @@ -186,7 +189,11 @@ class KAdminLocalManager(AbstractKRBManager): client: KAdminProtocol def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: - """Create threadpool and get loop.""" + """Create threadpool and get loop. + + Args: + loop (asyncio.AbstractEventLoop | None): event loop + """ self.loop = loop or asyncio.get_running_loop() async def connect(self) -> Self: @@ -214,7 +221,13 @@ async def __aexit__( exc: BaseException | None, tb: TracebackType | None, ) -> None: - """Destroy threadpool.""" + """Destroy threadpool. + + Args: + exc_type (type[BaseException] | None): exception type + exc (BaseException | None): exception + tb (TracebackType | None): traceback + """ await self.disconnect() async def _init_client(self) -> KAdminProtocol: @@ -236,6 +249,7 @@ async def add_princ( Args: name (str): principal name password (str): password, if empty - uses randkey. + **dbargs: database arguments """ await self.loop.run_in_executor( self.pool, @@ -350,6 +364,7 @@ async def lock_princ(self, name: str, **dbargs) -> None: Args: name (str): principal names + **dbargs: database arguments """ princ = await self._get_raw_principal(name) princ.expire = "Now" @@ -360,6 +375,7 @@ async def force_pw_principal(self, name: str, **dbargs) -> None: Args: name (str): principal names + **dbargs: database arguments """ princ = await self._get_raw_principal(name) princ.pwexpire = "Now" @@ -368,7 +384,14 @@ async def force_pw_principal(self, name: str, **dbargs) -> None: @asynccontextmanager async def kadmin_lifespan(app: FastAPI) -> AsyncIterator[None]: - """Create kadmin instance.""" + """Create kadmin instance. + + Args: + app (FastAPI): FastAPI app + + Yields: + AsyncIterator[None]: Async iterator + """ loop = asyncio.get_running_loop() async def try_set_kadmin(app: FastAPI) -> None: @@ -392,13 +415,21 @@ async def try_set_kadmin(app: FastAPI) -> None: def get_kadmin() -> KAdminLocalManager: - """Stub.""" + """Stub. + + Raises: + NotImplementedError: NotImplementedError + """ raise NotImplementedError def handle_db_error(request: Request, exc: BaseException): # noqa: ARG001 """Handle duplicate. + Args: + request (Request): request + exc (BaseException): exception + Raises: HTTPException: Database Error """ @@ -411,6 +442,10 @@ def handle_db_error(request: Request, exc: BaseException): # noqa: ARG001 def handle_duplicate(request: Request, exc: BaseException): # noqa: ARG001 """Handle duplicate. + Args: + request (Request): request + exc (BaseException): exception + Raises: HTTPException: Principal already exists """ @@ -423,6 +458,10 @@ def handle_duplicate(request: Request, exc: BaseException): # noqa: ARG001 def handle_not_found(request: Request, exc: BaseException): # noqa: ARG001 """Handle duplicate. + Args: + request (Request): request + exc (BaseException): exception + Raises: HTTPException: Principal does not exist """ diff --git a/Makefile b/Makefile index c8851e65e..9ca8c5fa0 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ before_pr: ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . + darglint2 -v 2 .kerberos/ app build: ## build app and manually generate self-signed cert make down diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index 15bbfcd44..26c66ae24 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -10,8 +10,12 @@ class KerberosMDAPIClient(AbstractKadmin): """KRB server integration.""" @logger_wraps(is_stub=True) - async def setup(*_, **__) -> None: # type: ignore - """Stub method, setup is not needed.""" + async def setup(*args, **kwargs) -> None: # type: ignore + """Stub method, setup is not needed. + + Args: + **kwargs: keyword arguments + """ @logger_wraps() async def add_principal( diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index 6d0dc65f4..8202230b3 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -33,7 +33,7 @@ def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: async def handle(self) -> AsyncGenerator: """Handle message with current user. - Returns: + Yields: AsyncGenerator: Async generator. """ await asyncio.sleep(0) diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index d46aeba31..16d1e7e6e 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -265,6 +265,9 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: Returns: tuple[LDAPCodes, str]: result code and message + + Raises: + Exception: any exception """ match err: case ValueError(): @@ -284,7 +287,7 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: return LDAPCodes.STRONGER_AUTH_REQUIRED, "" case _: - raise err + raise Exception def _get_dir_query(self) -> Select: """Get directory query. diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index 5864ed826..b493fae66 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -98,7 +98,7 @@ class Config: ignored_types = (cached_property,) @field_serializer("filter") - def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: # noqa: ANN401 + def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: """Serialize filter field. Args: diff --git a/poetry.lock b/poetry.lock index e3178ed14..148b2f403 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aioldap3" @@ -6,6 +6,7 @@ version = "1.1.1" description = "New generation of async ldap3 client wrapper" optional = false python-versions = ">=3.8.1,<4.0.0" +groups = ["main"] files = [ {file = "aioldap3-1.1.1-py3-none-any.whl", hash = "sha256:643f63ab6a1c3800a518258169744f655e9a1ec6e599e0a09b7441510d33d16f"}, ] @@ -23,6 +24,7 @@ version = "1.15.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe"}, {file = "alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49"}, @@ -42,6 +44,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -53,6 +56,7 @@ version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, @@ -65,7 +69,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -74,6 +78,7 @@ version = "2.8.0" description = "Python-ASN1 is a simple ASN.1 encoder and decoder for Python 2.7+ and 3.5+." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "asn1-2.8.0-py2.py3-none-any.whl", hash = "sha256:1438ac9a53cbc4064330af43b054ae042374f7c8ab46c55358241c15e29f1461"}, {file = "asn1-2.8.0.tar.gz", hash = "sha256:adf77ddc2707cf420c0eae3b99ee30e913afcf0936467d42669820ce6b7d150a"}, @@ -88,6 +93,7 @@ version = "3.13.1" description = "The missing async toolbox" optional = false python-versions = "~=3.8" +groups = ["main"] files = [ {file = "asyncstdlib-3.13.1-py3-none-any.whl", hash = "sha256:a64da68176af1da8c699026cad98f70b184f82b4cb39739e0b9701a2a7541cf9"}, {file = "asyncstdlib-3.13.1.tar.gz", hash = "sha256:f47564b9a3566f8f9172631d88c75fe074b0ce2127963b7265d310df9aeed03a"}, @@ -95,8 +101,8 @@ files = [ [package.extras] doc = ["sphinx", "sphinxcontrib-trio"] -test = ["black", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy", "pytest", "pytest-cov"] -typetest = ["mypy", "pyright", "typing-extensions"] +test = ["black ; implementation_name == \"cpython\"", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy ; implementation_name == \"cpython\"", "pytest", "pytest-cov"] +typetest = ["mypy ; implementation_name == \"cpython\"", "pyright", "typing-extensions"] [[package]] name = "backoff" @@ -104,6 +110,7 @@ version = "2.2.1" description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -115,6 +122,7 @@ version = "4.0.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, @@ -149,6 +157,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -160,6 +169,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -229,6 +239,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {main = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [package.dependencies] pycparser = "*" @@ -239,6 +250,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -253,10 +265,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -264,6 +278,7 @@ version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, @@ -331,7 +346,7 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -339,6 +354,7 @@ version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] files = [ {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, @@ -381,21 +397,34 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "darglint2" +version = "1.8.2" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +optional = false +python-versions = ">=3.6,<4.0" +groups = ["main"] +files = [ + {file = "darglint2-1.8.2-py3-none-any.whl", hash = "sha256:8f950c9b5fab25dd54bf537bef1569c267073e5828cb5ab76428876df6d947af"}, + {file = "darglint2-1.8.2.tar.gz", hash = "sha256:11e0fc9c999bf09e192f42b72d202d177cb82da258eba387b24c2f0f5943650f"}, +] + [[package]] name = "decorator" version = "5.2.1" description = "Decorators for Humans" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -407,6 +436,7 @@ version = "1.5.0" description = "Cute DI framework with scopes and agreeable API" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "dishka-1.5.0-py3-none-any.whl", hash = "sha256:cd8847ac675b4093fe42742d9cf42a49a38d8d1abca46fcc250cd2f2190a2f71"}, {file = "dishka-1.5.0.tar.gz", hash = "sha256:1e47707f7b40c3a3ab3b736bd5b4ee958939d32ace6199809e4f75bb236c04a8"}, @@ -418,6 +448,7 @@ version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -438,6 +469,7 @@ version = "0.19.1" description = "ECDSA cryptographic signature library (pure python)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6" +groups = ["main"] files = [ {file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"}, {file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"}, @@ -456,6 +488,7 @@ version = "0.0.3" description = "enum/enum34 compatibility package" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "enum-compat-0.0.3.tar.gz", hash = "sha256:3677daabed56a6f724451d585662253d8fb4e5569845aafa8bb0da36b1a8751e"}, {file = "enum_compat-0.0.3-py3-none-any.whl", hash = "sha256:88091b617c7fc3bbbceae50db5958023c48dc40b50520005aa3bf27f8f7ea157"}, @@ -467,6 +500,7 @@ version = "0.115.12" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, @@ -487,6 +521,7 @@ version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, @@ -535,11 +570,11 @@ greenlet = {version = ">=3.1.1", markers = "platform_python_implementation == \" "zope.interface" = "*" [package.extras] -dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] +dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""] docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] -monitor = ["psutil (>=5.7.0)"] -recommended = ["cffi (>=1.17.1)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] -test = ["cffi (>=1.17.1)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"] +monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] +recommended = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] +test = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"] [[package]] name = "greenlet" @@ -547,6 +582,7 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -622,6 +658,7 @@ files = [ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] +markers = {main = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"", test = "platform_python_implementation == \"CPython\""} [package.extras] docs = ["Sphinx", "furo"] @@ -633,6 +670,7 @@ version = "1.9.0" description = "Python GSSAPI Wrapper" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "gssapi-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:261e00ac426d840055ddb2199f4989db7e3ce70fa18b1538f53e392b4823e8f1"}, {file = "gssapi-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:14a1ae12fdf1e4c8889206195ba1843de09fe82587fa113112887cd5894587c6"}, @@ -670,6 +708,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -681,6 +720,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -702,6 +742,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -714,7 +755,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -726,6 +767,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -740,6 +782,7 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -751,6 +794,7 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -768,6 +812,7 @@ version = "1.0.1" description = "A Python utility library for working with Lightweight Directory Access Protocol (LDAP) filters." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "ldap_filter-1.0.1-py2.py3-none-any.whl", hash = "sha256:d0691b58d7fc867e3c24663122773e7ebccdda35b2f35caa9bff30357a9807ab"}, {file = "ldap_filter-1.0.1.tar.gz", hash = "sha256:b0b0b51ff8b681459dc9cb958c1238cb941b39d03280213c2d1f2cd142acbedf"}, @@ -782,6 +827,7 @@ version = "2.9.1" description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -796,6 +842,7 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = "<4.0,>=3.5" +groups = ["main"] files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -806,7 +853,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] [[package]] name = "mako" @@ -814,6 +861,7 @@ version = "1.3.9" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, @@ -833,6 +881,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -903,6 +952,7 @@ version = "1.15.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" +groups = ["main", "linters"] files = [ {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, @@ -955,6 +1005,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["main", "linters"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -966,6 +1017,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -977,6 +1029,7 @@ version = "1.7.4" description = "comprehensive password hashing framework supporting over 30 schemes" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, @@ -997,6 +1050,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1012,6 +1066,7 @@ version = "0.11.3" description = "PROXY protocol library with asyncio server implementation" optional = false python-versions = "~=3.8" +groups = ["main"] files = [ {file = "proxy_protocol-0.11.3-py3-none-any.whl", hash = "sha256:77d541828aed30c5d9eea9c4c9af1dd85c2c4a2f829e0ecb003cb978f738a3f1"}, {file = "proxy_protocol-0.11.3.tar.gz", hash = "sha256:a9a1bd7bd90bfa82444a6bfc7cf567fa0a4d4144c9cadf392b8736ba651a662c"}, @@ -1031,6 +1086,7 @@ version = "3.2.8" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg-3.2.8-py3-none-any.whl", hash = "sha256:0e960f1977d77de7f1ace4b54590f686b52c2f9ab1f61fff4141887fc711d9e7"}, {file = "psycopg-3.2.8.tar.gz", hash = "sha256:cc995d836841e400c4f615d8dea351dc39697ad29df84d428f9c38c8040222f8"}, @@ -1043,8 +1099,8 @@ typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.8)"] -c = ["psycopg-c (==3.2.8)"] +binary = ["psycopg-binary (==3.2.8) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.8) ; implementation_name != \"pypy\""] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -1056,6 +1112,8 @@ version = "3.2.8" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name != \"pypy\"" files = [ {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0694548e1633c2ea819406c5bfd297bf1b4f6f8638dec0d639ab9764fdebcb2a"}, {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85851cdc18b514f80790f711a25406515b42f6b64e9a5d3940ae399e3b0e2c23"}, @@ -1130,6 +1188,7 @@ version = "3.2.6" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7"}, {file = "psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5"}, @@ -1144,6 +1203,7 @@ version = "1.0.6" description = "Python Hot Reload starts the given program and reloads it whenever any file changes in the current directory or imported modules." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "py-hot-reload-1.0.6.tar.gz", hash = "sha256:09ef5ac863d0b2b776346527b3507751cdde4980dfda6504f7fcd451bab96905"}, {file = "py_hot_reload-1.0.6-py3-none-any.whl", hash = "sha256:d9a77538033e31ec3e633a1e3af0d4da4897916cf229b213e748d36f6cf3dfe0"}, @@ -1159,6 +1219,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -1170,10 +1231,12 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +markers = {main = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [[package]] name = "pydantic" @@ -1181,6 +1244,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1193,7 +1257,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -1201,6 +1265,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1313,6 +1378,7 @@ version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, @@ -1333,6 +1399,7 @@ version = "0.26.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, @@ -1351,6 +1418,7 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1369,6 +1437,7 @@ version = "3.3.0" description = "JOSE implementation in Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, @@ -1391,6 +1460,7 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -1402,6 +1472,7 @@ version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -1413,6 +1484,7 @@ version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -1428,6 +1500,7 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -1442,6 +1515,7 @@ version = "0.11.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["linters"] files = [ {file = "ruff-0.11.9-py3-none-linux_armv6l.whl", hash = "sha256:a31a1d143a5e6f499d1fb480f8e1e780b4dfdd580f86e05e87b835d22c5c6f8c"}, {file = "ruff-0.11.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:66bc18ca783b97186a1f3100e91e492615767ae0a3be584e1266aa9051990722"}, @@ -1469,19 +1543,20 @@ version = "78.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "setuptools-78.0.1-py3-none-any.whl", hash = "sha256:1cc9b32ee94f93224d6c80193cbb768004667aa2f2732a473d6949b0236c1d4e"}, {file = "setuptools-78.0.1.tar.gz", hash = "sha256:4321d2dc2157b976dee03e1037c9f2bc5fea503c0c47d3c9458e0e8e49e659ce"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -1489,6 +1564,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1500,6 +1576,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1511,6 +1588,7 @@ version = "2.0.39" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"}, @@ -1607,6 +1685,7 @@ version = "0.46.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, @@ -1624,6 +1703,7 @@ version = "2022.7.1.2" description = "Typing stubs for pytz" optional = false python-versions = "*" +groups = ["linters"] files = [ {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, @@ -1635,6 +1715,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "linters"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1646,6 +1727,8 @@ version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, @@ -1657,6 +1740,7 @@ version = "0.34.2" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, @@ -1667,7 +1751,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -1675,6 +1759,7 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -1726,6 +1811,7 @@ version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, @@ -1773,6 +1859,7 @@ version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, @@ -1851,13 +1938,15 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, ] [package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [[package]] name = "zope-event" @@ -1865,6 +1954,7 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -1883,6 +1973,7 @@ version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, @@ -1932,6 +2023,6 @@ test = ["coverage[toml]", "zope.event", "zope.testing"] testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "3.12.6" -content-hash = "5a35f9cf9805cbeb1e801227881536de90af7c326e933da6c01fc07812d456e5" +content-hash = "fdd7a645ba7e7d880310520a48668dbfd5e9e63fb5e01fd7736a1ca94ebaa081" diff --git a/pyproject.toml b/pyproject.toml index ef9425e66..e60ede471 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ asyncstdlib = "^3.10.6" backoff = "^2.2.1" bcrypt = "4.0.1" cryptography = "^44.0.1" +darglint2 = "^1.8.2" dishka = "^1.4.2" dnspython = "^2.7.0" fastapi = "^0.115.0" @@ -161,6 +162,7 @@ ignore = [ "B905", # this is necessary. get-attr-with-constant "RUF029", "DOC201", # TODO delete it and fix + "ANN401", # FIXME. 'Dynamically typed expressions (typing.Any) are disallowed' ] fixable = ["ALL"] diff --git a/tests/conftest.py b/tests/conftest.py index e14c7b838..135f2c7d2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,7 +9,14 @@ import weakref from contextlib import suppress from dataclasses import dataclass -from typing import AsyncGenerator, AsyncIterator, Generator, Iterator +from typing import ( + Any, + AsyncGenerator, + AsyncIterator, + Generator, + Iterator, + Literal, +) from unittest.mock import AsyncMock, Mock import httpx @@ -288,8 +295,16 @@ class MutePolicyBindRequest(BindRequest): __test__ = False @staticmethod - async def is_user_group_valid(*args, **kwargs) -> bool: # type: ignore - """Stub.""" + async def is_user_group_valid(*args: Any, **kwargs: Any) -> Literal[True]: + """Stub. + + Args: + *args: arguments + **kwargs: keyword arguments + + Returns: + Literal[True]: True + """ return True From 1a977367fdbd371fb2be9cda9f2cafb77fbce70b Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 14:29:40 +0300 Subject: [PATCH 12/25] refactor: delete darglint, delete useless docstrings. task_508 --- .github/workflows/checks.yml | 20 ----------------- .kerberos/config_server.py | 21 +++--------------- Makefile | 1 - .../scripts/principal_block_user_sync.py | 2 +- app/ldap_protocol/kerberos/base.py | 6 +---- .../ldap_requests/bind_methods/base.py | 22 +++---------------- app/ldap_protocol/ldap_requests/extended.py | 15 ++----------- app/ldap_protocol/ldap_requests/modify.py | 10 +++------ app/ldap_protocol/ldap_responses.py | 12 ++-------- app/ldap_protocol/session_storage.py | 20 +++-------------- 10 files changed, 18 insertions(+), 111 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 6fb1c931e..f796e9727 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -68,26 +68,6 @@ jobs: NEW_TAG: linter run: docker run $NEW_TAG mypy . - darglint_docstrings: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: build linters - env: - TAG: ghcr.io/${{ env.REPO }}_linters:latest - NEW_TAG: linter - run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 - - name: Run linters - env: - NEW_TAG: linter - run: docker run $NEW_TAG darglint2 -v 2 .kerberos/ app - tests: runs-on: ubuntu-latest steps: diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index a09d9653a..edb5defa4 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -107,14 +107,7 @@ async def add_princ( @abstractmethod async def get_princ(self, name: str) -> Principal | None: - """Get principal. - - Args: - name (str): principal name - - Returns: - Principal | None: - """ + """Get principal.""" @abstractmethod async def change_password(self, name: str, new_password: str) -> None: @@ -189,11 +182,7 @@ class KAdminLocalManager(AbstractKRBManager): client: KAdminProtocol def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: - """Create threadpool and get loop. - - Args: - loop (asyncio.AbstractEventLoop | None): event loop - """ + """Create threadpool and get loop.""" self.loop = loop or asyncio.get_running_loop() async def connect(self) -> Self: @@ -231,11 +220,7 @@ async def __aexit__( await self.disconnect() async def _init_client(self) -> KAdminProtocol: - """Init kadmin local connection. - - Returns: - KAdminProtocol: - """ + """Init kadmin local connection.""" return await self.loop.run_in_executor(self.pool, kadmv.local) async def add_princ( diff --git a/Makefile b/Makefile index 9ca8c5fa0..c8851e65e 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,6 @@ before_pr: ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . - darglint2 -v 2 .kerberos/ app build: ## build app and manually generate self-signed cert make down diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index 1e43652cb..5ad395cc9 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -97,7 +97,7 @@ def _find_krb_exp_attr(directory: Directory) -> Attribute | None: """Find krbprincipalexpiration attribute in directory. Args: - directory (Directory): the directory object + directory (Directory): directory object Returns: Attribute | None: the attribute with the name diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index 561ab9417..cdb682adc 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -37,11 +37,7 @@ class AbstractKadmin(ABC): client: httpx.AsyncClient def __init__(self, client: httpx.AsyncClient) -> None: - """Set client. - - Args: - client (httpx.AsyncClient): httpx - """ + """Set client.""" self.client = client async def setup_configs( diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index e47c74c34..22e5b249e 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -92,11 +92,7 @@ def METHOD_ID(self) -> int: # noqa: N802 @abstractmethod def is_valid(self, user: User) -> bool: - """Validate state. - - Args: - user (User): User directory - """ + """Validate state.""" @abstractmethod def is_anonymous(self) -> bool: @@ -108,12 +104,7 @@ def is_anonymous(self) -> bool: @abstractmethod async def get_user(self, session: AsyncSession, username: str) -> User: - """Get user. - - Args: - session (AsyncSession): sqlalchemy session - username (str): username - """ + """Get user.""" class SaslAuthentication(AbstractLDAPAuth): @@ -125,11 +116,4 @@ class SaslAuthentication(AbstractLDAPAuth): @classmethod @abstractmethod def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": - """Get auth from data. - - Args: - data (list[ASN1Row]): data - - Returns: - SaslAuthentication: sasl authentication - """ + """Get auth from data.""" diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index ae9e2bc3d..8afa9b3ed 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -41,11 +41,7 @@ class BaseExtendedValue(ABC, BaseModel): @classmethod @abstractmethod def from_data(cls, data: ASN1Row) -> "BaseExtendedValue": - """Create model from data, decoded from responseValue bytes. - - Args: - data: ASN1Row: - """ + """Create model from data, decoded from responseValue bytes.""" @abstractmethod async def handle( @@ -69,14 +65,7 @@ async def handle( @staticmethod def _decode_value(data: ASN1Row) -> ASN1Row: - """Decode value. - - Args: - data: ASN1Row - - Returns: - ASN1Row - """ + """Decode value.""" dec = Decoder() dec.start(data[1].value) # type: ignore output = asn1todict(dec) diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index 16d1e7e6e..e3c47843b 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -267,7 +267,7 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: tuple[LDAPCodes, str]: result code and message Raises: - Exception: any exception + Exception: if can`t match exception and LDAP code. """ match err: case ValueError(): @@ -289,12 +289,8 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: case _: raise Exception - def _get_dir_query(self) -> Select: - """Get directory query. - - Returns: - Select: directory query - """ + def _get_dir_query(self) -> Select[tuple[Directory]]: + """Get directory query.""" return ( select(Directory) .join(Directory.attributes) diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index cb180c7c9..bf0849c1e 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -70,11 +70,7 @@ class BaseResponse(ABC, BaseEncoder): @property @abstractmethod def PROTOCOL_OP(self) -> int: # noqa: N802 - """Protocol OP response code. - - Returns: - int: Protocol OP response code - """ + """Protocol OP response code.""" class BindResponse(LDAPResult, BaseResponse): @@ -265,11 +261,7 @@ class BaseExtendedResponseValue(ABC, BaseEncoder): @abstractmethod def get_value(self) -> str | None: - """Get response value. - - Returns: - str | None: response value - """ + """Get response value.""" class ExtendedResponse(LDAPResult, BaseResponse): diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index f3bb934f7..76efc355a 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -42,25 +42,11 @@ async def get(self, key: str) -> dict: @abstractmethod async def _get_session_keys_by_uid(self, uid: int) -> set[str]: - """Get session keys by user id. - - Args: - uid (int): user id - - Returns: - set[str]: session keys - """ + """Get session keys by user id.""" @abstractmethod async def _get_session_keys_by_ip(self, ip: str) -> set[str]: - """Get session keys by ip. - - Args: - ip (str): ip - - Returns: - set[str]: session keys - """ + """Get session keys by ip.""" @abstractmethod async def get_user_sessions( @@ -181,7 +167,7 @@ def _generate_key(self) -> str: """Generate a new key for storing data in the storage. Returns: - str: A new key. + str: New key. """ return f"http:{token_hex(self.key_length)}" From 5142fe77b632e0c445039c9068bfeb629bf7c5e2 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 16:01:22 +0300 Subject: [PATCH 13/25] refactor: enable darglint2 --- .github/workflows/checks.yml | 20 + .kerberos/config_server.py | 21 +- Makefile | 1 + app/alembic/env.py | 6 +- app/api/exception_handlers.py | 20 +- app/api/ldap_schema/attribute_type_router.py | 2 +- app/api/ldap_schema/object_class_router.py | 2 +- app/api/main/krb5_router.py | 1 + app/ioc.py | 27 +- app/ldap_protocol/kerberos/base.py | 6 +- app/ldap_protocol/ldap_requests/add.py | 1 + .../ldap_requests/bind_methods/base.py | 28 +- app/ldap_protocol/ldap_requests/extended.py | 18 +- app/ldap_protocol/ldap_requests/modify.py | 7 +- .../ldap_schema/attribute_type_dao.py | 10 +- .../ldap_schema/entity_type_dao.py | 17 +- .../ldap_schema/object_class_dao.py | 17 +- app/ldap_protocol/policies/access_policy.py | 4 +- app/ldap_protocol/session_storage.py | 15 +- app/models.py | 32 +- poetry.lock | 1117 ++++++++--------- pyproject.toml | 2 +- 22 files changed, 758 insertions(+), 616 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index f796e9727..6fb1c931e 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -68,6 +68,26 @@ jobs: NEW_TAG: linter run: docker run $NEW_TAG mypy . + darglint_docstrings: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: build linters + env: + TAG: ghcr.io/${{ env.REPO }}_linters:latest + NEW_TAG: linter + run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 + - name: Run linters + env: + NEW_TAG: linter + run: docker run $NEW_TAG darglint2 -v 2 .kerberos/ app + tests: runs-on: ubuntu-latest steps: diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index edb5defa4..f2ddaecba 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -107,7 +107,14 @@ async def add_princ( @abstractmethod async def get_princ(self, name: str) -> Principal | None: - """Get principal.""" + """Get principal. + + Args: + name (str): Principal name + + Returns: + Principal | None: + """ @abstractmethod async def change_password(self, name: str, new_password: str) -> None: @@ -182,7 +189,11 @@ class KAdminLocalManager(AbstractKRBManager): client: KAdminProtocol def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: - """Create threadpool and get loop.""" + """Create threadpool and get loop. + + Args: + loop (asyncio.AbstractEventLoop | None): event loop. + """ self.loop = loop or asyncio.get_running_loop() async def connect(self) -> Self: @@ -220,7 +231,11 @@ async def __aexit__( await self.disconnect() async def _init_client(self) -> KAdminProtocol: - """Init kadmin local connection.""" + """Init kadmin local connection. + + Returns: + KAdminProtocol: client of kadmin.KAdmin + """ return await self.loop.run_in_executor(self.pool, kadmv.local) async def add_princ( diff --git a/Makefile b/Makefile index c8851e65e..83e6902d3 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ before_pr: ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . + darglint2 .kerberos/ app build: ## build app and manually generate self-signed cert make down diff --git a/app/alembic/env.py b/app/alembic/env.py index 7ac5bb7cf..65197e763 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -22,7 +22,11 @@ def run_sync_migrations(connection: AsyncConnection): - """Run sync migrations.""" + """Run sync migrations. + + Args: + connection (AsyncConnection): async db connection. + """ context.configure( connection=connection, target_metadata=target_metadata, diff --git a/app/api/exception_handlers.py b/app/api/exception_handlers.py index 1038a7ca8..9485ceb3a 100644 --- a/app/api/exception_handlers.py +++ b/app/api/exception_handlers.py @@ -53,7 +53,15 @@ async def handle_instance_not_found_error( request: Request, # noqa: ARG001 exc: Exception, # noqa: ARG001 ) -> NoReturn: - """Handle Instance Not Found error.""" + """Handle Instance Not Found error. + + Args: + request (Request): request + exc (Exception): exc. + + Raises: + HTTPException: Instance not found. + """ raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Instance not found.", @@ -64,7 +72,15 @@ async def handle_instance_cant_modify_error( request: Request, # noqa: ARG001 exc: Exception, # noqa: ARG001 ) -> NoReturn: - """Handle Instance Cant Modify error.""" + """Handle Instance Cant Modify error. + + Args: + request (Request): request + exc (Exception): exc. + + Raises: + HTTPException: System Instance cannot be modified. + """ raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="System Instance cannot be modified.", diff --git a/app/api/ldap_schema/attribute_type_router.py b/app/api/ldap_schema/attribute_type_router.py index a522fd35b..5996a5596 100644 --- a/app/api/ldap_schema/attribute_type_router.py +++ b/app/api/ldap_schema/attribute_type_router.py @@ -147,7 +147,7 @@ async def delete_bulk_attribute_types( """Delete Attribute Types by their names. Args: - attribute_types_names (list[str]): List of attribute type names. + attribute_types_names (LimitedListType): List of attribute type names. session (AsyncSession): Database session. attribute_type_dao (AttributeTypeDAO): Attribute Type dao. """ diff --git a/app/api/ldap_schema/object_class_router.py b/app/api/ldap_schema/object_class_router.py index 7f6a078b4..9fa0dddf0 100644 --- a/app/api/ldap_schema/object_class_router.py +++ b/app/api/ldap_schema/object_class_router.py @@ -142,7 +142,7 @@ async def delete_bulk_object_classes( """Delete object classes by their names. Args: - object_classes_names (list[str]): List of object class names. + object_classes_names (LimitedListType): List of object class names. object_class_dao (ObjectClassDAO): Object Class DAO. session (AsyncSession): Database session. """ diff --git a/app/api/main/krb5_router.py b/app/api/main/krb5_router.py index 01e538d86..b81a1e9f5 100644 --- a/app/api/main/krb5_router.py +++ b/app/api/main/krb5_router.py @@ -74,6 +74,7 @@ async def setup_krb_catalogue( krbadmin_password (SecretStr): Kerberos admin password. ldap_session (LDAPSession): LDAP session. kadmin (AbstractKadmin): Kadmin manager. + entity_type_dao (EntityTypeDAO): Entity Type DAO. Raises: HTTPException: On conflict or failed creation. diff --git a/app/ioc.py b/app/ioc.py index 416342602..5ad899491 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -279,7 +279,14 @@ def get_attribute_type_dao( self, session: AsyncSession, ) -> AttributeTypeDAO: - """Get Attribute Type DAO.""" + """Get Attribute Type DAO. + + Args: + session (AsyncSession): async db session + + Returns: + AttributeTypeDAO: Attribute Type DAO. + """ return AttributeTypeDAO(session) @provide(provides=ObjectClassDAO) @@ -287,7 +294,14 @@ def get_object_class_dao( self, session: AsyncSession, ) -> ObjectClassDAO: - """Get Object Class DAO.""" + """Get Object Class DAO. + + Args: + session (AsyncSession): async db session. + + Returns: + ObjectClassDAO: Object Class DAO. + """ attribute_type_dao = AttributeTypeDAO(session) return ObjectClassDAO( attribute_type_dao=attribute_type_dao, @@ -299,7 +313,14 @@ def get_entity_type_dao( self, session: AsyncSession, ) -> EntityTypeDAO: - """Get Entity Type DAO.""" + """Get Entity Type DAO. + + Args: + session (AsyncSession): async db session. + + Returns: + EntityTypeDAO: Entity Type DAO. + """ return EntityTypeDAO(session) diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index cdb682adc..b6fc9909f 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -37,7 +37,11 @@ class AbstractKadmin(ABC): client: httpx.AsyncClient def __init__(self, client: httpx.AsyncClient) -> None: - """Set client.""" + """Set client. + + Args: + client (httpx.AsyncClient): http async client + """ self.client = client async def setup_configs( diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index d3e310537..81b341f3d 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -121,6 +121,7 @@ async def handle( # noqa: C901 session (AsyncSession): Async DB session ldap_session (LDAPSession): LDAP session kadmin (AbstractKadmin): Abstract Kerberos Admin + entity_type_dao (EntityTypeDAO): Entity Type DAO. Yields: AsyncGenerator[AddResponse, None] diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index 22e5b249e..2a47432ba 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -92,7 +92,14 @@ def METHOD_ID(self) -> int: # noqa: N802 @abstractmethod def is_valid(self, user: User) -> bool: - """Validate state.""" + """Validate state. + + Args: + user (User): instance of User. + + Returns: + bool: + """ @abstractmethod def is_anonymous(self) -> bool: @@ -104,7 +111,15 @@ def is_anonymous(self) -> bool: @abstractmethod async def get_user(self, session: AsyncSession, username: str) -> User: - """Get user.""" + """Get user. + + Args: + session (AsyncSession): async db session. + username (str): user name. + + Returns: + User: instance of User. + """ class SaslAuthentication(AbstractLDAPAuth): @@ -116,4 +131,11 @@ class SaslAuthentication(AbstractLDAPAuth): @classmethod @abstractmethod def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": - """Get auth from data.""" + """Get auth from data. + + Args: + data (list[ASN1Row]): list of row with metadata. + + Returns: + SaslAuthentication: Sasl auth form. + """ diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 8afa9b3ed..8e1f772a2 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -41,7 +41,14 @@ class BaseExtendedValue(ABC, BaseModel): @classmethod @abstractmethod def from_data(cls, data: ASN1Row) -> "BaseExtendedValue": - """Create model from data, decoded from responseValue bytes.""" + """Create model from data, decoded from responseValue bytes. + + Args: + data (ASN1Row): Row with metadata. + + Returns: + BaseExtendedValue: instance of BaseExtendedValue. + """ @abstractmethod async def handle( @@ -65,7 +72,14 @@ async def handle( @staticmethod def _decode_value(data: ASN1Row) -> ASN1Row: - """Decode value.""" + """Decode value. + + Args: + data (ASN1Row): Row with metadata. + + Returns: + ASN1Row: Decoded row with metadata + """ dec = Decoder() dec.start(data[1].value) # type: ignore output = asn1todict(dec) diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index b919ee2e6..c810351a3 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -179,6 +179,7 @@ async def handle( session_storage (SessionStorage): Session storage kadmin (AbstractKadmin): Kadmin settings (Settings): Settings + entity_type_dao (EntityTypeDAO): Entity Type DAO. Yields: AsyncGenerator[ModifyResponse, None] @@ -304,7 +305,11 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: raise Exception def _get_dir_query(self) -> Select[tuple[Directory]]: - """Get directory query.""" + """Get directory query. + + Returns: + Select[tuple[Directory]]: SQLAlchemy select query. + """ return ( select(Directory) .join(Directory.attributes) diff --git a/app/ldap_protocol/ldap_schema/attribute_type_dao.py b/app/ldap_protocol/ldap_schema/attribute_type_dao.py index 9b6091f48..d343118d8 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_dao.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_dao.py @@ -71,7 +71,11 @@ class AttributeTypeDAO: _session: AsyncSession def __init__(self, session: AsyncSession) -> None: - """Initialize Attribute Type DAO with session.""" + """Initialize Attribute Type DAO with session. + + Args: + session (AsyncSession): async db session. + """ self._session = session async def get_paginator( @@ -132,7 +136,7 @@ async def get_one_by_name( attribute_type_name (str): Attribute Type name. Returns: - AttributeType | None: Attribute Type. + AttributeType: Attribute Type. Raises: InstanceNotFoundError: Attribute Type not found. @@ -206,7 +210,7 @@ async def delete_all_by_names( attribute_type_names (list[str]): List of Attribute Types OIDs. """ if not attribute_type_names: - return None + return await self._session.execute( delete(AttributeType) diff --git a/app/ldap_protocol/ldap_schema/entity_type_dao.py b/app/ldap_protocol/ldap_schema/entity_type_dao.py index 9ec66699d..b1df08d24 100644 --- a/app/ldap_protocol/ldap_schema/entity_type_dao.py +++ b/app/ldap_protocol/ldap_schema/entity_type_dao.py @@ -30,7 +30,14 @@ class EntityTypeSchema(BaseModel): @classmethod def from_db(cls, entity_type: EntityType) -> "EntityTypeSchema": - """Create an instance of Entity Type Schema from SQLA object.""" + """Create an instance of Entity Type Schema from SQLA object. + + Args: + entity_type (EntityType): Instance of Entity Type. + + Returns: + EntityTypeSchema: Instance of Entity Type Schema. + """ return cls( name=entity_type.name, is_system=entity_type.is_system, @@ -57,7 +64,11 @@ class EntityTypeDAO: _session: AsyncSession def __init__(self, session: AsyncSession) -> None: - """Initialize Entity Type DAO with a database session.""" + """Initialize Entity Type DAO with a database session. + + Args: + session (AsyncSession): async db session. + """ self._session = session async def get_paginator( @@ -234,7 +245,7 @@ async def attach_entity_type_to_directories(self) -> None: is_system_entity_type=False, ) - return None + return async def attach_entity_type_to_directory( self, diff --git a/app/ldap_protocol/ldap_schema/object_class_dao.py b/app/ldap_protocol/ldap_schema/object_class_dao.py index 8e0e832f9..714154f38 100644 --- a/app/ldap_protocol/ldap_schema/object_class_dao.py +++ b/app/ldap_protocol/ldap_schema/object_class_dao.py @@ -76,14 +76,17 @@ class ObjectClassDAO: _session: AsyncSession _attribute_type_dao: AttributeTypeDAO - ObjectClassCantModifyError = - def __init__( self, session: AsyncSession, attribute_type_dao: AttributeTypeDAO, ) -> None: - """Initialize Object Class DAO with session.""" + """Initialize Object Class DAO with session. + + Args: + session (AsyncSession): async db session. + attribute_type_dao (AttributeTypeDAO): Attribute Type DAO. + """ self._session = session self._attribute_type_dao = attribute_type_dao @@ -220,10 +223,9 @@ async def get_one_by_name( Args: object_class_name (str): Object Class name. - session (AsyncSession): Database session. Returns: - ObjectClass | None: Object Class. + ObjectClass: Object Class. Raises: InstanceNotFoundError: Object class not found. @@ -274,8 +276,9 @@ async def modify_one( new_statement (ObjectClassUpdateSchema): New statement of object class - Raises: InstanceCantModifyError: If Object Class is system,\ - it cannot be changed. + Raises: + InstanceCantModifyError: If Object Class is system,\ + it cannot be changed. """ if object_class.is_system: raise InstanceCantModifyError( diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index 7b15d6003..a0982ec72 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -59,8 +59,8 @@ async def create_access_policy( can_add (bool): can add can_modify (bool): can modify can_delete (bool): can delete - grant_dn (ENTRY_TYPE): main dn - groups (list[ENTRY_TYPE]): list of groups + grant_dn (GRANT_DN_STRING): main dn + groups (list[GRANT_DN_STRING]): list of groups session (AsyncSession): session """ path = get_search_path(grant_dn) diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 76efc355a..29f93edb0 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -42,11 +42,22 @@ async def get(self, key: str) -> dict: @abstractmethod async def _get_session_keys_by_uid(self, uid: int) -> set[str]: - """Get session keys by user id.""" + """Get session keys by user id. + + Args: + uid (int): uid. + + Returns: + set[str]: session keys + """ @abstractmethod async def _get_session_keys_by_ip(self, ip: str) -> set[str]: - """Get session keys by ip.""" + """Get session keys by ip. + + Args: + ip (str): IP address. + """ @abstractmethod async def get_user_sessions( diff --git a/app/models.py b/app/models.py index d3ce8df3d..a1186063e 100644 --- a/app/models.py +++ b/app/models.py @@ -190,12 +190,23 @@ class EntityType(Base): @property def object_class_names_set(self) -> set[str]: - """Get object class names.""" + """Get object class names. + + Returns: + set[str]: object class names + """ return set(self.object_class_names) @classmethod def generate_entity_type_name(cls, directory: Directory) -> str: - """Generate entity type name based on Directory.""" + """Generate entity type name based on Directory. + + Args: + directory (Directory): instance of Directory. + + Returns: + str: entity type name. + """ return f"{directory.name}_entity_type_{directory.id}" @@ -234,12 +245,20 @@ class Directory(Base): @property def entity_type_name(self) -> str: - """Get entity type name.""" + """Get entity type name. + + Returns: + str: entity type name + """ return self.entity_type.name if self.entity_type else "" @property def entity_type_object_class_names_set(self) -> set[str]: - """Get object class names of entity type.""" + """Get object class names of entity type. + + Returns: + set[str]: object class names of entity type. + """ return ( self.entity_type.object_class_names_set if self.entity_type @@ -248,6 +267,11 @@ def entity_type_object_class_names_set(self) -> set[str]: @property def object_class_names_set(self) -> set[str]: + """Object class names from directory's attribute. + + Returns: + set[str]: object class names. + """ return set( self.attributes_dict.get("objectClass", []) + self.attributes_dict.get("objectclass", []) diff --git a/poetry.lock b/poetry.lock index 148b2f403..53ad4f166 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aioldap3" @@ -6,7 +6,6 @@ version = "1.1.1" description = "New generation of async ldap3 client wrapper" optional = false python-versions = ">=3.8.1,<4.0.0" -groups = ["main"] files = [ {file = "aioldap3-1.1.1-py3-none-any.whl", hash = "sha256:643f63ab6a1c3800a518258169744f655e9a1ec6e599e0a09b7441510d33d16f"}, ] @@ -20,14 +19,13 @@ url = "https://github.com/MultiDirectoryLab/aioldap3/releases/download/v1.1.1/ai [[package]] name = "alembic" -version = "1.15.1" +version = "1.16.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe"}, - {file = "alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49"}, + {file = "alembic-1.16.1-py3-none-any.whl", hash = "sha256:0cdd48acada30d93aa1035767d67dff25702f8de74d7c3919f2e8492c8db2e67"}, + {file = "alembic-1.16.1.tar.gz", hash = "sha256:43d37ba24b3d17bc1eb1024fe0f51cd1dc95aeb5464594a02c6bb9ca9864bfa4"}, ] [package.dependencies] @@ -44,7 +42,6 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -56,7 +53,6 @@ version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, @@ -69,7 +65,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -78,7 +74,6 @@ version = "2.8.0" description = "Python-ASN1 is a simple ASN.1 encoder and decoder for Python 2.7+ and 3.5+." optional = false python-versions = "*" -groups = ["main"] files = [ {file = "asn1-2.8.0-py2.py3-none-any.whl", hash = "sha256:1438ac9a53cbc4064330af43b054ae042374f7c8ab46c55358241c15e29f1461"}, {file = "asn1-2.8.0.tar.gz", hash = "sha256:adf77ddc2707cf420c0eae3b99ee30e913afcf0936467d42669820ce6b7d150a"}, @@ -93,7 +88,6 @@ version = "3.13.1" description = "The missing async toolbox" optional = false python-versions = "~=3.8" -groups = ["main"] files = [ {file = "asyncstdlib-3.13.1-py3-none-any.whl", hash = "sha256:a64da68176af1da8c699026cad98f70b184f82b4cb39739e0b9701a2a7541cf9"}, {file = "asyncstdlib-3.13.1.tar.gz", hash = "sha256:f47564b9a3566f8f9172631d88c75fe074b0ce2127963b7265d310df9aeed03a"}, @@ -101,8 +95,8 @@ files = [ [package.extras] doc = ["sphinx", "sphinxcontrib-trio"] -test = ["black ; implementation_name == \"cpython\"", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy ; implementation_name == \"cpython\"", "pytest", "pytest-cov"] -typetest = ["mypy ; implementation_name == \"cpython\"", "pyright", "typing-extensions"] +test = ["black", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy", "pytest", "pytest-cov"] +typetest = ["mypy", "pyright", "typing-extensions"] [[package]] name = "backoff" @@ -110,7 +104,6 @@ version = "2.2.1" description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" -groups = ["main"] files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -122,7 +115,6 @@ version = "4.0.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, @@ -153,14 +145,13 @@ typecheck = ["mypy"] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["main"] files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -169,7 +160,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -239,21 +229,19 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {main = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [package.dependencies] pycparser = "*" [[package]] name = "click" -version = "8.1.8" +version = "8.2.1" description = "Composable command line interface toolkit" optional = false -python-versions = ">=3.7" -groups = ["main", "dev"] +python-versions = ">=3.10" files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, ] [package.dependencies] @@ -265,12 +253,10 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -278,7 +264,6 @@ version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, @@ -346,64 +331,65 @@ files = [ ] [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "cryptography" -version = "44.0.2" +version = "44.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] -files = [ - {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, - {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, - {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, - {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, - {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, - {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, - {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, +files = [ + {file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"}, + {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"}, + {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"}, + {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"}, + {file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"}, + {file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"}, + {file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"}, + {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"}, + {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"}, + {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"}, + {file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"}, + {file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"}, + {file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"}, + {file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"}, + {file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -412,7 +398,6 @@ version = "1.8.2" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." optional = false python-versions = ">=3.6,<4.0" -groups = ["main"] files = [ {file = "darglint2-1.8.2-py3-none-any.whl", hash = "sha256:8f950c9b5fab25dd54bf537bef1569c267073e5828cb5ab76428876df6d947af"}, {file = "darglint2-1.8.2.tar.gz", hash = "sha256:11e0fc9c999bf09e192f42b72d202d177cb82da258eba387b24c2f0f5943650f"}, @@ -424,7 +409,6 @@ version = "5.2.1" description = "Decorators for Humans" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -432,14 +416,13 @@ files = [ [[package]] name = "dishka" -version = "1.5.0" +version = "1.6.0" description = "Cute DI framework with scopes and agreeable API" optional = false python-versions = ">=3.10" -groups = ["main"] files = [ - {file = "dishka-1.5.0-py3-none-any.whl", hash = "sha256:cd8847ac675b4093fe42742d9cf42a49a38d8d1abca46fcc250cd2f2190a2f71"}, - {file = "dishka-1.5.0.tar.gz", hash = "sha256:1e47707f7b40c3a3ab3b736bd5b4ee958939d32ace6199809e4f75bb236c04a8"}, + {file = "dishka-1.6.0-py3-none-any.whl", hash = "sha256:ab1aedee152ce7bb11cfd2673d7ce4001fe2b330d14e84535d7525a68430b2c2"}, + {file = "dishka-1.6.0.tar.gz", hash = "sha256:f1fa5ec7e980d4f618d0c425d1bb81d8e9414894d8ec6553b197d2298774e12f"}, ] [[package]] @@ -448,7 +431,6 @@ version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -469,7 +451,6 @@ version = "0.19.1" description = "ECDSA cryptographic signature library (pure python)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.6" -groups = ["main"] files = [ {file = "ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3"}, {file = "ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61"}, @@ -488,7 +469,6 @@ version = "0.0.3" description = "enum/enum34 compatibility package" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "enum-compat-0.0.3.tar.gz", hash = "sha256:3677daabed56a6f724451d585662253d8fb4e5569845aafa8bb0da36b1a8751e"}, {file = "enum_compat-0.0.3-py3-none-any.whl", hash = "sha256:88091b617c7fc3bbbceae50db5958023c48dc40b50520005aa3bf27f8f7ea157"}, @@ -500,7 +480,6 @@ version = "0.115.12" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, @@ -521,7 +500,6 @@ version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, @@ -570,95 +548,74 @@ greenlet = {version = ">=3.1.1", markers = "platform_python_implementation == \" "zope.interface" = "*" [package.extras] -dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""] +dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] -monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] -recommended = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""] -test = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"] +monitor = ["psutil (>=5.7.0)"] +recommended = ["cffi (>=1.17.1)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] +test = ["cffi (>=1.17.1)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"] [[package]] name = "greenlet" -version = "3.1.1" +version = "3.2.3" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.7" -groups = ["main", "test"] -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] -markers = {main = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"", test = "platform_python_implementation == \"CPython\""} +python-versions = ">=3.9" +files = [ + {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"}, + {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"}, + {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"}, + {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"}, + {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"}, + {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"}, + {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"}, + {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"}, + {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"}, + {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"}, + {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"}, + {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"}, + {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"}, + {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"}, + {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"}, + {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"}, + {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"}, + {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"}, + {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"}, + {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"}, + {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"}, + {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"}, + {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"}, + {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"}, + {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"}, + {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"}, + {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"}, + {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"}, + {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"}, + {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"}, +] [package.extras] docs = ["Sphinx", "furo"] @@ -670,7 +627,6 @@ version = "1.9.0" description = "Python GSSAPI Wrapper" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "gssapi-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:261e00ac426d840055ddb2199f4989db7e3ce70fa18b1538f53e392b4823e8f1"}, {file = "gssapi-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:14a1ae12fdf1e4c8889206195ba1843de09fe82587fa113112887cd5894587c6"}, @@ -704,31 +660,29 @@ decorator = "*" [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" -groups = ["main"] +python-versions = ">=3.8" files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] @@ -742,7 +696,6 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -755,7 +708,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -767,7 +720,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -782,7 +734,6 @@ version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -794,7 +745,6 @@ version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -812,7 +762,6 @@ version = "1.0.1" description = "A Python utility library for working with Lightweight Directory Access Protocol (LDAP) filters." optional = false python-versions = ">=3.4" -groups = ["main"] files = [ {file = "ldap_filter-1.0.1-py2.py3-none-any.whl", hash = "sha256:d0691b58d7fc867e3c24663122773e7ebccdda35b2f35caa9bff30357a9807ab"}, {file = "ldap_filter-1.0.1.tar.gz", hash = "sha256:b0b0b51ff8b681459dc9cb958c1238cb941b39d03280213c2d1f2cd142acbedf"}, @@ -827,7 +776,6 @@ version = "2.9.1" description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -842,7 +790,6 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = "<4.0,>=3.5" -groups = ["main"] files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -853,18 +800,17 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] +dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] [[package]] name = "mako" -version = "1.3.9" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" -groups = ["main"] files = [ - {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, - {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -881,7 +827,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -948,48 +893,48 @@ files = [ [[package]] name = "mypy" -version = "1.15.0" +version = "1.16.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" -groups = ["main", "linters"] -files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +files = [ + {file = "mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c"}, + {file = "mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777"}, + {file = "mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b"}, + {file = "mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666"}, + {file = "mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c"}, + {file = "mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8"}, + {file = "mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730"}, + {file = "mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d"}, + {file = "mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52"}, + {file = "mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f56236114c425620875c7cf71700e3d60004858da856c6fc78998ffe767b73d3"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15486beea80be24ff067d7d0ede673b001d0d684d0095803b3e6e17a886a2a92"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2"}, + {file = "mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20"}, + {file = "mypy-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e7e0ad35275e02797323a5aa1be0b14a4d03ffdb2e5f2b0489fa07b89c67b21"}, + {file = "mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031"}, + {file = "mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" typing_extensions = ">=4.6.0" [package.extras] @@ -1001,26 +946,24 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.5" -groups = ["main", "linters"] +python-versions = ">=3.8" files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -1029,7 +972,6 @@ version = "1.7.4" description = "comprehensive password hashing framework supporting over 30 schemes" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, @@ -1044,21 +986,31 @@ bcrypt = ["bcrypt (>=3.1.0)"] build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] totp = ["cryptography"] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" -groups = ["test"] +python-versions = ">=3.9" files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "proxy-protocol" @@ -1066,7 +1018,6 @@ version = "0.11.3" description = "PROXY protocol library with asyncio server implementation" optional = false python-versions = "~=3.8" -groups = ["main"] files = [ {file = "proxy_protocol-0.11.3-py3-none-any.whl", hash = "sha256:77d541828aed30c5d9eea9c4c9af1dd85c2c4a2f829e0ecb003cb978f738a3f1"}, {file = "proxy_protocol-0.11.3.tar.gz", hash = "sha256:a9a1bd7bd90bfa82444a6bfc7cf567fa0a4d4144c9cadf392b8736ba651a662c"}, @@ -1082,25 +1033,24 @@ doc = ["cloud-sptheme", "sphinx", "sphinx-autodoc-typehints"] [[package]] name = "psycopg" -version = "3.2.8" +version = "3.2.9" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ - {file = "psycopg-3.2.8-py3-none-any.whl", hash = "sha256:0e960f1977d77de7f1ace4b54590f686b52c2f9ab1f61fff4141887fc711d9e7"}, - {file = "psycopg-3.2.8.tar.gz", hash = "sha256:cc995d836841e400c4f615d8dea351dc39697ad29df84d428f9c38c8040222f8"}, + {file = "psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6"}, + {file = "psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.8", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +psycopg-binary = {version = "3.2.9", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.8) ; implementation_name != \"pypy\""] -c = ["psycopg-c (==3.2.8) ; implementation_name != \"pypy\""] +binary = ["psycopg-binary (==3.2.9)"] +c = ["psycopg-c (==3.2.9)"] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -1108,78 +1058,76 @@ test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", [[package]] name = "psycopg-binary" -version = "3.2.8" +version = "3.2.9" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" -groups = ["main"] -markers = "implementation_name != \"pypy\"" -files = [ - {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0694548e1633c2ea819406c5bfd297bf1b4f6f8638dec0d639ab9764fdebcb2a"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85851cdc18b514f80790f711a25406515b42f6b64e9a5d3940ae399e3b0e2c23"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:040c2a768bd9ae572421ee5695a6299e08147dd44bc8ac514961323dc5c31a62"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdb5567e81374734539f7b7deb9d547271585ec42a7866ea06bffa58fa5cd5a"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289d2575edc00391c4bf586048701638126f396a76db83f36463d1c2b3495aae"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c3a3b330c44e01ee29b3b76ddbb86890fbaf7e4b2f9abd43220d050642edee3"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:814d533e6a8359c2962e28a36fad2698c15639716459fe1100e859b6173c3b6d"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b67f78f75b033d8833ec40432c28610c275455e0172762919912a5e6b9db6366"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b98f7dc1ed83889803d0df2d327c94c95a487b9976215c3e9adb0dbb7a220d76"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a9c54bd5d91c6e1cc1e6f9127f175ce3162d8435cf8d4715149598c9baab4ff5"}, - {file = "psycopg_binary-3.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:2aba18f57da97b96ea9a6663c8982038a9d4a47b1f94f004ffa9491bd7d21160"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:076bd384a0d8bb7a59514b0d62bb75b48f83955a32ebec408b08db0e51bb06e5"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f162a44ed7e06ed075cbc9dfda23850a7f702c44af4b62061e9c83430130ff36"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e450989848bb63315e1768e6c6026cfdf6f72450c3752ce9f6e307c1d62b8d"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90c0f2c88578db2bbeea98cd10fcb6f635c0b5bdd23ae90a931716589094ed08"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75a929759a498b1b59481091da731f928e0cdbd3d7393b8a1022a1b57f01a91a"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d310d188bb349a5f66cc037f7416fd640ca9847d0083a63ba6c091fd45075482"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4965bc9d2ef8eed31ff411840e2ab0e1d0c1c59575e0154ced7b652ef0eaa33"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f1c26c1213efba8102911099af2203db6859855f7ceba21fd941e6d2bc7e84e"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:58c5c7ef4daaaefb1e656a307ceb61aa3a101a5eb843004579d423428bef66e5"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f501ee2b41a153aee59a3a5db238718f801ac39eec54ad3f28fbe657002e944"}, - {file = "psycopg_binary-3.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:fe51d8297bc8c178be1cc0ac6c060bfd706afb5cb04e794a44feae27c0afe6f4"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1c330b86bc5ea67fee529d3c7b53c6394f8cacad77a3214c50fce0d5bdbc10cf"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce4e637ac339bfe583ac26e18232c33f9039c93cfc01adaec550cb5e8a03f87"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:272ee7cd175996c7262f7ffb561593829b448032a52c545d844bc6a4fb77b078"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7237b1abcc36c04b45916c983a6c3d799104201f72475eab367874a5f37d3e7"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c9a30a1d8338823603cf064637aae5580c41ed95675c7aee6a47165784d0464"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f27d5ae05062f8ea0da6c11262ba8a1ab70864b1c18ea65d9e61636a8c72da4"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:10fa234801b9b8b23799f869300c632a3298fb8daecd2d5734d08ab76e7a17cb"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b055dba7df07c39f6a40a71862bf5525320350e3bd4c6d1809342fb7061d111f"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8c36b8d3f76e2831f3b33f34226952ed39d1d6a79cb2ca2bf044f28df9c6b5f0"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:764f9163ad9cfd32abd2d06f3000a52faf7a2b2411801d681ebe9158d72b46d5"}, - {file = "psycopg_binary-3.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:d8fa6fec9f7e225458d0031c43dd6d20673f55953eebe539d37e4b94b8831984"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84f03982598a6353cf70cafae34c16da28eac74ba9862cc740b6ba0dcf9721fc"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d247f55b28afc4a87b77240e733419ad0c82be2ec122a0b93fbb227ee0e6608e"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89eb0c15c0eec1c81256e9df3c01d9bd1067f4365872f6f81da7521ab30e19de"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aef90bdc201f2d375e5996d44124c588d3a7ce9f67c79f30531cdc5ead2c3d"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b60a17eca6a6906af8084c518be81bd71a3d50ddc69c0dc667d6ce9b8f4d8604"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8297d92f41e19b6794b04bdf7d53938a5ad8e68f7105b50048a078477b7ee4b8"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a547d53e615776f8e79768aacd7a12c6f0131fa1d6820d2e3e848261b0ad3849"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:058cfd606f1dc0be9b5a80d208fb9b487f7b4986a955322cbb45cee7e3e8056e"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15d21ed3292fb19b6ab096c3522d561d196eeef3903c31f1318df7478eb96fa5"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6384f81c33a369144e4b98cbb4bf3ec4ac102ae11cfb84e70cf99aa43a44925"}, - {file = "psycopg_binary-3.2.8-cp313-cp313-win_amd64.whl", hash = "sha256:60db59a0f1676f70c027a8273b7b360af85ef87bf43cd49eb63727b72a170a9f"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:94fcd34521b45d6392a347a3f0d3f913dc26c70bfe06ba7b57f8e2a5c5fb4722"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f203d9d34a2b8e4808d042437b6f5eebb36d9236bb28e89ad9969094fce6354a"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ee041375a1d406335fe10a0d80f9429f7144fd128caa0183b9ac8932cc7219"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37120696713a1eca988504eaa36cb90ea8a48c58dbb0c49f5db6464abfcb9bec"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d7400d163efba4e4e94e0b1777289d990c55fd6e2dd88d0145c5917e3f398ed"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c39a2218d38f36740898d3bf8f9cccd5efa9c10ef9e7a3ffa5db8972b278df1b"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1d551879d0614cc8b9027d9a20460e22b36440ecf0f97abcee30f3a9cace676f"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:05db811cf6fba97187ba287ecc097c6735c178fe6e9383df44d95f0be70ed1d6"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:43ff57a26f0c765b78f59cb81b9f2d6dd22ee729a1f07b3e8a7f7a6e019435ed"}, - {file = "psycopg_binary-3.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:2c1ca0296260a30d05ea45cb69824bc99711232d96ff5980a9458e91bb4d6581"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b1b5fd2e4ef6b28f0740fff4426e51d71390dbf970795f2e445536ce47da480"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08ce4010185ee6d42287b37b6d2a18006fa9c053ecd2ed50d5bd428b99bdbee5"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4702427a3f6f240f888d78e36de37cc6d4298e95178e065fbc0c353fe692774"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42741f3fadfcef01252f4f6c67ab34a238c331c2504d976559236633618b1417"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53dfc4504a0e3b0f5efb1b94c9da68b917adc8a9c49c1b0061f6fa8125bd136c"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f6269a64ee4f8b6b27116abf6536b31c1757973b0f0e612e19a1ad5376a73a"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b72a737b88d1a14b2d9efea6819579ee8c4f335825f92f8d6e725f1e72ac519f"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2705c34cba861699539619544078fe2a314b79c582874e813a6e512782b22638"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b06a7cf2585bf6a3b4f9397af48427f558049a570d44b142eb9948c9b734c8ae"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2f1be2d3160cdfb4732cd9f5777b0e9c78509ef0033dd6cff34ee0f16560e2fe"}, - {file = "psycopg_binary-3.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:6a45e2409352a99c8b4f733b86daf19c4df3dc7d9c1f2fb880adf7dfa225678a"}, +files = [ + {file = "psycopg_binary-3.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:528239bbf55728ba0eacbd20632342867590273a9bacedac7538ebff890f1093"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4978c01ca4c208c9d6376bd585e2c0771986b76ff7ea518f6d2b51faece75e8"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ed2bab85b505d13e66a914d0f8cdfa9475c16d3491cf81394e0748b77729af2"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799fa1179ab8a58d1557a95df28b492874c8f4135101b55133ec9c55fc9ae9d7"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb37ac3955d19e4996c3534abfa4f23181333974963826db9e0f00731274b695"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001e986656f7e06c273dd4104e27f4b4e0614092e544d950c7c938d822b1a894"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa5c80d8b4cbf23f338db88a7251cef8bb4b68e0f91cf8b6ddfa93884fdbb0c1"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:39a127e0cf9b55bd4734a8008adf3e01d1fd1cb36339c6a9e2b2cbb6007c50ee"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fb7599e436b586e265bea956751453ad32eb98be6a6e694252f4691c31b16edb"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5d2c9fe14fe42b3575a0b4e09b081713e83b762c8dc38a3771dd3265f8f110e7"}, + {file = "psycopg_binary-3.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:7e4660fad2807612bb200de7262c88773c3483e85d981324b3c647176e41fdc8"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2504e9fd94eabe545d20cddcc2ff0da86ee55d76329e1ab92ecfcc6c0a8156c4"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:093a0c079dd6228a7f3c3d82b906b41964eaa062a9a8c19f45ab4984bf4e872b"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:387c87b51d72442708e7a853e7e7642717e704d59571da2f3b29e748be58c78a"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9ac10a2ebe93a102a326415b330fff7512f01a9401406896e78a81d75d6eddc"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72fdbda5b4c2a6a72320857ef503a6589f56d46821592d4377c8c8604810342b"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f34e88940833d46108f949fdc1fcfb74d6b5ae076550cd67ab59ef47555dba95"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a3e0f89fe35cb03ff1646ab663dabf496477bab2a072315192dbaa6928862891"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6afb3e62f2a3456f2180a4eef6b03177788df7ce938036ff7f09b696d418d186"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cc19ed5c7afca3f6b298bfc35a6baa27adb2019670d15c32d0bb8f780f7d560d"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc75f63653ce4ec764c8f8c8b0ad9423e23021e1c34a84eb5f4ecac8538a4a4a"}, + {file = "psycopg_binary-3.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:3db3ba3c470801e94836ad78bf11fd5fab22e71b0c77343a1ee95d693879937a"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be7d650a434921a6b1ebe3fff324dbc2364393eb29d7672e638ce3e21076974e"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76b4722a529390683c0304501f238b365a46b1e5fb6b7249dbc0ad6fea51a0"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a551e4683f1c307cfc3d9a05fec62c00a7264f320c9962a67a543e3ce0d8ff"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61d0a6ceed8f08c75a395bc28cb648a81cf8dee75ba4650093ad1a24a51c8724"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad280bbd409bf598683dda82232f5215cfc5f2b1bf0854e409b4d0c44a113b1d"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76eddaf7fef1d0994e3d536ad48aa75034663d3a07f6f7e3e601105ae73aeff6"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:52e239cd66c4158e412318fbe028cd94b0ef21b0707f56dcb4bdc250ee58fd40"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08bf9d5eabba160dd4f6ad247cf12f229cc19d2458511cab2eb9647f42fa6795"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1b2cf018168cad87580e67bdde38ff5e51511112f1ce6ce9a8336871f465c19a"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:14f64d1ac6942ff089fc7e926440f7a5ced062e2ed0949d7d2d680dc5c00e2d4"}, + {file = "psycopg_binary-3.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:7a838852e5afb6b4126f93eb409516a8c02a49b788f4df8b6469a40c2157fa21"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:98bbe35b5ad24a782c7bf267596638d78aa0e87abc7837bdac5b2a2ab954179e"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:72691a1615ebb42da8b636c5ca9f2b71f266be9e172f66209a361c175b7842c5"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ab464bfba8c401f5536d5aa95f0ca1dd8257b5202eede04019b4415f491351"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e8aeefebe752f46e3c4b769e53f1d4ad71208fe1150975ef7662c22cca80fab"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7e4e4dd177a8665c9ce86bc9caae2ab3aa9360b7ce7ec01827ea1baea9ff748"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fc2915949e5c1ea27a851f7a472a7da7d0a40d679f0a31e42f1022f3c562e87"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a1fa38a4687b14f517f049477178093c39c2a10fdcced21116f47c017516498f"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5be8292d07a3ab828dc95b5ee6b69ca0a5b2e579a577b39671f4f5b47116dfd2"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:778588ca9897b6c6bab39b0d3034efff4c5438f5e3bd52fda3914175498202f9"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0d5b3af045a187aedbd7ed5fc513bd933a97aaff78e61c3745b330792c4345b"}, + {file = "psycopg_binary-3.2.9-cp313-cp313-win_amd64.whl", hash = "sha256:2290bc146a1b6a9730350f695e8b670e1d1feb8446597bed0bbe7c3c30e0abcb"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df22ec17390ec5ccb38d211fb251d138d37a43344492858cea24de8efa15003"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac3a6e926421e976c1c2653624e1294f162dc67ac55f9addbe8f7b8d08ce603"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf789be42aea5752ee396d58de0538d5fcb76795c85fb03ab23620293fb81b6f"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f05b9dafa5670a7503abc715af081dbbb176a8e6770de77bccaeb9024206c5"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2d7a6646d41228e9049978be1f3f838b557a1bde500b919906d54c4390f5086"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a4d76e28df27ce25dc19583407f5c6c6c2ba33b443329331ab29b6ef94c8736d"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:418f52b77b715b42e8ec43ee61ca74abc6765a20db11e8576e7f6586488a266f"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:1f1736d5b21f69feefeef8a75e8d3bf1f0a1e17c165a7488c3111af9d6936e91"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5918c0fab50df764812f3ca287f0d716c5c10bedde93d4da2cefc9d40d03f3aa"}, + {file = "psycopg_binary-3.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:7b617b81f08ad8def5edd110de44fd6d326f969240cc940c6f6b3ef21fe9c59f"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587a3f19954d687a14e0c8202628844db692dbf00bba0e6d006659bf1ca91cbe"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:791759138380df21d356ff991265fde7fe5997b0c924a502847a9f9141e68786"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95315b8c8ddfa2fdcb7fe3ddea8a595c1364524f512160c604e3be368be9dd07"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18ac08475c9b971237fcc395b0a6ee4e8580bb5cf6247bc9b8461644bef5d9f4"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac2c04b6345e215e65ca6aef5c05cc689a960b16674eaa1f90a8f86dfaee8c04"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1ab25e3134774f1e476d4bb9050cdec25f10802e63e92153906ae934578734"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4bfec4a73e8447d8fe8854886ffa78df2b1c279a7592241c2eb393d4499a17e2"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:166acc57af5d2ff0c0c342aed02e69a0cd5ff216cae8820c1059a6f3b7cf5f78"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:413f9e46259fe26d99461af8e1a2b4795a4e27cc8ac6f7919ec19bcee8945074"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:354dea21137a316b6868ee41c2ae7cce001e104760cf4eab3ec85627aed9b6cd"}, + {file = "psycopg_binary-3.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:24ddb03c1ccfe12d000d950c9aba93a7297993c4e3905d9f2c9795bb0764d523"}, ] [[package]] @@ -1188,7 +1136,6 @@ version = "3.2.6" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7"}, {file = "psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5"}, @@ -1203,7 +1150,6 @@ version = "1.0.6" description = "Python Hot Reload starts the given program and reloads it whenever any file changes in the current directory or imported modules." optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "py-hot-reload-1.0.6.tar.gz", hash = "sha256:09ef5ac863d0b2b776346527b3507751cdde4980dfda6504f7fcd451bab96905"}, {file = "py_hot_reload-1.0.6-py3-none-any.whl", hash = "sha256:d9a77538033e31ec3e633a1e3af0d4da4897916cf229b213e748d36f6cf3dfe0"}, @@ -1219,7 +1165,6 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -1231,167 +1176,194 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["main", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {main = "platform_python_implementation != \"PyPy\"", test = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} [[package]] name = "pydantic" -version = "2.10.6" +version = "2.11.5" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" -groups = ["main"] +python-versions = ">=3.9" files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, + {file = "pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"}, + {file = "pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" +pydantic-core = "2.33.2" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.2" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +python-versions = ">=3.9" +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.0" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" -groups = ["test"] +python-versions = ">=3.9" files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e"}, + {file = "pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -1399,7 +1371,6 @@ version = "0.26.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, @@ -1418,7 +1389,6 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1433,26 +1403,26 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "python-jose" -version = "3.3.0" +version = "3.5.0" description = "JOSE implementation in Python" optional = false -python-versions = "*" -groups = ["main"] +python-versions = ">=3.9" files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, + {file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"}, + {file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"}, ] [package.dependencies] cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" +pyasn1 = ">=0.5.0" +rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0" [package.extras] cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] +pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"] +test = ["pytest", "pytest-cov"] [[package]] name = "python-multipart" @@ -1460,7 +1430,6 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -1468,42 +1437,42 @@ files = [ [[package]] name = "pytz" -version = "2025.1" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" -groups = ["main"] files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] name = "redis" -version = "5.2.1" +version = "5.3.0" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" -groups = ["main"] files = [ - {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, - {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, + {file = "redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83"}, + {file = "redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e"}, ] +[package.dependencies] +PyJWT = ">=2.9.0,<2.10.0" + [package.extras] hiredis = ["hiredis (>=3.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" -groups = ["main"] +python-versions = "<4,>=3.6" files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] @@ -1515,7 +1484,6 @@ version = "0.11.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -groups = ["linters"] files = [ {file = "ruff-0.11.9-py3-none-linux_armv6l.whl", hash = "sha256:a31a1d143a5e6f499d1fb480f8e1e780b4dfdd580f86e05e87b835d22c5c6f8c"}, {file = "ruff-0.11.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:66bc18ca783b97186a1f3100e91e492615767ae0a3be584e1266aa9051990722"}, @@ -1539,24 +1507,23 @@ files = [ [[package]] name = "setuptools" -version = "78.0.1" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" -groups = ["test"] files = [ - {file = "setuptools-78.0.1-py3-none-any.whl", hash = "sha256:1cc9b32ee94f93224d6c80193cbb768004667aa2f2732a473d6949b0236c1d4e"}, - {file = "setuptools-78.0.1.tar.gz", hash = "sha256:4321d2dc2157b976dee03e1037c9f2bc5fea503c0c47d3c9458e0e8e49e659ce"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -1564,7 +1531,6 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1576,7 +1542,6 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" -groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1584,82 +1549,81 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.39" +version = "2.0.41" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:533e0f66c32093a987a30df3ad6ed21170db9d581d0b38e71396c49718fbb1ca"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7399d45b62d755e9ebba94eb89437f80512c08edde8c63716552a3aade61eb42"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:788b6ff6728072b313802be13e88113c33696a9a1f2f6d634a97c20f7ef5ccce"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-win32.whl", hash = "sha256:01da15490c9df352fbc29859d3c7ba9cd1377791faeeb47c100832004c99472c"}, - {file = "SQLAlchemy-2.0.39-cp37-cp37m-win_amd64.whl", hash = "sha256:f2bcb085faffcacf9319b1b1445a7e1cfdc6fb46c03f2dce7bc2d9a4b3c1cdc5"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b761a6847f96fdc2d002e29e9e9ac2439c13b919adfd64e8ef49e75f6355c548"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d7e3866eb52d914aea50c9be74184a0feb86f9af8aaaa4daefe52b69378db0b"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995c2bacdddcb640c2ca558e6760383dcdd68830160af92b5c6e6928ffd259b4"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344cd1ec2b3c6bdd5dfde7ba7e3b879e0f8dd44181f16b895940be9b842fd2b6"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5dfbc543578058c340360f851ddcecd7a1e26b0d9b5b69259b526da9edfa8875"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3395e7ed89c6d264d38bea3bfb22ffe868f906a7985d03546ec7dc30221ea980"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-win32.whl", hash = "sha256:bf555f3e25ac3a70c67807b2949bfe15f377a40df84b71ab2c58d8593a1e036e"}, - {file = "SQLAlchemy-2.0.39-cp38-cp38-win_amd64.whl", hash = "sha256:463ecfb907b256e94bfe7bcb31a6d8c7bc96eca7cbe39803e448a58bb9fcad02"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c"}, - {file = "sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e"}, - {file = "sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109"}, - {file = "sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7"}, - {file = "sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70"}, - {file = "sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a"}, - {file = "sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f"}, - {file = "sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22"}, +files = [ + {file = "SQLAlchemy-2.0.41-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-win32.whl", hash = "sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2"}, + {file = "SQLAlchemy-2.0.41-cp37-cp37m-win_amd64.whl", hash = "sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-win32.whl", hash = "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-win_amd64.whl", hash = "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-win32.whl", hash = "sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45"}, + {file = "sqlalchemy-2.0.41-cp38-cp38-win_amd64.whl", hash = "sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-win32.whl", hash = "sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440"}, + {file = "sqlalchemy-2.0.41-cp39-cp39-win_amd64.whl", hash = "sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71"}, + {file = "sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576"}, + {file = "sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -1670,7 +1634,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -1681,14 +1645,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.46.1" +version = "0.46.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, - {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, + {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, + {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, ] [package.dependencies] @@ -1703,7 +1666,6 @@ version = "2022.7.1.2" description = "Typing stubs for pytz" optional = false python-versions = "*" -groups = ["linters"] files = [ {file = "types-pytz-2022.7.1.2.tar.gz", hash = "sha256:487d3e8e9f4071eec8081746d53fa982bbc05812e719dcbf2ebf3d55a1a4cd28"}, {file = "types_pytz-2022.7.1.2-py3-none-any.whl", hash = "sha256:40ca448a928d566f7d44ddfde0066e384f7ffbd4da2778e42a4570eaca572446"}, @@ -1711,24 +1673,35 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" -groups = ["main", "linters"] +python-versions = ">=3.9" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" -groups = ["main"] -markers = "sys_platform == \"win32\"" files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, @@ -1736,14 +1709,13 @@ files = [ [[package]] name = "uvicorn" -version = "0.34.2" +version = "0.34.3" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" -groups = ["main"] files = [ - {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, - {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, + {file = "uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885"}, + {file = "uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a"}, ] [package.dependencies] @@ -1751,7 +1723,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -1759,7 +1731,6 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" -groups = ["main"] files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -1811,7 +1782,6 @@ version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, @@ -1859,7 +1829,6 @@ version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" -groups = ["main"] files = [ {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, @@ -1938,15 +1907,13 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" -groups = ["main"] -markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, ] [package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "zope-event" @@ -1954,7 +1921,6 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" -groups = ["test"] files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -1973,7 +1939,6 @@ version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" -groups = ["test"] files = [ {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, @@ -2023,6 +1988,6 @@ test = ["coverage[toml]", "zope.event", "zope.testing"] testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] -lock-version = "2.1" -python-versions = "3.12.6" -content-hash = "fdd7a645ba7e7d880310520a48668dbfd5e9e63fb5e01fd7736a1ca94ebaa081" +lock-version = "2.0" +python-versions = "3.12.3" +content-hash = "a1c686341e1741ae1c376d034d705ce90e835a8e8a3bab14b68e02c24b3452e1" diff --git a/pyproject.toml b/pyproject.toml index e60ede471..0d9e73dce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -179,7 +179,7 @@ ignore-variadic-names = true [tool.ruff.lint.per-file-ignores] "tests/*.py" = ["S101", "D104", "DOC501", "D417", "DOC201", "DOC402"] # Ignore rules for the `tests/` directory. -"alembic/*.py" = ["I001"] # Ignore `Flake8-isort IO01` rule for the `alembic/` directory. It works incorrect in CI ruff test. +"app/alembic/*.py" = ["ANN001"] # Ignore `Flake8-isort IO01` rule for the `alembic/` directory. It works incorrect in CI ruff test. [tool.ruff.lint.mccabe] # 15 Complexity level is too high, need to reduce this level or ignore it `# noqa: C901`. From 626a60ebbc513f3b342f0386e95187e15f240b15 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 16:10:37 +0300 Subject: [PATCH 14/25] fix: poetry lock task_508 --- poetry.lock | 994 +++++++++++++++++++++++++--------------------------- 1 file changed, 469 insertions(+), 525 deletions(-) diff --git a/poetry.lock b/poetry.lock index 53ad4f166..e3178ed14 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aioldap3" @@ -19,13 +19,13 @@ url = "https://github.com/MultiDirectoryLab/aioldap3/releases/download/v1.1.1/ai [[package]] name = "alembic" -version = "1.16.1" +version = "1.15.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.9" files = [ - {file = "alembic-1.16.1-py3-none-any.whl", hash = "sha256:0cdd48acada30d93aa1035767d67dff25702f8de74d7c3919f2e8492c8db2e67"}, - {file = "alembic-1.16.1.tar.gz", hash = "sha256:43d37ba24b3d17bc1eb1024fe0f51cd1dc95aeb5464594a02c6bb9ca9864bfa4"}, + {file = "alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe"}, + {file = "alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49"}, ] [package.dependencies] @@ -145,13 +145,13 @@ typecheck = ["mypy"] [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, - {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -235,13 +235,13 @@ pycparser = "*" [[package]] name = "click" -version = "8.2.1" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false -python-versions = ">=3.10" +python-versions = ">=3.7" files = [ - {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, - {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -335,48 +335,46 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "44.0.3" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" files = [ - {file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"}, - {file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"}, - {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"}, - {file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"}, - {file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"}, - {file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"}, - {file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"}, - {file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"}, - {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"}, - {file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"}, - {file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"}, - {file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"}, - {file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"}, - {file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"}, - {file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"}, + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] @@ -389,20 +387,9 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "darglint2" -version = "1.8.2" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "darglint2-1.8.2-py3-none-any.whl", hash = "sha256:8f950c9b5fab25dd54bf537bef1569c267073e5828cb5ab76428876df6d947af"}, - {file = "darglint2-1.8.2.tar.gz", hash = "sha256:11e0fc9c999bf09e192f42b72d202d177cb82da258eba387b24c2f0f5943650f"}, -] - [[package]] name = "decorator" version = "5.2.1" @@ -416,13 +403,13 @@ files = [ [[package]] name = "dishka" -version = "1.6.0" +version = "1.5.0" description = "Cute DI framework with scopes and agreeable API" optional = false python-versions = ">=3.10" files = [ - {file = "dishka-1.6.0-py3-none-any.whl", hash = "sha256:ab1aedee152ce7bb11cfd2673d7ce4001fe2b330d14e84535d7525a68430b2c2"}, - {file = "dishka-1.6.0.tar.gz", hash = "sha256:f1fa5ec7e980d4f618d0c425d1bb81d8e9414894d8ec6553b197d2298774e12f"}, + {file = "dishka-1.5.0-py3-none-any.whl", hash = "sha256:cd8847ac675b4093fe42742d9cf42a49a38d8d1abca46fcc250cd2f2190a2f71"}, + {file = "dishka-1.5.0.tar.gz", hash = "sha256:1e47707f7b40c3a3ab3b736bd5b4ee958939d32ace6199809e4f75bb236c04a8"}, ] [[package]] @@ -556,65 +543,84 @@ test = ["cffi (>=1.17.1)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idn [[package]] name = "greenlet" -version = "3.2.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" files = [ - {file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db"}, - {file = "greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712"}, - {file = "greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00"}, - {file = "greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302"}, - {file = "greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147"}, - {file = "greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc"}, - {file = "greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba"}, - {file = "greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34"}, - {file = "greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688"}, - {file = "greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c"}, - {file = "greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163"}, - {file = "greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849"}, - {file = "greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb"}, - {file = "greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0"}, - {file = "greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36"}, - {file = "greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3"}, - {file = "greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892"}, - {file = "greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141"}, - {file = "greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a"}, - {file = "greenlet-3.2.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:42efc522c0bd75ffa11a71e09cd8a399d83fafe36db250a87cf1dacfaa15dc64"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d760f9bdfe79bff803bad32b4d8ffb2c1d2ce906313fc10a83976ffb73d64ca7"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8324319cbd7b35b97990090808fdc99c27fe5338f87db50514959f8059999805"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:8c37ef5b3787567d322331d5250e44e42b58c8c713859b8a04c6065f27efbf72"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce539fb52fb774d0802175d37fcff5c723e2c7d249c65916257f0a940cee8904"}, - {file = "greenlet-3.2.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:003c930e0e074db83559edc8705f3a2d066d4aa8c2f198aff1e454946efd0f26"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7e70ea4384b81ef9e84192e8a77fb87573138aa5d4feee541d8014e452b434da"}, - {file = "greenlet-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:22eb5ba839c4b2156f18f76768233fe44b23a31decd9cc0d4cc8141c211fd1b4"}, - {file = "greenlet-3.2.3-cp39-cp39-win32.whl", hash = "sha256:4532f0d25df67f896d137431b13f4cdce89f7e3d4a96387a41290910df4d3a57"}, - {file = "greenlet-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:aaa7aae1e7f75eaa3ae400ad98f8644bb81e1dc6ba47ce8a93d3f17274e08322"}, - {file = "greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -660,29 +666,29 @@ decorator = "*" [[package]] name = "h11" -version = "0.16.0" +version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, - {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] [[package]] name = "httpcore" -version = "1.0.9" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, - {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.16" +h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] @@ -804,13 +810,13 @@ dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (= [[package]] name = "mako" -version = "1.3.10" +version = "1.3.9" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, - {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, + {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, + {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, ] [package.dependencies] @@ -893,48 +899,47 @@ files = [ [[package]] name = "mypy" -version = "1.16.0" +version = "1.15.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" files = [ - {file = "mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c"}, - {file = "mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571"}, - {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491"}, - {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777"}, - {file = "mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b"}, - {file = "mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93"}, - {file = "mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab"}, - {file = "mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2"}, - {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff"}, - {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666"}, - {file = "mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c"}, - {file = "mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b"}, - {file = "mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13"}, - {file = "mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090"}, - {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1"}, - {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8"}, - {file = "mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730"}, - {file = "mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec"}, - {file = "mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b"}, - {file = "mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0"}, - {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b"}, - {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d"}, - {file = "mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52"}, - {file = "mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb"}, - {file = "mypy-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f56236114c425620875c7cf71700e3d60004858da856c6fc78998ffe767b73d3"}, - {file = "mypy-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15486beea80be24ff067d7d0ede673b001d0d684d0095803b3e6e17a886a2a92"}, - {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436"}, - {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2"}, - {file = "mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20"}, - {file = "mypy-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e7e0ad35275e02797323a5aa1be0b14a4d03ffdb2e5f2b0489fa07b89c67b21"}, - {file = "mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031"}, - {file = "mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" -pathspec = ">=0.9.0" typing_extensions = ">=4.6.0" [package.extras] @@ -946,24 +951,24 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.1.0" +version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.8" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" -version = "25.0" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -986,31 +991,20 @@ bcrypt = ["bcrypt (>=3.1.0)"] build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] totp = ["cryptography"] -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - [[package]] name = "pluggy" -version = "1.6.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, - {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["coverage", "pytest", "pytest-benchmark"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "proxy-protocol" @@ -1033,24 +1027,24 @@ doc = ["cloud-sptheme", "sphinx", "sphinx-autodoc-typehints"] [[package]] name = "psycopg" -version = "3.2.9" +version = "3.2.8" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" files = [ - {file = "psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6"}, - {file = "psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700"}, + {file = "psycopg-3.2.8-py3-none-any.whl", hash = "sha256:0e960f1977d77de7f1ace4b54590f686b52c2f9ab1f61fff4141887fc711d9e7"}, + {file = "psycopg-3.2.8.tar.gz", hash = "sha256:cc995d836841e400c4f615d8dea351dc39697ad29df84d428f9c38c8040222f8"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.9", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +psycopg-binary = {version = "3.2.8", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.9)"] -c = ["psycopg-c (==3.2.9)"] +binary = ["psycopg-binary (==3.2.8)"] +c = ["psycopg-c (==3.2.8)"] dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -1058,76 +1052,76 @@ test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", [[package]] name = "psycopg-binary" -version = "3.2.9" +version = "3.2.8" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" files = [ - {file = "psycopg_binary-3.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:528239bbf55728ba0eacbd20632342867590273a9bacedac7538ebff890f1093"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4978c01ca4c208c9d6376bd585e2c0771986b76ff7ea518f6d2b51faece75e8"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ed2bab85b505d13e66a914d0f8cdfa9475c16d3491cf81394e0748b77729af2"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799fa1179ab8a58d1557a95df28b492874c8f4135101b55133ec9c55fc9ae9d7"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb37ac3955d19e4996c3534abfa4f23181333974963826db9e0f00731274b695"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001e986656f7e06c273dd4104e27f4b4e0614092e544d950c7c938d822b1a894"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa5c80d8b4cbf23f338db88a7251cef8bb4b68e0f91cf8b6ddfa93884fdbb0c1"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:39a127e0cf9b55bd4734a8008adf3e01d1fd1cb36339c6a9e2b2cbb6007c50ee"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fb7599e436b586e265bea956751453ad32eb98be6a6e694252f4691c31b16edb"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5d2c9fe14fe42b3575a0b4e09b081713e83b762c8dc38a3771dd3265f8f110e7"}, - {file = "psycopg_binary-3.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:7e4660fad2807612bb200de7262c88773c3483e85d981324b3c647176e41fdc8"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2504e9fd94eabe545d20cddcc2ff0da86ee55d76329e1ab92ecfcc6c0a8156c4"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:093a0c079dd6228a7f3c3d82b906b41964eaa062a9a8c19f45ab4984bf4e872b"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:387c87b51d72442708e7a853e7e7642717e704d59571da2f3b29e748be58c78a"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9ac10a2ebe93a102a326415b330fff7512f01a9401406896e78a81d75d6eddc"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72fdbda5b4c2a6a72320857ef503a6589f56d46821592d4377c8c8604810342b"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f34e88940833d46108f949fdc1fcfb74d6b5ae076550cd67ab59ef47555dba95"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a3e0f89fe35cb03ff1646ab663dabf496477bab2a072315192dbaa6928862891"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6afb3e62f2a3456f2180a4eef6b03177788df7ce938036ff7f09b696d418d186"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cc19ed5c7afca3f6b298bfc35a6baa27adb2019670d15c32d0bb8f780f7d560d"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc75f63653ce4ec764c8f8c8b0ad9423e23021e1c34a84eb5f4ecac8538a4a4a"}, - {file = "psycopg_binary-3.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:3db3ba3c470801e94836ad78bf11fd5fab22e71b0c77343a1ee95d693879937a"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be7d650a434921a6b1ebe3fff324dbc2364393eb29d7672e638ce3e21076974e"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76b4722a529390683c0304501f238b365a46b1e5fb6b7249dbc0ad6fea51a0"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a551e4683f1c307cfc3d9a05fec62c00a7264f320c9962a67a543e3ce0d8ff"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61d0a6ceed8f08c75a395bc28cb648a81cf8dee75ba4650093ad1a24a51c8724"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad280bbd409bf598683dda82232f5215cfc5f2b1bf0854e409b4d0c44a113b1d"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76eddaf7fef1d0994e3d536ad48aa75034663d3a07f6f7e3e601105ae73aeff6"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:52e239cd66c4158e412318fbe028cd94b0ef21b0707f56dcb4bdc250ee58fd40"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08bf9d5eabba160dd4f6ad247cf12f229cc19d2458511cab2eb9647f42fa6795"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1b2cf018168cad87580e67bdde38ff5e51511112f1ce6ce9a8336871f465c19a"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:14f64d1ac6942ff089fc7e926440f7a5ced062e2ed0949d7d2d680dc5c00e2d4"}, - {file = "psycopg_binary-3.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:7a838852e5afb6b4126f93eb409516a8c02a49b788f4df8b6469a40c2157fa21"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:98bbe35b5ad24a782c7bf267596638d78aa0e87abc7837bdac5b2a2ab954179e"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:72691a1615ebb42da8b636c5ca9f2b71f266be9e172f66209a361c175b7842c5"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ab464bfba8c401f5536d5aa95f0ca1dd8257b5202eede04019b4415f491351"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e8aeefebe752f46e3c4b769e53f1d4ad71208fe1150975ef7662c22cca80fab"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7e4e4dd177a8665c9ce86bc9caae2ab3aa9360b7ce7ec01827ea1baea9ff748"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fc2915949e5c1ea27a851f7a472a7da7d0a40d679f0a31e42f1022f3c562e87"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a1fa38a4687b14f517f049477178093c39c2a10fdcced21116f47c017516498f"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5be8292d07a3ab828dc95b5ee6b69ca0a5b2e579a577b39671f4f5b47116dfd2"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:778588ca9897b6c6bab39b0d3034efff4c5438f5e3bd52fda3914175498202f9"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0d5b3af045a187aedbd7ed5fc513bd933a97aaff78e61c3745b330792c4345b"}, - {file = "psycopg_binary-3.2.9-cp313-cp313-win_amd64.whl", hash = "sha256:2290bc146a1b6a9730350f695e8b670e1d1feb8446597bed0bbe7c3c30e0abcb"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df22ec17390ec5ccb38d211fb251d138d37a43344492858cea24de8efa15003"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac3a6e926421e976c1c2653624e1294f162dc67ac55f9addbe8f7b8d08ce603"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf789be42aea5752ee396d58de0538d5fcb76795c85fb03ab23620293fb81b6f"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f05b9dafa5670a7503abc715af081dbbb176a8e6770de77bccaeb9024206c5"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2d7a6646d41228e9049978be1f3f838b557a1bde500b919906d54c4390f5086"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a4d76e28df27ce25dc19583407f5c6c6c2ba33b443329331ab29b6ef94c8736d"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:418f52b77b715b42e8ec43ee61ca74abc6765a20db11e8576e7f6586488a266f"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:1f1736d5b21f69feefeef8a75e8d3bf1f0a1e17c165a7488c3111af9d6936e91"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5918c0fab50df764812f3ca287f0d716c5c10bedde93d4da2cefc9d40d03f3aa"}, - {file = "psycopg_binary-3.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:7b617b81f08ad8def5edd110de44fd6d326f969240cc940c6f6b3ef21fe9c59f"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587a3f19954d687a14e0c8202628844db692dbf00bba0e6d006659bf1ca91cbe"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:791759138380df21d356ff991265fde7fe5997b0c924a502847a9f9141e68786"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95315b8c8ddfa2fdcb7fe3ddea8a595c1364524f512160c604e3be368be9dd07"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18ac08475c9b971237fcc395b0a6ee4e8580bb5cf6247bc9b8461644bef5d9f4"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac2c04b6345e215e65ca6aef5c05cc689a960b16674eaa1f90a8f86dfaee8c04"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1ab25e3134774f1e476d4bb9050cdec25f10802e63e92153906ae934578734"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4bfec4a73e8447d8fe8854886ffa78df2b1c279a7592241c2eb393d4499a17e2"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:166acc57af5d2ff0c0c342aed02e69a0cd5ff216cae8820c1059a6f3b7cf5f78"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:413f9e46259fe26d99461af8e1a2b4795a4e27cc8ac6f7919ec19bcee8945074"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:354dea21137a316b6868ee41c2ae7cce001e104760cf4eab3ec85627aed9b6cd"}, - {file = "psycopg_binary-3.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:24ddb03c1ccfe12d000d950c9aba93a7297993c4e3905d9f2c9795bb0764d523"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0694548e1633c2ea819406c5bfd297bf1b4f6f8638dec0d639ab9764fdebcb2a"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85851cdc18b514f80790f711a25406515b42f6b64e9a5d3940ae399e3b0e2c23"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:040c2a768bd9ae572421ee5695a6299e08147dd44bc8ac514961323dc5c31a62"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdb5567e81374734539f7b7deb9d547271585ec42a7866ea06bffa58fa5cd5a"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289d2575edc00391c4bf586048701638126f396a76db83f36463d1c2b3495aae"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c3a3b330c44e01ee29b3b76ddbb86890fbaf7e4b2f9abd43220d050642edee3"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:814d533e6a8359c2962e28a36fad2698c15639716459fe1100e859b6173c3b6d"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b67f78f75b033d8833ec40432c28610c275455e0172762919912a5e6b9db6366"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b98f7dc1ed83889803d0df2d327c94c95a487b9976215c3e9adb0dbb7a220d76"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a9c54bd5d91c6e1cc1e6f9127f175ce3162d8435cf8d4715149598c9baab4ff5"}, + {file = "psycopg_binary-3.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:2aba18f57da97b96ea9a6663c8982038a9d4a47b1f94f004ffa9491bd7d21160"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:076bd384a0d8bb7a59514b0d62bb75b48f83955a32ebec408b08db0e51bb06e5"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f162a44ed7e06ed075cbc9dfda23850a7f702c44af4b62061e9c83430130ff36"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e450989848bb63315e1768e6c6026cfdf6f72450c3752ce9f6e307c1d62b8d"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90c0f2c88578db2bbeea98cd10fcb6f635c0b5bdd23ae90a931716589094ed08"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75a929759a498b1b59481091da731f928e0cdbd3d7393b8a1022a1b57f01a91a"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d310d188bb349a5f66cc037f7416fd640ca9847d0083a63ba6c091fd45075482"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4965bc9d2ef8eed31ff411840e2ab0e1d0c1c59575e0154ced7b652ef0eaa33"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f1c26c1213efba8102911099af2203db6859855f7ceba21fd941e6d2bc7e84e"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:58c5c7ef4daaaefb1e656a307ceb61aa3a101a5eb843004579d423428bef66e5"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f501ee2b41a153aee59a3a5db238718f801ac39eec54ad3f28fbe657002e944"}, + {file = "psycopg_binary-3.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:fe51d8297bc8c178be1cc0ac6c060bfd706afb5cb04e794a44feae27c0afe6f4"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1c330b86bc5ea67fee529d3c7b53c6394f8cacad77a3214c50fce0d5bdbc10cf"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce4e637ac339bfe583ac26e18232c33f9039c93cfc01adaec550cb5e8a03f87"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:272ee7cd175996c7262f7ffb561593829b448032a52c545d844bc6a4fb77b078"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7237b1abcc36c04b45916c983a6c3d799104201f72475eab367874a5f37d3e7"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c9a30a1d8338823603cf064637aae5580c41ed95675c7aee6a47165784d0464"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f27d5ae05062f8ea0da6c11262ba8a1ab70864b1c18ea65d9e61636a8c72da4"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:10fa234801b9b8b23799f869300c632a3298fb8daecd2d5734d08ab76e7a17cb"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b055dba7df07c39f6a40a71862bf5525320350e3bd4c6d1809342fb7061d111f"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8c36b8d3f76e2831f3b33f34226952ed39d1d6a79cb2ca2bf044f28df9c6b5f0"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:764f9163ad9cfd32abd2d06f3000a52faf7a2b2411801d681ebe9158d72b46d5"}, + {file = "psycopg_binary-3.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:d8fa6fec9f7e225458d0031c43dd6d20673f55953eebe539d37e4b94b8831984"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84f03982598a6353cf70cafae34c16da28eac74ba9862cc740b6ba0dcf9721fc"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d247f55b28afc4a87b77240e733419ad0c82be2ec122a0b93fbb227ee0e6608e"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89eb0c15c0eec1c81256e9df3c01d9bd1067f4365872f6f81da7521ab30e19de"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aef90bdc201f2d375e5996d44124c588d3a7ce9f67c79f30531cdc5ead2c3d"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b60a17eca6a6906af8084c518be81bd71a3d50ddc69c0dc667d6ce9b8f4d8604"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8297d92f41e19b6794b04bdf7d53938a5ad8e68f7105b50048a078477b7ee4b8"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a547d53e615776f8e79768aacd7a12c6f0131fa1d6820d2e3e848261b0ad3849"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:058cfd606f1dc0be9b5a80d208fb9b487f7b4986a955322cbb45cee7e3e8056e"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:15d21ed3292fb19b6ab096c3522d561d196eeef3903c31f1318df7478eb96fa5"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6384f81c33a369144e4b98cbb4bf3ec4ac102ae11cfb84e70cf99aa43a44925"}, + {file = "psycopg_binary-3.2.8-cp313-cp313-win_amd64.whl", hash = "sha256:60db59a0f1676f70c027a8273b7b360af85ef87bf43cd49eb63727b72a170a9f"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:94fcd34521b45d6392a347a3f0d3f913dc26c70bfe06ba7b57f8e2a5c5fb4722"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f203d9d34a2b8e4808d042437b6f5eebb36d9236bb28e89ad9969094fce6354a"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ee041375a1d406335fe10a0d80f9429f7144fd128caa0183b9ac8932cc7219"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37120696713a1eca988504eaa36cb90ea8a48c58dbb0c49f5db6464abfcb9bec"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d7400d163efba4e4e94e0b1777289d990c55fd6e2dd88d0145c5917e3f398ed"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c39a2218d38f36740898d3bf8f9cccd5efa9c10ef9e7a3ffa5db8972b278df1b"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1d551879d0614cc8b9027d9a20460e22b36440ecf0f97abcee30f3a9cace676f"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:05db811cf6fba97187ba287ecc097c6735c178fe6e9383df44d95f0be70ed1d6"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:43ff57a26f0c765b78f59cb81b9f2d6dd22ee729a1f07b3e8a7f7a6e019435ed"}, + {file = "psycopg_binary-3.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:2c1ca0296260a30d05ea45cb69824bc99711232d96ff5980a9458e91bb4d6581"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b1b5fd2e4ef6b28f0740fff4426e51d71390dbf970795f2e445536ce47da480"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08ce4010185ee6d42287b37b6d2a18006fa9c053ecd2ed50d5bd428b99bdbee5"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4702427a3f6f240f888d78e36de37cc6d4298e95178e065fbc0c353fe692774"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42741f3fadfcef01252f4f6c67ab34a238c331c2504d976559236633618b1417"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53dfc4504a0e3b0f5efb1b94c9da68b917adc8a9c49c1b0061f6fa8125bd136c"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f6269a64ee4f8b6b27116abf6536b31c1757973b0f0e612e19a1ad5376a73a"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b72a737b88d1a14b2d9efea6819579ee8c4f335825f92f8d6e725f1e72ac519f"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2705c34cba861699539619544078fe2a314b79c582874e813a6e512782b22638"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b06a7cf2585bf6a3b4f9397af48427f558049a570d44b142eb9948c9b734c8ae"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2f1be2d3160cdfb4732cd9f5777b0e9c78509ef0033dd6cff34ee0f16560e2fe"}, + {file = "psycopg_binary-3.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:6a45e2409352a99c8b4f733b86daf19c4df3dc7d9c1f2fb880adf7dfa225678a"}, ] [[package]] @@ -1183,20 +1177,19 @@ files = [ [[package]] name = "pydantic" -version = "2.11.5" +version = "2.10.6" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"}, - {file = "pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1204,166 +1197,135 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.9.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - [[package]] name = "pytest" -version = "8.4.0" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e"}, - {file = "pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] -colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} -iniconfig = ">=1" -packaging = ">=20" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" pluggy = ">=1.5,<2" -pygments = ">=2.7.2" [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -1403,26 +1365,25 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "python-jose" -version = "3.5.0" +version = "3.3.0" description = "JOSE implementation in Python" optional = false -python-versions = ">=3.9" +python-versions = "*" files = [ - {file = "python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771"}, - {file = "python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b"}, + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, ] [package.dependencies] cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} ecdsa = "!=0.15" -pyasn1 = ">=0.5.0" -rsa = ">=4.0,<4.1.1 || >4.1.1,<4.4 || >4.4,<5.0" +pyasn1 = "*" +rsa = "*" [package.extras] cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)"] -test = ["pytest", "pytest-cov"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] [[package]] name = "python-multipart" @@ -1437,42 +1398,39 @@ files = [ [[package]] name = "pytz" -version = "2025.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] name = "redis" -version = "5.3.0" +version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" files = [ - {file = "redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83"}, - {file = "redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] -[package.dependencies] -PyJWT = ">=2.9.0,<2.10.0" - [package.extras] hiredis = ["hiredis (>=3.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "rsa" -version = "4.9.1" +version = "4.9" description = "Pure-Python RSA implementation" optional = false -python-versions = "<4,>=3.6" +python-versions = ">=3.6,<4" files = [ - {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, - {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, ] [package.dependencies] @@ -1507,13 +1465,13 @@ files = [ [[package]] name = "setuptools" -version = "80.9.0" +version = "78.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, + {file = "setuptools-78.0.1-py3-none-any.whl", hash = "sha256:1cc9b32ee94f93224d6c80193cbb768004667aa2f2732a473d6949b0236c1d4e"}, + {file = "setuptools-78.0.1.tar.gz", hash = "sha256:4321d2dc2157b976dee03e1037c9f2bc5fea503c0c47d3c9458e0e8e49e659ce"}, ] [package.extras] @@ -1549,81 +1507,81 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.41" +version = "2.0.39" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.41-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6854175807af57bdb6425e47adbce7d20a4d79bbfd6f6d6519cd10bb7109a7f8"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05132c906066142103b83d9c250b60508af556982a385d96c4eaa9fb9720ac2b"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4af17bda11e907c51d10686eda89049f9ce5669b08fbe71a29747f1e876036"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c0b0e5e1b5d9f3586601048dd68f392dc0cc99a59bb5faf18aab057ce00d00b2"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0b3dbf1e7e9bc95f4bac5e2fb6d3fb2f083254c3fdd20a1789af965caf2d2348"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-win32.whl", hash = "sha256:1e3f196a0c59b0cae9a0cd332eb1a4bda4696e863f4f1cf84ab0347992c548c2"}, - {file = "SQLAlchemy-2.0.41-cp37-cp37m-win_amd64.whl", hash = "sha256:6ab60a5089a8f02009f127806f777fca82581c49e127f08413a66056bd9166dd"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-win32.whl", hash = "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda"}, - {file = "sqlalchemy-2.0.41-cp310-cp310-win_amd64.whl", hash = "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8"}, - {file = "sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6"}, - {file = "sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f"}, - {file = "sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90144d3b0c8b139408da50196c5cad2a6909b51b23df1f0538411cd23ffa45d3"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:023b3ee6169969beea3bb72312e44d8b7c27c75b347942d943cf49397b7edeb5"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725875a63abf7c399d4548e686debb65cdc2549e1825437096a0af1f7e374814"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81965cc20848ab06583506ef54e37cf15c83c7e619df2ad16807c03100745dea"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dd5ec3aa6ae6e4d5b5de9357d2133c07be1aff6405b136dad753a16afb6717dd"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ff8e80c4c4932c10493ff97028decfdb622de69cae87e0f127a7ebe32b4069c6"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-win32.whl", hash = "sha256:4d44522480e0bf34c3d63167b8cfa7289c1c54264c2950cc5fc26e7850967e45"}, - {file = "sqlalchemy-2.0.41-cp38-cp38-win_amd64.whl", hash = "sha256:81eedafa609917040d39aa9332e25881a8e7a0862495fcdf2023a9667209deda"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a420a91913092d1e20c86a2f5f1fc85c1a8924dbcaf5e0586df8aceb09c9cc2"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:906e6b0d7d452e9a98e5ab8507c0da791856b2380fdee61b765632bb8698026f"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a373a400f3e9bac95ba2a06372c4fd1412a7cee53c37fc6c05f829bf672b8769"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:087b6b52de812741c27231b5a3586384d60c353fbd0e2f81405a814b5591dc8b"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ea30ab3ec98355235972dadc497bb659cc75f8292b760394824fab9cf39826"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8280856dd7c6a68ab3a164b4a4b1c51f7691f6d04af4d4ca23d6ecf2261b7923"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-win32.whl", hash = "sha256:b50eab9994d64f4a823ff99a0ed28a6903224ddbe7fef56a6dd865eec9243440"}, - {file = "sqlalchemy-2.0.41-cp39-cp39-win_amd64.whl", hash = "sha256:5e22575d169529ac3e0a120cf050ec9daa94b6a9597993d1702884f6954a7d71"}, - {file = "sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576"}, - {file = "sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:533e0f66c32093a987a30df3ad6ed21170db9d581d0b38e71396c49718fbb1ca"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7399d45b62d755e9ebba94eb89437f80512c08edde8c63716552a3aade61eb42"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:788b6ff6728072b313802be13e88113c33696a9a1f2f6d634a97c20f7ef5ccce"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-win32.whl", hash = "sha256:01da15490c9df352fbc29859d3c7ba9cd1377791faeeb47c100832004c99472c"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-win_amd64.whl", hash = "sha256:f2bcb085faffcacf9319b1b1445a7e1cfdc6fb46c03f2dce7bc2d9a4b3c1cdc5"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b761a6847f96fdc2d002e29e9e9ac2439c13b919adfd64e8ef49e75f6355c548"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d7e3866eb52d914aea50c9be74184a0feb86f9af8aaaa4daefe52b69378db0b"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995c2bacdddcb640c2ca558e6760383dcdd68830160af92b5c6e6928ffd259b4"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344cd1ec2b3c6bdd5dfde7ba7e3b879e0f8dd44181f16b895940be9b842fd2b6"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5dfbc543578058c340360f851ddcecd7a1e26b0d9b5b69259b526da9edfa8875"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3395e7ed89c6d264d38bea3bfb22ffe868f906a7985d03546ec7dc30221ea980"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-win32.whl", hash = "sha256:bf555f3e25ac3a70c67807b2949bfe15f377a40df84b71ab2c58d8593a1e036e"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-win_amd64.whl", hash = "sha256:463ecfb907b256e94bfe7bcb31a6d8c7bc96eca7cbe39803e448a58bb9fcad02"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a"}, + {file = "sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f"}, + {file = "sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22"}, ] [package.dependencies] -greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] -aioodbc = ["aioodbc", "greenlet (>=1)"] -aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (>=1)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -1634,7 +1592,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -1645,13 +1603,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.46.2" +version = "0.46.1" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" files = [ - {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, - {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, + {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, + {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, ] [package.dependencies] @@ -1673,29 +1631,15 @@ files = [ [[package]] name = "typing-extensions" -version = "4.14.0" -description = "Backported and Experimental Type Hints for Python 3.9+" -optional = false -python-versions = ">=3.9" -files = [ - {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, - {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, -] - -[[package]] -name = "typing-inspection" -version = "0.4.1" -description = "Runtime typing introspection tools" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, - {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[package.dependencies] -typing-extensions = ">=4.12.0" - [[package]] name = "tzdata" version = "2025.2" @@ -1709,13 +1653,13 @@ files = [ [[package]] name = "uvicorn" -version = "0.34.3" +version = "0.34.2" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" files = [ - {file = "uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885"}, - {file = "uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a"}, + {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, + {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, ] [package.dependencies] @@ -1723,7 +1667,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -1989,5 +1933,5 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" -python-versions = "3.12.3" -content-hash = "a1c686341e1741ae1c376d034d705ce90e835a8e8a3bab14b68e02c24b3452e1" +python-versions = "3.12.6" +content-hash = "5a35f9cf9805cbeb1e801227881536de90af7c326e933da6c01fc07812d456e5" From 2d8f8c22704237edf44077fb737af533e608f260 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 16:18:31 +0300 Subject: [PATCH 15/25] refactor: disable darglint2 task_508 --- .darglint2 | 6 ------ .github/workflows/checks.yml | 20 -------------------- Makefile | 1 - pyproject.toml | 2 -- 4 files changed, 29 deletions(-) delete mode 100644 .darglint2 diff --git a/.darglint2 b/.darglint2 deleted file mode 100644 index 77c3a1e82..000000000 --- a/.darglint2 +++ /dev/null @@ -1,6 +0,0 @@ -# https://akaihola.github.io/darglint2/master/index.html -[darglint2] -docstring_style=google - -# https://akaihola.github.io/darglint2/master/readme.html#strictness-configuration -strictness=full diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 6fb1c931e..f796e9727 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -68,26 +68,6 @@ jobs: NEW_TAG: linter run: docker run $NEW_TAG mypy . - darglint_docstrings: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: build linters - env: - TAG: ghcr.io/${{ env.REPO }}_linters:latest - NEW_TAG: linter - run: docker build --target=runtime -f .docker/lint.Dockerfile . -t $NEW_TAG --cache-to type=gha,mode=max --cache-from $TAG --build-arg BUILDKIT_INLINE_CACHE=1 - - name: Run linters - env: - NEW_TAG: linter - run: docker run $NEW_TAG darglint2 -v 2 .kerberos/ app - tests: runs-on: ubuntu-latest steps: diff --git a/Makefile b/Makefile index 83e6902d3..c8851e65e 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,6 @@ before_pr: ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . - darglint2 .kerberos/ app build: ## build app and manually generate self-signed cert make down diff --git a/pyproject.toml b/pyproject.toml index 0d9e73dce..183306e66 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,6 @@ asyncstdlib = "^3.10.6" backoff = "^2.2.1" bcrypt = "4.0.1" cryptography = "^44.0.1" -darglint2 = "^1.8.2" dishka = "^1.4.2" dnspython = "^2.7.0" fastapi = "^0.115.0" @@ -114,7 +113,6 @@ select = [ "I", # isort, check tool.ruff.lint.isort. Must have "N", # pep8-naming "A", # flake8 builtin-attribute-shadowing - # DOC and D not enough. Use darglint2. "D", # pydocstyle, check tool.ruff.lint.pydocstyle "DOC", # pydoclint TODO uncomment, ruff fix and fix error "UP", # pyupgrade, check tool.ruff.lint.pyupgrade. Must have From 053a92a4e9eb8c32fb6729d71a0f6b0bfa47b5ed Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 16:30:02 +0300 Subject: [PATCH 16/25] fix: ANN001 rule folders task_508 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 183306e66..dc3005371 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -178,6 +178,7 @@ ignore-variadic-names = true [tool.ruff.lint.per-file-ignores] "tests/*.py" = ["S101", "D104", "DOC501", "D417", "DOC201", "DOC402"] # Ignore rules for the `tests/` directory. "app/alembic/*.py" = ["ANN001"] # Ignore `Flake8-isort IO01` rule for the `alembic/` directory. It works incorrect in CI ruff test. +"alembic/*.py" = ["ANN001"] # Ignore `Flake8-isort IO01` rule for the `alembic/` directory. It works incorrect in CI ruff test. [tool.ruff.lint.mccabe] # 15 Complexity level is too high, need to reduce this level or ignore it `# noqa: C901`. From 295ccb3804c0b7ac224b777801c05c8f70b2bb39 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 9 Jun 2025 18:26:22 +0300 Subject: [PATCH 17/25] refactor: lint task_508 --- .docker/lint.Dockerfile | 8 +++++--- Makefile | 1 - 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.docker/lint.Dockerfile b/.docker/lint.Dockerfile index fab799df3..11405d13e 100644 --- a/.docker/lint.Dockerfile +++ b/.docker/lint.Dockerfile @@ -21,7 +21,7 @@ RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --with linters -- # The runtime image, used to just run the code provided its virtual environment FROM python:3.12.6-slim-bookworm AS runtime -WORKDIR /app +WORKDIR /md RUN set -eux; ENV VIRTUAL_ENV=/venvs/.venv \ @@ -31,5 +31,7 @@ ENV VIRTUAL_ENV=/venvs/.venv \ COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV} -COPY app /app -COPY pyproject.toml ./ \ No newline at end of file +COPY app ./app +COPY tests ./tests +COPY .kerberos ./.kerberos +COPY pyproject.toml ./ diff --git a/Makefile b/Makefile index c8851e65e..b105d9e9e 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,6 @@ help: ## show help message @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) before_pr: - ruff format . --preview ruff check . --preview --fix --unsafe-fixes ruff format . --preview mypy . From c581f2a0b4c4d9893f1ec3184137464135b82017 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Tue, 10 Jun 2025 12:47:49 +0300 Subject: [PATCH 18/25] refactor: update comments into ignore list in ruff configuration task_508 --- pyproject.toml | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dc3005371..da6d09287 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -143,24 +143,23 @@ select = [ # Gradually remove all values marked 'TODO' and fix errors. ignore = [ - "D203", # this is necessary. Conflict with `D211` - "D213", # this is necessary. Conflict with `D212` - "D301", # this is necessary. - "UP035", # this is necessary. We allowed deprecated import - "ANN002", # this is necessary. - "ANN003", # this is necessary. - "ASYNC109", - "ASYNC230", - "S311", # this is necessary. - "B904", # this is necessary. - "COM812", # this is necessary. Cause conflicts when used with the formatter - "TC001", # this is necessary. - "TC002", # this is necessary. - "TC003", # this is necessary. - "B905", # this is necessary. get-attr-with-constant - "RUF029", - "DOC201", # TODO delete it and fix - "ANN401", # FIXME. 'Dynamically typed expressions (typing.Any) are disallowed' + "D203", # Conflict with `D211`. + "D213", # Conflict with `D212`. + "D301", # It convert `"""Description."""` to `r"""Description."""`. + "UP035", # We allowed deprecated import. + "ANN002", # Disable type annotations for *args. + "ANN003", # Disable type annotations for **kwargs. + "ASYNC109", # Allow timeout parameters into async func. + "ASYNC230", # Allow open files with blocking methods like open. + "S311", # We used `random — Generate pseudo-random numbers`. + "B904", # We don't used `raise ... FROM ..`. + "COM812", # Cause conflicts when used with the formatter. + "TC001", # First-party imports not defined in a type-checking block. + "TC002", # Third-party imports not defined in a type-checking block. + "TC003", # Standard library imports not defined in a type-checking block. + "B905", # Allow `zip` calls without an explicit `strict` parameter. + "RUF029", # 'Checks for functions declared async that do not await or otherwise use features requiring the function to be declared async.' + "ANN401", # Allow dynamically typed expressions (typing.Any). ] fixable = ["ALL"] From 13a1f6e01e356e0da828906beb2ecc8bc85257e5 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 16 Jun 2025 13:57:51 +0300 Subject: [PATCH 19/25] refactor: fix entity type dao returns task_508 --- app/ioc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/ioc.py b/app/ioc.py index 63f70e23c..68d5fb600 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -219,7 +219,11 @@ async def get_entity_type_dao( self, session: AsyncSession, ) -> EntityTypeDAO: - """Get Entity Type DAO.""" + """Get Entity Type DAO. + + Returns: + EntityTypeDAO: Entity Type DAO + """ return EntityTypeDAO(session) @provide(scope=Scope.APP) From a81c520bcab53c30389244ed2044d5a1e69c4df2 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 23 Jun 2025 11:33:11 +0300 Subject: [PATCH 20/25] delete: not important task_508 --- .kerberos/config_server.py | 86 +++---------------- app/alembic/env.py | 12 +-- app/api/auth/utils.py | 3 - app/ioc.py | 12 --- app/ldap_protocol/asn1parser.py | 3 - app/ldap_protocol/dialogue.py | 6 +- app/ldap_protocol/dns.py | 12 +-- app/ldap_protocol/filter_interpreter.py | 8 -- app/ldap_protocol/kerberos/__init__.py | 3 - app/ldap_protocol/kerberos/base.py | 6 +- app/ldap_protocol/kerberos/client.py | 6 +- app/ldap_protocol/kerberos/stub.py | 7 +- app/ldap_protocol/kerberos/utils.py | 9 -- app/ldap_protocol/ldap_requests/abandon.py | 3 - app/ldap_protocol/ldap_requests/add.py | 3 - app/ldap_protocol/ldap_requests/bind.py | 3 - .../ldap_requests/bind_methods/base.py | 8 +- .../ldap_requests/bind_methods/sasl_gssapi.py | 11 --- .../ldap_requests/bind_methods/sasl_plain.py | 10 +-- .../ldap_requests/bind_methods/simple.py | 3 - app/ldap_protocol/ldap_requests/delete.py | 3 - app/ldap_protocol/ldap_requests/extended.py | 15 ---- app/ldap_protocol/ldap_requests/modify.py | 8 -- app/ldap_protocol/ldap_requests/search.py | 3 - app/ldap_protocol/ldap_responses.py | 30 +------ .../ldap_schema/attribute_type_dao.py | 9 +- .../ldap_schema/entity_type_dao.py | 9 +- .../ldap_schema/object_class_dao.py | 3 - app/ldap_protocol/policies/access_policy.py | 3 - app/ldap_protocol/policies/password_policy.py | 20 +---- app/ldap_protocol/server.py | 33 ++----- app/ldap_protocol/session_storage.py | 61 ++----------- app/ldap_protocol/utils/helpers.py | 9 -- app/ldap_protocol/utils/queries.py | 6 -- .../utils/raw_definition_parser.py | 6 -- app/models.py | 3 - app/multidirectory.py | 12 +-- app/schedule.py | 6 +- tests/conftest.py | 30 ++----- tests/test_api/test_main/test_kadmin.py | 7 +- 40 files changed, 47 insertions(+), 443 deletions(-) diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index f2ddaecba..802ea6a82 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -107,23 +107,11 @@ async def add_princ( @abstractmethod async def get_princ(self, name: str) -> Principal | None: - """Get principal. - - Args: - name (str): Principal name - - Returns: - Principal | None: - """ + """Get principal.""" @abstractmethod async def change_password(self, name: str, new_password: str) -> None: - """Chanage principal's password. - - Args: - name (str): principal name - new_password (str): password - """ + """Change principal's password.""" @abstractmethod async def create_or_update_princ_pw( @@ -140,20 +128,11 @@ async def create_or_update_princ_pw( @abstractmethod async def del_princ(self, name: str) -> None: - """Delete principal by name. - - Args: - name (str): principal name - """ + """Delete principal by name.""" @abstractmethod async def rename_princ(self, name: str, new_name: str) -> None: - """Rename principal. - - Args: - name (str): principal name - new_name (str): new principal name - """ + """Rename principal.""" @abstractmethod async def ktadd(self, names: list[str], fn: str) -> None: @@ -166,21 +145,11 @@ async def ktadd(self, names: list[str], fn: str) -> None: @abstractmethod async def lock_princ(self, name: str, **dbargs) -> None: - """Lock principal. - - Args: - name (str): principal name - **dbargs: database arguments - """ + """Lock principal.""" @abstractmethod async def force_pw_principal(self, name: str, **dbargs) -> None: - """Force password principal. - - Args: - name (str): principal name - **dbargs: database arguments - """ + """Force password principal.""" class KAdminLocalManager(AbstractKRBManager): @@ -189,11 +158,7 @@ class KAdminLocalManager(AbstractKRBManager): client: KAdminProtocol def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: - """Create threadpool and get loop. - - Args: - loop (asyncio.AbstractEventLoop | None): event loop. - """ + """Create threadpool and get loop.""" self.loop = loop or asyncio.get_running_loop() async def connect(self) -> Self: @@ -281,9 +246,6 @@ async def _get_raw_principal(self, name: str) -> PrincipalProtocol: async def get_princ(self, name: str) -> Principal: """Get principal. - Args: - name (str): principal name - Returns: Principal: Principal kadmin object """ @@ -291,12 +253,7 @@ async def get_princ(self, name: str) -> Principal: return Principal.model_validate(principal, from_attributes=True) async def change_password(self, name: str, new_password: str) -> None: - """Chanage principal's password. - - Args: - name (str): principal name - new_password (str): password - """ + """Chanage principal's password.""" princ = await self._get_raw_principal(name) await self.loop.run_in_executor( self.pool, @@ -321,20 +278,11 @@ async def create_or_update_princ_pw( await self.add_princ(name, new_password) async def del_princ(self, name: str) -> None: - """Delete principal by name. - - Args: - name (str): principal name - """ + """Delete principal by name.""" await self.loop.run_in_executor(self.pool, self.client.delprinc, name) async def rename_princ(self, name: str, new_name: str) -> None: - """Rename principal. - - Args: - name (str): Principal name. - new_name (str): Principal new name. - """ + """Rename principal.""" await self.loop.run_in_executor( self.pool, self.client.rename_principal, @@ -360,23 +308,13 @@ async def ktadd(self, names: list[str], fn: str) -> None: await self.loop.run_in_executor(self.pool, princ.ktadd, fn) async def lock_princ(self, name: str, **dbargs) -> None: - """Lock princ. - - Args: - name (str): principal names - **dbargs: database arguments - """ + """Lock princ.""" princ = await self._get_raw_principal(name) princ.expire = "Now" await self.loop.run_in_executor(self.pool, princ.commit) async def force_pw_principal(self, name: str, **dbargs) -> None: - """Force password principal. - - Args: - name (str): principal names - **dbargs: database arguments - """ + """Force password principal.""" princ = await self._get_raw_principal(name) princ.pwexpire = "Now" await self.loop.run_in_executor(self.pool, princ.commit) diff --git a/app/alembic/env.py b/app/alembic/env.py index 65197e763..e8036ec41 100644 --- a/app/alembic/env.py +++ b/app/alembic/env.py @@ -22,11 +22,7 @@ def run_sync_migrations(connection: AsyncConnection): - """Run sync migrations. - - Args: - connection (AsyncConnection): async db connection. - """ + """Run sync migrations.""" context.configure( connection=connection, target_metadata=target_metadata, @@ -39,11 +35,7 @@ def run_sync_migrations(connection: AsyncConnection): async def run_async_migrations(settings: Settings): - """Run async migrations. - - Args: - settings (Settings): Settings - """ + """Run async migrations.""" engine = create_async_engine(str(settings.POSTGRES_URI)) async with engine.connect() as connection: diff --git a/app/api/auth/utils.py b/app/api/auth/utils.py index a26b1672a..19375e1f1 100644 --- a/app/api/auth/utils.py +++ b/app/api/auth/utils.py @@ -41,9 +41,6 @@ def get_ip_from_request(request: Request) -> IPv4Address | IPv6Address: def get_user_agent_from_request(request: Request) -> str: """Get user agent from request. - Args: - request (Request): The incoming request object. - Returns: str: The user agent header. """ diff --git a/app/ioc.py b/app/ioc.py index 68d5fb600..4e19b3149 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -55,9 +55,6 @@ class MainProvider(Provider): def get_engine(self, settings: Settings) -> AsyncEngine: """Get async engine. - Args: - settings (Settings): settings. - Returns: AsyncEngine: """ @@ -345,9 +342,6 @@ class LDAPServerProvider(Provider): async def get_session(self, storage: SessionStorage) -> LDAPSession: """Create ldap session. - Args: - storage (SessionStorage): session storage - Returns: LDAPSession: ldap session """ @@ -363,9 +357,6 @@ class MFACredsProvider(Provider): async def get_auth(self, session: AsyncSession) -> Creds | None: """Admin creds get. - Args: - session (AsyncSession): async session - Returns: MFA_HTTP_Creds: optional creds """ @@ -375,9 +366,6 @@ async def get_auth(self, session: AsyncSession) -> Creds | None: async def get_auth_ldap(self, session: AsyncSession) -> Creds | None: """Admin creds get. - Args: - session (AsyncSession): db - Returns: MFA_LDAP_Creds: optional creds """ diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index 59cdd8437..2ac8fc08f 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -300,9 +300,6 @@ def value_to_string( def asn1todict(decoder: Decoder) -> list[ASN1Row]: """Recursively collect ASN.1 data to list of ASNRows. - Args: - decoder (Decoder): instance of Decoder - Returns: list[ASN1Row]: """ diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index 9fd42d0f8..f82645248 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -137,11 +137,7 @@ def user(self, user: User) -> None: ) async def set_user(self, user: User | UserSchema) -> None: - """Bind user to session concurrently save. - - Args: - user (User | UserSchema): instance of User or UserSchema - """ + """Bind user to session concurrently save.""" async with self._lock: if isinstance(user, User): self._user = await UserSchema.from_db(user, self.key) diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index 506f02323..4c5d96722 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -48,9 +48,6 @@ def logger_wraps(is_stub: bool = False) -> Callable: """Log DNSManager calls. - Args: - is_stub (bool): If True, marks the logger as a stub. Default is False. - Returns: Callable: Decorator for logging. """ @@ -58,9 +55,6 @@ def logger_wraps(is_stub: bool = False) -> Callable: def wrapper(func: Callable) -> Callable: """Decorator for logging function calls. - Args: - func (Callable): Function to wrap. - Returns: Callable: Wrapped function. """ @@ -162,11 +156,7 @@ class AbstractDNSManager(ABC): """Abstract DNS manager class.""" def __init__(self, settings: DNSManagerSettings) -> None: - """Set up DNS manager. - - Args: - settings (DNSManagerSettings): DNS manager settings - """ + """Set up DNS manager.""" self._dns_settings = settings @logger_wraps() diff --git a/app/ldap_protocol/filter_interpreter.py b/app/ldap_protocol/filter_interpreter.py index 6ad386bb1..12259dea1 100644 --- a/app/ldap_protocol/filter_interpreter.py +++ b/app/ldap_protocol/filter_interpreter.py @@ -29,14 +29,6 @@ def _get_substring(right: ASN1Row) -> str: # RFC 4511 - """Get substring. - - Args: - right (ASN1Row): Row with metadata - - Returns: - str: substring - """ expr = right.value[0] value = expr.value if isinstance(value, bytes): diff --git a/app/ldap_protocol/kerberos/__init__.py b/app/ldap_protocol/kerberos/__init__.py index e3783cbd3..ebb32d4d6 100644 --- a/app/ldap_protocol/kerberos/__init__.py +++ b/app/ldap_protocol/kerberos/__init__.py @@ -20,9 +20,6 @@ async def get_kerberos_class(session: AsyncSession) -> type[AbstractKadmin]: """Get kerberos server state. - Args: - session (AsyncSession): db - Returns: type[KerberosMDAPIClient] | type[StubKadminMDADPIClient]: api """ diff --git a/app/ldap_protocol/kerberos/base.py b/app/ldap_protocol/kerberos/base.py index b6fc9909f..cdb682adc 100644 --- a/app/ldap_protocol/kerberos/base.py +++ b/app/ldap_protocol/kerberos/base.py @@ -37,11 +37,7 @@ class AbstractKadmin(ABC): client: httpx.AsyncClient def __init__(self, client: httpx.AsyncClient) -> None: - """Set client. - - Args: - client (httpx.AsyncClient): http async client - """ + """Set client.""" self.client = client async def setup_configs( diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index 26c66ae24..53910ac0b 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -11,11 +11,7 @@ class KerberosMDAPIClient(AbstractKadmin): @logger_wraps(is_stub=True) async def setup(*args, **kwargs) -> None: # type: ignore - """Stub method, setup is not needed. - - Args: - **kwargs: keyword arguments - """ + """Stub method, setup is not needed.""" @logger_wraps() async def add_principal( diff --git a/app/ldap_protocol/kerberos/stub.py b/app/ldap_protocol/kerberos/stub.py index 5a01b82d4..1c093927f 100644 --- a/app/ldap_protocol/kerberos/stub.py +++ b/app/ldap_protocol/kerberos/stub.py @@ -11,12 +11,7 @@ class StubKadminMDADPIClient(AbstractKadmin): @logger_wraps() async def setup(self, *args, **kwargs) -> None: # type: ignore - """Call setup. - - Args: - *args: arguments - **kwargs: keyword arguments - """ + """Call setup.""" await super().setup(*args, **kwargs) @logger_wraps(is_stub=True) diff --git a/app/ldap_protocol/kerberos/utils.py b/app/ldap_protocol/kerberos/utils.py index 26f2cd0ff..845aaa5de 100644 --- a/app/ldap_protocol/kerberos/utils.py +++ b/app/ldap_protocol/kerberos/utils.py @@ -15,9 +15,6 @@ def logger_wraps(is_stub: bool = False) -> Callable: """Log kadmin calls. - Args: - is_stub (bool): flag to change logs (Default value = False) - Returns: Callable: any method """ @@ -25,9 +22,6 @@ def logger_wraps(is_stub: bool = False) -> Callable: def wrapper(func: Callable) -> Callable: """Wrap kadmin calls. - Args: - func (Callable): any function - Returns: Callable: wrapped function """ @@ -94,9 +88,6 @@ async def set_state(session: AsyncSession, state: "KerberosState") -> None: async def get_krb_server_state(session: AsyncSession) -> "KerberosState": """Get kerberos server state. - Args: - session (AsyncSession): db session - Returns: KerberosState: The current kerberos server state. """ diff --git a/app/ldap_protocol/ldap_requests/abandon.py b/app/ldap_protocol/ldap_requests/abandon.py index 8202230b3..ae8d3b025 100644 --- a/app/ldap_protocol/ldap_requests/abandon.py +++ b/app/ldap_protocol/ldap_requests/abandon.py @@ -22,9 +22,6 @@ class AbandonRequest(BaseRequest): def from_data(cls, data: dict[str, list[ASN1Row]]) -> "AbandonRequest": # noqa: ARG003 """Create structure from ASN1Row dataclass list. - Args: - data (dict[str, list[ASN1Row]]): data - Returns: AbandonRequest: Instance of AbandonRequest. """ diff --git a/app/ldap_protocol/ldap_requests/add.py b/app/ldap_protocol/ldap_requests/add.py index 81b341f3d..cae3028f4 100644 --- a/app/ldap_protocol/ldap_requests/add.py +++ b/app/ldap_protocol/ldap_requests/add.py @@ -92,9 +92,6 @@ def attributes_dict(self) -> dict[str, list[str | bytes]]: def from_data(cls, data: ASN1Row) -> "AddRequest": """Deserialize. - Args: - data (ASN1Row): data - Returns: AddRequest """ diff --git a/app/ldap_protocol/ldap_requests/bind.py b/app/ldap_protocol/ldap_requests/bind.py index 4940af155..9f8df1136 100644 --- a/app/ldap_protocol/ldap_requests/bind.py +++ b/app/ldap_protocol/ldap_requests/bind.py @@ -268,9 +268,6 @@ class UnbindRequest(BaseRequest): def from_data(cls, data: dict[str, list[ASN1Row]]) -> "UnbindRequest": # noqa: ARG003 """Unbind request has no body. - Args: - data (dict[str, list[ASN1Row]]): data - Returns: UnbindRequest """ diff --git a/app/ldap_protocol/ldap_requests/bind_methods/base.py b/app/ldap_protocol/ldap_requests/bind_methods/base.py index 2a47432ba..9c2f3c871 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/base.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/base.py @@ -94,11 +94,8 @@ def METHOD_ID(self) -> int: # noqa: N802 def is_valid(self, user: User) -> bool: """Validate state. - Args: - user (User): instance of User. - Returns: - bool: + bool: True if valid, False otherwise """ @abstractmethod @@ -133,9 +130,6 @@ class SaslAuthentication(AbstractLDAPAuth): def from_data(cls, data: list[ASN1Row]) -> "SaslAuthentication": """Get auth from data. - Args: - data (list[ASN1Row]): list of row with metadata. - Returns: SaslAuthentication: Sasl auth form. """ diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py index dfb0ac373..2009c5a17 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_gssapi.py @@ -82,9 +82,6 @@ class SaslGSSAPIAuthentication(SaslAuthentication): def is_valid(self, user: User | None) -> bool: # noqa: ARG002 """Check if GSSAPI token is valid. - Args: - user (User | None): indb user - Returns: bool: status """ @@ -164,14 +161,6 @@ def _handle_ticket( return GSSAPIAuthStatus.ERROR def _validate_security_layer(self, client_layer: GSSAPISL) -> bool: - """Validate security layer. - - Args: - client_layer (GSSAPISL): client security layer - - Returns: - bool: validate result - """ supported = GSSAPISL.SUPPORTED_SECURITY_LAYERS return (client_layer & supported) == client_layer diff --git a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py index 4bffa4e51..4171bec7c 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/sasl_plain.py @@ -26,11 +26,8 @@ class SaslPLAINAuthentication(SaslAuthentication): def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. - Args: - user (User | None): in db user - Returns: - bool: status + bool: True if password is valid, False otherwise. """ password = getattr(user, "password", None) if password is not None: @@ -44,7 +41,7 @@ def is_anonymous(self) -> bool: """Check if auth is anonymous. Returns: - bool: status + bool: True if anonymous, False otherwise. """ return False @@ -52,9 +49,6 @@ def is_anonymous(self) -> bool: def from_data(cls, data: list[ASN1Row]) -> "SaslPLAINAuthentication": """Get auth from data. - Args: - data (list[ASN1Row]): data - Returns: SaslPLAINAuthentication """ diff --git a/app/ldap_protocol/ldap_requests/bind_methods/simple.py b/app/ldap_protocol/ldap_requests/bind_methods/simple.py index b7362b2d3..594f465b6 100644 --- a/app/ldap_protocol/ldap_requests/bind_methods/simple.py +++ b/app/ldap_protocol/ldap_requests/bind_methods/simple.py @@ -23,9 +23,6 @@ class SimpleAuthentication(AbstractLDAPAuth): def is_valid(self, user: User | None) -> bool: """Check if pwd is valid for user. - Args: - user (User | None): User object or None. - Returns: bool: status """ diff --git a/app/ldap_protocol/ldap_requests/delete.py b/app/ldap_protocol/ldap_requests/delete.py index 0d3696901..cc0145f4d 100644 --- a/app/ldap_protocol/ldap_requests/delete.py +++ b/app/ldap_protocol/ldap_requests/delete.py @@ -46,9 +46,6 @@ class DeleteRequest(BaseRequest): def from_data(cls, data: ASN1Row) -> "DeleteRequest": """Get delete request from data. - Args: - data (ASN1Row): ASN1Row containing the entry to delete. - Returns: DeleteRequest: Instance of DeleteRequest with the entry set. """ diff --git a/app/ldap_protocol/ldap_requests/extended.py b/app/ldap_protocol/ldap_requests/extended.py index 8e1f772a2..67e1f95f6 100644 --- a/app/ldap_protocol/ldap_requests/extended.py +++ b/app/ldap_protocol/ldap_requests/extended.py @@ -43,9 +43,6 @@ class BaseExtendedValue(ABC, BaseModel): def from_data(cls, data: ASN1Row) -> "BaseExtendedValue": """Create model from data, decoded from responseValue bytes. - Args: - data (ASN1Row): Row with metadata. - Returns: BaseExtendedValue: instance of BaseExtendedValue. """ @@ -74,9 +71,6 @@ async def handle( def _decode_value(data: ASN1Row) -> ASN1Row: """Decode value. - Args: - data (ASN1Row): Row with metadata. - Returns: ASN1Row: Decoded row with metadata """ @@ -125,9 +119,6 @@ class WhoAmIRequestValue(BaseExtendedValue): def from_data(cls, data: ASN1Row) -> "WhoAmIRequestValue": # noqa: ARG003 """Create model from data, WhoAmIRequestValue data is empty. - Args: - data: ASN1Row - Returns: WhoAmIRequestValue """ @@ -207,9 +198,6 @@ async def handle( def from_data(cls, data: ASN1Row) -> "StartTLSRequestValue": # noqa: ARG003 """Create model from data, decoded from responseValue bytes. - Args: - data: ASN1Row - Returns: StartTLSRequestValue """ @@ -335,9 +323,6 @@ async def handle( def from_data(cls, data: ASN1Row) -> "PasswdModifyRequestValue": """Create model from data, decoded from responseValue bytes. - Args: - data: ASN1Row - Returns: PasswdModifyRequestValue """ diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index c810351a3..bc734a8d8 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -112,9 +112,6 @@ class ModifyRequest(BaseRequest): def from_data(cls, data: list[ASN1Row]) -> "ModifyRequest": """Get modify request from data. - Args: - data (list[ASN1Row]): data - Returns: ModifyRequest: modify request """ @@ -305,11 +302,6 @@ def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: raise Exception def _get_dir_query(self) -> Select[tuple[Directory]]: - """Get directory query. - - Returns: - Select[tuple[Directory]]: SQLAlchemy select query. - """ return ( select(Directory) .join(Directory.attributes) diff --git a/app/ldap_protocol/ldap_requests/search.py b/app/ldap_protocol/ldap_requests/search.py index b493fae66..680db4164 100644 --- a/app/ldap_protocol/ldap_requests/search.py +++ b/app/ldap_protocol/ldap_requests/search.py @@ -114,9 +114,6 @@ def serialize_filter(self, val: ASN1Row | None, _info: Any) -> str | None: def from_data(cls, data: dict[str, list[ASN1Row]]) -> "SearchRequest": """Get search request from data. - Args: - data (dict[str, list[ASN1Row]]): data - Returns: SearchRequest: LDAP search request """ diff --git a/app/ldap_protocol/ldap_responses.py b/app/ldap_protocol/ldap_responses.py index bf0849c1e..9fcdceefe 100644 --- a/app/ldap_protocol/ldap_responses.py +++ b/app/ldap_protocol/ldap_responses.py @@ -55,11 +55,7 @@ def _get_asn1_fields(self) -> dict: return fields def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer. - - Args: - enc (Encoder): encoder - """ + """Serialize flat structure to bytes, write to encoder buffer.""" for value in self._get_asn1_fields().values(): enc.write(value, type_map[type(value)]) @@ -85,11 +81,7 @@ class BindResponse(LDAPResult, BaseResponse): server_sasl_creds: bytes | None = Field(None, alias="serverSaslCreds") def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer. - - Args: - enc (Encoder): encoder - """ + """Serialize flat structure to bytes, write to encoder buffer.""" enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) enc.write(self.error_message, type_map[type(self.error_message)]) @@ -122,9 +114,6 @@ def l_name(self) -> str: def validate_type(cls, v: str | bytes | int) -> str: """Validate type. - Args: - v (str | bytes | int): value - Returns: str: value """ @@ -135,9 +124,6 @@ def validate_type(cls, v: str | bytes | int) -> str: def validate_vals(cls, vals: list[str | int | bytes]) -> list[str | bytes]: """Validate vals. - Args: - vals (list[str | int | bytes]): values - Returns: list[str | bytes]: values """ @@ -172,11 +158,7 @@ class SearchResultEntry(BaseResponse): partial_attributes: list[PartialAttribute] def to_asn1(self, enc: Encoder) -> None: - """Serialize search response structure to asn1 buffer. - - Args: - enc (Encoder): encoder - """ + """Serialize search response structure to asn1 buffer.""" enc.write(self.object_name, Numbers.OctetString) enc.enter(Numbers.Sequence) @@ -278,11 +260,7 @@ class ExtendedResponse(LDAPResult, BaseResponse): response_value: SerializeAsAny[BaseExtendedResponseValue] | None def to_asn1(self, enc: Encoder) -> None: - """Serialize flat structure to bytes, write to encoder buffer. - - Args: - enc (Encoder): encoder - """ + """Serialize flat structure to bytes, write to encoder buffer.""" enc.write(self.result_code, type_map[type(self.result_code)]) enc.write(self.matched_dn, type_map[type(self.matched_dn)]) enc.write(self.error_message, type_map[type(self.error_message)]) diff --git a/app/ldap_protocol/ldap_schema/attribute_type_dao.py b/app/ldap_protocol/ldap_schema/attribute_type_dao.py index d343118d8..2d382cb71 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_dao.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_dao.py @@ -35,9 +35,6 @@ class AttributeTypeSchema(BaseSchemaModel): def from_db(cls, attribute_type: AttributeType) -> "AttributeTypeSchema": """Create an instance from database. - Args: - attribute_type (AttributeType): instance of AttributeType - Returns: AttributeTypeSchema: serialized AttributeType. """ @@ -71,11 +68,7 @@ class AttributeTypeDAO: _session: AsyncSession def __init__(self, session: AsyncSession) -> None: - """Initialize Attribute Type DAO with session. - - Args: - session (AsyncSession): async db session. - """ + """Initialize Attribute Type DAO with session.""" self._session = session async def get_paginator( diff --git a/app/ldap_protocol/ldap_schema/entity_type_dao.py b/app/ldap_protocol/ldap_schema/entity_type_dao.py index b1df08d24..6d79a7c33 100644 --- a/app/ldap_protocol/ldap_schema/entity_type_dao.py +++ b/app/ldap_protocol/ldap_schema/entity_type_dao.py @@ -32,9 +32,6 @@ class EntityTypeSchema(BaseModel): def from_db(cls, entity_type: EntityType) -> "EntityTypeSchema": """Create an instance of Entity Type Schema from SQLA object. - Args: - entity_type (EntityType): Instance of Entity Type. - Returns: EntityTypeSchema: Instance of Entity Type Schema. """ @@ -64,11 +61,7 @@ class EntityTypeDAO: _session: AsyncSession def __init__(self, session: AsyncSession) -> None: - """Initialize Entity Type DAO with a database session. - - Args: - session (AsyncSession): async db session. - """ + """Initialize Entity Type DAO with a database session.""" self._session = session async def get_paginator( diff --git a/app/ldap_protocol/ldap_schema/object_class_dao.py b/app/ldap_protocol/ldap_schema/object_class_dao.py index 714154f38..f576ee753 100644 --- a/app/ldap_protocol/ldap_schema/object_class_dao.py +++ b/app/ldap_protocol/ldap_schema/object_class_dao.py @@ -40,9 +40,6 @@ class ObjectClassSchema(BaseSchemaModel): def from_db(cls, object_class: ObjectClass) -> "ObjectClassSchema": """Create an instance of Object Class Schema from SQLA object. - Args: - object_class (ObjectClass): source - Returns: ObjectClassSchema: instance of ObjectClassSchema. """ diff --git a/app/ldap_protocol/policies/access_policy.py b/app/ldap_protocol/policies/access_policy.py index a0982ec72..9014b2a50 100644 --- a/app/ldap_protocol/policies/access_policy.py +++ b/app/ldap_protocol/policies/access_policy.py @@ -27,9 +27,6 @@ async def get_policies(session: AsyncSession) -> list[AccessPolicy]: """Get policies. - Args: - session (AsyncSession): db - Returns: list[AccessPolicy]: result """ diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index ecadab424..3a2204949 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -72,14 +72,6 @@ class PasswordPolicySchema(BaseModel): @model_validator(mode="after") def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": - """Validate minimum password age. - - Returns: - self - - Raises: - ValueError: not valid - """ if self.minimum_password_age_days > self.maximum_password_age_days: raise ValueError( "Minimum password age days must be " @@ -113,9 +105,6 @@ async def get_policy_settings( ) -> "PasswordPolicySchema": """Get policy settings. - Args: - session (AsyncSession): db - Returns: PasswordPolicySchema: policy """ @@ -125,11 +114,7 @@ async def get_policy_settings( return cls.model_validate(policy, from_attributes=True) async def update_policy_settings(self, session: AsyncSession) -> None: - """Update policy. - - Args: - session (AsyncSession): db - """ + """Update policy.""" await session.execute( (update(PasswordPolicy).values(self.model_dump(mode="json"))), ) @@ -156,9 +141,6 @@ async def delete_policy_settings( def _count_password_exists_days(last_pwd_set: Attribute) -> int: """Get number of days, pwd exists. - Args: - last_pwd_set (Attribute): pwdLastSet - Returns: int: count of days """ diff --git a/app/ldap_protocol/server.py b/app/ldap_protocol/server.py index 484984957..f94923ecd 100644 --- a/app/ldap_protocol/server.py +++ b/app/ldap_protocol/server.py @@ -365,12 +365,7 @@ async def _unwrap_request( @staticmethod def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: - """Request full log. - - Args: - addr (str): address - msg (LDAPRequestMessage): message - """ + """Request full log.""" log.debug( f"\nFrom: {addr!r}\n{msg.name}[{msg.message_id}]: " f"{msg.model_dump_json()}\n", @@ -378,12 +373,7 @@ def _req_log_full(addr: str, msg: LDAPRequestMessage) -> None: @staticmethod def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: - """Response full log. - - Args: - addr (str): address - msg (LDAPResponseMessage): message - """ + """Response full log.""" log.debug( f"\nTo: {addr!r}\n{msg.name}[{msg.message_id}]: " f"{msg.model_dump_json()}"[:3000], @@ -391,12 +381,7 @@ def _resp_log_full(addr: str, msg: LDAPResponseMessage) -> None: @staticmethod def _log_short(addr: str, msg: LDAPMessage) -> None: - """Short log. - - Args: - addr (str): address - msg (LDAPMessage): message - """ + """Short log.""" log.info(f"\n{addr!r}: {msg.name}[{msg.message_id}]\n") async def _handle_single_response( @@ -519,21 +504,13 @@ async def _get_server(self) -> asyncio.base_events.Server: @staticmethod async def _run_server(server: asyncio.base_events.Server) -> None: - """Run server. - - Args: - server (asyncio.base_events.Server): async server - """ + """Run server.""" async with server: await server.serve_forever() @staticmethod def log_addrs(server: asyncio.base_events.Server) -> None: - """Log server addresses. - - Args: - server (asyncio.base_events.Server): async server - """ + """Log server addresses.""" addrs = ", ".join(str(sock.getsockname()) for sock in server.sockets) log.info(f"Server on {addrs}") diff --git a/app/ldap_protocol/session_storage.py b/app/ldap_protocol/session_storage.py index 29f93edb0..86afaf0a1 100644 --- a/app/ldap_protocol/session_storage.py +++ b/app/ldap_protocol/session_storage.py @@ -42,22 +42,11 @@ async def get(self, key: str) -> dict: @abstractmethod async def _get_session_keys_by_uid(self, uid: int) -> set[str]: - """Get session keys by user id. - - Args: - uid (int): uid. - - Returns: - set[str]: session keys - """ + """Get session keys by user id.""" @abstractmethod async def _get_session_keys_by_ip(self, ip: str) -> set[str]: - """Get session keys by ip. - - Args: - ip (str): IP address. - """ + """Get session keys by ip.""" @abstractmethod async def get_user_sessions( @@ -97,19 +86,11 @@ async def get_ip_sessions( @abstractmethod async def clear_user_sessions(self, uid: int) -> None: - """Clear user sessions. - - Args: - uid (int): user id - """ + """Clear user sessions.""" @abstractmethod async def delete_user_session(self, session_id: str) -> None: - """Delete user session. - - Args: - session_id (str): session id - """ + """Delete user session.""" @staticmethod def _sign(session_id: str, settings: Settings) -> str: @@ -131,9 +112,6 @@ def _sign(session_id: str, settings: Settings) -> str: def get_user_agent_hash(self, user_agent: str) -> str: """Get user agent hash. - Args: - user_agent (str): user agent - Returns: str: The hash of the user agent. """ @@ -164,14 +142,6 @@ def _get_user_session_key(self, uid: int, protocol: ProtocolType) -> str: return f"keys:{protocol}:{uid}" def _get_protocol(self, session_id: str) -> ProtocolType: - """Get protocol. - - Args: - session_id (str): Session id - - Returns: - ProtocolType: Protocol type for given session_id - """ return "http" if session_id.startswith("http:") else "ldap" def _generate_key(self) -> str: @@ -183,14 +153,6 @@ def _generate_key(self) -> str: return f"http:{token_hex(self.key_length)}" def _get_lock_key(self, session_id: str) -> str: - """Get lock key. - - Args: - session_id (str): session id - - Returns: - str: lock key - """ return f"lock:{session_id}" @abstractmethod @@ -292,9 +254,6 @@ def _generate_session_data( async def check_session(self, session_id: str) -> bool: """Check session. - Args: - session_id (str): session id - Returns: bool: True if session exists """ @@ -436,19 +395,12 @@ async def get(self, key: str) -> dict: return json.loads(data) async def delete(self, keys: Iterable[str]) -> None: - """Delete data associated with the given key from storage. - - Args: - keys (Iterable[str]): The keys to delete from the storage. - """ + """Delete data associated with the given key from storage.""" await self._storage.delete(*keys) async def _fetch_keys(self, key: str) -> set[str]: """Fetch keys. - Args: - key (str): key - Returns: set[str]: A set of decoded keys from the storage. """ @@ -798,9 +750,6 @@ async def create_session( async def check_session(self, session_id: str) -> bool: """Check session. - Args: - session_id (str): session id - Returns: bool: True if exists. """ diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index ab3129174..1b8d82925 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -191,9 +191,6 @@ def dn_is_base_directory(base_directory: Directory, entry: str) -> bool: def get_generalized_now(tz: ZoneInfo) -> str: """Get generalized time (formated) with tz. - Args: - tz (ZoneInfo): timezone - Returns: str: generalized time """ @@ -203,9 +200,6 @@ def get_generalized_now(tz: ZoneInfo) -> str: def _get_domain(name: str) -> str: """Get domain from name. - Args: - name (str): directory path - Returns: str: domain """ @@ -234,9 +228,6 @@ def create_integer_hash(text: str, size: int = 9) -> int: def get_windows_timestamp(value: datetime) -> int: """Get the Windows timestamp from the value. - Args: - value (datetime): date and time - Returns: int: Windows timestamp """ diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index d905aa3ad..e50e37a36 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -30,9 +30,6 @@ async def get_base_directories(session: AsyncSession) -> list[Directory]: """Get base domain directories. - Args: - session (AsyncSession): sqlalchemy session - Returns: list[Directory]: base domain directories """ @@ -209,9 +206,6 @@ async def set_last_logon_user( def get_search_path(dn: str) -> list[str]: """Get search path for dn. - Args: - dn (str): any DN, dn syntax - Returns: list[str]: reversed list of dn values """ diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index 9599c4e34..114cc6d46 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -37,9 +37,6 @@ def _list_to_string(data: list[str]) -> str | None: def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: """Get attribute type info. - Args: - raw_definition (str): raw definition of attribute type - Returns: AttributeTypeInfo: parsed attribute type info """ @@ -50,9 +47,6 @@ def _get_attribute_type_info(raw_definition: str) -> AttributeTypeInfo: def get_object_class_info(raw_definition: str) -> ObjectClassInfo: """Get object class info. - Args: - raw_definition (str): raw definition of object class - Returns: ObjectClassInfo: parsed object class info """ diff --git a/app/models.py b/app/models.py index a1186063e..8183275a0 100644 --- a/app/models.py +++ b/app/models.py @@ -201,9 +201,6 @@ def object_class_names_set(self) -> set[str]: def generate_entity_type_name(cls, directory: Directory) -> str: """Generate entity type name based on Directory. - Args: - directory (Directory): instance of Directory. - Returns: str: entity type name. """ diff --git a/app/multidirectory.py b/app/multidirectory.py index 7af2bc54c..7debb6e5c 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -95,9 +95,6 @@ async def _lifespan(app: FastAPI) -> AsyncIterator[None]: def _create_basic_app(settings: Settings) -> FastAPI: """Create basic FastAPI app with dependencies overrides. - Args: - settings (Settings): Settings with database dsn. - Returns: FastAPI: Configured FastAPI application. """ @@ -149,9 +146,6 @@ def _create_basic_app(settings: Settings) -> FastAPI: def _create_shadow_app(settings: Settings) -> FastAPI: """Create shadow FastAPI app for shadow. - Args: - settings (Settings): Settings with database dsn. - Returns: FastAPI: Configured FastAPI application for shadow API. """ @@ -197,11 +191,7 @@ def create_prod_app( def ldap(settings: Settings) -> None: - """Run server. - - Args: - settings (Settings): Settings with database dsn. - """ + """Run server.""" async def _servers(settings: Settings) -> None: servers = [] diff --git a/app/schedule.py b/app/schedule.py index 0e029e396..27cd13412 100644 --- a/app/schedule.py +++ b/app/schedule.py @@ -51,11 +51,7 @@ async def _schedule( def scheduler(settings: Settings) -> None: - """Sript entrypoint. - - Args: - settings (Settings): Settings with database dsn. - """ + """Sript entrypoint.""" async def runner(settings: Settings) -> None: container = make_async_container( diff --git a/tests/conftest.py b/tests/conftest.py index aecabe675..c085aff7c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -184,11 +184,7 @@ def get_entity_type_dao( @provide(scope=Scope.RUNTIME, provides=AsyncEngine) def get_engine(self, settings: Settings) -> AsyncEngine: - """Get async engine. - - Args: - settings (Settings): Settings with database dsn. - """ + """Get async engine.""" return create_async_engine(str(settings.POSTGRES_URI), pool_size=10) @provide(scope=Scope.APP, provides=async_sessionmaker[AsyncSession]) @@ -374,20 +370,12 @@ async def _migrations( config.attributes["app_settings"] = settings def upgrade(conn: AsyncConnection) -> None: - """Run up migrations. - - Args: - conn (AsyncConnection): connection - """ + """Run up migrations.""" config.attributes["connection"] = conn command.upgrade(config, "head") def downgrade(conn: AsyncConnection) -> None: - """Run down migrations. - - Args: - conn (AsyncConnection): connection - """ + """Run down migrations.""" config.attributes["connection"] = conn command.downgrade(config, "base") @@ -595,11 +583,7 @@ async def http_client( @pytest.fixture def creds(user: dict) -> TestCreds: - """Get creds from test data. - - Args: - user (dict): user data - """ + """Get creds from test data.""" return TestCreds(user["sam_accout_name"], user["password"]) @@ -611,11 +595,7 @@ def user() -> dict: @pytest.fixture def _force_override_tls(settings: Settings) -> Iterator: - """Override tls status for tests. - - Args: - settings (Settings): Settings with database dsn. - """ + """Override tls status for tests.""" current_status = settings.USE_CORE_TLS settings.USE_CORE_TLS = True yield diff --git a/tests/test_api/test_main/test_kadmin.py b/tests/test_api/test_main/test_kadmin.py index 33895d12e..07bc8c058 100644 --- a/tests/test_api/test_main/test_kadmin.py +++ b/tests/test_api/test_main/test_kadmin.py @@ -485,12 +485,7 @@ async def test_delete_princ( @pytest.mark.usefixtures("session") @pytest.mark.usefixtures("setup_session") async def test_admin_incorrect_pw_setup(http_client: AsyncClient) -> None: - """Test setup args. - - Args: - http_client (AsyncClient): http cl - ldap_session (LDAPSession): ldap - """ + """Test setup args.""" response = await http_client.get("/kerberos/status") assert response.status_code == status.HTTP_200_OK assert response.json() == KerberosState.NOT_CONFIGURED From 2ec3c914c17f31abdf89a63cfc32869c6d3cdb17 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 23 Jun 2025 12:03:43 +0300 Subject: [PATCH 21/25] refactor: fix nopa task_508 --- app/api/auth/schema.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index e62e483cb..e15337f75 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -71,7 +71,8 @@ class SetupRequest(BaseModel): password: str @field_validator("domain") - def validate_domain(cls, v: str) -> str: # noqa FIXME why noqa? + @classmethod + def validate_domain(cls, v: str) -> str: """Validate domain. Args: From 5f3c9d7000e2da8d322096222325cc6d1defda00 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 23 Jun 2025 12:09:14 +0300 Subject: [PATCH 22/25] refactor: fix noqa task_508 --- app/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/config.py b/app/config.py index 861119d79..835f67bfe 100644 --- a/app/config.py +++ b/app/config.py @@ -126,7 +126,8 @@ def POSTGRES_URI(self) -> PostgresDsn: # noqa GSSAPI_MAX_OUTPUT_TOKEN_SIZE: int = 1024 @field_validator("TIMEZONE", mode="before") - def create_tz(cls, tz: str) -> ZoneInfo: # noqa: N805 + @classmethod + def create_tz(cls, tz: str) -> ZoneInfo: """Get timezone from a string. Args: From a9fa9c97c9103418db1b9b46330dea4315814fc9 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 23 Jun 2025 12:30:34 +0300 Subject: [PATCH 23/25] refactor: fix docstrings task_508 --- .kerberos/config_server.py | 8 +------ app/api/auth/router.py | 7 +----- app/api/auth/router_pwd_policy.py | 6 ----- app/api/auth/schema.py | 3 --- app/api/main/ap_router.py | 2 -- app/api/network/router.py | 2 -- app/api/network/schema.py | 6 ----- app/api/network/utils.py | 3 --- app/config.py | 2 +- app/extra/alembic_utils.py | 3 --- .../scripts/principal_block_user_sync.py | 3 --- app/ioc.py | 24 ------------------- app/ldap_protocol/asn1parser.py | 3 --- app/ldap_protocol/dialogue.py | 3 --- app/ldap_protocol/dns.py | 6 ----- app/ldap_protocol/kerberos/client.py | 12 ---------- app/ldap_protocol/ldap_requests/bind.py | 6 ----- app/ldap_protocol/ldap_requests/modify.py | 3 --- .../ldap_schema/attribute_type_dao.py | 8 +------ .../ldap_schema/entity_type_dao.py | 9 ------- .../ldap_schema/object_class_dao.py | 15 ------------ app/ldap_protocol/policies/password_policy.py | 12 ---------- app/ldap_protocol/utils/const.py | 3 --- app/ldap_protocol/utils/helpers.py | 3 --- app/ldap_protocol/utils/queries.py | 3 --- .../utils/raw_definition_parser.py | 3 --- app/multidirectory.py | 3 --- app/security.py | 3 --- 28 files changed, 4 insertions(+), 160 deletions(-) diff --git a/.kerberos/config_server.py b/.kerberos/config_server.py index 802ea6a82..da906fe9b 100644 --- a/.kerberos/config_server.py +++ b/.kerberos/config_server.py @@ -324,9 +324,6 @@ async def force_pw_principal(self, name: str, **dbargs) -> None: async def kadmin_lifespan(app: FastAPI) -> AsyncIterator[None]: """Create kadmin instance. - Args: - app (FastAPI): FastAPI app - Yields: AsyncIterator[None]: Async iterator """ @@ -685,11 +682,8 @@ def get_status(request: Request) -> bool: true - is ready false - not set - Args: - request (Request): http request - Returns: - bool + bool: True if kadmin is ready, False otherwise. """ kadmind = getattr(request.app.state, "kadmind", None) diff --git a/app/api/auth/router.py b/app/api/auth/router.py index dec2c8168..8cd173942 100644 --- a/app/api/auth/router.py +++ b/app/api/auth/router.py @@ -244,13 +244,8 @@ async def password_reset( async def check_setup(session: FromDishka[AsyncSession]) -> bool: """Check if initial setup needed. - True if setup already complete, False if setup is needed. - - Args: - session (FromDishka[AsyncSession]): Database session. - Returns: - bool: True if setup is complete, False if setup is needed. + bool: True if setup already complete, False if setup is needed. """ query = select(exists(Directory).where(Directory.parent_id.is_(None))) retval = await session.scalars(query) diff --git a/app/api/auth/router_pwd_policy.py b/app/api/auth/router_pwd_policy.py index aecf8a2d2..d03db21bb 100644 --- a/app/api/auth/router_pwd_policy.py +++ b/app/api/auth/router_pwd_policy.py @@ -43,9 +43,6 @@ async def get_policy( ) -> PasswordPolicySchema: """Get current policy setting. - Args: - session (AsyncSession): Database session. - Returns: PasswordPolicySchema: Current password policy schema. """ @@ -76,9 +73,6 @@ async def reset_policy( ) -> PasswordPolicySchema: """Reset current policy setting. - Args: - session (AsyncSession): Database session. - Returns: PasswordPolicySchema: Reset password policy schema. """ diff --git a/app/api/auth/schema.py b/app/api/auth/schema.py index e15337f75..6aed7961c 100644 --- a/app/api/auth/schema.py +++ b/app/api/auth/schema.py @@ -75,9 +75,6 @@ class SetupRequest(BaseModel): def validate_domain(cls, v: str) -> str: """Validate domain. - Args: - v (str): value - Returns: str: Validated domain string. diff --git a/app/api/main/ap_router.py b/app/api/main/ap_router.py index 02d1458f0..38eeedd8f 100644 --- a/app/api/main/ap_router.py +++ b/app/api/main/ap_router.py @@ -27,8 +27,6 @@ async def get_access_policies( """Get APs. \f - Args: - session (FromDishka[AsyncSession]): db. Returns: list[MaterialAccessPolicySchema]: list of access policies. diff --git a/app/api/network/router.py b/app/api/network/router.py index dee5e8d60..c437dc7d5 100644 --- a/app/api/network/router.py +++ b/app/api/network/router.py @@ -116,8 +116,6 @@ async def get_list_network_policies( """Get network. \f - Args: - session (AsyncSession): Database session Returns: list[PolicyResponse]: List of policies with their details. diff --git a/app/api/network/schema.py b/app/api/network/schema.py index 3de74d32a..37eb3d4fc 100644 --- a/app/api/network/schema.py +++ b/app/api/network/schema.py @@ -60,9 +60,6 @@ def complete_netmasks(self) -> list[IPv4Address | IPv4Network]: def validate_group(cls, groups: list[str]) -> list[str]: """Validate groups. - Args: - groups (list[str]): groups names - Returns: list[str]: groups @@ -81,9 +78,6 @@ def validate_group(cls, groups: list[str]) -> list[str]: def validate_mfa_group(cls, mfa_groups: list[str]) -> list[str]: """Validate mfa groups. - Args: - mfa_groups (list[str]): mfa groups names - Returns: list[str]: mfa groups diff --git a/app/api/network/utils.py b/app/api/network/utils.py index 77db7dda3..a804bb997 100644 --- a/app/api/network/utils.py +++ b/app/api/network/utils.py @@ -14,9 +14,6 @@ async def check_policy_count(session: AsyncSession) -> None: """Check if policy count euqals 1. - Args: - session (AsyncSession): db - Raises: HTTPException: 422 """ diff --git a/app/config.py b/app/config.py index 835f67bfe..0bd32688c 100644 --- a/app/config.py +++ b/app/config.py @@ -131,7 +131,7 @@ def create_tz(cls, tz: str) -> ZoneInfo: """Get timezone from a string. Args: - tz (str): timezone + tz (str): string timezone Returns: ZoneInfo: diff --git a/app/extra/alembic_utils.py b/app/extra/alembic_utils.py index 656bba8a6..757681209 100644 --- a/app/extra/alembic_utils.py +++ b/app/extra/alembic_utils.py @@ -21,9 +21,6 @@ def temporary_stub_entity_type_id(func: Callable) -> Callable: that precede the 'ba78cef9700a_initial_entity_type.py' migration and include working with the Directory. - Args: - func (Callable): any function - Returns: Callable: any function """ diff --git a/app/extra/scripts/principal_block_user_sync.py b/app/extra/scripts/principal_block_user_sync.py index 5ad395cc9..70b1653e9 100644 --- a/app/extra/scripts/principal_block_user_sync.py +++ b/app/extra/scripts/principal_block_user_sync.py @@ -96,9 +96,6 @@ async def principal_block_sync( def _find_krb_exp_attr(directory: Directory) -> Attribute | None: """Find krbprincipalexpiration attribute in directory. - Args: - directory (Directory): directory object - Returns: Attribute | None: the attribute with the name 'krbprincipalexpiration', or None if not found. diff --git a/app/ioc.py b/app/ioc.py index 4e19b3149..a80504c03 100644 --- a/app/ioc.py +++ b/app/ioc.py @@ -76,9 +76,6 @@ def get_session_factory( ) -> async_sessionmaker[AsyncSession]: """Create session factory. - Args: - engine (AsyncEngine): Async Engine. - Returns: async_sessionmaker[AsyncSession]: """ @@ -91,9 +88,6 @@ async def create_session( ) -> AsyncIterator[AsyncSession]: """Create session for request. - Args: - async_session (async_sessionmaker[AsyncSession]): async db session - Yields: AsyncIterator[AsyncSession] """ @@ -108,9 +102,6 @@ async def get_krb_class( ) -> type[AbstractKadmin]: """Get kerberos type. - Args: - session_maker (async_sessionmaker[AsyncSession]): session maker - Returns: type[AbstractKadmin]: kerberos class """ @@ -166,9 +157,6 @@ async def get_dns_mngr_class( ) -> type[AbstractDNSManager]: """Get DNS manager type. - Args: - session_maker (async_sessionmaker[AsyncSession]): session maker - Returns: type[AbstractDNSManager]: DNS manager class """ @@ -290,9 +278,6 @@ def get_attribute_type_dao( ) -> AttributeTypeDAO: """Get Attribute Type DAO. - Args: - session (AsyncSession): async db session - Returns: AttributeTypeDAO: Attribute Type DAO. """ @@ -305,9 +290,6 @@ def get_object_class_dao( ) -> ObjectClassDAO: """Get Object Class DAO. - Args: - session (AsyncSession): async db session. - Returns: ObjectClassDAO: Object Class DAO. """ @@ -324,9 +306,6 @@ def get_entity_type_dao( ) -> EntityTypeDAO: """Get Entity Type DAO. - Args: - session (AsyncSession): async db session. - Returns: EntityTypeDAO: Entity Type DAO. """ @@ -384,9 +363,6 @@ async def get_client( ) -> AsyncIterator[MFAHTTPClient]: """Get async client for DI. - Args: - settings: Settings with database dsn. - Yields: AsyncIterator[MFAHTTPClient]. """ diff --git a/app/ldap_protocol/asn1parser.py b/app/ldap_protocol/asn1parser.py index 2ac8fc08f..44e497f4c 100644 --- a/app/ldap_protocol/asn1parser.py +++ b/app/ldap_protocol/asn1parser.py @@ -326,9 +326,6 @@ def asn1todict(decoder: Decoder) -> list[ASN1Row]: def _validate_oid(oid: str) -> str: """Validate ldap oid with regex. - Args: - oid (str): oid - Returns: str: diff --git a/app/ldap_protocol/dialogue.py b/app/ldap_protocol/dialogue.py index f82645248..d95bb7e52 100644 --- a/app/ldap_protocol/dialogue.py +++ b/app/ldap_protocol/dialogue.py @@ -126,9 +126,6 @@ def user(self) -> UserSchema | None: def user(self, user: User) -> None: """User setter. - Args: - user (User): instance of User - Raises: NotImplementedError: Cannot manually set user """ diff --git a/app/ldap_protocol/dns.py b/app/ldap_protocol/dns.py index 4c5d96722..8f025de13 100644 --- a/app/ldap_protocol/dns.py +++ b/app/ldap_protocol/dns.py @@ -426,9 +426,6 @@ async def get_dns_state( ) -> "DNSManagerState": """Get or create DNS manager state. - Args: - session (AsyncSession): Database session. - Returns: DNSManagerState: Current DNS manager state. """ @@ -528,9 +525,6 @@ async def get_dns_manager_class( ) -> type[AbstractDNSManager]: """Get DNS manager class. - Args: - session (AsyncSession): Database session. - Returns: type[AbstractDNSManager]: DNS manager class type. """ diff --git a/app/ldap_protocol/kerberos/client.py b/app/ldap_protocol/kerberos/client.py index 53910ac0b..b1e099826 100644 --- a/app/ldap_protocol/kerberos/client.py +++ b/app/ldap_protocol/kerberos/client.py @@ -43,9 +43,6 @@ async def add_principal( async def get_principal(self, name: str) -> dict: """Get principal. - Args: - name (str): principal name - Returns: dict @@ -62,9 +59,6 @@ async def get_principal(self, name: str) -> dict: async def del_principal(self, name: str) -> None: """Delete principal. - Args: - name (str): principal name - Raises: KRBAPIError: API error """ @@ -162,9 +156,6 @@ async def ktadd(self, names: list[str]) -> httpx.Response: async def lock_principal(self, name: str) -> None: """Lock principal. - Args: - name (str): user principal name - Raises: KRBAPIError: API error """ @@ -179,9 +170,6 @@ async def lock_principal(self, name: str) -> None: async def force_princ_pw_change(self, name: str) -> None: """Force mark password change for principal. - Args: - name (str): user principal name - Raises: KRBAPIError: API error """ diff --git a/app/ldap_protocol/ldap_requests/bind.py b/app/ldap_protocol/ldap_requests/bind.py index 9f8df1136..6e758d369 100644 --- a/app/ldap_protocol/ldap_requests/bind.py +++ b/app/ldap_protocol/ldap_requests/bind.py @@ -61,9 +61,6 @@ class BindRequest(BaseRequest): def from_data(cls, data: list[ASN1Row]) -> "BindRequest": """Get bind from data dict. - Args: - data (list[ASN1Row]): data - Returns: BindRequest @@ -279,9 +276,6 @@ async def handle( ) -> AsyncGenerator[BaseResponse, None]: """Handle unbind request, no need to send response. - Args: - ldap_session (LDAPSession): ldap session - Yields: AsyncGenerator[BaseResponse, None] """ diff --git a/app/ldap_protocol/ldap_requests/modify.py b/app/ldap_protocol/ldap_requests/modify.py index bc734a8d8..82a6b05d0 100644 --- a/app/ldap_protocol/ldap_requests/modify.py +++ b/app/ldap_protocol/ldap_requests/modify.py @@ -272,9 +272,6 @@ async def handle( def _match_bad_response(self, err: BaseException) -> tuple[LDAPCodes, str]: """Match bad response. - Args: - err (BaseException): error - Returns: tuple[LDAPCodes, str]: result code and message diff --git a/app/ldap_protocol/ldap_schema/attribute_type_dao.py b/app/ldap_protocol/ldap_schema/attribute_type_dao.py index 2d382cb71..bf6fd7d5a 100644 --- a/app/ldap_protocol/ldap_schema/attribute_type_dao.py +++ b/app/ldap_protocol/ldap_schema/attribute_type_dao.py @@ -78,7 +78,7 @@ async def get_paginator( """Retrieve paginated attribute_types. Args: - params (PaginationParams): page_size and page_number. + params (PaginationParams): parameters for pagination. Returns: PaginationResult: Chunk of attribute_types and metadata. @@ -125,9 +125,6 @@ async def get_one_by_name( ) -> AttributeType: """Get single Attribute Type by name. - Args: - attribute_type_name (str): Attribute Type name. - Returns: AttributeType: Attribute Type. @@ -152,9 +149,6 @@ async def get_all_by_names( ) -> list[AttributeType]: """Get list of Attribute Types by names. - Args: - attribute_type_names (list[str]): Attribute Type names. - Returns: list[AttributeType]: List of Attribute Types. """ diff --git a/app/ldap_protocol/ldap_schema/entity_type_dao.py b/app/ldap_protocol/ldap_schema/entity_type_dao.py index 6d79a7c33..8d981772c 100644 --- a/app/ldap_protocol/ldap_schema/entity_type_dao.py +++ b/app/ldap_protocol/ldap_schema/entity_type_dao.py @@ -70,9 +70,6 @@ async def get_paginator( ) -> PaginationResult: """Retrieve paginated Entity Types. - Args: - params (PaginationParams): page_size and page_number. - Returns: PaginationResult: Chunk of Entity Types and metadata. """ @@ -109,9 +106,6 @@ async def get_one_by_name( ) -> EntityType: """Get single Entity Type by name. - Args: - entity_type_name (str): Entity Type name. - Returns: EntityType: Instance of Entity Type. @@ -136,9 +130,6 @@ async def get_entity_type_by_object_class_names( ) -> EntityType | None: """Get single Entity Type by object class names. - Args: - object_class_names (Iterable[str]): object class names. - Returns: EntityType | None: Instance of Entity Type or None. """ diff --git a/app/ldap_protocol/ldap_schema/object_class_dao.py b/app/ldap_protocol/ldap_schema/object_class_dao.py index f576ee753..65b76264e 100644 --- a/app/ldap_protocol/ldap_schema/object_class_dao.py +++ b/app/ldap_protocol/ldap_schema/object_class_dao.py @@ -93,9 +93,6 @@ async def get_paginator( ) -> PaginationResult: """Retrieve paginated Object Classes. - Args: - params (PaginationParams): page_size and page_number. - Returns: PaginationResult: Chunk of object_classes and metadata. """ @@ -171,9 +168,6 @@ async def count_exists_object_class_by_names( ) -> int: """Count exists Object Class by names. - Args: - object_class_names (list[str]): object class names - Returns: int: count of object classes """ @@ -191,9 +185,6 @@ async def is_all_object_classes_exists( ) -> Literal[True]: """Check if all Object Classes exist. - Args: - object_class_names (list[str]): object class names - Returns: Literal[True]: True if all object classes found. @@ -218,9 +209,6 @@ async def get_one_by_name( ) -> ObjectClass: """Get single Object Class by name. - Args: - object_class_name (str): Object Class name. - Returns: ObjectClass: Object Class. @@ -245,9 +233,6 @@ async def get_all_by_names( ) -> list[ObjectClass]: """Get list of Object Classes by names. - Args: - object_class_names (list[str]): object class names - Returns: list[ObjectClass]: List of Object Classes. """ diff --git a/app/ldap_protocol/policies/password_policy.py b/app/ldap_protocol/policies/password_policy.py index 3a2204949..79c833ac3 100644 --- a/app/ldap_protocol/policies/password_policy.py +++ b/app/ldap_protocol/policies/password_policy.py @@ -82,9 +82,6 @@ def _validate_minimum_pwd_age(self) -> "PasswordPolicySchema": async def create_policy_settings(self, session: AsyncSession) -> Self: """Create policies settings. - Args: - session (AsyncSession): db session - Returns: Self: Serialized password policy. @@ -127,9 +124,6 @@ async def delete_policy_settings( ) -> "PasswordPolicySchema": """Reset (delete) default policy. - Args: - session (AsyncSession): db - Returns: PasswordPolicySchema: schema policy """ @@ -191,9 +185,6 @@ async def get_pwd_last_set( def validate_min_age(self, last_pwd_set: Attribute) -> bool: """Validate min password change age. - Args: - last_pwd_set (Attribute): last pwd set - Returns: bool: can change pwd True - not valid, can not change False - valid, can change on minimum_password_age_days can always change. @@ -208,9 +199,6 @@ def validate_min_age(self, last_pwd_set: Attribute) -> bool: def validate_max_age(self, last_pwd_set: Attribute) -> bool: """Validate max password change age. - Args: - last_pwd_set (Attribute): last pwd set - Returns: bool: is pwd expired True - not valid, expired False - valid, not expired on maximum_password_age_days always valid. diff --git a/app/ldap_protocol/utils/const.py b/app/ldap_protocol/utils/const.py index 2abe55356..76307a6c1 100644 --- a/app/ldap_protocol/utils/const.py +++ b/app/ldap_protocol/utils/const.py @@ -37,9 +37,6 @@ def _type_validate_entry(entry: str) -> str: def _type_validate_email(email: str) -> str: """Validate email. - Args: - email (str): email address - Returns: str: email address diff --git a/app/ldap_protocol/utils/helpers.py b/app/ldap_protocol/utils/helpers.py index 1b8d82925..40a31338f 100644 --- a/app/ldap_protocol/utils/helpers.py +++ b/app/ldap_protocol/utils/helpers.py @@ -360,9 +360,6 @@ def create_user_name(directory_id: int) -> str: NOTE: keycloak - Args: - directory_id (int): Directory's id - Returns: str: username """ diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index e50e37a36..7e91ddfe8 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -271,9 +271,6 @@ async def get_dn_by_id(id_: int, session: AsyncSession) -> str: def get_domain_object_class(domain: Directory) -> Iterator[Attribute]: """Get default domain attrs. - Args: - domain (Directory): instance of Directory - Yields: Iterator[Attribute] """ diff --git a/app/ldap_protocol/utils/raw_definition_parser.py b/app/ldap_protocol/utils/raw_definition_parser.py index 114cc6d46..b844f41d5 100644 --- a/app/ldap_protocol/utils/raw_definition_parser.py +++ b/app/ldap_protocol/utils/raw_definition_parser.py @@ -18,9 +18,6 @@ class RawDefinitionParser: def _list_to_string(data: list[str]) -> str | None: """Convert list to string. - Args: - data (list[str]): list of strings - Raises: ValueError: if list has more than one element diff --git a/app/multidirectory.py b/app/multidirectory.py index 7debb6e5c..7d0b99108 100644 --- a/app/multidirectory.py +++ b/app/multidirectory.py @@ -82,9 +82,6 @@ async def proc_time_header_middleware( async def _lifespan(app: FastAPI) -> AsyncIterator[None]: """Lifespan context manager. - Args: - app (FastAPI): FastAPI application. - Yields: AsyncIterator: async iterator """ diff --git a/app/security.py b/app/security.py index 20444349c..9cae57260 100644 --- a/app/security.py +++ b/app/security.py @@ -25,9 +25,6 @@ def verify_password(plain_password: str, hashed_password: str) -> bool: def get_password_hash(password: str) -> str: """Hash password. - Args: - password (str): raw pwd - Returns: str: hash """ From d9bb85dc778311da2f58eb7307af7b76edcb7ddf Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 23 Jun 2025 13:42:10 +0300 Subject: [PATCH 24/25] refactor: comma task_508 --- app/ldap_protocol/utils/queries.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/ldap_protocol/utils/queries.py b/app/ldap_protocol/utils/queries.py index 7e91ddfe8..9076ce08d 100644 --- a/app/ldap_protocol/utils/queries.py +++ b/app/ldap_protocol/utils/queries.py @@ -122,7 +122,8 @@ async def get_groups(dn_list: list[str], session: AsyncSession) -> list[Group]: async def get_group( - dn: str | GRANT_DN_STRING, session: AsyncSession + dn: str | GRANT_DN_STRING, + session: AsyncSession, ) -> Directory: """Get dir with group by dn. From f6f8ca30061fde7a9702cd4f9b0df6eab3595ff6 Mon Sep 17 00:00:00 2001 From: Milov Dmitriy Date: Mon, 30 Jun 2025 13:45:24 +0300 Subject: [PATCH 25/25] refactor: dns docstrings task_508 --- .dns/dns_api.py | 38 ++++++-- app/api/exception_handlers.py | 6 +- app/api/main/dns_router.py | 25 ++++- app/ldap_protocol/dns/__init__.py | 12 ++- app/ldap_protocol/dns/base.py | 145 ++++++++++++++++++++++------ app/ldap_protocol/dns/remote.py | 18 +++- app/ldap_protocol/dns/selfhosted.py | 47 ++++++--- app/ldap_protocol/dns/stub.py | 60 +++++++----- app/ldap_protocol/dns/utils.py | 38 ++++++-- interface | 2 +- 10 files changed, 302 insertions(+), 89 deletions(-) diff --git a/.dns/dns_api.py b/.dns/dns_api.py index 2051dcfd6..7fee31ec9 100644 --- a/.dns/dns_api.py +++ b/.dns/dns_api.py @@ -7,7 +7,7 @@ import logging import os import re -import subprocess +import subprocess # noqa: S404 from collections import defaultdict from dataclasses import dataclass from enum import StrEnum @@ -189,7 +189,7 @@ class BindDNSServerManager: """Bind9 DNS server manager.""" @staticmethod - def _get_zone_obj_by_zone_name(zone_name) -> dns.zone.Zone: + def _get_zone_obj_by_zone_name(zone_name: str) -> dns.zone.Zone: """Get DNS zone object by zone name. Algorithm: @@ -846,8 +846,8 @@ def update_record( self, old_record: DNSRecord, new_record: DNSRecord, - record_type, - zone_name, + record_type: DNSRecordType, + zone_name: str, ) -> None: """Update a record in a zone (value or TTL). @@ -998,7 +998,11 @@ def get_server_settings() -> list[DNSServerParam]: async def get_dns_manager() -> type[BindDNSServerManager]: - """Get DNS server manager client.""" + """Get DNS server manager client. + + Returns: + BindDNSServerManager: dns manager. + """ return BindDNSServerManager() @@ -1043,7 +1047,11 @@ def delete_zone( async def get_all_records_by_zone( dns_manager: Annotated[BindDNSServerManager, Depends(get_dns_manager)], ) -> list[DNSZone]: - """Get all DNS records grouped by zone.""" + """Get all DNS records grouped by zone. + + Returns: + list[DNSZone]: List of DNSZone objects with records. + """ return dns_manager.get_all_records() @@ -1051,7 +1059,11 @@ async def get_all_records_by_zone( async def get_forward_zones( dns_manager: Annotated[BindDNSServerManager, Depends(get_dns_manager)], ) -> list[DNSForwardZone]: - """Get all forward DNS zones.""" + """Get all forward DNS zones. + + Returns: + list[DNSForwardZone]: List of DNSForwardZone objects. + """ return await dns_manager.get_forward_zones() @@ -1141,7 +1153,11 @@ def update_dns_server_settings( async def get_server_settings( dns_manager: Annotated[BindDNSServerManager, Depends(get_dns_manager)], ) -> list[DNSServerParam]: - """Get list of modifiable server settings.""" + """Get list of modifiable server settings. + + Returns: + list[DNSServerParam]: List of server parameters. + """ return dns_manager.get_server_settings() @@ -1155,7 +1171,11 @@ def setup_server( def create_app() -> FastAPI: - """Create FastAPI app.""" + """Create FastAPI app. + + Returns: + FastAPI: FastAPI application instance. + """ app = FastAPI( name="DNSServerManager", title="DNSServerManager", diff --git a/app/api/exception_handlers.py b/app/api/exception_handlers.py index 925586e9f..441c38108 100644 --- a/app/api/exception_handlers.py +++ b/app/api/exception_handlers.py @@ -91,7 +91,11 @@ async def handle_not_implemented_error( request: Request, # noqa: ARG001 exc: Exception, # noqa: ARG001 ) -> NoReturn: - """Handle Not Implemented error.""" + """Handle Not Implemented error. + + Raises: + HTTPException: This feature is supported with selfhosted DNS server. + """ raise HTTPException( status_code=status.HTTP_501_NOT_IMPLEMENTED, detail="This feature is supported with selfhosted DNS server.", diff --git a/app/api/main/dns_router.py b/app/api/main/dns_router.py index c727e424d..0c98b39d4 100644 --- a/app/api/main/dns_router.py +++ b/app/api/main/dns_router.py @@ -181,7 +181,11 @@ async def setup_dns( async def get_dns_zone( dns_manager: FromDishka[AbstractDNSManager], ) -> list[DNSZone]: - """Get all DNS records of all zones.""" + """Get all DNS records of all zones. + + Returns: + list[DNSZone]: List of DNSZone objects with records. + """ return await dns_manager.get_all_zones_records() @@ -189,7 +193,11 @@ async def get_dns_zone( async def get_forward_dns_zones( dns_manager: FromDishka[AbstractDNSManager], ) -> list[DNSForwardZone]: - """Get list of DNS forward zones with forwarders.""" + """Get list of DNS forward zones with forwarders. + + Returns: + list[DNSForwardZone]: List of DNSForwardZone objects. + """ return await dns_manager.get_forward_zones() @@ -233,7 +241,12 @@ async def check_dns_forward_zone( data: DNSServiceForwardZoneCheckRequest, dns_manager: FromDishka[AbstractDNSManager], ) -> list[DNSForwardServerStatus]: - """Check given DNS forward zone for availability.""" + """Check given DNS forward zone for availability. + + Returns: + list[DNSForwardServerStatus]: List of DNSForwardServerStatus objects + indicating the status of each DNS server. + """ return [ await dns_manager.check_forward_dns_server(dns_server_ip) for dns_server_ip in data.dns_server_ips @@ -262,7 +275,11 @@ async def update_server_options( async def get_server_options( dns_manager: FromDishka[AbstractDNSManager], ) -> list[DNSServerParam]: - """Get list of modifiable DNS server params.""" + """Get list of modifiable DNS server params. + + Returns: + list[DNSServerParam]: List of DNSServerParam objects. + """ return await dns_manager.get_server_options() diff --git a/app/ldap_protocol/dns/__init__.py b/app/ldap_protocol/dns/__init__.py index 4f0492aef..7668e490d 100644 --- a/app/ldap_protocol/dns/__init__.py +++ b/app/ldap_protocol/dns/__init__.py @@ -1,3 +1,9 @@ +"""DNS API module. + +Copyright (c) 2025 MultiFactor +License: https://github.com/MultiDirectoryLab/MultiDirectory/blob/main/LICENSE +""" + from sqlalchemy.ext.asyncio import AsyncSession from .base import ( @@ -33,7 +39,11 @@ async def get_dns_manager_class( session: AsyncSession, ) -> type[AbstractDNSManager]: - """Get DNS manager class.""" + """Get DNS manager class. + + Returns: + AbstractDNSManager: Class of the DNS manager based on the current DNS. + """ dns_state = await get_dns_state(session) if dns_state == DNSManagerState.SELFHOSTED: return SelfHostedDNSManager diff --git a/app/ldap_protocol/dns/base.py b/app/ldap_protocol/dns/base.py index d3106c99e..cd49469b5 100644 --- a/app/ldap_protocol/dns/base.py +++ b/app/ldap_protocol/dns/base.py @@ -221,22 +221,18 @@ async def setup( await session.execute( update(CatalogueSetting) .where(CatalogueSetting.name.in_(new_settings.keys())) - .values( - { - "value": case( - *settings, - else_=CatalogueSetting.value, - ) - } - ) + .values({ + "value": case( + *settings, + else_=CatalogueSetting.value, + ) + }) ) else: - session.add_all( - [ - CatalogueSetting(name=name, value=value) - for name, value in new_settings.items() - ] - ) + session.add_all([ + CatalogueSetting(name=name, value=value) + for name, value in new_settings.items() + ]) @abstractmethod async def create_record( @@ -246,7 +242,8 @@ async def create_record( record_type: str, ttl: int | None, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Create DNS record.""" @abstractmethod async def update_record( @@ -256,7 +253,8 @@ async def update_record( record_type: str, ttl: int | None, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Update DNS record.""" @abstractmethod async def delete_record( @@ -265,17 +263,43 @@ async def delete_record( ip: str, record_type: str, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Delete DNS record.""" @abstractmethod - async def get_all_records(self) -> list[DNSRecords]: ... + async def get_all_records(self) -> list[DNSRecords]: + """Get all DNS records of all zones. + + Raises: + DNSNotImplementedError: If the method is not implemented. + + Returns: + list[DNSRecords]: List of DNSRecords objects with records. + """ + raise DNSNotImplementedError @abstractmethod async def get_all_zones_records(self) -> list[DNSZone]: + """Get all DNS records grouped by zone. + + Raises: + DNSNotImplementedError: If the method is not implemented. + + Returns: + list[DNSZone]: List of DNSZone objects with records. + """ raise DNSNotImplementedError @abstractmethod async def get_forward_zones(self) -> list[DNSForwardZone]: + """Get all forward zones. + + Raises: + DNSNotImplementedError: If the method is not implemented. + + Returns: + list[DNSForwardZone]: List of DNSForwardZone objects. + """ raise DNSNotImplementedError @abstractmethod @@ -286,6 +310,17 @@ async def create_zone( nameserver: str | None, params: list[DNSZoneParam], ) -> None: + """Create DNS zone. + + Args: + zone_name (str): Name of the zone. + zone_type (DNSZoneType): Type of the zone (master or forward). + nameserver (str | None): Nameserver for the zone, if applicable. + params (list[DNSZoneParam]): List of parameters for the zone. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod @@ -294,13 +329,27 @@ async def update_zone( zone_name: str, params: list[DNSZoneParam] | None, ) -> None: + """Update DNS zone. + + Args: + zone_name (str): Name of the zone to update. + params (list[DNSZoneParam] | None): List of parameters to update. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod - async def delete_zone( - self, - zone_names: list[str], - ) -> None: + async def delete_zone(self, zone_names: list[str]) -> None: + """Delete DNS zone. + + Args: + zone_names (list[str]): List of zone names to delete. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod @@ -308,6 +357,17 @@ async def check_forward_dns_server( self, dns_server_ip: IPv4Address | IPv6Address, ) -> DNSForwardServerStatus: + """Check if the given DNS server is reachable and valid. + + Args: + dns_server_ip (IPv4Address | IPv6Address): IP address of DNS server + + Returns: + DNSForwardServerStatus: Status of the DNS server. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod @@ -315,20 +375,45 @@ async def update_server_options( self, params: list[DNSServerParam], ) -> None: + """Update DNS server options. + + Args: + params (list[DNSServerParam]): List of server parameters to update. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod - async def get_server_options(self) -> list[DNSServerParam]: ... + async def get_server_options(self) -> list[DNSServerParam]: + """Get list of modifiable DNS server params. + + Raises: + DNSNotImplementedError: If the method is not implemented. + + Returns: + list[DNSServerParam]: List of DNSServerParam objects. + """ + raise DNSNotImplementedError @abstractmethod - async def restart_server( - self, - ) -> None: + async def restart_server(self) -> None: + """Restart DNS server. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError @abstractmethod - async def reload_zone( - self, - zone_name: str, - ) -> None: + async def reload_zone(self, zone_name: str) -> None: + """Reload DNS zone. + + Args: + zone_name (str): Name of the zone to reload. + + Raises: + DNSNotImplementedError: If the method is not implemented. + """ raise DNSNotImplementedError diff --git a/app/ldap_protocol/dns/remote.py b/app/ldap_protocol/dns/remote.py index 7f1e349bd..75a1204bc 100644 --- a/app/ldap_protocol/dns/remote.py +++ b/app/ldap_protocol/dns/remote.py @@ -23,7 +23,14 @@ class RemoteDNSManager(AbstractDNSManager): """DNS server manager.""" async def _send(self, action: Message) -> None: - """Send request to DNS server.""" + """Send request to DNS server. + + Args: + action (Message): DNS action to perform. + + Raises: + DNSConnectionError: If the DNS server IP is not set. + """ if self._dns_settings.tsig_key is not None: action.use_tsig( keyring=TsigKey("zone.", self._dns_settings.tsig_key), @@ -52,7 +59,14 @@ async def create_record( @logger_wraps() async def get_all_records(self) -> list[DNSRecords]: - """Get all DNS records.""" + """Get all DNS records. + + Returns: + list[DNSRecords]: List of DNS records grouped by type. + + Raises: + DNSConnectionError: If the DNS server IP or zone name is not set. + """ if ( self._dns_settings.dns_server_ip is None or self._dns_settings.zone_name is None diff --git a/app/ldap_protocol/dns/selfhosted.py b/app/ldap_protocol/dns/selfhosted.py index c29e07d5a..c83d7735a 100644 --- a/app/ldap_protocol/dns/selfhosted.py +++ b/app/ldap_protocol/dns/selfhosted.py @@ -56,6 +56,7 @@ async def update_record( ttl: int | None, zone_name: str | None = None, ) -> None: + """Update DNS record.""" await self._http_client.patch( "/record", json={ @@ -75,6 +76,7 @@ async def delete_record( record_type: str, zone_name: str | None = None, ) -> None: + """Delete DNS record.""" await self._http_client.request( "delete", "/record", @@ -88,6 +90,11 @@ async def delete_record( @logger_wraps() async def get_all_records(self) -> list[DNSRecords]: + """Get all DNS records. + + Returns: + list[DNSRecords]: List of DNS records grouped by type. + """ response = await self._http_client.get("/zone") response_data = response.json() @@ -103,12 +110,22 @@ async def get_all_records(self) -> list[DNSRecords]: @logger_wraps() async def get_all_zones_records(self) -> list[DNSZone]: + """Get all DNS zones with their records. + + Returns: + list[DNSZone]: List of DNS zones with their records. + """ response = await self._http_client.get("/zone") return response.json() @logger_wraps() async def get_forward_zones(self) -> list[DNSForwardZone]: + """Get all forward zones. + + Returns: + list[DNSForwardZone]: List of forward zones. + """ response = await self._http_client.get("/zone/forward") return response.json() @@ -121,6 +138,7 @@ async def create_zone( nameserver: str | None, params: list[DNSZoneParam], ) -> None: + """Create DNS zone.""" await self._http_client.post( "/zone", json={ @@ -137,6 +155,7 @@ async def update_zone( zone_name: str, params: list[DNSZoneParam], ) -> None: + """Update DNS zone.""" await self._http_client.patch( "/zone", json={ @@ -146,10 +165,8 @@ async def update_zone( ) @logger_wraps() - async def delete_zone( - self, - zone_names: list[str], - ) -> None: + async def delete_zone(self, zone_names: list[str]) -> None: + """Delete DNS zone.""" for zone_name in zone_names: await self._http_client.request( "delete", @@ -162,6 +179,11 @@ async def check_forward_dns_server( self, dns_server_ip: IPv4Address | IPv6Address, ) -> DNSForwardServerStatus: + """Check if the forward DNS server is reachable and return its FQDN. + + Returns: + DNSForwardServerStatus: Status of the forward DNS server. + """ str_dns_server_ip = str(dns_server_ip) try: hostname, _, _ = socket.gethostbyaddr(str_dns_server_ip) @@ -183,6 +205,7 @@ async def update_server_options( self, params: list[DNSServerParam], ) -> None: + """Update DNS server options.""" await self._http_client.patch( "/server/settings", json=[asdict(param) for param in params], @@ -190,19 +213,21 @@ async def update_server_options( @logger_wraps() async def get_server_options(self) -> list[DNSServerParam]: + """Get list of modifiable DNS server params. + + Returns: + list[DNSServerParam]: List of DNSServerParam objects. + """ response = await self._http_client.get("/server/settings") return response.json() @logger_wraps() - async def restart_server( - self, - ) -> None: + async def restart_server(self) -> None: + """Restart DNS server.""" await self._http_client.get("/server/restart") @logger_wraps() - async def reload_zone( - self, - zone_name: str, - ) -> None: + async def reload_zone(self, zone_name: str) -> None: + """Reload DNS zone.""" await self._http_client.get(f"/zone/{zone_name}") diff --git a/app/ldap_protocol/dns/stub.py b/app/ldap_protocol/dns/stub.py index 836a98a62..e66e91f80 100644 --- a/app/ldap_protocol/dns/stub.py +++ b/app/ldap_protocol/dns/stub.py @@ -26,7 +26,8 @@ async def create_record( record_type: str, ttl: int | None, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Stub DNS manager create record.""" @logger_wraps(is_stub=True) async def update_record( @@ -36,7 +37,8 @@ async def update_record( record_type: str, ttl: int, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Stub DNS manager update record.""" @logger_wraps(is_stub=True) async def delete_record( @@ -45,13 +47,20 @@ async def delete_record( ip: str, record_type: str, zone_name: str | None = None, - ) -> None: ... + ) -> None: + """Stub DNS manager delete record.""" @logger_wraps(is_stub=True) - async def get_all_zones_records(self) -> None: ... + async def get_all_zones_records(self) -> None: + """Stub DNS manager get all zones records.""" @logger_wraps(is_stub=True) async def get_forward_zones(self) -> list[DNSForwardZone]: + """Stub DNS manager get forward zones. + + Returns: + list[DNSForwardZone]: List of DNSForwardZone objects. + """ return [] @logger_wraps(is_stub=True) @@ -61,49 +70,54 @@ async def create_zone( zone_type: DNSZoneType, nameserver: str | None, params: list[DNSZoneParam], - ) -> None: ... + ) -> None: + """Stub DNS manager create zone.""" @logger_wraps(is_stub=True) async def update_zone( self, zone_name: str, params: list[DNSZoneParam] | None, - ) -> None: ... + ) -> None: + """Stub DNS manager update zone.""" @logger_wraps(is_stub=True) - async def delete_zone( - self, - zone_names: list[str], - ) -> None: ... + async def delete_zone(self, zone_names: list[str]) -> None: + """Stub DNS manager delete zone.""" @logger_wraps(is_stub=True) - async def check_forward_dns_server( - self, - dns_server_ip: str, - ) -> None: ... + async def check_forward_dns_server(self, dns_server_ip: str) -> None: + """Stub DNS manager check forward DNS server.""" @logger_wraps(is_stub=True) async def update_server_options( self, params: list[DNSServerParam], - ) -> None: ... + ) -> None: + """Stub DNS manager update server options.""" @logger_wraps(is_stub=True) async def get_server_options(self) -> list[DNSServerParam]: + """Stub DNS manager get server options. + + Returns: + list[DNSServerParam]: List of DNSServerParam objects. + """ return [] @logger_wraps(is_stub=True) - async def restart_server( - self, - ) -> None: ... + async def restart_server(self) -> None: + """Stub DNS manager restart server.""" @logger_wraps(is_stub=True) - async def reload_zone( - self, - zone_name: str, - ) -> None: ... + async def reload_zone(self, zone_name: str) -> None: + """Stub DNS manager reload zone.""" @logger_wraps(is_stub=True) async def get_all_records(self) -> list[DNSRecords]: - """Stub DNS manager get all records.""" + """Stub DNS manager get all records. + + Returns: + list[DNSRecords]: List of DNSRecords objects. + """ return [] diff --git a/app/ldap_protocol/dns/utils.py b/app/ldap_protocol/dns/utils.py index 8c579b5a3..1cd581a50 100644 --- a/app/ldap_protocol/dns/utils.py +++ b/app/ldap_protocol/dns/utils.py @@ -26,7 +26,11 @@ def logger_wraps(is_stub: bool = False) -> Callable: - """Log DNSManager calls.""" + """Log DNSManager calls. + + Returns: + Callable: Decorator for logging DNSManager calls. + """ def wrapper(func: Callable) -> Callable: name = func.__name__ @@ -53,10 +57,15 @@ async def wrapped(*args: str, **kwargs: str) -> Any: return wrapper -async def get_dns_state( - session: AsyncSession, -) -> "DNSManagerState": - """Get or create DNS manager state.""" +async def get_dns_state(session: AsyncSession) -> "DNSManagerState": + """Get or create DNS manager state. + + Args: + session (AsyncSession): Database session. + + Returns: + DNSManagerState: Current state of the DNS manager. + """ state = await session.scalar( select(CatalogueSetting) .filter(CatalogueSetting.name == DNS_MANAGER_STATE_NAME) @@ -88,7 +97,14 @@ async def set_dns_manager_state( async def resolve_dns_server_ip(host: str) -> str: - """Get DNS server IP from Docker network.""" + """Get DNS server IP from Docker network. + + Returns: + str: IP address of the DNS server. + + Raises: + DNSConnectionError: If the DNS server IP cannot be resolved. + """ async_resolver = AsyncResolver() dns_server_ip_resolve = await async_resolver.resolve(host) if dns_server_ip_resolve is None or dns_server_ip_resolve.rrset is None: @@ -100,7 +116,15 @@ async def get_dns_manager_settings( session: AsyncSession, resolve_coro: Awaitable[str], ) -> "DNSManagerSettings": - """Get DNS manager's settings.""" + """Get DNS manager's settings. + + Args: + session (AsyncSession): Database session. + resolve_coro (Awaitable[str]): Coroutine to resolve DNS server IP. + + Returns: + DNSManagerSettings: DNS manager settings. + """ settings_dict = {} for setting in await session.scalars( select(CatalogueSetting).filter( diff --git a/interface b/interface index 2b8f6556f..fccbff889 160000 --- a/interface +++ b/interface @@ -1 +1 @@ -Subproject commit 2b8f6556f80005cc3ec387ee4cf37a441111c43a +Subproject commit fccbff88901935affdee79584fde63857932db90