From bb3f77a0c4c0b8394c6cda0de277057bf5b67244 Mon Sep 17 00:00:00 2001 From: John Sell Date: Tue, 23 Dec 2025 14:54:42 -0500 Subject: [PATCH 01/11] feat(infrastructure): add SQLAlchemy async database foundation with Alembic - Add SQLAlchemy 2.0 with asyncpg for async database operations - Add Alembic for schema migrations - Add python-ulid for ULID support instead of UUID - Create read/write engine separation with connection pooling - Create FastAPI dependency injection for database sessions - Create SQLAlchemy declarative base with timestamp mixin - Initialize Alembic with async migration support - Create initial migration for teams table (ULID primary key) - Add comprehensive unit tests for engines and dependencies - Configure Alembic to use settings module for database URL - Enable ruff post-write hook for migration formatting Refs: AIHCM-121 --- src/api/alembic.ini | 147 ++++++++++++++++ .../infrastructure/database/dependencies.py | 126 ++++++++++++++ src/api/infrastructure/database/engines.py | 81 +++++++++ src/api/infrastructure/database/models.py | 45 +++++ src/api/infrastructure/migrations/README | 1 + src/api/infrastructure/migrations/env.py | 106 ++++++++++++ .../infrastructure/migrations/script.py.mako | 28 +++ .../7fbe65eaef1b_create_teams_table.py | 41 +++++ src/api/pyproject.toml | 4 + .../test_database_dependencies.py | 122 +++++++++++++ .../infrastructure/test_database_engines.py | 111 ++++++++++++ src/api/uv.lock | 162 ++++++++++++++++++ 12 files changed, 974 insertions(+) create mode 100644 src/api/alembic.ini create mode 100644 src/api/infrastructure/database/dependencies.py create mode 100644 src/api/infrastructure/database/engines.py create mode 100644 src/api/infrastructure/database/models.py create mode 100644 src/api/infrastructure/migrations/README create mode 100644 src/api/infrastructure/migrations/env.py create mode 100644 src/api/infrastructure/migrations/script.py.mako create mode 100644 src/api/infrastructure/migrations/versions/7fbe65eaef1b_create_teams_table.py create mode 100644 src/api/tests/unit/infrastructure/test_database_dependencies.py create mode 100644 src/api/tests/unit/infrastructure/test_database_engines.py diff --git a/src/api/alembic.ini b/src/api/alembic.ini new file mode 100644 index 00000000..ca1f6dd7 --- /dev/null +++ b/src/api/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/infrastructure/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +# sqlalchemy.url is configured in env.py using settings module + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +hooks = ruff +ruff.type = module +ruff.module = ruff +ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/api/infrastructure/database/dependencies.py b/src/api/infrastructure/database/dependencies.py new file mode 100644 index 00000000..edf3d76d --- /dev/null +++ b/src/api/infrastructure/database/dependencies.py @@ -0,0 +1,126 @@ +"""Database dependency injection for FastAPI. + +Provides async session factories for read and write operations with proper +transaction management and connection pooling. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker + +from infrastructure.database.engines import create_read_engine, create_write_engine +from infrastructure.settings import get_database_settings + +if TYPE_CHECKING: + pass + +# Module-level engine instances (created on first use) +_write_engine: AsyncEngine | None = None +_read_engine: AsyncEngine | None = None + + +def get_write_engine() -> AsyncEngine: + """Get the write database engine (singleton). + + Creates engine on first call and caches for subsequent calls. + + Returns: + Configured async engine for write operations + """ + global _write_engine + if _write_engine is None: + settings = get_database_settings() + _write_engine = create_write_engine(settings) + return _write_engine + + +def get_read_engine() -> AsyncEngine: + """Get the read database engine (singleton). + + Creates engine on first call and caches for subsequent calls. + + Returns: + Configured async engine for read operations + """ + global _read_engine + if _read_engine is None: + settings = get_database_settings() + _read_engine = create_read_engine(settings) + return _read_engine + + +async def get_write_session() -> AsyncGenerator[AsyncSession, None]: + """Provide a write session for mutations (FastAPI dependency). + + The session is configured to NOT auto-commit. Callers must explicitly + manage transactions using `async with session.begin()`. + + Usage: + @router.post("/teams") + async def create_team( + session: AsyncSession = Depends(get_write_session) + ): + async with session.begin(): + # mutations here + session.add(team) + # transaction commits at end of `with` block + + Yields: + AsyncSession for database operations + """ + engine = get_write_engine() + async_session_factory = async_sessionmaker( + engine, + expire_on_commit=False, # Don't expire objects after commit + class_=AsyncSession, + ) + + async with async_session_factory() as session: + yield session + + +async def get_read_session() -> AsyncGenerator[AsyncSession, None]: + """Provide a read-only session for queries (FastAPI dependency). + + The session uses the read engine. While not enforced at the database level + (requires database role permissions), application code should use this + session only for read operations. + + Usage: + @router.get("/teams/{id}") + async def get_team( + session: AsyncSession = Depends(get_read_session) + ): + result = await session.execute(select(Team).where(...)) + return result.scalar_one_or_none() + + Yields: + AsyncSession for read-only database operations + """ + engine = get_read_engine() + async_session_factory = async_sessionmaker( + engine, + expire_on_commit=False, + class_=AsyncSession, + ) + + async with async_session_factory() as session: + yield session + + +async def close_database_connections() -> None: + """Close all database engine connections. + + Should be called on application shutdown to properly cleanup connections. + """ + global _write_engine, _read_engine + + if _write_engine is not None: + await _write_engine.dispose() + _write_engine = None + + if _read_engine is not None: + await _read_engine.dispose() + _read_engine = None diff --git a/src/api/infrastructure/database/engines.py b/src/api/infrastructure/database/engines.py new file mode 100644 index 00000000..88439aae --- /dev/null +++ b/src/api/infrastructure/database/engines.py @@ -0,0 +1,81 @@ +"""Database engine creation for async SQLAlchemy. + +This module provides factory functions for creating read and write database engines +with proper connection pooling and async support using asyncpg. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine + +if TYPE_CHECKING: + from infrastructure.settings import DatabaseSettings + + +def create_write_engine(settings: DatabaseSettings) -> AsyncEngine: + """Create async engine for write operations. + + Uses standard connection pooling for mutations. Connects to the application + database (separate from AGE graph database). + + Args: + settings: Database connection settings + + Returns: + Configured async engine for write operations + """ + url = _build_async_url(settings) + + return create_async_engine( + url, + pool_size=settings.pool_max_connections, + max_overflow=0, # No overflow - strict pool limit + pool_pre_ping=True, # Verify connections before using + echo=False, # Set to True for SQL logging + ) + + +def create_read_engine(settings: DatabaseSettings) -> AsyncEngine: + """Create async engine for read-only operations. + + Uses separate connection pool with read-only execution options for safety. + In production, this could point to a read replica. + + Args: + settings: Database connection settings + + Returns: + Configured async engine for read operations + """ + url = _build_async_url(settings) + + return create_async_engine( + url, + pool_size=settings.pool_max_connections, + max_overflow=0, + pool_pre_ping=True, + echo=False, + # Note: postgresql_readonly execution option doesn't prevent writes at engine level, + # it's more of a hint. True read-only enforcement requires database role permissions. + # For now, we rely on application discipline to use read engine only for queries. + ) + + +def _build_async_url(settings: DatabaseSettings) -> str: + """Build async database URL for asyncpg. + + Args: + settings: Database connection settings + + Returns: + Connection URL string in format: postgresql+asyncpg://user:pass@host:port/db + """ + password = settings.password.get_secret_value() + return ( + f"postgresql+asyncpg://" + f"{settings.username}:{password}@" + f"{settings.host}:{settings.port}/" + f"{settings.database}" + ) diff --git a/src/api/infrastructure/database/models.py b/src/api/infrastructure/database/models.py new file mode 100644 index 00000000..f6874586 --- /dev/null +++ b/src/api/infrastructure/database/models.py @@ -0,0 +1,45 @@ +"""SQLAlchemy declarative base and shared model utilities. + +This module provides the declarative base class for all SQLAlchemy ORM models +and common mixins for timestamps and other shared functionality. +""" + +from __future__ import annotations + +from datetime import datetime, timezone +from typing import Any + +from sqlalchemy import DateTime +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + """Base class for all SQLAlchemy ORM models. + + All models in the application should inherit from this base class. + It provides the declarative base functionality and type hints for SQLAlchemy 2.0. + """ + + # Type annotation for SQLAlchemy + type_annotation_map: dict[type, Any] = {} + + +class TimestampMixin: + """Mixin providing created_at and updated_at timestamp columns. + + Automatically sets created_at on insert and updates updated_at on modification. + Uses timezone-aware UTC timestamps. + """ + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + default=lambda: datetime.now(timezone.utc), + nullable=False, + ) + + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + default=lambda: datetime.now(timezone.utc), + onupdate=lambda: datetime.now(timezone.utc), + nullable=False, + ) diff --git a/src/api/infrastructure/migrations/README b/src/api/infrastructure/migrations/README new file mode 100644 index 00000000..98e4f9c4 --- /dev/null +++ b/src/api/infrastructure/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/src/api/infrastructure/migrations/env.py b/src/api/infrastructure/migrations/env.py new file mode 100644 index 00000000..2df7f537 --- /dev/null +++ b/src/api/infrastructure/migrations/env.py @@ -0,0 +1,106 @@ +"""Alembic migration environment configuration. + +This module configures Alembic to use our database settings and models for +automatic migration generation. +""" + +import asyncio +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +# Import our Base and settings +from infrastructure.database.models import Base +from infrastructure.settings import get_database_settings + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Set target metadata for autogenerate support +# This allows Alembic to detect schema changes automatically +target_metadata = Base.metadata + +# Configure database URL from settings +db_settings = get_database_settings() +password = db_settings.password.get_secret_value() +database_url = ( + f"postgresql+asyncpg://" + f"{db_settings.username}:{password}@" + f"{db_settings.host}:{db_settings.port}/" + f"{db_settings.database}" +) +config.set_main_option("sqlalchemy.url", database_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """Run migrations using a database connection. + + Args: + connection: SQLAlchemy connection to run migrations on + """ + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations in 'online' mode with async engine. + + In this scenario we need to create an async Engine + and associate a connection with the context. + """ + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, # No pooling for migrations + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode (async wrapper).""" + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/api/infrastructure/migrations/script.py.mako b/src/api/infrastructure/migrations/script.py.mako new file mode 100644 index 00000000..11016301 --- /dev/null +++ b/src/api/infrastructure/migrations/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/src/api/infrastructure/migrations/versions/7fbe65eaef1b_create_teams_table.py b/src/api/infrastructure/migrations/versions/7fbe65eaef1b_create_teams_table.py new file mode 100644 index 00000000..f1b6f5d9 --- /dev/null +++ b/src/api/infrastructure/migrations/versions/7fbe65eaef1b_create_teams_table.py @@ -0,0 +1,41 @@ +"""create teams table + +Revision ID: 7fbe65eaef1b +Revises: +Create Date: 2025-12-23 14:51:40.833197 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "7fbe65eaef1b" +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + op.create_table( + "teams", + sa.Column("id", sa.String(length=26), nullable=False), # ULID + sa.Column("name", sa.String(length=255), nullable=False), + sa.Column("workspace_id", sa.String(length=26), nullable=False), # ULID + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_teams_workspace_id"), "teams", ["workspace_id"], unique=False + ) + + +def downgrade() -> None: + """Downgrade schema.""" + op.drop_index(op.f("ix_teams_workspace_id"), table_name="teams") + op.drop_table("teams") diff --git a/src/api/pyproject.toml b/src/api/pyproject.toml index e32a8199..44c8775a 100644 --- a/src/api/pyproject.toml +++ b/src/api/pyproject.toml @@ -5,12 +5,16 @@ description = "Enterprise-Ready Bi-Temporal Knowledge Graphs as a Service" readme = "README.md" requires-python = ">=3.12" dependencies = [ + "alembic>=1.17.2", "apache-age-python>=0.0.7", + "asyncpg>=0.31.0", "cyclopts==5.0.0a1", "fastapi[standard]>=0.123.9", "fastmcp==2.14.1", "psycopg2>=2.9.11", "pydantic-settings>=2.12.0", + "python-ulid>=3.1.0", + "sqlalchemy[asyncio]>=2.0.45", "structlog>=25.5.0", ] diff --git a/src/api/tests/unit/infrastructure/test_database_dependencies.py b/src/api/tests/unit/infrastructure/test_database_dependencies.py new file mode 100644 index 00000000..b322b40e --- /dev/null +++ b/src/api/tests/unit/infrastructure/test_database_dependencies.py @@ -0,0 +1,122 @@ +"""Unit tests for database dependency injection. + +Tests the FastAPI dependency providers for async database sessions. +""" + +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession + +from infrastructure.database.dependencies import ( + close_database_connections, + get_read_engine, + get_read_session, + get_write_engine, + get_write_session, +) + + +@pytest.mark.asyncio +async def test_get_write_engine(): + """Test that get_write_engine returns an AsyncEngine.""" + engine = get_write_engine() + + assert isinstance(engine, AsyncEngine) + assert engine.url.drivername == "postgresql+asyncpg" + + +@pytest.mark.asyncio +async def test_get_read_engine(): + """Test that get_read_engine returns an AsyncEngine.""" + engine = get_read_engine() + + assert isinstance(engine, AsyncEngine) + assert engine.url.drivername == "postgresql+asyncpg" + + +@pytest.mark.asyncio +async def test_engines_are_singletons(): + """Test that engines are cached and reused.""" + write_engine_1 = get_write_engine() + write_engine_2 = get_write_engine() + + read_engine_1 = get_read_engine() + read_engine_2 = get_read_engine() + + # Same engine instance should be returned + assert write_engine_1 is write_engine_2 + assert read_engine_1 is read_engine_2 + + # But write and read engines should be different + assert write_engine_1 is not read_engine_1 + + +@pytest.mark.asyncio +async def test_get_write_session(): + """Test that get_write_session yields an AsyncSession.""" + async for session in get_write_session(): + assert isinstance(session, AsyncSession) + assert session.bind is not None + + +@pytest.mark.asyncio +async def test_get_read_session(): + """Test that get_read_session yields an AsyncSession.""" + async for session in get_read_session(): + assert isinstance(session, AsyncSession) + assert session.bind is not None + + +@pytest.mark.asyncio +async def test_write_session_uses_write_engine(): + """Test that write session is bound to write engine.""" + write_engine = get_write_engine() + + async for session in get_write_session(): + # Session should be bound to write engine + assert session.bind.sync_engine is write_engine.sync_engine + + +@pytest.mark.asyncio +async def test_read_session_uses_read_engine(): + """Test that read session is bound to read engine.""" + read_engine = get_read_engine() + + async for session in get_read_session(): + # Session should be bound to read engine + assert session.bind.sync_engine is read_engine.sync_engine + + +@pytest.mark.asyncio +async def test_sessions_properly_yielded(): + """Test that sessions are properly yielded from async generators.""" + session_count = 0 + + async for session in get_write_session(): + session_count += 1 + # Session should be active (not in a transaction, but connection is open) + assert isinstance(session, AsyncSession) + + # Should have yielded exactly one session + assert session_count == 1 + + +@pytest.mark.asyncio +async def test_close_database_connections(): + """Test that close_database_connections disposes engines.""" + # Get engines to ensure they're initialized + write_engine = get_write_engine() + read_engine = get_read_engine() + + # Close connections + await close_database_connections() + + # After closing, getting engines again should create new instances + new_write_engine = get_write_engine() + new_read_engine = get_read_engine() + + # Should be new engine instances + assert new_write_engine is not write_engine + assert new_read_engine is not read_engine + + # Cleanup + await close_database_connections() diff --git a/src/api/tests/unit/infrastructure/test_database_engines.py b/src/api/tests/unit/infrastructure/test_database_engines.py new file mode 100644 index 00000000..fa36ed8e --- /dev/null +++ b/src/api/tests/unit/infrastructure/test_database_engines.py @@ -0,0 +1,111 @@ +"""Unit tests for database engine creation. + +Tests the creation and configuration of async SQLAlchemy engines for read/write operations. +""" + +import pytest +from pydantic import SecretStr +from sqlalchemy.ext.asyncio import AsyncEngine + +from infrastructure.database.engines import ( + _build_async_url, + create_read_engine, + create_write_engine, +) +from infrastructure.settings import DatabaseSettings + + +@pytest.fixture +def mock_db_settings() -> DatabaseSettings: + """Create mock database settings for testing.""" + return DatabaseSettings( + host="localhost", + port=5432, + database="test_db", + username="test_user", + password=SecretStr("test_password"), + pool_min_connections=2, + pool_max_connections=10, + ) + + +def test_build_async_url(mock_db_settings): + """Test async database URL construction.""" + url = _build_async_url(mock_db_settings) + + assert url == ( + "postgresql+asyncpg://test_user:test_password@localhost:5432/test_db" + ) + + +def test_build_async_url_with_special_characters(): + """Test URL construction with special characters in password.""" + settings = DatabaseSettings( + host="db.example.com", + port=5432, + database="mydb", + username="user@domain", + password=SecretStr("p@ssw0rd!#$"), + ) + + url = _build_async_url(settings) + + # URL should contain the special characters (asyncpg handles encoding) + assert "p@ssw0rd!#$" in url + assert "user@domain" in url + + +def test_create_write_engine(mock_db_settings): + """Test write engine creation with proper configuration.""" + engine = create_write_engine(mock_db_settings) + + assert isinstance(engine, AsyncEngine) + assert engine.pool.size() == mock_db_settings.pool_max_connections + assert engine.url.drivername == "postgresql+asyncpg" + assert engine.url.database == "test_db" + + # Cleanup + engine.sync_engine.dispose() + + +def test_create_read_engine(mock_db_settings): + """Test read engine creation with proper configuration.""" + engine = create_read_engine(mock_db_settings) + + assert isinstance(engine, AsyncEngine) + assert engine.pool.size() == mock_db_settings.pool_max_connections + assert engine.url.drivername == "postgresql+asyncpg" + assert engine.url.database == "test_db" + + # Cleanup + engine.sync_engine.dispose() + + +def test_engines_use_separate_pools(mock_db_settings): + """Test that read and write engines have separate connection pools.""" + write_engine = create_write_engine(mock_db_settings) + read_engine = create_read_engine(mock_db_settings) + + # Engines should be different instances + assert write_engine is not read_engine + + # Should have separate pools + assert write_engine.pool is not read_engine.pool + + # Cleanup + write_engine.sync_engine.dispose() + read_engine.sync_engine.dispose() + + +def test_engine_pool_configuration(mock_db_settings): + """Test that engine pools are configured correctly.""" + engine = create_write_engine(mock_db_settings) + + # Pool should respect max connections + assert engine.pool.size() == 10 + + # max_overflow=0 means strict limit (no overflow) + assert engine.pool._max_overflow == 0 + + # Cleanup + engine.sync_engine.dispose() diff --git a/src/api/uv.lock b/src/api/uv.lock index fa9cb8fa..a9ba7159 100644 --- a/src/api/uv.lock +++ b/src/api/uv.lock @@ -2,6 +2,20 @@ version = 1 revision = 2 requires-python = ">=3.12" +[[package]] +name = "alembic" +version = "1.17.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554, upload-time = "2025-11-14T20:35:05.699Z" }, +] + [[package]] name = "annotated-doc" version = "0.0.4" @@ -55,6 +69,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/4a/0474f7f25d970c7b7e6078d02b93ed6a0e2c8b56fe6c82dd20c13ab8f6d7/apache_age_python-0.0.7-py3-none-any.whl", hash = "sha256:6fee06273343a79d86847ca6403d383a65dd6be5dcb192394d2a90b30c4b4f14", size = 25732, upload-time = "2023-09-22T17:02:27.836Z" }, ] +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + [[package]] name = "attrs" version = "25.4.0" @@ -620,6 +674,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1d/82/72401d09dc27c27fdf72ad6c2fe331e553e3c3646e01b5ff16473191033d/fastmcp-2.14.1-py3-none-any.whl", hash = "sha256:fb3e365cc1d52573ab89caeba9944dd4b056149097be169bce428e011f0a57e5", size = 412176, upload-time = "2025-12-15T02:26:25.356Z" }, ] +[[package]] +name = "greenlet" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, + { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, + { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, + { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" }, + { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, + { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, + { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, + { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, + { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, + { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, + { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" }, + { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, + { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, + { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" }, + { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, + { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, + { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, + { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" }, + { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" }, + { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -826,12 +919,16 @@ name = "kartograph-api" version = "0.2.5" source = { virtual = "." } dependencies = [ + { name = "alembic" }, { name = "apache-age-python" }, + { name = "asyncpg" }, { name = "cyclopts" }, { name = "fastapi", extra = ["standard"] }, { name = "fastmcp" }, { name = "psycopg2" }, { name = "pydantic-settings" }, + { name = "python-ulid" }, + { name = "sqlalchemy", extra = ["asyncio"] }, { name = "structlog" }, ] @@ -850,12 +947,16 @@ dev = [ [package.metadata] requires-dist = [ + { name = "alembic", specifier = ">=1.17.2" }, { name = "apache-age-python", specifier = ">=0.0.7" }, + { name = "asyncpg", specifier = ">=0.31.0" }, { name = "cyclopts", specifier = "==5.0.0a1" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.123.9" }, { name = "fastmcp", specifier = "==2.14.1" }, { name = "psycopg2", specifier = ">=2.9.11" }, { name = "pydantic-settings", specifier = ">=2.12.0" }, + { name = "python-ulid", specifier = ">=3.1.0" }, + { name = "sqlalchemy", extras = ["asyncio"], specifier = ">=2.0.45" }, { name = "structlog", specifier = ">=25.5.0" }, ] @@ -993,6 +1094,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" }, ] +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + [[package]] name = "markdown-it-py" version = "4.0.0" @@ -1612,6 +1725,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] +[[package]] +name = "python-ulid" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/7e/0d6c82b5ccc71e7c833aed43d9e8468e1f2ff0be1b3f657a6fcafbb8433d/python_ulid-3.1.0.tar.gz", hash = "sha256:ff0410a598bc5f6b01b602851a3296ede6f91389f913a5d5f8c496003836f636", size = 93175, upload-time = "2025-08-18T16:09:26.305Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/a0/4ed6632b70a52de845df056654162acdebaf97c20e3212c559ac43e7216e/python_ulid-3.1.0-py3-none-any.whl", hash = "sha256:e2cdc979c8c877029b4b7a38a6fba3bc4578e4f109a308419ff4d3ccf0a46619", size = 11577, upload-time = "2025-08-18T16:09:25.047Z" }, +] + [[package]] name = "pywin32" version = "311" @@ -1967,6 +2089,46 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, ] +[[package]] +name = "sqlalchemy" +version = "2.0.45" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size = 9869912, upload-time = "2025-12-09T21:05:16.737Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/c7/1900b56ce19bff1c26f39a4ce427faec7716c81ac792bfac8b6a9f3dca93/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3ee2aac15169fb0d45822983631466d60b762085bc4535cd39e66bea362df5f", size = 3333760, upload-time = "2025-12-09T22:11:02.66Z" }, + { url = "https://files.pythonhosted.org/packages/0a/93/3be94d96bb442d0d9a60e55a6bb6e0958dd3457751c6f8502e56ef95fed0/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba547ac0b361ab4f1608afbc8432db669bd0819b3e12e29fb5fa9529a8bba81d", size = 3348268, upload-time = "2025-12-09T22:13:49.054Z" }, + { url = "https://files.pythonhosted.org/packages/48/4b/f88ded696e61513595e4a9778f9d3f2bf7332cce4eb0c7cedaabddd6687b/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:215f0528b914e5c75ef2559f69dca86878a3beeb0c1be7279d77f18e8d180ed4", size = 3278144, upload-time = "2025-12-09T22:11:04.14Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6a/310ecb5657221f3e1bd5288ed83aa554923fb5da48d760a9f7622afeb065/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:107029bf4f43d076d4011f1afb74f7c3e2ea029ec82eb23d8527d5e909e97aa6", size = 3313907, upload-time = "2025-12-09T22:13:50.598Z" }, + { url = "https://files.pythonhosted.org/packages/5c/39/69c0b4051079addd57c84a5bfb34920d87456dd4c90cf7ee0df6efafc8ff/sqlalchemy-2.0.45-cp312-cp312-win32.whl", hash = "sha256:0c9f6ada57b58420a2c0277ff853abe40b9e9449f8d7d231763c6bc30f5c4953", size = 2112182, upload-time = "2025-12-09T21:39:30.824Z" }, + { url = "https://files.pythonhosted.org/packages/f7/4e/510db49dd89fc3a6e994bee51848c94c48c4a00dc905e8d0133c251f41a7/sqlalchemy-2.0.45-cp312-cp312-win_amd64.whl", hash = "sha256:8defe5737c6d2179c7997242d6473587c3beb52e557f5ef0187277009f73e5e1", size = 2139200, upload-time = "2025-12-09T21:39:32.321Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c8/7cc5221b47a54edc72a0140a1efa56e0a2730eefa4058d7ed0b4c4357ff8/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf", size = 3277082, upload-time = "2025-12-09T22:11:06.167Z" }, + { url = "https://files.pythonhosted.org/packages/0e/50/80a8d080ac7d3d321e5e5d420c9a522b0aa770ec7013ea91f9a8b7d36e4a/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e", size = 3293131, upload-time = "2025-12-09T22:13:52.626Z" }, + { url = "https://files.pythonhosted.org/packages/da/4c/13dab31266fc9904f7609a5dc308a2432a066141d65b857760c3bef97e69/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b", size = 3225389, upload-time = "2025-12-09T22:11:08.093Z" }, + { url = "https://files.pythonhosted.org/packages/74/04/891b5c2e9f83589de202e7abaf24cd4e4fa59e1837d64d528829ad6cc107/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8", size = 3266054, upload-time = "2025-12-09T22:13:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/f1/24/fc59e7f71b0948cdd4cff7a286210e86b0443ef1d18a23b0d83b87e4b1f7/sqlalchemy-2.0.45-cp313-cp313-win32.whl", hash = "sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a", size = 2110299, upload-time = "2025-12-09T21:39:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c5/d17113020b2d43073412aeca09b60d2009442420372123b8d49cc253f8b8/sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl", hash = "sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee", size = 2136264, upload-time = "2025-12-09T21:39:36.801Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8d/bb40a5d10e7a5f2195f235c0b2f2c79b0bf6e8f00c0c223130a4fbd2db09/sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6", size = 3521998, upload-time = "2025-12-09T22:13:28.622Z" }, + { url = "https://files.pythonhosted.org/packages/75/a5/346128b0464886f036c039ea287b7332a410aa2d3fb0bb5d404cb8861635/sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a", size = 3473434, upload-time = "2025-12-09T22:13:30.188Z" }, + { url = "https://files.pythonhosted.org/packages/cc/64/4e1913772646b060b025d3fc52ce91a58967fe58957df32b455de5a12b4f/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774", size = 3272404, upload-time = "2025-12-09T22:11:09.662Z" }, + { url = "https://files.pythonhosted.org/packages/b3/27/caf606ee924282fe4747ee4fd454b335a72a6e018f97eab5ff7f28199e16/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce", size = 3277057, upload-time = "2025-12-09T22:13:56.213Z" }, + { url = "https://files.pythonhosted.org/packages/85/d0/3d64218c9724e91f3d1574d12eb7ff8f19f937643815d8daf792046d88ab/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33", size = 3222279, upload-time = "2025-12-09T22:11:11.1Z" }, + { url = "https://files.pythonhosted.org/packages/24/10/dd7688a81c5bc7690c2a3764d55a238c524cd1a5a19487928844cb247695/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74", size = 3244508, upload-time = "2025-12-09T22:13:57.932Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/db75756ca49f777e029968d9c9fee338c7907c563267740c6d310a8e3f60/sqlalchemy-2.0.45-cp314-cp314-win32.whl", hash = "sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f", size = 2113204, upload-time = "2025-12-09T21:39:38.365Z" }, + { url = "https://files.pythonhosted.org/packages/89/a2/0e1590e9adb292b1d576dbcf67ff7df8cf55e56e78d2c927686d01080f4b/sqlalchemy-2.0.45-cp314-cp314-win_amd64.whl", hash = "sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177", size = 2138785, upload-time = "2025-12-09T21:39:39.503Z" }, + { url = "https://files.pythonhosted.org/packages/42/39/f05f0ed54d451156bbed0e23eb0516bcad7cbb9f18b3bf219c786371b3f0/sqlalchemy-2.0.45-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b", size = 3522029, upload-time = "2025-12-09T22:13:32.09Z" }, + { url = "https://files.pythonhosted.org/packages/54/0f/d15398b98b65c2bce288d5ee3f7d0a81f77ab89d9456994d5c7cc8b2a9db/sqlalchemy-2.0.45-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b", size = 3475142, upload-time = "2025-12-09T22:13:33.739Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size = 1936672, upload-time = "2025-12-09T21:54:52.608Z" }, +] + +[package.optional-dependencies] +asyncio = [ + { name = "greenlet" }, +] + [[package]] name = "sse-starlette" version = "3.0.4" From 0feb2c03b5a04f39b6590fa334299c21e4d6a54e Mon Sep 17 00:00:00 2001 From: John Sell Date: Tue, 23 Dec 2025 15:13:55 -0500 Subject: [PATCH 02/11] feat(shared-kernel): add authorization abstractions and SpiceDB client MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add authzed library for SpiceDB integration - Add python-ulid for ULID support - Create ResourceType, RelationType, Permission enums (using Group not Team) - Create AuthorizationProvider protocol for swappable implementations - Implement SpiceDBClient with async methods for relationships and permissions - Create SpiceDB schema (.zed) with Tenant→Workspace→Group hierarchy - Create AuthorizationProbe for domain-oriented observability - Move ObservationContext to shared_kernel (fix architectural boundary) - Add 35 unit tests for types and probes - All 410 tests passing Refs: AIHCM-122 --- .../default_graph_service_probe.py | 2 +- .../default_schema_service_probe.py | 2 +- .../observability/graph_service_probe.py | 2 +- .../observability/schema_service_probe.py | 2 +- src/api/graph/infrastructure/observability.py | 2 +- .../infrastructure/observability/__init__.py | 2 +- .../infrastructure/observability/probes.py | 2 +- src/api/pyproject.toml | 1 + src/api/query/application/observability.py | 2 +- .../shared_kernel/authorization/__init__.py | 21 ++ .../authorization/observability/__init__.py | 11 + .../observability/authorization_probe.py | 246 ++++++++++++++ .../shared_kernel/authorization/protocols.py | 112 +++++++ .../authorization/spicedb/__init__.py | 19 ++ .../authorization/spicedb/client.py | 313 ++++++++++++++++++ .../authorization/spicedb/exceptions.py | 19 ++ .../authorization/spicedb/schema.zed | 106 ++++++ src/api/shared_kernel/authorization/types.py | 80 +++++ .../observability_context.py} | 0 .../graph/test_application_observability.py | 6 +- .../shared_kernel/authorization/__init__.py | 1 + .../authorization/test_authorization_probe.py | 170 ++++++++++ .../shared_kernel/authorization/test_types.py | 146 ++++++++ src/api/uv.lock | 232 +++++++++++++ 24 files changed, 1488 insertions(+), 11 deletions(-) create mode 100644 src/api/shared_kernel/authorization/__init__.py create mode 100644 src/api/shared_kernel/authorization/observability/__init__.py create mode 100644 src/api/shared_kernel/authorization/observability/authorization_probe.py create mode 100644 src/api/shared_kernel/authorization/protocols.py create mode 100644 src/api/shared_kernel/authorization/spicedb/__init__.py create mode 100644 src/api/shared_kernel/authorization/spicedb/client.py create mode 100644 src/api/shared_kernel/authorization/spicedb/exceptions.py create mode 100644 src/api/shared_kernel/authorization/spicedb/schema.zed create mode 100644 src/api/shared_kernel/authorization/types.py rename src/api/{infrastructure/observability/context.py => shared_kernel/observability_context.py} (100%) create mode 100644 src/api/tests/unit/shared_kernel/authorization/__init__.py create mode 100644 src/api/tests/unit/shared_kernel/authorization/test_authorization_probe.py create mode 100644 src/api/tests/unit/shared_kernel/authorization/test_types.py diff --git a/src/api/graph/application/observability/default_graph_service_probe.py b/src/api/graph/application/observability/default_graph_service_probe.py index c1533c4b..64e8f0ef 100644 --- a/src/api/graph/application/observability/default_graph_service_probe.py +++ b/src/api/graph/application/observability/default_graph_service_probe.py @@ -13,7 +13,7 @@ from graph.application.observability.graph_service_probe import GraphServiceProbe if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class DefaultGraphServiceProbe(GraphServiceProbe): diff --git a/src/api/graph/application/observability/default_schema_service_probe.py b/src/api/graph/application/observability/default_schema_service_probe.py index 2b423e0a..8fd036b7 100644 --- a/src/api/graph/application/observability/default_schema_service_probe.py +++ b/src/api/graph/application/observability/default_schema_service_probe.py @@ -12,7 +12,7 @@ from graph.application.observability.schema_service_probe import SchemaServiceProbe if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class DefaultSchemaServiceProbe(SchemaServiceProbe): diff --git a/src/api/graph/application/observability/graph_service_probe.py b/src/api/graph/application/observability/graph_service_probe.py index bec4d8e7..250d7f31 100644 --- a/src/api/graph/application/observability/graph_service_probe.py +++ b/src/api/graph/application/observability/graph_service_probe.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Protocol if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class GraphServiceProbe(Protocol): diff --git a/src/api/graph/application/observability/schema_service_probe.py b/src/api/graph/application/observability/schema_service_probe.py index 3768fbab..6ed3f592 100644 --- a/src/api/graph/application/observability/schema_service_probe.py +++ b/src/api/graph/application/observability/schema_service_probe.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Protocol if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class SchemaServiceProbe(Protocol): diff --git a/src/api/graph/infrastructure/observability.py b/src/api/graph/infrastructure/observability.py index 432f7bc1..98cb144e 100644 --- a/src/api/graph/infrastructure/observability.py +++ b/src/api/graph/infrastructure/observability.py @@ -11,7 +11,7 @@ import structlog if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class GraphClientProbe(Protocol): diff --git a/src/api/infrastructure/observability/__init__.py b/src/api/infrastructure/observability/__init__.py index eeb38627..84179d07 100644 --- a/src/api/infrastructure/observability/__init__.py +++ b/src/api/infrastructure/observability/__init__.py @@ -7,7 +7,7 @@ See: https://martinfowler.com/articles/domain-oriented-observability.html """ -from infrastructure.observability.context import ObservationContext +from shared_kernel.observability_context import ObservationContext from infrastructure.observability.probes import ( ConnectionProbe, DefaultConnectionProbe, diff --git a/src/api/infrastructure/observability/probes.py b/src/api/infrastructure/observability/probes.py index 9808c999..e14e861e 100644 --- a/src/api/infrastructure/observability/probes.py +++ b/src/api/infrastructure/observability/probes.py @@ -11,7 +11,7 @@ import structlog if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class ConnectionProbe(Protocol): diff --git a/src/api/pyproject.toml b/src/api/pyproject.toml index 44c8775a..58d4fae9 100644 --- a/src/api/pyproject.toml +++ b/src/api/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "alembic>=1.17.2", "apache-age-python>=0.0.7", "asyncpg>=0.31.0", + "authzed>=1.24.0", "cyclopts==5.0.0a1", "fastapi[standard]>=0.123.9", "fastmcp==2.14.1", diff --git a/src/api/query/application/observability.py b/src/api/query/application/observability.py index 0181882f..07dd18de 100644 --- a/src/api/query/application/observability.py +++ b/src/api/query/application/observability.py @@ -10,7 +10,7 @@ import structlog if TYPE_CHECKING: - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext class QueryServiceProbe(Protocol): diff --git a/src/api/shared_kernel/authorization/__init__.py b/src/api/shared_kernel/authorization/__init__.py new file mode 100644 index 00000000..db586b55 --- /dev/null +++ b/src/api/shared_kernel/authorization/__init__.py @@ -0,0 +1,21 @@ +"""Authorization primitives for fine-grained access control. + +This module provides shared authorization types and abstractions used across +bounded contexts for SpiceDB integration. +""" + +from shared_kernel.authorization.types import ( + Permission, + RelationType, + ResourceType, + format_resource, + format_subject, +) + +__all__ = [ + "ResourceType", + "RelationType", + "Permission", + "format_resource", + "format_subject", +] diff --git a/src/api/shared_kernel/authorization/observability/__init__.py b/src/api/shared_kernel/authorization/observability/__init__.py new file mode 100644 index 00000000..98d0626f --- /dev/null +++ b/src/api/shared_kernel/authorization/observability/__init__.py @@ -0,0 +1,11 @@ +"""Observability for authorization operations.""" + +from shared_kernel.authorization.observability.authorization_probe import ( + AuthorizationProbe, + DefaultAuthorizationProbe, +) + +__all__ = [ + "AuthorizationProbe", + "DefaultAuthorizationProbe", +] diff --git a/src/api/shared_kernel/authorization/observability/authorization_probe.py b/src/api/shared_kernel/authorization/observability/authorization_probe.py new file mode 100644 index 00000000..e78ad14f --- /dev/null +++ b/src/api/shared_kernel/authorization/observability/authorization_probe.py @@ -0,0 +1,246 @@ +"""Domain probe for authorization operations. + +Following Domain-Oriented Observability patterns, this probe captures +domain-significant events related to authorization checks and relationship writes. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Protocol + +import structlog + +if TYPE_CHECKING: + from shared_kernel.observability_context import ObservationContext + + +class AuthorizationProbe(Protocol): + """Domain probe for authorization operations.""" + + def relationship_written( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Record that a relationship was written to the authorization system.""" + ... + + def relationship_write_failed( + self, + resource: str, + relation: str, + subject: str, + error: Exception, + ) -> None: + """Record that writing a relationship failed.""" + ... + + def permission_checked( + self, + resource: str, + permission: str, + subject: str, + granted: bool, + ) -> None: + """Record that a permission was checked.""" + ... + + def permission_check_failed( + self, + resource: str, + permission: str, + subject: str, + error: Exception, + ) -> None: + """Record that checking a permission failed.""" + ... + + def bulk_check_completed( + self, + total_requests: int, + permitted_count: int, + ) -> None: + """Record that a bulk permission check completed.""" + ... + + def relationship_deleted( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Record that a relationship was deleted.""" + ... + + def relationship_delete_failed( + self, + resource: str, + relation: str, + subject: str, + error: Exception, + ) -> None: + """Record that deleting a relationship failed.""" + ... + + def connection_failed( + self, + endpoint: str, + error: Exception, + ) -> None: + """Record that connection to authorization system failed.""" + ... + + def with_context(self, context: ObservationContext) -> AuthorizationProbe: + """Create a new probe with observation context bound.""" + ... + + +class DefaultAuthorizationProbe: + """Default implementation of AuthorizationProbe using structlog.""" + + def __init__( + self, + logger: structlog.stdlib.BoundLogger | None = None, + context: ObservationContext | None = None, + ): + self._logger = logger or structlog.get_logger() + self._context = context + + def _get_context_kwargs(self) -> dict[str, Any]: + """Get context metadata as kwargs for logging.""" + if self._context is None: + return {} + return self._context.as_dict() + + def with_context(self, context: ObservationContext) -> DefaultAuthorizationProbe: + """Create a new probe with observation context bound.""" + return DefaultAuthorizationProbe(logger=self._logger, context=context) + + def relationship_written( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Record that a relationship was written.""" + self._logger.info( + "authorization_relationship_written", + resource=resource, + relation=relation, + subject=subject, + **self._get_context_kwargs(), + ) + + def relationship_write_failed( + self, + resource: str, + relation: str, + subject: str, + error: Exception, + ) -> None: + """Record that writing a relationship failed.""" + self._logger.error( + "authorization_relationship_write_failed", + resource=resource, + relation=relation, + subject=subject, + error=str(error), + error_type=type(error).__name__, + **self._get_context_kwargs(), + ) + + def permission_checked( + self, + resource: str, + permission: str, + subject: str, + granted: bool, + ) -> None: + """Record that a permission was checked.""" + self._logger.debug( + "authorization_permission_checked", + resource=resource, + permission=permission, + subject=subject, + granted=granted, + **self._get_context_kwargs(), + ) + + def permission_check_failed( + self, + resource: str, + permission: str, + subject: str, + error: Exception, + ) -> None: + """Record that checking a permission failed.""" + self._logger.error( + "authorization_permission_check_failed", + resource=resource, + permission=permission, + subject=subject, + error=str(error), + error_type=type(error).__name__, + **self._get_context_kwargs(), + ) + + def bulk_check_completed( + self, + total_requests: int, + permitted_count: int, + ) -> None: + """Record that a bulk permission check completed.""" + self._logger.info( + "authorization_bulk_check_completed", + total_requests=total_requests, + permitted_count=permitted_count, + **self._get_context_kwargs(), + ) + + def relationship_deleted( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Record that a relationship was deleted.""" + self._logger.info( + "authorization_relationship_deleted", + resource=resource, + relation=relation, + subject=subject, + **self._get_context_kwargs(), + ) + + def relationship_delete_failed( + self, + resource: str, + relation: str, + subject: str, + error: Exception, + ) -> None: + """Record that deleting a relationship failed.""" + self._logger.error( + "authorization_relationship_delete_failed", + resource=resource, + relation=relation, + subject=subject, + error=str(error), + error_type=type(error).__name__, + **self._get_context_kwargs(), + ) + + def connection_failed( + self, + endpoint: str, + error: Exception, + ) -> None: + """Record that connection to authorization system failed.""" + self._logger.error( + "authorization_connection_failed", + endpoint=endpoint, + error=str(error), + error_type=type(error).__name__, + **self._get_context_kwargs(), + ) diff --git a/src/api/shared_kernel/authorization/protocols.py b/src/api/shared_kernel/authorization/protocols.py new file mode 100644 index 00000000..37b193bc --- /dev/null +++ b/src/api/shared_kernel/authorization/protocols.py @@ -0,0 +1,112 @@ +"""Authorization provider protocol for SpiceDB abstraction. + +Defines the interface for authorization providers, allowing for swappable +implementations (SpiceDB, mock, alternative providers). +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Protocol + + +@dataclass(frozen=True) +class CheckRequest: + """A single permission check request for bulk operations. + + Attributes: + resource: Resource identifier (e.g., "team:abc123") + permission: Permission to check (e.g., "view", "edit") + subject: Subject identifier (e.g., "user:alice") + """ + + resource: str + permission: str + subject: str + + +class AuthorizationProvider(Protocol): + """Protocol for authorization providers. + + Implementations must provide methods for writing relationships, + checking permissions, and bulk permission checks. The primary + implementation is SpiceDBClient, but this protocol allows for + mocking in tests or alternative implementations. + """ + + async def write_relationship( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Write a relationship to the authorization system. + + Args: + resource: Resource identifier (e.g., "team:abc123") + relation: Relation name (e.g., "member", "owner") + subject: Subject identifier (e.g., "user:alice") + + Raises: + AuthorizationError: If the write fails + """ + ... + + async def check_permission( + self, + resource: str, + permission: str, + subject: str, + ) -> bool: + """Check if a subject has permission on a resource. + + Args: + resource: Resource identifier (e.g., "team:abc123") + permission: Permission to check (e.g., "view", "edit") + subject: Subject identifier (e.g., "user:alice") + + Returns: + True if permission is granted, False otherwise + + Raises: + AuthorizationError: If the check fails + """ + ... + + async def bulk_check_permission( + self, + requests: list[CheckRequest], + ) -> set[str]: + """Bulk check permissions for multiple resources. + + This is more efficient than individual checks for post-filtering + query results. Returns the set of resource IDs that passed the check. + + Args: + requests: List of permission check requests + + Returns: + Set of resource identifiers that passed permission checks + + Raises: + AuthorizationError: If the bulk check fails + """ + ... + + async def delete_relationship( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Delete a relationship from the authorization system. + + Args: + resource: Resource identifier (e.g., "team:abc123") + relation: Relation name (e.g., "member", "owner") + subject: Subject identifier (e.g., "user:alice") + + Raises: + AuthorizationError: If the delete fails + """ + ... diff --git a/src/api/shared_kernel/authorization/spicedb/__init__.py b/src/api/shared_kernel/authorization/spicedb/__init__.py new file mode 100644 index 00000000..98e6abad --- /dev/null +++ b/src/api/shared_kernel/authorization/spicedb/__init__.py @@ -0,0 +1,19 @@ +"""SpiceDB client implementation for authorization. + +This module provides the SpiceDB client that implements the AuthorizationProvider +protocol for fine-grained access control. +""" + +from shared_kernel.authorization.spicedb.client import SpiceDBClient +from shared_kernel.authorization.spicedb.exceptions import ( + AuthorizationError, + SpiceDBConnectionError, + SpiceDBPermissionError, +) + +__all__ = [ + "SpiceDBClient", + "AuthorizationError", + "SpiceDBConnectionError", + "SpiceDBPermissionError", +] diff --git a/src/api/shared_kernel/authorization/spicedb/client.py b/src/api/shared_kernel/authorization/spicedb/client.py new file mode 100644 index 00000000..b9557162 --- /dev/null +++ b/src/api/shared_kernel/authorization/spicedb/client.py @@ -0,0 +1,313 @@ +"""SpiceDB client implementation for authorization. + +Provides async SpiceDB client wrapping the authzed library with proper +error handling and type safety. +""" + +from __future__ import annotations + +from authzed.api.v1 import ( + CheckPermissionRequest, + Consistency, + ObjectReference, + Relationship, + RelationshipUpdate, + SubjectReference, + WriteRelationshipsRequest, +) +from authzed.api.v1.permission_service_pb2 import CheckPermissionResponse +from grpcutil import bearer_token_credentials + +from shared_kernel.authorization.observability import ( + AuthorizationProbe, + DefaultAuthorizationProbe, +) +from shared_kernel.authorization.protocols import CheckRequest +from shared_kernel.authorization.spicedb.exceptions import ( + SpiceDBConnectionError, + SpiceDBPermissionError, +) + + +class SpiceDBClient: + """SpiceDB client implementation of AuthorizationProvider protocol. + + This client provides async methods for writing relationships, checking + permissions, and bulk permission checks against a SpiceDB instance. + """ + + def __init__( + self, + endpoint: str, + preshared_key: str, + probe: AuthorizationProbe | None = None, + ): + """Initialize SpiceDB client. + + Args: + endpoint: SpiceDB gRPC endpoint (e.g., "localhost:50051") + preshared_key: Pre-shared key for authentication + probe: Optional domain probe for observability + """ + self._endpoint = endpoint + self._preshared_key = preshared_key + self._client = None + self._probe = probe or DefaultAuthorizationProbe() + + async def _ensure_client(self): + """Lazily initialize the gRPC client.""" + if self._client is None: + try: + from authzed.api.v1 import Client + + # Create credentials with preshared key + credentials = bearer_token_credentials(self._preshared_key) + + # Initialize client + self._client = Client( + self._endpoint, + credentials, + ) + except Exception as e: + self._probe.connection_failed( + endpoint=self._endpoint, + error=e, + ) + raise SpiceDBConnectionError( + f"Failed to connect to SpiceDB at {self._endpoint}: {e}" + ) from e + + async def write_relationship( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Write a relationship to SpiceDB. + + Args: + resource: Resource identifier (e.g., "group:abc123") + relation: Relation name (e.g., "member", "owner") + subject: Subject identifier (e.g., "user:alice") + + Raises: + SpiceDBPermissionError: If the write fails + """ + await self._ensure_client() + assert self._client is not None # For mypy + + # Parse resource and subject + resource_type, resource_id = resource.split(":", 1) + subject_type, subject_id = subject.split(":", 1) + + try: + # Create relationship update + relationship = Relationship( + resource=ObjectReference( + object_type=resource_type, + object_id=resource_id, + ), + relation=relation, + subject=SubjectReference( + object=ObjectReference( + object_type=subject_type, + object_id=subject_id, + ), + ), + ) + + update = RelationshipUpdate( + operation=RelationshipUpdate.OPERATION_TOUCH, + relationship=relationship, + ) + + request = WriteRelationshipsRequest(updates=[update]) + + await self._client.WriteRelationships(request) + + self._probe.relationship_written( + resource=resource, + relation=relation, + subject=subject, + ) + + except Exception as e: + self._probe.relationship_write_failed( + resource=resource, + relation=relation, + subject=subject, + error=e, + ) + raise SpiceDBPermissionError( + f"Failed to write relationship: {resource} {relation} {subject}" + ) from e + + async def check_permission( + self, + resource: str, + permission: str, + subject: str, + ) -> bool: + """Check if a subject has permission on a resource. + + Args: + resource: Resource identifier (e.g., "group:abc123") + permission: Permission to check (e.g., "view", "edit") + subject: Subject identifier (e.g., "user:alice") + + Returns: + True if permission is granted, False otherwise + + Raises: + SpiceDBPermissionError: If the check fails + """ + await self._ensure_client() + assert self._client is not None # For mypy + + # Parse resource and subject + resource_type, resource_id = resource.split(":", 1) + subject_type, subject_id = subject.split(":", 1) + + try: + request = CheckPermissionRequest( + consistency=Consistency(fully_consistent=True), + resource=ObjectReference( + object_type=resource_type, + object_id=resource_id, + ), + permission=permission, + subject=SubjectReference( + object=ObjectReference( + object_type=subject_type, + object_id=subject_id, + ), + ), + ) + + response = await self._client.CheckPermission(request) + + has_permission = ( + response.permissionship + == CheckPermissionResponse.PERMISSIONSHIP_HAS_PERMISSION + ) + + self._probe.permission_checked( + resource=resource, + permission=permission, + subject=subject, + granted=has_permission, + ) + + return has_permission + + except Exception as e: + self._probe.permission_check_failed( + resource=resource, + permission=permission, + subject=subject, + error=e, + ) + raise SpiceDBPermissionError( + f"Failed to check permission: {resource} {permission} {subject}" + ) from e + + async def bulk_check_permission( + self, + requests: list[CheckRequest], + ) -> set[str]: + """Bulk check permissions for multiple resources. + + For now, this is implemented as sequential checks. Future optimization + could use SpiceDB's BulkCheckPermission API when available. + + Args: + requests: List of permission check requests + + Returns: + Set of resource identifiers that passed permission checks + + Raises: + SpiceDBPermissionError: If any check fails + """ + permitted_resources = set() + + for req in requests: + has_permission = await self.check_permission( + resource=req.resource, + permission=req.permission, + subject=req.subject, + ) + + if has_permission: + permitted_resources.add(req.resource) + + self._probe.bulk_check_completed( + total_requests=len(requests), + permitted_count=len(permitted_resources), + ) + + return permitted_resources + + async def delete_relationship( + self, + resource: str, + relation: str, + subject: str, + ) -> None: + """Delete a relationship from SpiceDB. + + Args: + resource: Resource identifier (e.g., "group:abc123") + relation: Relation name (e.g., "member", "owner") + subject: Subject identifier (e.g., "user:alice") + + Raises: + SpiceDBPermissionError: If the delete fails + """ + await self._ensure_client() + assert self._client is not None # For mypy + + # Parse resource and subject + resource_type, resource_id = resource.split(":", 1) + subject_type, subject_id = subject.split(":", 1) + + try: + relationship = Relationship( + resource=ObjectReference( + object_type=resource_type, + object_id=resource_id, + ), + relation=relation, + subject=SubjectReference( + object=ObjectReference( + object_type=subject_type, + object_id=subject_id, + ), + ), + ) + + update = RelationshipUpdate( + operation=RelationshipUpdate.OPERATION_DELETE, + relationship=relationship, + ) + + request = WriteRelationshipsRequest(updates=[update]) + + await self._client.WriteRelationships(request) + + self._probe.relationship_deleted( + resource=resource, + relation=relation, + subject=subject, + ) + + except Exception as e: + self._probe.relationship_delete_failed( + resource=resource, + relation=relation, + subject=subject, + error=e, + ) + raise SpiceDBPermissionError( + f"Failed to delete relationship: {resource} {relation} {subject}" + ) from e diff --git a/src/api/shared_kernel/authorization/spicedb/exceptions.py b/src/api/shared_kernel/authorization/spicedb/exceptions.py new file mode 100644 index 00000000..fab34181 --- /dev/null +++ b/src/api/shared_kernel/authorization/spicedb/exceptions.py @@ -0,0 +1,19 @@ +"""Exceptions for SpiceDB authorization operations.""" + + +class AuthorizationError(Exception): + """Base exception for authorization errors.""" + + pass + + +class SpiceDBConnectionError(AuthorizationError): + """Raised when connection to SpiceDB fails.""" + + pass + + +class SpiceDBPermissionError(AuthorizationError): + """Raised when a permission check or write operation fails.""" + + pass diff --git a/src/api/shared_kernel/authorization/spicedb/schema.zed b/src/api/shared_kernel/authorization/spicedb/schema.zed new file mode 100644 index 00000000..235e3f2d --- /dev/null +++ b/src/api/shared_kernel/authorization/spicedb/schema.zed @@ -0,0 +1,106 @@ +/** + * Kartograph Authorization Schema for SpiceDB + * + * This schema defines the authorization model for Kartograph using + * Relationship-Based Access Control (ReBAC). The hierarchy is: + * + * Tenant → Workspace → Group → Users + * + * Design principles: + * - Groups (not Teams) for Red Hat terminology alignment + * - Workspaces provide resource organization within tenants + * - Groups can be owned by users or other groups (for shared ownership) + * - Knowledge graphs and data sources (future) belong to workspaces + */ + +/** + * User represents a person in the system. + * Users are provisioned from SSO (Red Hat SSO). + */ +definition user {} + +/** + * Group represents a collection of users working together. + * This aligns with Red Hat terminology (vs "team"). + * Groups own workspaces and can have members. + */ +definition group { + /** Users who are members of this group */ + relation member: user + + /** Users who can administer this group (add/remove members) */ + relation admin: user + + /** Permission to view group details */ + permission view = member + admin + + /** Permission to manage group membership */ + permission manage = admin +} + +/** + * Workspace provides resource organization within a tenant. + * Workspaces can be owned by users (personal) or groups (shared). + * Each tenant has at least one root workspace. + */ +definition workspace { + /** The parent tenant this workspace belongs to */ + relation parent: tenant + + /** Users or groups who own this workspace */ + relation owner: user | group#member + + /** Users or groups who are members of this workspace */ + relation member: user | group#member + + /** Permission to view workspace and its resources */ + permission view = owner + member + + /** Permission to edit workspace configuration */ + permission edit = owner + + /** Permission to delete workspace */ + permission delete = owner +} + +/** + * Tenant is the top-level organization boundary. + * Maps 1:1 with data isolation (separate graph database). + * Each tenant must have a root workspace. + */ +definition tenant { + /** Users who can administer this tenant */ + relation admin: user + + /** The mandatory root workspace for this tenant */ + relation root_workspace: workspace + + /** Permission to view tenant */ + permission view = admin + + /** Permission to manage tenant configuration */ + permission manage = admin +} + +/** + * Future resource types (not yet implemented): + * + * definition knowledge_graph { + * relation workspace: workspace + * relation viewer: user | group#member + * relation editor: user | group#member + * + * permission view = viewer + editor + workspace->member + * permission edit = editor + workspace->owner + * permission delete = workspace->owner + * } + * + * definition data_source { + * relation knowledge_graph: knowledge_graph + * relation owner: user | group#member + * + * permission view = owner + knowledge_graph->view + * permission edit = owner + knowledge_graph->edit + * permission delete = owner + knowledge_graph->delete + * } + */ diff --git a/src/api/shared_kernel/authorization/types.py b/src/api/shared_kernel/authorization/types.py new file mode 100644 index 00000000..449c5282 --- /dev/null +++ b/src/api/shared_kernel/authorization/types.py @@ -0,0 +1,80 @@ +"""Authorization type definitions for SpiceDB. + +Defines resource types, relations, and permissions that map to the SpiceDB schema. +These enums ensure type safety and prevent hardcoded strings across the codebase. +""" + +from enum import StrEnum + + +class ResourceType(StrEnum): + """SpiceDB resource types matching schema definitions. + + Each value corresponds to a `definition` in the SpiceDB schema (.zed file). + """ + + USER = "user" + GROUP = "group" + WORKSPACE = "workspace" + TENANT = "tenant" + # Future: KNOWLEDGE_GRAPH, DATA_SOURCE, etc. + + +class RelationType(StrEnum): + """SpiceDB relations matching schema relations. + + Each value corresponds to a `relation` in the SpiceDB schema definitions. + """ + + MEMBER = "member" + OWNER = "owner" + ADMIN = "admin" + PARENT = "parent" + WORKSPACE = "workspace" + ROOT_WORKSPACE = "root_workspace" + + +class Permission(StrEnum): + """SpiceDB permissions matching schema permissions. + + Each value corresponds to a `permission` in the SpiceDB schema definitions. + """ + + VIEW = "view" + EDIT = "edit" + DELETE = "delete" + MANAGE = "manage" + + +def format_resource(resource_type: ResourceType, resource_id: str) -> str: + """Format a resource identifier for SpiceDB. + + Args: + resource_type: The type of resource + resource_id: The unique identifier for the resource + + Returns: + Formatted resource string (e.g., "team:abc123") + + Example: + >>> format_resource(ResourceType.TEAM, "abc123") + "team:abc123" + """ + return f"{resource_type}:{resource_id}" + + +def format_subject(subject_type: ResourceType, subject_id: str) -> str: + """Format a subject identifier for SpiceDB. + + Args: + subject_type: The type of subject (usually USER or GROUP) + subject_id: The unique identifier for the subject + + Returns: + Formatted subject string (e.g., "user:alice") + + Example: + >>> format_subject(ResourceType.USER, "alice") + "user:alice" + """ + return f"{subject_type}:{subject_id}" diff --git a/src/api/infrastructure/observability/context.py b/src/api/shared_kernel/observability_context.py similarity index 100% rename from src/api/infrastructure/observability/context.py rename to src/api/shared_kernel/observability_context.py diff --git a/src/api/tests/unit/graph/test_application_observability.py b/src/api/tests/unit/graph/test_application_observability.py index 9e1aa843..b0dcb452 100644 --- a/src/api/tests/unit/graph/test_application_observability.py +++ b/src/api/tests/unit/graph/test_application_observability.py @@ -96,7 +96,7 @@ class TestWithContext: def test_with_context_creates_new_probe(self): """with_context should create a new probe with context bound.""" - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext probe = DefaultGraphServiceProbe() context = ObservationContext(request_id="req-123", graph_name="test_graph") @@ -108,7 +108,7 @@ def test_with_context_creates_new_probe(self): def test_with_context_preserves_logger(self): """with_context should preserve the original logger.""" - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext mock_logger = MagicMock(spec=structlog.stdlib.BoundLogger) probe = DefaultGraphServiceProbe(logger=mock_logger) @@ -120,7 +120,7 @@ def test_with_context_preserves_logger(self): def test_context_included_in_log_calls(self): """Bound context should be included in log calls.""" - from infrastructure.observability.context import ObservationContext + from shared_kernel.observability_context import ObservationContext mock_logger = MagicMock(spec=structlog.stdlib.BoundLogger) probe = DefaultGraphServiceProbe(logger=mock_logger) diff --git a/src/api/tests/unit/shared_kernel/authorization/__init__.py b/src/api/tests/unit/shared_kernel/authorization/__init__.py new file mode 100644 index 00000000..b9f568ba --- /dev/null +++ b/src/api/tests/unit/shared_kernel/authorization/__init__.py @@ -0,0 +1 @@ +"""Unit tests for shared kernel authorization module.""" diff --git a/src/api/tests/unit/shared_kernel/authorization/test_authorization_probe.py b/src/api/tests/unit/shared_kernel/authorization/test_authorization_probe.py new file mode 100644 index 00000000..f51ca04b --- /dev/null +++ b/src/api/tests/unit/shared_kernel/authorization/test_authorization_probe.py @@ -0,0 +1,170 @@ +"""Unit tests for authorization domain probe.""" + +from unittest.mock import Mock + + +from shared_kernel.authorization.observability import ( + DefaultAuthorizationProbe, +) + + +class TestDefaultAuthorizationProbe: + """Tests for DefaultAuthorizationProbe.""" + + def test_creates_with_default_logger(self): + """Test that probe can be created without providing a logger.""" + probe = DefaultAuthorizationProbe() + assert probe._logger is not None + + def test_accepts_custom_logger(self): + """Test that probe accepts a custom logger.""" + custom_logger = Mock() + probe = DefaultAuthorizationProbe(logger=custom_logger) + assert probe._logger is custom_logger + + +class TestRelationshipWritten: + """Tests for relationship_written probe method.""" + + def test_logs_with_correct_parameters(self): + """Test that relationship written event is logged correctly.""" + mock_logger = Mock() + probe = DefaultAuthorizationProbe(logger=mock_logger) + + probe.relationship_written( + resource="group:abc123", + relation="member", + subject="user:alice", + ) + + mock_logger.info.assert_called_once() + call_args = mock_logger.info.call_args + assert call_args[0][0] == "authorization_relationship_written" + assert call_args[1]["resource"] == "group:abc123" + assert call_args[1]["relation"] == "member" + assert call_args[1]["subject"] == "user:alice" + + +class TestRelationshipWriteFailed: + """Tests for relationship_write_failed probe method.""" + + def test_logs_error_with_details(self): + """Test that write failures are logged with error details.""" + mock_logger = Mock() + probe = DefaultAuthorizationProbe(logger=mock_logger) + error = ValueError("Connection refused") + + probe.relationship_write_failed( + resource="group:abc123", + relation="member", + subject="user:alice", + error=error, + ) + + mock_logger.error.assert_called_once() + call_args = mock_logger.error.call_args + assert call_args[0][0] == "authorization_relationship_write_failed" + assert call_args[1]["error"] == "Connection refused" + assert call_args[1]["error_type"] == "ValueError" + + +class TestPermissionChecked: + """Tests for permission_checked probe method.""" + + def test_logs_granted_permission(self): + """Test that granted permissions are logged.""" + mock_logger = Mock() + probe = DefaultAuthorizationProbe(logger=mock_logger) + + probe.permission_checked( + resource="group:abc123", + permission="view", + subject="user:alice", + granted=True, + ) + + mock_logger.debug.assert_called_once() + call_args = mock_logger.debug.call_args + assert call_args[0][0] == "authorization_permission_checked" + assert call_args[1]["granted"] is True + + def test_logs_denied_permission(self): + """Test that denied permissions are logged.""" + mock_logger = Mock() + probe = DefaultAuthorizationProbe(logger=mock_logger) + + probe.permission_checked( + resource="group:abc123", + permission="delete", + subject="user:alice", + granted=False, + ) + + mock_logger.debug.assert_called_once() + call_args = mock_logger.debug.call_args + assert call_args[1]["granted"] is False + + +class TestBulkCheckCompleted: + """Tests for bulk_check_completed probe method.""" + + def test_logs_bulk_check_statistics(self): + """Test that bulk check statistics are logged.""" + mock_logger = Mock() + probe = DefaultAuthorizationProbe(logger=mock_logger) + + probe.bulk_check_completed( + total_requests=10, + permitted_count=7, + ) + + mock_logger.info.assert_called_once() + call_args = mock_logger.info.call_args + assert call_args[0][0] == "authorization_bulk_check_completed" + assert call_args[1]["total_requests"] == 10 + assert call_args[1]["permitted_count"] == 7 + + +class TestWithContext: + """Tests for with_context method.""" + + def test_with_context_creates_new_probe(self): + """Test that with_context creates a new probe instance.""" + probe = DefaultAuthorizationProbe() + mock_context = Mock() + + new_probe = probe.with_context(mock_context) + + assert new_probe is not probe + assert new_probe._context is mock_context + + def test_with_context_preserves_logger(self): + """Test that with_context preserves the original logger.""" + custom_logger = Mock() + probe = DefaultAuthorizationProbe(logger=custom_logger) + mock_context = Mock() + + new_probe = probe.with_context(mock_context) + + assert new_probe._logger is custom_logger + + def test_context_included_in_log_calls(self): + """Test that observation context is included in log output.""" + mock_logger = Mock() + mock_context = Mock() + mock_context.as_dict.return_value = { + "request_id": "req-123", + "user_id": "user:alice", + } + + probe = DefaultAuthorizationProbe(logger=mock_logger, context=mock_context) + + probe.relationship_written( + resource="group:abc123", + relation="member", + subject="user:alice", + ) + + call_args = mock_logger.info.call_args + assert call_args[1]["request_id"] == "req-123" + assert call_args[1]["user_id"] == "user:alice" diff --git a/src/api/tests/unit/shared_kernel/authorization/test_types.py b/src/api/tests/unit/shared_kernel/authorization/test_types.py new file mode 100644 index 00000000..8b51ef5f --- /dev/null +++ b/src/api/tests/unit/shared_kernel/authorization/test_types.py @@ -0,0 +1,146 @@ +"""Unit tests for authorization types and utilities.""" + +from shared_kernel.authorization.types import ( + Permission, + RelationType, + ResourceType, + format_resource, + format_subject, +) + + +class TestResourceType: + """Tests for ResourceType enum.""" + + def test_has_user_type(self): + """Test that USER resource type exists.""" + assert ResourceType.USER == "user" + + def test_has_group_type(self): + """Test that GROUP resource type exists.""" + assert ResourceType.GROUP == "group" + + def test_has_workspace_type(self): + """Test that WORKSPACE resource type exists.""" + assert ResourceType.WORKSPACE == "workspace" + + def test_has_tenant_type(self): + """Test that TENANT resource type exists.""" + assert ResourceType.TENANT == "tenant" + + def test_resource_types_are_lowercase(self): + """Test that all resource types are lowercase strings.""" + for resource_type in ResourceType: + assert resource_type.islower() + assert isinstance(resource_type, str) + + +class TestRelationType: + """Tests for RelationType enum.""" + + def test_has_member_relation(self): + """Test that MEMBER relation exists.""" + assert RelationType.MEMBER == "member" + + def test_has_owner_relation(self): + """Test that OWNER relation exists.""" + assert RelationType.OWNER == "owner" + + def test_has_admin_relation(self): + """Test that ADMIN relation exists.""" + assert RelationType.ADMIN == "admin" + + def test_has_parent_relation(self): + """Test that PARENT relation exists.""" + assert RelationType.PARENT == "parent" + + def test_has_workspace_relation(self): + """Test that WORKSPACE relation exists.""" + assert RelationType.WORKSPACE == "workspace" + + def test_relation_types_are_lowercase(self): + """Test that all relation types are lowercase strings.""" + for relation_type in RelationType: + assert relation_type.islower() + assert isinstance(relation_type, str) + + +class TestPermission: + """Tests for Permission enum.""" + + def test_has_view_permission(self): + """Test that VIEW permission exists.""" + assert Permission.VIEW == "view" + + def test_has_edit_permission(self): + """Test that EDIT permission exists.""" + assert Permission.EDIT == "edit" + + def test_has_delete_permission(self): + """Test that DELETE permission exists.""" + assert Permission.DELETE == "delete" + + def test_has_manage_permission(self): + """Test that MANAGE permission exists.""" + assert Permission.MANAGE == "manage" + + def test_permissions_are_lowercase(self): + """Test that all permissions are lowercase strings.""" + for permission in Permission: + assert permission.islower() + assert isinstance(permission, str) + + +class TestFormatResource: + """Tests for format_resource utility function.""" + + def test_formats_group_resource(self): + """Test formatting a group resource identifier.""" + result = format_resource(ResourceType.GROUP, "abc123") + assert result == "group:abc123" + + def test_formats_user_resource(self): + """Test formatting a user resource identifier.""" + result = format_resource(ResourceType.USER, "alice") + assert result == "user:alice" + + def test_formats_workspace_resource(self): + """Test formatting a workspace resource identifier.""" + result = format_resource(ResourceType.WORKSPACE, "ws-xyz") + assert result == "workspace:ws-xyz" + + def test_formats_with_ulid(self): + """Test formatting with ULID identifier.""" + ulid = "01ARZ3NDEKTSV4RRFFQ69G5FAV" + result = format_resource(ResourceType.GROUP, ulid) + assert result == f"group:{ulid}" + + def test_preserves_resource_id_case(self): + """Test that resource ID case is preserved.""" + result = format_resource(ResourceType.GROUP, "MixedCase123") + assert result == "group:MixedCase123" + + +class TestFormatSubject: + """Tests for format_subject utility function.""" + + def test_formats_user_subject(self): + """Test formatting a user subject identifier.""" + result = format_subject(ResourceType.USER, "alice") + assert result == "user:alice" + + def test_formats_group_subject(self): + """Test formatting a group subject identifier.""" + result = format_subject(ResourceType.GROUP, "admins") + assert result == "group:admins" + + def test_formats_with_ulid(self): + """Test formatting subject with ULID identifier.""" + ulid = "01ARZ3NDEKTSV4RRFFQ69G5FAV" + result = format_subject(ResourceType.USER, ulid) + assert result == f"user:{ulid}" + + def test_preserves_subject_id_case(self): + """Test that subject ID case is preserved.""" + result = format_subject(ResourceType.USER, "AliceSmith") + assert result == "user:AliceSmith" diff --git a/src/api/uv.lock b/src/api/uv.lock index a9ba7159..e654fae5 100644 --- a/src/api/uv.lock +++ b/src/api/uv.lock @@ -1,6 +1,11 @@ version = 1 revision = 2 requires-python = ">=3.12" +resolution-markers = [ + "python_full_version == '3.13.*'", + "python_full_version < '3.13'", + "python_full_version >= '3.14'", +] [[package]] name = "alembic" @@ -130,6 +135,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, ] +[[package]] +name = "authzed" +version = "1.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpc-interceptor" }, + { name = "grpcio" }, + { name = "protobuf" }, + { name = "protovalidate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/fd/8ddbc219df7579b8bb9c33e80546b85d9c92f1bf55edc5be4fa632ffebe2/authzed-1.24.0.tar.gz", hash = "sha256:c08da75450dab02177ab7486964223ba6d6fa4e141b97c423f60e8b792b03205", size = 147477, upload-time = "2025-11-10T15:08:40.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/67/8ee66529e37fa1fe9afb1ba1a82ffc546cece659a1fe2ef802b57cfcc7d4/authzed-1.24.0-py3-none-any.whl", hash = "sha256:f5adb96f3d401066ba90d7a81a6805fb115c7ba5700901559352f66a9fca18da", size = 189698, upload-time = "2025-11-10T15:08:37.919Z" }, +] + [[package]] name = "beartype" version = "0.22.9" @@ -148,6 +169,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, ] +[[package]] +name = "cel-python" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "lark" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "types-python-dateutil" }, + { name = "types-pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/87/85a1b99b98f6466bb87d40df636626385945ae82348e82cd97d44313f612/cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f", size = 67185, upload-time = "2025-02-14T11:42:21.882Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/28/08871462a0347b3e707658a8308be6f979167488a2196f93b402c2ea7170/cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60", size = 71337, upload-time = "2025-02-14T11:42:19.996Z" }, +] + [[package]] name = "certifi" version = "2025.11.12" @@ -674,6 +712,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1d/82/72401d09dc27c27fdf72ad6c2fe331e553e3c3646e01b5ff16473191033d/fastmcp-2.14.1-py3-none-any.whl", hash = "sha256:fb3e365cc1d52573ab89caeba9944dd4b056149097be169bce428e011f0a57e5", size = 412176, upload-time = "2025-12-15T02:26:25.356Z" }, ] +[[package]] +name = "google-re2" +version = "1.1.20251105" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/60/805c654ba53d685513df955ee745f71920fe8e6a284faf0f9b9dc19b659c/google_re2-1.1.20251105.tar.gz", hash = "sha256:1db14a292ee8303b91e91e7c37e05ac17d3c467f29416c79ac70a78be3e65bda", size = 11676, upload-time = "2025-11-05T14:58:07.324Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/20/73b487538e9107c2fd96aed737e3f3890dfce3e292622e4ffb2f9c810ee5/google_re2-1.1.20251105-1-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b30f09b4d63249c72e65ccae4cbf6b331b48c22fc7cb439f1d85f347b9d07ceb", size = 485591, upload-time = "2025-11-05T14:57:20.961Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9a/ca3a993bdb5dc6d5b2616b9657b2872a83d1827f8bd3ab50cd629eb751c7/google_re2-1.1.20251105-1-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:9a77892c524b8bdf3d47d7cad1cc2ac3a0108bdd65007ef4c02888fa46baf8ee", size = 518780, upload-time = "2025-11-05T14:57:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/df/37/b2e367987371514253ec9e514637f457deaacb7acc1c900814f3a6421e0f/google_re2-1.1.20251105-1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a3ac51b28cbf25c100dfd8849212d878d7005d1d4a7e129a10789043c56b6021", size = 486966, upload-time = "2025-11-05T14:57:24.575Z" }, + { url = "https://files.pythonhosted.org/packages/d9/69/1db6742943c0ac254bfb7d8a37a5d3f73f016a65cfa1f84fe3a0451820f6/google_re2-1.1.20251105-1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:9f7158afc9825ac2654c6561aea94a1f7edb5b5b88e6e3639bb80bb817d102ac", size = 520225, upload-time = "2025-11-05T14:57:26.039Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0a/0747c92dbebe2c09a26bd7386d372b5c5a9926236b4f3d69bb8f15db05cb/google_re2-1.1.20251105-1-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:5320da07dc3b7ac7f407514f42ac17d67e771ac7c7562d449571185e6fb601b2", size = 482943, upload-time = "2025-11-05T14:57:27.353Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/6bfc6838bb6cb561824ac03deeab2bd11d5d9a93505f536c8fa2f6bd46c4/google_re2-1.1.20251105-1-cp312-cp312-macosx_15_0_x86_64.whl", hash = "sha256:5a4e5785bc30d52ce655d805b07ad2d8a4905429a5f690ae9c2f1caa76665709", size = 510384, upload-time = "2025-11-05T14:57:29.139Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/6add090c917ee39f6f0be753037cafceb3bad904b424efc155fb38082635/google_re2-1.1.20251105-1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b7a3b90f747130310d4b3b8e19ebb845d0d97c1deb63b36f76c7242dacbd736", size = 572446, upload-time = "2025-11-05T14:57:30.495Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1c/8b1ccbeade96a21435d55b5185cd6d9b2ceab5a9af998a4d9099e0540759/google_re2-1.1.20251105-1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:809c5fa5d08279413b29c2e2c5c528e85cd94a0e0fd897db595a0c09eeee2782", size = 591348, upload-time = "2025-11-05T14:57:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/62/cf/7bdd7a1ae7828b613011da808eafec4da3132f43c3be6af5e0bd670ebe8b/google_re2-1.1.20251105-1-cp312-cp312-win32.whl", hash = "sha256:d8424e63a9ec0fe5bde03d97876b2431f8a746af33eb475fa1ae39144bd05b2a", size = 433787, upload-time = "2025-11-05T14:57:33.071Z" }, + { url = "https://files.pythonhosted.org/packages/31/e9/5dd951c35acaabfe87c67228b9af2cdcd7779d9167edbe6b9094b8a8e529/google_re2-1.1.20251105-1-cp312-cp312-win_amd64.whl", hash = "sha256:062313c309f93dfeb6966372f4c446580e98879133ec155522eea8aaf568a5cd", size = 491726, upload-time = "2025-11-05T14:57:34.39Z" }, + { url = "https://files.pythonhosted.org/packages/60/8d/c1afd29fc2cb475fd4c634f3d3c8099c0efb662362c10b27a9eaf11c9357/google_re2-1.1.20251105-1-cp312-cp312-win_arm64.whl", hash = "sha256:558f144b26a9555ae4e9467cc3aa3299a8ce13217f328b21ae326ca0633be19b", size = 642673, upload-time = "2025-11-05T14:57:35.693Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b9/c441722196598fc3de0f654606ad9975a968c71dc27f516b5a4c9ebb94fd/google_re2-1.1.20251105-1-cp313-cp313-macosx_13_0_arm64.whl", hash = "sha256:9f3cf610e857a7d6f02916cf2b7fc159a5429b8bcb23164500d46e5e233f2924", size = 485549, upload-time = "2025-11-05T14:57:36.939Z" }, + { url = "https://files.pythonhosted.org/packages/ea/87/cf588255e5ada1dfb555cc96de35be78438bb0b6faba64df5fe91cecc224/google_re2-1.1.20251105-1-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:a21c2807bf4d5d00f206a4ecb3b043aad674e28c451b697b740280f608872078", size = 518840, upload-time = "2025-11-05T14:57:38.115Z" }, + { url = "https://files.pythonhosted.org/packages/0d/39/da66e4ca9be0c51546efc6fb39cf1683c4be8245d8199cb54a9808e8d5fa/google_re2-1.1.20251105-1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8314144eefeee7b88b742081c2038418f677e63901039ca9dbfbc0c5bb6d2911", size = 487037, upload-time = "2025-11-05T14:57:39.467Z" }, + { url = "https://files.pythonhosted.org/packages/75/dd/24ba65692dd58dca6ff178428551f4e9b776d1489a1251f5c8539e598baa/google_re2-1.1.20251105-1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:28a46be978e53c772139d0f5c9ba69f53563fcdd4225407e4d34d51208b828f1", size = 520285, upload-time = "2025-11-05T14:57:40.666Z" }, + { url = "https://files.pythonhosted.org/packages/61/12/cfdbb92bed24af6474970a75a26145c424f98cfbcc633fdd185985f0efe0/google_re2-1.1.20251105-1-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:83292e23963aa1b219d5f64a65365b0880448a6a060276027b55270bc5b18c7e", size = 482981, upload-time = "2025-11-05T14:57:41.928Z" }, + { url = "https://files.pythonhosted.org/packages/97/bf/5fc32ded9279e69a87b88d7261e7e77e2e26325d4e27ca1303a3215e430a/google_re2-1.1.20251105-1-cp313-cp313-macosx_15_0_x86_64.whl", hash = "sha256:1920b15dc9b1bdfeca5aa2c60900373c6f27cd1056d53cd299456ea5540a6fff", size = 510366, upload-time = "2025-11-05T14:57:43.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/71/f927ddc7aef1b8d7ccc8a649c335d311f29f3dea658209e30e37720e4891/google_re2-1.1.20251105-1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b1458d9ca588124cd61aa1bf5388a216e1247e7d474f8e5e1530498044f5c87", size = 572390, upload-time = "2025-11-05T14:57:44.422Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8c/23075e589038284c9487f41cde531d35873f9da622fb4ac7d1d97bd9086e/google_re2-1.1.20251105-1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a52cb204e49d20cdbb66faf394d57f476e96c39c23a328442ab0194fc6bd1a2b", size = 591386, upload-time = "2025-11-05T14:57:45.713Z" }, + { url = "https://files.pythonhosted.org/packages/f1/7f/858453ef689f6b9895cd02b466836a9d1a6e4ba535d1a275b01bf73baa1d/google_re2-1.1.20251105-1-cp313-cp313-win32.whl", hash = "sha256:67c5c73d7ebcf3f0e0a3b528b41bd8c6c04900f1598aebf05bbdf15a06cf5f9a", size = 433807, upload-time = "2025-11-05T14:57:46.92Z" }, + { url = "https://files.pythonhosted.org/packages/08/24/6ea87fe682e115ffd296e91eb5c5a266349d1ee8414ce8ece3f99ec1ac84/google_re2-1.1.20251105-1-cp313-cp313-win_amd64.whl", hash = "sha256:0bcba63ad3ea8926fb0c71bb5044e33d405bb9395f5b5444393cd5f28f0bf6d3", size = 491734, upload-time = "2025-11-05T14:57:48.304Z" }, + { url = "https://files.pythonhosted.org/packages/34/85/32ba71b06f3cf5f9856ae95b3d6463b971742453631a5ae2c5be338ea377/google_re2-1.1.20251105-1-cp313-cp313-win_arm64.whl", hash = "sha256:64ee189ea857f2126c5e42073cfa9b03e9f4cbaf073edbedb575059074841aa0", size = 642654, upload-time = "2025-11-05T14:57:49.602Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7f/7eb238bdcd06182b5f427afd305cf413b7cf4ea71047308bbf35912cf923/google_re2-1.1.20251105-1-cp314-cp314-macosx_13_0_arm64.whl", hash = "sha256:cc151cf6a585d9ebe711da32b23683fcff40f78db8c8587c7f4b209ef4658809", size = 484719, upload-time = "2025-11-05T14:57:51.326Z" }, + { url = "https://files.pythonhosted.org/packages/6d/62/eed28eab67f939f4b9383c47b1db11638ade6ac30785c15cb960de85ba43/google_re2-1.1.20251105-1-cp314-cp314-macosx_13_0_x86_64.whl", hash = "sha256:7e2186d2c90488c1e11895343941f35ca2f58e9ba6c6b034fd531abe22ef77cc", size = 517698, upload-time = "2025-11-05T14:57:52.597Z" }, + { url = "https://files.pythonhosted.org/packages/f7/16/a1e6768513f788bf9c67a1cfe379ef34a793983eee46e4b653e42b558b78/google_re2-1.1.20251105-1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:41be22359c3dceb582937739b4365dd8e279de24ad0a5b10e653503abaff2ed7", size = 486421, upload-time = "2025-11-05T14:57:53.852Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fc/7a97ffd36d451e5a8bfaff2f9022b14807795d588f98227ff96e8da99856/google_re2-1.1.20251105-1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f3168d7bbac247c862ea85b2f3c011d3a04bedcb6892b37f14d488f4133b206e", size = 519037, upload-time = "2025-11-05T14:57:55.078Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ee/8b6f7d94bb689dafdf60de8dd8f8f6296ad40d4d15c933fcda4da7a3a06b/google_re2-1.1.20251105-1-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:79ce664038194a31bbcf422137f9607ae3d9946a5cff98cf0efbeb7f9411e64b", size = 483373, upload-time = "2025-11-05T14:57:56.297Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a6/16a09e03d1de128f821869e4252688c21319f5017d9209f4d0e71ea5c951/google_re2-1.1.20251105-1-cp314-cp314-macosx_15_0_x86_64.whl", hash = "sha256:0476b07421b8882b279d5ceb5b760c15c62d581ded95274697fc1227e3869ee6", size = 510167, upload-time = "2025-11-05T14:57:57.653Z" }, + { url = "https://files.pythonhosted.org/packages/c4/9d/213dce5de401527369fb5af11096b18c06001d9eb71f3318fe5eba1ec706/google_re2-1.1.20251105-1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85feec3161ffdc12f6b144e37a2f91f80b771c72ffadde60191e89a49f6d7e81", size = 573176, upload-time = "2025-11-05T14:57:59.211Z" }, + { url = "https://files.pythonhosted.org/packages/03/be/a8def96aa4a80b233e105767d22e3de961dcde5a04f0a05cb4f3ddb4df78/google_re2-1.1.20251105-1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7bfaa2cf55daf0c5c650e68526bb20b61e37d7f3ae53f6893013acc1c91c116", size = 591483, upload-time = "2025-11-05T14:58:00.416Z" }, + { url = "https://files.pythonhosted.org/packages/14/ea/144bbc4b9359da89aec07b4c2a91a6bfe7119914885386577c665b07bb01/google_re2-1.1.20251105-1-cp314-cp314-win32.whl", hash = "sha256:214c1accdc60fff9ce1bf812b157147ca361844f496ed9e0d5f357b0e562ced8", size = 433773, upload-time = "2025-11-05T14:58:01.594Z" }, + { url = "https://files.pythonhosted.org/packages/96/b3/74e301211699f1b650ba7690a3e4e52146ac4266fcd62f3ea0a945b9eda4/google_re2-1.1.20251105-1-cp314-cp314-win_amd64.whl", hash = "sha256:6d4d5fdadd329a2ed193463899d00ef2fd126172f36a4c01c9def271f19801b6", size = 491893, upload-time = "2025-11-05T14:58:02.969Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d1/4adcfcb9c95e3d064c9f7aaf6cb3a4fc842d86115014b9d4094db4d465b5/google_re2-1.1.20251105-1-cp314-cp314-win_arm64.whl", hash = "sha256:1d27f3a2a947ec1f721d0f14f661108acfd4f4d34f357ce28db951cc036656e5", size = 643093, upload-time = "2025-11-05T14:58:05.761Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, +] + [[package]] name = "greenlet" version = "3.3.0" @@ -713,6 +804,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, ] +[[package]] +name = "grpc-interceptor" +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/28/57449d5567adf4c1d3e216aaca545913fbc21a915f2da6790d6734aac76e/grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926", size = 19322, upload-time = "2023-11-16T02:05:42.459Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/ac/8d53f230a7443401ce81791ec50a3b0e54924bf615ad287654fa4a2f5cdc/grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d", size = 20848, upload-time = "2023-11-16T02:05:40.913Z" }, +] + +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -872,6 +1016,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "jsonschema" version = "4.25.1" @@ -922,6 +1075,7 @@ dependencies = [ { name = "alembic" }, { name = "apache-age-python" }, { name = "asyncpg" }, + { name = "authzed" }, { name = "cyclopts" }, { name = "fastapi", extra = ["standard"] }, { name = "fastmcp" }, @@ -950,6 +1104,7 @@ requires-dist = [ { name = "alembic", specifier = ">=1.17.2" }, { name = "apache-age-python", specifier = ">=0.0.7" }, { name = "asyncpg", specifier = ">=0.31.0" }, + { name = "authzed", specifier = ">=1.24.0" }, { name = "cyclopts", specifier = "==5.0.0a1" }, { name = "fastapi", extras = ["standard"], specifier = ">=0.123.9" }, { name = "fastmcp", specifier = "==2.14.1" }, @@ -990,6 +1145,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, ] +[[package]] +name = "lark" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/1d/29681d27b84e384ea50b5546e9f0089126afbc91754db4ca89593fcfd0e8/lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef", size = 235168, upload-time = "2021-11-12T11:15:32.124Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/39/cef2ccdfd984ae3cf93878d050c1b7c9354dd9493ce83fd9bb33a41f7a33/lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca", size = 103540, upload-time = "2021-11-12T11:15:34.408Z" }, +] + [[package]] name = "librt" version = "0.7.3" @@ -1410,6 +1574,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, + { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, +] + +[[package]] +name = "protovalidate" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cel-python" }, + { name = "google-re2" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/98/1595ae90c4a29c625580ee84415bb0c752e09c6c7aa13595e8ea94a7c929/protovalidate-1.0.0.tar.gz", hash = "sha256:926f7a212fed9190d00cc076fa24ef5e48a404b5577465028697f4dea8c4a507", size = 215286, upload-time = "2025-09-12T16:28:02.665Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/1d/30a86726b317593469eb526c8ca25dd8ce7f7b9f4237137fedb1f352ffff/protovalidate-1.0.0-py3-none-any.whl", hash = "sha256:933818942700c85d4a47f1030e61f59d7bd9a8c1572e9dc822f98eef45a39d9e", size = 29478, upload-time = "2025-09-12T16:28:01.201Z" }, +] + [[package]] name = "psycopg2" version = "2.9.11" @@ -1698,6 +1891,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "python-dotenv" version = "1.2.1" @@ -2080,6 +2285,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sortedcontainers" version = "2.4.0" @@ -2188,6 +2402,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20251115" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/36/06d01fb52c0d57e9ad0c237654990920fa41195e4b3d640830dabf9eeb2f/types_python_dateutil-2.9.0.20251115.tar.gz", hash = "sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58", size = 16363, upload-time = "2025-11-15T03:00:13.717Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/0b/56961d3ba517ed0df9b3a27bfda6514f3d01b28d499d1bce9068cfe4edd1/types_python_dateutil-2.9.0.20251115-py3-none-any.whl", hash = "sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624", size = 18251, upload-time = "2025-11-15T03:00:12.317Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250915" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" From 6f846a08b28840488da0bc2e4415c4638fc680d8 Mon Sep 17 00:00:00 2001 From: John Sell Date: Mon, 19 Jan 2026 09:17:05 -0500 Subject: [PATCH 03/11] ci: automerge mintmaker non-major upgrades if tests pass --- renovate.json | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..bc0627bc --- /dev/null +++ b/renovate.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "github>konflux-ci/mintmaker-presets:cve-automerge-all*", + "github>konflux-ci/mintmaker-presets:group-python-requirements", + "github>konflux-ci/mintmaker-presets:approveMajorUpdates", + "github>konflux-ci/mintmaker-presets:automergeAll" + ] +} \ No newline at end of file From ae5330b0468f6a672e516f113afb512a8bf0da80 Mon Sep 17 00:00:00 2001 From: John Sell Date: Tue, 27 Jan 2026 14:38:12 -0500 Subject: [PATCH 04/11] fix(deploy): set postgres uid/gid to 001379999 --- deploy/apps/kartograph/base/postgres-deployment.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deploy/apps/kartograph/base/postgres-deployment.yaml b/deploy/apps/kartograph/base/postgres-deployment.yaml index 587a9668..dc03c28b 100644 --- a/deploy/apps/kartograph/base/postgres-deployment.yaml +++ b/deploy/apps/kartograph/base/postgres-deployment.yaml @@ -17,13 +17,13 @@ spec: app.kubernetes.io/component: database spec: securityContext: - fsGroup: 999 # postgres group + fsGroup: 001379999 # postgres group containers: - name: postgres image: apache/age:release_PG17_1.6.0 securityContext: - runAsUser: 999 - runAsGroup: 999 + runAsUser: 001379999 + runAsGroup: 001379999 runAsNonRoot: true allowPrivilegeEscalation: false imagePullPolicy: IfNotPresent From 66ba5517a05be80e0ccbe29feeddd7d8a41a9c06 Mon Sep 17 00:00:00 2001 From: John Sell Date: Thu, 29 Jan 2026 11:52:08 -0500 Subject: [PATCH 05/11] refactor(api.iam): rename Role to GroupRole --- .../iam/application/services/group_service.py | 4 +- src/api/iam/domain/aggregates/group.py | 16 ++-- src/api/iam/domain/events.py | 12 +-- src/api/iam/domain/value_objects.py | 10 +-- .../iam/infrastructure/group_repository.py | 6 +- .../iam/infrastructure/outbox/serializer.py | 12 +-- .../iam/infrastructure/outbox/translator.py | 12 +-- src/api/iam/presentation/models.py | 4 +- src/api/tests/integration/iam/test_api.py | 4 +- .../integration/iam/test_group_repository.py | 26 +++--- .../iam/test_outbox_consistency.py | 18 ++-- .../iam/application/test_group_service.py | 4 +- src/api/tests/unit/iam/domain/test_events.py | 74 +++++++-------- .../unit/iam/domain/test_group_aggregate.py | 90 +++++++++---------- .../infrastructure/outbox/test_serializer.py | 34 +++---- .../infrastructure/outbox/test_translator.py | 6 +- .../infrastructure/test_group_repository.py | 14 +-- .../unit/infrastructure/outbox/test_worker.py | 4 +- .../shared_kernel/outbox/test_operations.py | 4 +- 19 files changed, 178 insertions(+), 176 deletions(-) diff --git a/src/api/iam/application/services/group_service.py b/src/api/iam/application/services/group_service.py index e75ae954..786581ed 100644 --- a/src/api/iam/application/services/group_service.py +++ b/src/api/iam/application/services/group_service.py @@ -9,7 +9,7 @@ from iam.application.observability import DefaultGroupServiceProbe, GroupServiceProbe from iam.domain.aggregates import Group -from iam.domain.value_objects import GroupId, Role, TenantId, UserId +from iam.domain.value_objects import GroupId, GroupRole, TenantId, UserId from iam.ports.repositories import IGroupRepository from shared_kernel.authorization.protocols import AuthorizationProvider from shared_kernel.authorization.types import ( @@ -74,7 +74,7 @@ async def create_group( # Create group using factory method (records GroupCreated event) group = Group.create(name=name, tenant_id=self._scope_to_tenant) # Add creator as admin (records MemberAdded event) - group.add_member(creator_id, Role.ADMIN) + group.add_member(creator_id, GroupRole.ADMIN) async with self._session.begin(): # Persist group (writes to PostgreSQL and outbox) diff --git a/src/api/iam/domain/aggregates/group.py b/src/api/iam/domain/aggregates/group.py index 2fb02c29..ca19bee1 100644 --- a/src/api/iam/domain/aggregates/group.py +++ b/src/api/iam/domain/aggregates/group.py @@ -17,7 +17,7 @@ from iam.domain.value_objects import ( GroupId, GroupMember, - Role, + GroupRole, TenantId, UserId, ) @@ -78,7 +78,7 @@ def create(cls, name: str, tenant_id: TenantId) -> "Group": ) return group - def add_member(self, user_id: UserId, role: Role) -> None: + def add_member(self, user_id: UserId, role: GroupRole) -> None: """Add a member to the group with a specific role. Args: @@ -118,8 +118,8 @@ def remove_member(self, user_id: UserId) -> None: member_role = self.get_member_role(user_id) # Check if removing last admin - if member_role == Role.ADMIN: - admin_count = sum(1 for m in self.members if m.role == Role.ADMIN) + if member_role == GroupRole.ADMIN: + admin_count = sum(1 for m in self.members if m.role == GroupRole.ADMIN) if admin_count == 1: raise ValueError( "Cannot remove the last admin. Promote another member first." @@ -137,7 +137,7 @@ def remove_member(self, user_id: UserId) -> None: ) ) - def update_member_role(self, user_id: UserId, new_role: Role) -> None: + def update_member_role(self, user_id: UserId, new_role: GroupRole) -> None: """Update a member's role. Args: @@ -153,8 +153,8 @@ def update_member_role(self, user_id: UserId, new_role: Role) -> None: current_role = self.get_member_role(user_id) # Check if demoting last admin - if current_role == Role.ADMIN and new_role != Role.ADMIN: - admin_count = sum(1 for m in self.members if m.role == Role.ADMIN) + if current_role == GroupRole.ADMIN and new_role != GroupRole.ADMIN: + admin_count = sum(1 for m in self.members if m.role == GroupRole.ADMIN) if admin_count == 1: raise ValueError( "Cannot demote the last admin. Promote another member first." @@ -205,7 +205,7 @@ def has_member(self, user_id: UserId) -> bool: """ return any(m.user_id == user_id for m in self.members) - def get_member_role(self, user_id: UserId) -> Role | None: + def get_member_role(self, user_id: UserId) -> GroupRole | None: """Get the role of a member. Args: diff --git a/src/api/iam/domain/events.py b/src/api/iam/domain/events.py index 0861bb9a..0ad59f5c 100644 --- a/src/api/iam/domain/events.py +++ b/src/api/iam/domain/events.py @@ -13,7 +13,7 @@ from dataclasses import dataclass from datetime import datetime -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole @dataclass(frozen=True) @@ -29,7 +29,7 @@ class MemberSnapshot: """ user_id: str - role: Role + role: GroupRole @dataclass(frozen=True) @@ -85,7 +85,7 @@ class MemberAdded: group_id: str user_id: str - role: Role + role: GroupRole occurred_at: datetime @@ -104,7 +104,7 @@ class MemberRemoved: group_id: str user_id: str - role: Role + role: GroupRole occurred_at: datetime @@ -125,8 +125,8 @@ class MemberRoleChanged: group_id: str user_id: str - old_role: Role - new_role: Role + old_role: GroupRole + new_role: GroupRole occurred_at: datetime diff --git a/src/api/iam/domain/value_objects.py b/src/api/iam/domain/value_objects.py index 0f320e90..7a4fadae 100644 --- a/src/api/iam/domain/value_objects.py +++ b/src/api/iam/domain/value_objects.py @@ -130,7 +130,7 @@ class APIKeyId(BaseId): pass -class Role(StrEnum): +class GroupRole(StrEnum): """Roles for group membership. Defines the hierarchy of permissions within a group. @@ -149,16 +149,16 @@ class GroupMember: """ user_id: UserId - role: Role + role: GroupRole def is_admin(self) -> bool: """Check if this member is an admin.""" - return self.role == Role.ADMIN + return self.role == GroupRole.ADMIN def is_member(self) -> bool: """Check if this member is a regular member.""" - return self.role == Role.MEMBER + return self.role == GroupRole.MEMBER def has_admin_privileges(self) -> bool: """Check if this member has admin privileges.""" - return self.role == Role.ADMIN + return self.role == GroupRole.ADMIN diff --git a/src/api/iam/infrastructure/group_repository.py b/src/api/iam/infrastructure/group_repository.py index 515d3cf4..ffebc3bd 100644 --- a/src/api/iam/infrastructure/group_repository.py +++ b/src/api/iam/infrastructure/group_repository.py @@ -17,7 +17,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from iam.domain.aggregates import Group -from iam.domain.value_objects import GroupId, GroupMember, Role, TenantId, UserId +from iam.domain.value_objects import GroupId, GroupMember, GroupRole, TenantId, UserId from iam.infrastructure.models import GroupModel from iam.infrastructure.observability import ( DefaultGroupRepositoryProbe, @@ -288,7 +288,7 @@ async def _hydrate_members(self, group_id: str) -> list[GroupMember]: group_resource = format_resource(ResourceType.GROUP, group_id) # Lookup all subjects with each role type - for role in [Role.ADMIN, Role.MEMBER]: + for role in [GroupRole.ADMIN, GroupRole.MEMBER]: subjects = await self._authz.lookup_subjects( resource=group_resource, relation=role.value, @@ -299,7 +299,7 @@ async def _hydrate_members(self, group_id: str) -> list[GroupMember]: members.append( GroupMember( user_id=UserId(value=subject_relation.subject_id), - role=Role(subject_relation.relation), + role=GroupRole(subject_relation.relation), ) ) diff --git a/src/api/iam/infrastructure/outbox/serializer.py b/src/api/iam/infrastructure/outbox/serializer.py index 3a16ea06..e6b24391 100644 --- a/src/api/iam/infrastructure/outbox/serializer.py +++ b/src/api/iam/infrastructure/outbox/serializer.py @@ -15,7 +15,7 @@ DomainEvent, MemberSnapshot, ) -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole # Derive supported events from the DomainEvent type alias _SUPPORTED_EVENTS: frozenset[str] = frozenset( @@ -98,7 +98,7 @@ def _convert_for_json(self, data: dict[str, Any]) -> None: for key, value in list(data.items()): if isinstance(value, datetime): data[key] = value.isoformat() - elif isinstance(value, Role): + elif isinstance(value, GroupRole): data[key] = value.value elif isinstance(value, tuple): # Handle MemberSnapshot tuples in GroupDeleted @@ -131,18 +131,18 @@ def _convert_from_json(self, data: dict[str, Any], event_type: str) -> None: # Convert role fields back to Role enum if "role" in data: - data["role"] = Role(data["role"]) + data["role"] = GroupRole(data["role"]) if "old_role" in data: - data["old_role"] = Role(data["old_role"]) + data["old_role"] = GroupRole(data["old_role"]) if "new_role" in data: - data["new_role"] = Role(data["new_role"]) + data["new_role"] = GroupRole(data["new_role"]) # Convert members list back to tuple of MemberSnapshot if "members" in data and event_type == "GroupDeleted": data["members"] = tuple( MemberSnapshot( user_id=m["user_id"], - role=Role(m["role"]), + role=GroupRole(m["role"]), ) for m in data["members"] ) diff --git a/src/api/iam/infrastructure/outbox/translator.py b/src/api/iam/infrastructure/outbox/translator.py index b422eb17..9a030ad9 100644 --- a/src/api/iam/infrastructure/outbox/translator.py +++ b/src/api/iam/infrastructure/outbox/translator.py @@ -10,7 +10,7 @@ from typing import Any, get_args from iam.domain.events import DomainEvent -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole from shared_kernel.authorization.types import RelationType, ResourceType from shared_kernel.outbox.operations import ( DeleteRelationship, @@ -110,7 +110,7 @@ def _translate_group_deleted( # Delete all member relationships from the snapshot for member in payload["members"]: - role = Role(member["role"]) + role = GroupRole(member["role"]) operations.append( DeleteRelationship( resource_type=ResourceType.GROUP, @@ -128,7 +128,7 @@ def _translate_member_added( payload: dict[str, Any], ) -> list[SpiceDBOperation]: """Translate MemberAdded to role relationship write.""" - role = Role(payload["role"]) + role = GroupRole(payload["role"]) return [ WriteRelationship( resource_type=ResourceType.GROUP, @@ -144,7 +144,7 @@ def _translate_member_removed( payload: dict[str, Any], ) -> list[SpiceDBOperation]: """Translate MemberRemoved to role relationship delete.""" - role = Role(payload["role"]) + role = GroupRole(payload["role"]) return [ DeleteRelationship( resource_type=ResourceType.GROUP, @@ -160,8 +160,8 @@ def _translate_member_role_changed( payload: dict[str, Any], ) -> list[SpiceDBOperation]: """Translate MemberRoleChanged to delete old + write new role.""" - old_role = Role(payload["old_role"]) - new_role = Role(payload["new_role"]) + old_role = GroupRole(payload["old_role"]) + new_role = GroupRole(payload["new_role"]) return [ # First delete the old role relationship diff --git a/src/api/iam/presentation/models.py b/src/api/iam/presentation/models.py index 6b26ab59..14c1fa86 100644 --- a/src/api/iam/presentation/models.py +++ b/src/api/iam/presentation/models.py @@ -7,7 +7,7 @@ from pydantic import BaseModel, Field from iam.domain.aggregates import APIKey, Group, Tenant -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole class CreateGroupRequest(BaseModel): @@ -23,7 +23,7 @@ class GroupMemberResponse(BaseModel): """Response model for group member.""" user_id: str = Field(..., description="User ID (ULID format)") - role: Role = Field(..., description="Member role (admin or member)") + role: GroupRole = Field(..., description="Member role (admin or member)") class GroupResponse(BaseModel): diff --git a/src/api/tests/integration/iam/test_api.py b/src/api/tests/integration/iam/test_api.py index 8f628245..4fcad252 100644 --- a/src/api/tests/integration/iam/test_api.py +++ b/src/api/tests/integration/iam/test_api.py @@ -12,7 +12,7 @@ from asgi_lifespan import LifespanManager from httpx import ASGITransport, AsyncClient -from iam.domain.value_objects import GroupId, Role +from iam.domain.value_objects import GroupId, GroupRole from main import app from shared_kernel.authorization.protocols import AuthorizationProvider from shared_kernel.authorization.types import ( @@ -90,7 +90,7 @@ async def test_creates_group_successfully( assert data["id"] is not None assert len(data["members"]) == 1 # Creator becomes admin member - assert data["members"][0]["role"] == Role.ADMIN.value + assert data["members"][0]["role"] == GroupRole.ADMIN.value # User ID comes from JWT, verify it's a valid ULID format assert len(data["members"][0]["user_id"]) > 0 diff --git a/src/api/tests/integration/iam/test_group_repository.py b/src/api/tests/integration/iam/test_group_repository.py index 12a067e7..e1d324e1 100644 --- a/src/api/tests/integration/iam/test_group_repository.py +++ b/src/api/tests/integration/iam/test_group_repository.py @@ -8,7 +8,7 @@ import pytest from iam.domain.aggregates import Group -from iam.domain.value_objects import Role, TenantId, UserId +from iam.domain.value_objects import GroupRole, TenantId, UserId from iam.infrastructure.group_repository import GroupRepository from iam.ports.exceptions import DuplicateGroupNameError @@ -49,7 +49,7 @@ async def test_saves_and_retrieves_group_with_members( tenant_id = TenantId.generate() group = Group.create(name="Engineering", tenant_id=tenant_id) user_id = UserId.generate() - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) async with async_session.begin(): await group_repository.save(group) @@ -63,7 +63,7 @@ async def test_saves_and_retrieves_group_with_members( assert retrieved is not None assert len(retrieved.members) == 1 assert retrieved.members[0].user_id.value == user_id.value - assert retrieved.members[0].role == Role.ADMIN + assert retrieved.members[0].role == GroupRole.ADMIN class TestGroupUpdates: @@ -104,7 +104,7 @@ async def test_adds_member_to_existing_group( tenant_id = TenantId.generate() group = Group.create(name="Engineering", tenant_id=tenant_id) admin_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) + group.add_member(admin_id, GroupRole.ADMIN) # Save initial group async with async_session.begin(): @@ -112,7 +112,7 @@ async def test_adds_member_to_existing_group( # Add another member member_id = UserId.generate() - group.add_member(member_id, Role.MEMBER) + group.add_member(member_id, GroupRole.MEMBER) async with async_session.begin(): await group_repository.save(group) @@ -137,8 +137,8 @@ async def test_removes_member_from_group( group = Group.create(name="Engineering", tenant_id=tenant_id) admin1 = UserId.generate() admin2 = UserId.generate() - group.add_member(admin1, Role.ADMIN) - group.add_member(admin2, Role.ADMIN) + group.add_member(admin1, GroupRole.ADMIN) + group.add_member(admin2, GroupRole.ADMIN) # Save initial group async with async_session.begin(): @@ -171,15 +171,15 @@ async def test_updates_member_role( group = Group.create(name="Engineering", tenant_id=tenant_id) admin_id = UserId.generate() member_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) - group.add_member(member_id, Role.MEMBER) + group.add_member(admin_id, GroupRole.ADMIN) + group.add_member(member_id, GroupRole.MEMBER) # Save initial group async with async_session.begin(): await group_repository.save(group) # Promote member to admin - group.update_member_role(member_id, Role.ADMIN) + group.update_member_role(member_id, GroupRole.ADMIN) async with async_session.begin(): await group_repository.save(group) @@ -193,7 +193,7 @@ async def test_updates_member_role( member = next( m for m in retrieved.members if m.user_id.value == member_id.value ) - assert member.role == Role.ADMIN + assert member.role == GroupRole.ADMIN class TestGroupDeletion: @@ -211,7 +211,7 @@ async def test_deletes_group_and_members( tenant_id = TenantId.generate() group = Group.create(name="Engineering", tenant_id=tenant_id) user_id = UserId.generate() - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) # Save group async with async_session.begin(): @@ -359,7 +359,7 @@ async def test_hydrates_members_for_listed_groups( tenant_id = TenantId.generate() group = Group.create(name="Engineering", tenant_id=tenant_id) user_id = UserId.generate() - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) async with async_session.begin(): await group_repository.save(group) diff --git a/src/api/tests/integration/iam/test_outbox_consistency.py b/src/api/tests/integration/iam/test_outbox_consistency.py index 301dde06..d2b1475e 100644 --- a/src/api/tests/integration/iam/test_outbox_consistency.py +++ b/src/api/tests/integration/iam/test_outbox_consistency.py @@ -18,7 +18,7 @@ from iam.domain.aggregates import Group from iam.domain.events import GroupCreated, MemberAdded, MemberRemoved -from iam.domain.value_objects import Role, TenantId, UserId +from iam.domain.value_objects import GroupRole, TenantId, UserId from iam.infrastructure.group_repository import GroupRepository from iam.infrastructure.outbox import IAMEventSerializer, IAMEventTranslator from shared_kernel.authorization.protocols import AuthorizationProvider @@ -109,7 +109,7 @@ async def test_add_member_appends_member_added_event( user_id = UserId.generate() # Add member to the group - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) async with async_session.begin(): await group_repo.save(group) @@ -130,7 +130,7 @@ async def test_add_member_appends_member_added_event( assert isinstance(event, MemberAdded) assert event.group_id == group.id.value assert event.user_id == user_id.value - assert event.role == Role.ADMIN + assert event.role == GroupRole.ADMIN # Clean up await async_session.execute( @@ -163,8 +163,8 @@ async def test_remove_member_appends_member_removed_event( admin2 = UserId.generate() # Add two admins (need two because we can't remove the last admin) - group.add_member(admin1, Role.ADMIN) - group.add_member(admin2, Role.ADMIN) + group.add_member(admin1, GroupRole.ADMIN) + group.add_member(admin2, GroupRole.ADMIN) async with async_session.begin(): await group_repo.save(group) @@ -193,7 +193,7 @@ async def test_remove_member_appends_member_removed_event( event = serializer.deserialize(outbox_entry.event_type, outbox_entry.payload) assert isinstance(event, MemberRemoved) assert event.user_id == admin2.value - assert event.role == Role.ADMIN + assert event.role == GroupRole.ADMIN # Clean up await async_session.execute( @@ -305,7 +305,7 @@ async def test_worker_processes_member_added_and_writes_to_spicedb( # Use factory method group = Group.create(name="Member Test Group", tenant_id=tenant_id) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) async with async_session.begin(): await group_repo.save(group) @@ -329,7 +329,7 @@ async def test_worker_processes_member_added_and_writes_to_spicedb( has_relationship = await spicedb_client.check_permission( resource=group_resource, - permission=Role.MEMBER.value, + permission=GroupRole.MEMBER.value, subject=user_subject, ) assert has_relationship is True @@ -343,7 +343,7 @@ async def test_worker_processes_member_added_and_writes_to_spicedb( ) await spicedb_client.delete_relationship( resource=group_resource, - relation=Role.MEMBER.value, + relation=GroupRole.MEMBER.value, subject=user_subject, ) await async_session.execute( diff --git a/src/api/tests/unit/iam/application/test_group_service.py b/src/api/tests/unit/iam/application/test_group_service.py index 7be5d3a1..f7754970 100644 --- a/src/api/tests/unit/iam/application/test_group_service.py +++ b/src/api/tests/unit/iam/application/test_group_service.py @@ -8,7 +8,7 @@ from iam.application.services.group_service import GroupService from iam.domain.aggregates import Group, User -from iam.domain.value_objects import GroupId, Role, TenantId, UserId +from iam.domain.value_objects import GroupId, GroupRole, TenantId, UserId from iam.ports.exceptions import DuplicateGroupNameError from iam.ports.repositories import IGroupRepository @@ -126,7 +126,7 @@ async def test_creates_group_with_creator_as_admin( assert result.name == "Engineering" assert len(result.members) == 1 assert result.members[0].user_id == creator_id - assert result.members[0].role == Role.ADMIN + assert result.members[0].role == GroupRole.ADMIN @pytest.mark.asyncio async def test_saves_group_to_repository( diff --git a/src/api/tests/unit/iam/domain/test_events.py b/src/api/tests/unit/iam/domain/test_events.py index 99556e67..b2d75523 100644 --- a/src/api/tests/unit/iam/domain/test_events.py +++ b/src/api/tests/unit/iam/domain/test_events.py @@ -21,7 +21,7 @@ MemberRoleChanged, MemberSnapshot, ) -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole class TestGroupCreated: @@ -89,8 +89,8 @@ def test_creates_with_required_fields(self): """Test that GroupDeleted can be created with required fields.""" occurred_at = datetime.now(UTC) members = ( - MemberSnapshot(user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", role=Role.ADMIN), - MemberSnapshot(user_id="01ARZCX0P0HZGQP3MZXQQ0NNXX", role=Role.MEMBER), + MemberSnapshot(user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", role=GroupRole.ADMIN), + MemberSnapshot(user_id="01ARZCX0P0HZGQP3MZXQQ0NNXX", role=GroupRole.MEMBER), ) event = GroupDeleted( @@ -120,9 +120,9 @@ def test_is_immutable(self): def test_members_snapshot_captures_all_members(self): """Test that members snapshot captures all members with their roles.""" members = ( - MemberSnapshot(user_id="admin1", role=Role.ADMIN), - MemberSnapshot(user_id="member1", role=Role.MEMBER), - MemberSnapshot(user_id="member2", role=Role.MEMBER), + MemberSnapshot(user_id="admin1", role=GroupRole.ADMIN), + MemberSnapshot(user_id="member1", role=GroupRole.MEMBER), + MemberSnapshot(user_id="member2", role=GroupRole.MEMBER), ) event = GroupDeleted( @@ -135,7 +135,7 @@ def test_members_snapshot_captures_all_members(self): assert len(event.members) == 3 # Verify member snapshot structure admin_snapshot = next(m for m in event.members if m.user_id == "admin1") - assert admin_snapshot.role == Role.ADMIN + assert admin_snapshot.role == GroupRole.ADMIN class TestMemberAdded: @@ -148,13 +148,13 @@ def test_creates_with_required_fields(self): event = MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ) assert event.group_id == "01ARZCX0P0HZGQP3MZXQQ0NNZZ" assert event.user_id == "01ARZCX0P0HZGQP3MZXQQ0NNWW" - assert event.role == Role.MEMBER + assert event.role == GroupRole.MEMBER assert event.occurred_at == occurred_at def test_role_is_role_enum(self): @@ -162,24 +162,24 @@ def test_role_is_role_enum(self): event = MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.ADMIN, + role=GroupRole.ADMIN, occurred_at=datetime.now(UTC), ) - assert isinstance(event.role, Role) - assert event.role == Role.ADMIN + assert isinstance(event.role, GroupRole) + assert event.role == GroupRole.ADMIN def test_is_immutable(self): """Test that MemberAdded is immutable (frozen dataclass).""" event = MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=datetime.now(UTC), ) with pytest.raises(FrozenInstanceError): - event.role = Role.ADMIN # type: ignore[misc] + event.role = GroupRole.ADMIN # type: ignore[misc] class TestMemberRemoved: @@ -192,13 +192,13 @@ def test_creates_with_required_fields(self): event = MemberRemoved( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ) assert event.group_id == "01ARZCX0P0HZGQP3MZXQQ0NNZZ" assert event.user_id == "01ARZCX0P0HZGQP3MZXQQ0NNWW" - assert event.role == Role.MEMBER + assert event.role == GroupRole.MEMBER assert event.occurred_at == occurred_at def test_is_immutable(self): @@ -206,7 +206,7 @@ def test_is_immutable(self): event = MemberRemoved( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=datetime.now(UTC), ) @@ -224,15 +224,15 @@ def test_creates_with_required_fields(self): event = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ) assert event.group_id == "01ARZCX0P0HZGQP3MZXQQ0NNZZ" assert event.user_id == "01ARZCX0P0HZGQP3MZXQQ0NNWW" - assert event.old_role == Role.MEMBER - assert event.new_role == Role.ADMIN + assert event.old_role == GroupRole.MEMBER + assert event.new_role == GroupRole.ADMIN assert event.occurred_at == occurred_at def test_roles_are_role_enum(self): @@ -240,26 +240,26 @@ def test_roles_are_role_enum(self): event = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=datetime.now(UTC), ) - assert isinstance(event.old_role, Role) - assert isinstance(event.new_role, Role) + assert isinstance(event.old_role, GroupRole) + assert isinstance(event.new_role, GroupRole) def test_is_immutable(self): """Test that MemberRoleChanged is immutable (frozen dataclass).""" event = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=datetime.now(UTC), ) with pytest.raises(FrozenInstanceError): - event.new_role = Role.MEMBER # type: ignore[misc] + event.new_role = GroupRole.MEMBER # type: ignore[misc] def test_equality_based_on_all_values(self): """Test that two events with same values are equal.""" @@ -268,15 +268,15 @@ def test_equality_based_on_all_values(self): event1 = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ) event2 = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ) @@ -303,7 +303,7 @@ def test_all_events_can_be_typed_as_domain_event(self): tenant_id="01ARZCX0P0HZGQP3MZXQQ0NNYY", members=( MemberSnapshot( - user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", role=Role.ADMIN + user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", role=GroupRole.ADMIN ), ), occurred_at=occurred_at, @@ -311,20 +311,20 @@ def test_all_events_can_be_typed_as_domain_event(self): MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ), MemberRemoved( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ), MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ), ] diff --git a/src/api/tests/unit/iam/domain/test_group_aggregate.py b/src/api/tests/unit/iam/domain/test_group_aggregate.py index 31729fdb..f66fbf1d 100644 --- a/src/api/tests/unit/iam/domain/test_group_aggregate.py +++ b/src/api/tests/unit/iam/domain/test_group_aggregate.py @@ -7,7 +7,7 @@ import pytest from iam.domain.aggregates import Group -from iam.domain.value_objects import GroupId, Role, TenantId, UserId +from iam.domain.value_objects import GroupId, GroupRole, TenantId, UserId class TestGroupCreation: @@ -95,11 +95,11 @@ def test_adds_member_with_role(self): ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) assert len(group.members) == 1 assert group.members[0].user_id == user_id - assert group.members[0].role == Role.MEMBER + assert group.members[0].role == GroupRole.MEMBER def test_adds_admin(self): """Test that admin can be added.""" @@ -110,9 +110,9 @@ def test_adds_admin(self): ) admin_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) + group.add_member(admin_id, GroupRole.ADMIN) - assert group.members[0].role == Role.ADMIN + assert group.members[0].role == GroupRole.ADMIN def test_adds_multiple_members(self): """Test that multiple members can be added.""" @@ -124,8 +124,8 @@ def test_adds_multiple_members(self): alice = UserId.generate() bob = UserId.generate() - group.add_member(alice, Role.ADMIN) - group.add_member(bob, Role.MEMBER) + group.add_member(alice, GroupRole.ADMIN) + group.add_member(bob, GroupRole.MEMBER) assert len(group.members) == 2 @@ -138,10 +138,10 @@ def test_prevents_duplicate_members(self): ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) with pytest.raises(ValueError, match="already a member"): - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) class TestHasMember: @@ -155,7 +155,7 @@ def test_returns_true_when_member_exists(self): name="Engineering", ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) assert group.has_member(user_id) is True @@ -182,11 +182,11 @@ def test_returns_role_for_member(self): name="Engineering", ) user_id = UserId.generate() - group.add_member(user_id, Role.ADMIN) + group.add_member(user_id, GroupRole.ADMIN) role = group.get_member_role(user_id) - assert role == Role.ADMIN + assert role == GroupRole.ADMIN def test_returns_none_for_non_member(self): """Test that get_member_role returns None for non-member.""" @@ -215,8 +215,8 @@ def test_removes_existing_member(self): admin_id = UserId.generate() user_id = UserId.generate() # Add an admin first so we can remove the regular member - group.add_member(admin_id, Role.ADMIN) - group.add_member(user_id, Role.MEMBER) + group.add_member(admin_id, GroupRole.ADMIN) + group.add_member(user_id, GroupRole.MEMBER) group.remove_member(user_id) @@ -243,7 +243,7 @@ def test_prevents_removing_last_admin(self): name="Engineering", ) admin_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) + group.add_member(admin_id, GroupRole.ADMIN) with pytest.raises(ValueError, match="last admin"): group.remove_member(admin_id) @@ -257,8 +257,8 @@ def test_can_remove_admin_when_multiple_admins_exist(self): ) admin1 = UserId.generate() admin2 = UserId.generate() - group.add_member(admin1, Role.ADMIN) - group.add_member(admin2, Role.ADMIN) + group.add_member(admin1, GroupRole.ADMIN) + group.add_member(admin2, GroupRole.ADMIN) group.remove_member(admin1) @@ -277,11 +277,11 @@ def test_updates_member_role(self): name="Engineering", ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) - group.update_member_role(user_id, Role.ADMIN) + group.update_member_role(user_id, GroupRole.ADMIN) - assert group.get_member_role(user_id) == Role.ADMIN + assert group.get_member_role(user_id) == GroupRole.ADMIN def test_raises_when_updating_non_member(self): """Test that updating non-member raises error.""" @@ -293,7 +293,7 @@ def test_raises_when_updating_non_member(self): user_id = UserId.generate() with pytest.raises(ValueError, match="not a member"): - group.update_member_role(user_id, Role.ADMIN) + group.update_member_role(user_id, GroupRole.ADMIN) def test_prevents_demoting_last_admin(self): """Test that last admin cannot be demoted.""" @@ -303,10 +303,10 @@ def test_prevents_demoting_last_admin(self): name="Engineering", ) admin_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) + group.add_member(admin_id, GroupRole.ADMIN) with pytest.raises(ValueError, match="last admin"): - group.update_member_role(admin_id, Role.MEMBER) + group.update_member_role(admin_id, GroupRole.MEMBER) def test_can_demote_admin_when_multiple_admins_exist(self): """Test that admin can be demoted if other admins exist.""" @@ -317,12 +317,12 @@ def test_can_demote_admin_when_multiple_admins_exist(self): ) admin1 = UserId.generate() admin2 = UserId.generate() - group.add_member(admin1, Role.ADMIN) - group.add_member(admin2, Role.ADMIN) + group.add_member(admin1, GroupRole.ADMIN) + group.add_member(admin2, GroupRole.ADMIN) - group.update_member_role(admin1, Role.MEMBER) + group.update_member_role(admin1, GroupRole.MEMBER) - assert group.get_member_role(admin1) == Role.MEMBER + assert group.get_member_role(admin1) == GroupRole.MEMBER class TestEventCollection: @@ -355,14 +355,14 @@ def test_add_member_records_member_added_event(self): ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) events = group.collect_events() assert len(events) == 1 assert isinstance(events[0], MemberAdded) assert events[0].group_id == group.id.value assert events[0].user_id == user_id.value - assert events[0].role == Role.MEMBER + assert events[0].role == GroupRole.MEMBER def test_remove_member_records_member_removed_event(self): """Test that remove_member records a MemberRemoved event.""" @@ -375,8 +375,8 @@ def test_remove_member_records_member_removed_event(self): ) admin_id = UserId.generate() user_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) - group.add_member(user_id, Role.MEMBER) + group.add_member(admin_id, GroupRole.ADMIN) + group.add_member(user_id, GroupRole.MEMBER) group.collect_events() # Clear creation events group.remove_member(user_id) @@ -386,7 +386,7 @@ def test_remove_member_records_member_removed_event(self): assert isinstance(events[0], MemberRemoved) assert events[0].group_id == group.id.value assert events[0].user_id == user_id.value - assert events[0].role == Role.MEMBER + assert events[0].role == GroupRole.MEMBER def test_update_member_role_records_member_role_changed_event(self): """Test that update_member_role records a MemberRoleChanged event.""" @@ -399,19 +399,19 @@ def test_update_member_role_records_member_role_changed_event(self): ) admin_id = UserId.generate() user_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) - group.add_member(user_id, Role.MEMBER) + group.add_member(admin_id, GroupRole.ADMIN) + group.add_member(user_id, GroupRole.MEMBER) group.collect_events() # Clear creation events - group.update_member_role(user_id, Role.ADMIN) + group.update_member_role(user_id, GroupRole.ADMIN) events = group.collect_events() assert len(events) == 1 assert isinstance(events[0], MemberRoleChanged) assert events[0].group_id == group.id.value assert events[0].user_id == user_id.value - assert events[0].old_role == Role.MEMBER - assert events[0].new_role == Role.ADMIN + assert events[0].old_role == GroupRole.MEMBER + assert events[0].new_role == GroupRole.ADMIN def test_collect_events_clears_pending_events(self): """Test that collect_events clears the pending events list.""" @@ -421,7 +421,7 @@ def test_collect_events_clears_pending_events(self): name="Engineering", ) user_id = UserId.generate() - group.add_member(user_id, Role.MEMBER) + group.add_member(user_id, GroupRole.MEMBER) # First collection should have events events1 = group.collect_events() @@ -442,9 +442,9 @@ def test_multiple_operations_record_multiple_events(self): user2 = UserId.generate() user3 = UserId.generate() - group.add_member(user1, Role.ADMIN) - group.add_member(user2, Role.MEMBER) - group.add_member(user3, Role.MEMBER) + group.add_member(user1, GroupRole.ADMIN) + group.add_member(user2, GroupRole.MEMBER) + group.add_member(user3, GroupRole.MEMBER) events = group.collect_events() @@ -485,8 +485,8 @@ def test_group_deleted_event_includes_member_snapshot(self): ) admin_id = UserId.generate() member_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) - group.add_member(member_id, Role.MEMBER) + group.add_member(admin_id, GroupRole.ADMIN) + group.add_member(member_id, GroupRole.MEMBER) group.collect_events() # Clear add events group.mark_for_deletion() @@ -505,6 +505,6 @@ def test_group_deleted_event_includes_member_snapshot(self): for snapshot in event.members: assert isinstance(snapshot, MemberSnapshot) if snapshot.user_id == admin_id.value: - assert snapshot.role == Role.ADMIN + assert snapshot.role == GroupRole.ADMIN else: - assert snapshot.role == Role.MEMBER + assert snapshot.role == GroupRole.MEMBER diff --git a/src/api/tests/unit/iam/infrastructure/outbox/test_serializer.py b/src/api/tests/unit/iam/infrastructure/outbox/test_serializer.py index 1f146d63..e646aff3 100644 --- a/src/api/tests/unit/iam/infrastructure/outbox/test_serializer.py +++ b/src/api/tests/unit/iam/infrastructure/outbox/test_serializer.py @@ -17,7 +17,7 @@ MemberRoleChanged, MemberSnapshot, ) -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole from iam.infrastructure.outbox import IAMEventSerializer @@ -60,8 +60,8 @@ def test_serializes_group_deleted_with_members(self): serializer = IAMEventSerializer() occurred_at = datetime(2026, 1, 8, 12, 0, 0, tzinfo=UTC) members = ( - MemberSnapshot(user_id="user1", role=Role.ADMIN), - MemberSnapshot(user_id="user2", role=Role.MEMBER), + MemberSnapshot(user_id="user1", role=GroupRole.ADMIN), + MemberSnapshot(user_id="user2", role=GroupRole.MEMBER), ) event = GroupDeleted( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", @@ -85,7 +85,7 @@ def test_serializes_member_added(self): event = MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ) @@ -102,8 +102,8 @@ def test_serializes_member_role_changed(self): event = MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ) @@ -183,8 +183,8 @@ def test_deserializes_group_deleted_with_members(self): assert len(event.members) == 2 assert isinstance(event.members, tuple) assert isinstance(event.members[0], MemberSnapshot) - assert event.members[0].role == Role.ADMIN - assert event.members[1].role == Role.MEMBER + assert event.members[0].role == GroupRole.ADMIN + assert event.members[1].role == GroupRole.MEMBER def test_deserializes_member_added(self): """MemberAdded should reconstruct Role enum.""" @@ -199,7 +199,7 @@ def test_deserializes_member_added(self): event = serializer.deserialize("MemberAdded", payload) assert isinstance(event, MemberAdded) - assert event.role == Role.MEMBER + assert event.role == GroupRole.MEMBER def test_deserializes_member_role_changed(self): """MemberRoleChanged should reconstruct both roles.""" @@ -215,8 +215,8 @@ def test_deserializes_member_role_changed(self): event = serializer.deserialize("MemberRoleChanged", payload) assert isinstance(event, MemberRoleChanged) - assert event.old_role == Role.MEMBER - assert event.new_role == Role.ADMIN + assert event.old_role == GroupRole.MEMBER + assert event.new_role == GroupRole.ADMIN def test_raises_for_unknown_event_type(self): """Deserializer should raise for unknown event types.""" @@ -249,8 +249,8 @@ def test_round_trip_group_deleted(self): """GroupDeleted should round trip with members.""" serializer = IAMEventSerializer() members = ( - MemberSnapshot(user_id="user1", role=Role.ADMIN), - MemberSnapshot(user_id="user2", role=Role.MEMBER), + MemberSnapshot(user_id="user1", role=GroupRole.ADMIN), + MemberSnapshot(user_id="user2", role=GroupRole.MEMBER), ) original = GroupDeleted( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", @@ -283,20 +283,20 @@ def test_round_trip_all_events(self): MemberAdded( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ), MemberRemoved( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - role=Role.MEMBER, + role=GroupRole.MEMBER, occurred_at=occurred_at, ), MemberRoleChanged( group_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", user_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", - old_role=Role.MEMBER, - new_role=Role.ADMIN, + old_role=GroupRole.MEMBER, + new_role=GroupRole.ADMIN, occurred_at=occurred_at, ), ] diff --git a/src/api/tests/unit/iam/infrastructure/outbox/test_translator.py b/src/api/tests/unit/iam/infrastructure/outbox/test_translator.py index 89df3061..e89ad90c 100644 --- a/src/api/tests/unit/iam/infrastructure/outbox/test_translator.py +++ b/src/api/tests/unit/iam/infrastructure/outbox/test_translator.py @@ -6,7 +6,7 @@ import pytest -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole from iam.infrastructure.outbox import IAMEventTranslator from shared_kernel.authorization.types import RelationType, ResourceType from shared_kernel.outbox.operations import DeleteRelationship, WriteRelationship @@ -138,7 +138,7 @@ def test_translates_member_added_with_member_role(self): assert isinstance(op, WriteRelationship) assert op.resource_type == ResourceType.GROUP assert op.resource_id == "01ARZCX0P0HZGQP3MZXQQ0NNZZ" - assert op.relation == Role.MEMBER + assert op.relation == GroupRole.MEMBER assert op.subject_type == ResourceType.USER assert op.subject_id == "01ARZCX0P0HZGQP3MZXQQ0NNWW" @@ -154,7 +154,7 @@ def test_translates_member_added_with_admin_role(self): operations = translator.translate("MemberAdded", payload) - assert operations[0].relation == Role.ADMIN + assert operations[0].relation == GroupRole.ADMIN assert operations[0].relation_name == "admin" diff --git a/src/api/tests/unit/iam/infrastructure/test_group_repository.py b/src/api/tests/unit/iam/infrastructure/test_group_repository.py index ddd3ad69..10234a42 100644 --- a/src/api/tests/unit/iam/infrastructure/test_group_repository.py +++ b/src/api/tests/unit/iam/infrastructure/test_group_repository.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec from iam.domain.aggregates import Group -from iam.domain.value_objects import GroupId, Role, TenantId, UserId +from iam.domain.value_objects import GroupId, GroupRole, TenantId, UserId from iam.infrastructure.group_repository import GroupRepository from iam.infrastructure.models import GroupModel from iam.ports.exceptions import DuplicateGroupNameError @@ -148,7 +148,7 @@ async def test_appends_events_to_outbox( tenant_id = TenantId.generate() # Use factory to generate events group = Group.create(name="Engineering", tenant_id=tenant_id) - group.add_member(UserId.generate(), Role.ADMIN) + group.add_member(UserId.generate(), GroupRole.ADMIN) # Mock get_by_name to return None repository.get_by_name = AsyncMock(return_value=None) @@ -216,9 +216,11 @@ async def test_returns_group_with_members_hydrated( # Mock SpiceDB members - return members only for ADMIN role, empty for others async def mock_lookup(resource, relation, subject_type): - if relation == Role.ADMIN.value: + if relation == GroupRole.ADMIN.value: return [ - SubjectRelation(subject_id=user_id.value, relation=Role.ADMIN.value) + SubjectRelation( + subject_id=user_id.value, relation=GroupRole.ADMIN.value + ) ] return [] @@ -232,7 +234,7 @@ async def mock_lookup(resource, relation, subject_type): assert result.name == "Engineering" assert len(result.members) == 1 assert result.members[0].user_id.value == user_id.value - assert result.members[0].role == Role.ADMIN + assert result.members[0].role == GroupRole.ADMIN class TestGetByName: @@ -331,7 +333,7 @@ async def test_appends_group_deleted_to_outbox( name="Engineering", ) admin_id = UserId.generate() - group.add_member(admin_id, Role.ADMIN) + group.add_member(admin_id, GroupRole.ADMIN) group.collect_events() # Clear the add event group.mark_for_deletion() diff --git a/src/api/tests/unit/infrastructure/outbox/test_worker.py b/src/api/tests/unit/infrastructure/outbox/test_worker.py index a53943f3..7f407539 100644 --- a/src/api/tests/unit/infrastructure/outbox/test_worker.py +++ b/src/api/tests/unit/infrastructure/outbox/test_worker.py @@ -10,7 +10,7 @@ import pytest -from iam.domain.value_objects import Role +from iam.domain.value_objects import GroupRole from infrastructure.outbox.worker import OutboxWorker from shared_kernel.authorization.types import RelationType, ResourceType from shared_kernel.outbox.operations import DeleteRelationship, WriteRelationship @@ -111,7 +111,7 @@ async def test_handles_delete_relationship(self): DeleteRelationship( resource_type=ResourceType.GROUP, resource_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", - relation=Role.MEMBER, + relation=GroupRole.MEMBER, subject_type=ResourceType.USER, subject_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", ) diff --git a/src/api/tests/unit/shared_kernel/outbox/test_operations.py b/src/api/tests/unit/shared_kernel/outbox/test_operations.py index 95ea114c..5bd55d7b 100644 --- a/src/api/tests/unit/shared_kernel/outbox/test_operations.py +++ b/src/api/tests/unit/shared_kernel/outbox/test_operations.py @@ -75,12 +75,12 @@ def test_relation_name_property_with_enum(self): def test_relation_name_property_with_role_string(self): """Should handle role strings (from Role enum).""" - from iam.domain.value_objects import Role + from iam.domain.value_objects import GroupRole op = WriteRelationship( resource_type=ResourceType.GROUP, resource_id="01ARZCX0P0HZGQP3MZXQQ0NNZZ", - relation=Role.ADMIN, + relation=GroupRole.ADMIN, subject_type=ResourceType.USER, subject_id="01ARZCX0P0HZGQP3MZXQQ0NNWW", ) From 3399577efb8677c9c0abb0cdbbf64841f919b7b0 Mon Sep 17 00:00:00 2001 From: John Sell Date: Thu, 29 Jan 2026 11:53:03 -0500 Subject: [PATCH 06/11] refactor(api.iam): add TenantMember value object --- src/api/iam/domain/value_objects.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/api/iam/domain/value_objects.py b/src/api/iam/domain/value_objects.py index 7a4fadae..409e0819 100644 --- a/src/api/iam/domain/value_objects.py +++ b/src/api/iam/domain/value_objects.py @@ -140,6 +140,13 @@ class GroupRole(StrEnum): MEMBER = "member" +class TenantRole(StrEnum): + """Roles for tenant membership.""" + + ADMIN = "admin" + MEMBER = "member" + + @dataclass(frozen=True) class GroupMember: """Represents a user's membership in a group with a specific role. @@ -162,3 +169,11 @@ def is_member(self) -> bool: def has_admin_privileges(self) -> bool: """Check if this member has admin privileges.""" return self.role == GroupRole.ADMIN + + +@dataclass(frozen=True) +class TenantMember: + """Represent's a user's membership in a Tenant with a specific role.""" + + user_id: UserId + role: TenantRole From 77c65c297c84987140c578db149261e4c2b0ab74 Mon Sep 17 00:00:00 2001 From: John Sell Date: Thu, 29 Jan 2026 12:17:11 -0500 Subject: [PATCH 07/11] refactor(api.iam): add TenantMemberAdded event and method --- src/api/iam/domain/aggregates/tenant.py | 26 ++++++++++++++++++++++--- src/api/iam/domain/events.py | 23 +++++++++++++++++++++- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/src/api/iam/domain/aggregates/tenant.py b/src/api/iam/domain/aggregates/tenant.py index 78f34eb8..d65d7eee 100644 --- a/src/api/iam/domain/aggregates/tenant.py +++ b/src/api/iam/domain/aggregates/tenant.py @@ -4,10 +4,10 @@ from dataclasses import dataclass, field from datetime import UTC, datetime -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional -from iam.domain.events import TenantCreated, TenantDeleted -from iam.domain.value_objects import TenantId +from iam.domain.events import TenantCreated, TenantDeleted, TenantMemberAdded +from iam.domain.value_objects import TenantId, TenantRole, UserId if TYPE_CHECKING: from iam.domain.events import DomainEvent @@ -60,6 +60,26 @@ def create(cls, name: str) -> "Tenant": ) return tenant + def add_user( + self, user_id: UserId, role: TenantRole, added_by: Optional[UserId] = None + ): + """Add a user as a member to this tenant. + + Args: + user_id: User being added + role: Their role in the tenant + added_by: Admin who added them (None for system/migration) + """ + self._pending_events.append( + TenantMemberAdded( + tenant_id=self.id, + user_id=user_id, + role=role, + added_by=added_by, + occurred_at=datetime.now(UTC), + ) + ) + def mark_for_deletion(self) -> None: """Mark the tenant for deletion and record the TenantDeleted event. diff --git a/src/api/iam/domain/events.py b/src/api/iam/domain/events.py index 0ad59f5c..591a0473 100644 --- a/src/api/iam/domain/events.py +++ b/src/api/iam/domain/events.py @@ -12,8 +12,9 @@ from dataclasses import dataclass from datetime import datetime +from typing import Optional -from iam.domain.value_objects import GroupRole +from iam.domain.value_objects import GroupRole, TenantId, TenantRole, UserId @dataclass(frozen=True) @@ -166,6 +167,25 @@ class TenantDeleted: occurred_at: datetime +@dataclass(frozen=True) +class TenantMemberAdded: + """Event raised when a user is added as a member to a tenant. + + Attributes: + tenant_id: The ID of the tenant to which the member was added + user_id: The user added as a member to the tenant + role: The role the user is given within the tenant + added_by: The [optional] ID of the user that initiated this action + occurred_at: When this even occurred (UTC) + """ + + tenant_id: TenantId + user_id: UserId + role: TenantRole + occurred_at: datetime + added_by: Optional[UserId] = None + + @dataclass(frozen=True) class APIKeyCreated: """Event raised when a new API key is created. @@ -215,6 +235,7 @@ class APIKeyRevoked: | MemberRoleChanged | TenantCreated | TenantDeleted + | TenantMemberAdded | APIKeyCreated | APIKeyRevoked ) From 768d47931e671f2b615831b0b80008c37e5b5b48 Mon Sep 17 00:00:00 2001 From: John Sell Date: Thu, 29 Jan 2026 12:21:51 -0500 Subject: [PATCH 08/11] refactor(api.iam): add TenantMemberRemoved event and method --- src/api/iam/domain/aggregates/tenant.py | 31 +++++++++++++++++++++++-- src/api/iam/domain/events.py | 18 ++++++++++++++ src/api/iam/domain/exceptions.py | 2 ++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 src/api/iam/domain/exceptions.py diff --git a/src/api/iam/domain/aggregates/tenant.py b/src/api/iam/domain/aggregates/tenant.py index d65d7eee..6c1080c3 100644 --- a/src/api/iam/domain/aggregates/tenant.py +++ b/src/api/iam/domain/aggregates/tenant.py @@ -6,7 +6,13 @@ from datetime import UTC, datetime from typing import TYPE_CHECKING, Optional -from iam.domain.events import TenantCreated, TenantDeleted, TenantMemberAdded +from iam.domain.exceptions import CannotRemoveLastAdminError +from iam.domain.events import ( + TenantCreated, + TenantDeleted, + TenantMemberAdded, + TenantMemberRemoved, +) from iam.domain.value_objects import TenantId, TenantRole, UserId if TYPE_CHECKING: @@ -60,7 +66,7 @@ def create(cls, name: str) -> "Tenant": ) return tenant - def add_user( + def add_member( self, user_id: UserId, role: TenantRole, added_by: Optional[UserId] = None ): """Add a user as a member to this tenant. @@ -80,6 +86,27 @@ def add_user( ) ) + def remove_member(self, user_id: UserId, removed_by: UserId, is_last_admin: bool): + """Remove a member from a tenant. + + Args: + user_id: User being removed + removed_by: Admin who removed them + is_last_admin: Whether this user is the last admin in the tenant. If True, + a CannotRemoveLastAdminError is raised. + """ + if is_last_admin: + raise CannotRemoveLastAdminError() + + self._pending_events.append( + TenantMemberRemoved( + tenant_id=self.id, + user_id=user_id, + removed_by=removed_by, + occurred_at=datetime.now(UTC), + ) + ) + def mark_for_deletion(self) -> None: """Mark the tenant for deletion and record the TenantDeleted event. diff --git a/src/api/iam/domain/events.py b/src/api/iam/domain/events.py index 591a0473..a7a22fd5 100644 --- a/src/api/iam/domain/events.py +++ b/src/api/iam/domain/events.py @@ -186,6 +186,23 @@ class TenantMemberAdded: added_by: Optional[UserId] = None +@dataclass(frozen=True) +class TenantMemberRemoved: + """Event raised when a user is removed as a member from a tenant. + + Attributes: + tenant_id: The ID of the tenant from which the member was removed + user_id: The user removed as a member from the tenant + removed_by: The ID of the user that initiated this action + occurred_at: When this even occurred (UTC) + """ + + tenant_id: TenantId + user_id: UserId + occurred_at: datetime + removed_by: UserId + + @dataclass(frozen=True) class APIKeyCreated: """Event raised when a new API key is created. @@ -236,6 +253,7 @@ class APIKeyRevoked: | TenantCreated | TenantDeleted | TenantMemberAdded + | TenantMemberRemoved | APIKeyCreated | APIKeyRevoked ) diff --git a/src/api/iam/domain/exceptions.py b/src/api/iam/domain/exceptions.py new file mode 100644 index 00000000..5c864375 --- /dev/null +++ b/src/api/iam/domain/exceptions.py @@ -0,0 +1,2 @@ +class CannotRemoveLastAdminError(Exception): + pass From a6a41933b1fb42e96ca4b3d76804de09f255890b Mon Sep 17 00:00:00 2001 From: John Sell Date: Fri, 6 Feb 2026 15:43:17 -0500 Subject: [PATCH 09/11] feat(iam.infrastructure): add workspace repository and fix cascade constraints - Fix groups FK from CASCADE to RESTRICT to force application-level cascading and ensure domain events are emitted for SpiceDB cleanup - Create workspaces table with RESTRICT FK constraints and partial unique index for root workspace per tenant - Add WorkspaceModel with self-referential parent/child relationships - Define IWorkspaceRepository protocol with save, get_by_id, get_by_name, get_root_workspace, list_by_tenant, and delete methods - Implement WorkspaceRepository following TenantRepository pattern with transactional outbox for domain events - Add WorkspaceRepositoryProbe for domain-oriented observability - Add default_workspace_name setting to IAMSettings (defaults to None, falls back to tenant name) - Wire workspace repository into FastAPI dependency injection - Add 27 unit tests covering all repository methods, protocol compliance, outbox events, observability probes, and edge cases Co-Authored-By: Claude Sonnet 4.5 (1M context) --- src/api/iam/dependencies/workspace.py | 31 + src/api/iam/infrastructure/models.py | 87 +- .../infrastructure/observability/__init__.py | 4 + .../observability/repository_probe.py | 97 +++ .../infrastructure/workspace_repository.py | 270 ++++++ src/api/iam/ports/repositories.py | 93 ++- .../205809969bf4_create_workspaces_table.py | 87 ++ ...76_fix_groups_tenant_cascade_constraint.py | 56 ++ src/api/infrastructure/settings.py | 6 + .../test_workspace_repository.py | 774 ++++++++++++++++++ website/src/data/env-vars.json | 9 +- 11 files changed, 1505 insertions(+), 9 deletions(-) create mode 100644 src/api/iam/dependencies/workspace.py create mode 100644 src/api/iam/infrastructure/workspace_repository.py create mode 100644 src/api/infrastructure/migrations/versions/205809969bf4_create_workspaces_table.py create mode 100644 src/api/infrastructure/migrations/versions/36612dcd7676_fix_groups_tenant_cascade_constraint.py create mode 100644 src/api/tests/unit/iam/infrastructure/test_workspace_repository.py diff --git a/src/api/iam/dependencies/workspace.py b/src/api/iam/dependencies/workspace.py new file mode 100644 index 00000000..64fd40b4 --- /dev/null +++ b/src/api/iam/dependencies/workspace.py @@ -0,0 +1,31 @@ +"""FastAPI dependency injection for workspace repository. + +Provides workspace repository instances for route handlers +using FastAPI's dependency injection system. +""" + +from typing import Annotated + +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from iam.dependencies.outbox import get_outbox_repository +from iam.infrastructure.workspace_repository import WorkspaceRepository +from infrastructure.database.dependencies import get_write_session +from infrastructure.outbox.repository import OutboxRepository + + +def get_workspace_repository( + session: Annotated[AsyncSession, Depends(get_write_session)], + outbox: Annotated[OutboxRepository, Depends(get_outbox_repository)], +) -> WorkspaceRepository: + """Get WorkspaceRepository instance. + + Args: + session: Async database session + outbox: Outbox repository for transactional outbox pattern + + Returns: + WorkspaceRepository instance with outbox pattern enabled + """ + return WorkspaceRepository(session=session, outbox=outbox) diff --git a/src/api/iam/infrastructure/models.py b/src/api/iam/infrastructure/models.py index 682a8e40..442fe332 100644 --- a/src/api/iam/infrastructure/models.py +++ b/src/api/iam/infrastructure/models.py @@ -6,8 +6,8 @@ from datetime import datetime -from sqlalchemy import Boolean, DateTime, ForeignKey, String, UniqueConstraint -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import Boolean, DateTime, ForeignKey, Index, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship from infrastructure.database.models import Base, TimestampMixin @@ -22,9 +22,9 @@ class GroupModel(Base, TimestampMixin): is enforced at the application level. Foreign Key Constraint: - - tenant_id references tenants.id with CASCADE delete - - When a tenant is deleted, all groups are cascade deleted - - Group deletion must be handled in service layer to emit events + - tenant_id references tenants.id with RESTRICT delete + - Application layer must explicitly delete groups before tenant deletion + - This ensures GroupDeleted domain events are emitted for SpiceDB cleanup """ __tablename__ = "groups" @@ -32,7 +32,7 @@ class GroupModel(Base, TimestampMixin): id: Mapped[str] = mapped_column(String(26), primary_key=True) tenant_id: Mapped[str] = mapped_column( String(26), - ForeignKey("tenants.id", ondelete="CASCADE"), + ForeignKey("tenants.id", ondelete="RESTRICT"), nullable=False, index=True, ) @@ -78,11 +78,86 @@ class TenantModel(Base, TimestampMixin): id: Mapped[str] = mapped_column(String(26), primary_key=True) name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + # Relationships + workspaces = relationship("WorkspaceModel", back_populates="tenant") + def __repr__(self) -> str: """Return string representation.""" return f"" +class WorkspaceModel(Base, TimestampMixin): + """ORM model for workspaces table. + + Stores workspace metadata in PostgreSQL. Workspaces organize knowledge + graphs within a tenant. Each tenant has exactly one root workspace + (auto-created on tenant creation) and can have multiple child workspaces. + + Foreign Key Constraints: + - tenant_id references tenants.id with RESTRICT delete + Application must delete workspaces before tenant deletion + - parent_workspace_id references workspaces.id with RESTRICT delete + Cannot delete a parent workspace while children exist + + Partial Unique Index: + - Only one root workspace (is_root=TRUE) per tenant + """ + + __tablename__ = "workspaces" + + id: Mapped[str] = mapped_column(String(26), primary_key=True) + tenant_id: Mapped[str] = mapped_column( + String(26), + ForeignKey("tenants.id", ondelete="RESTRICT"), + nullable=False, + index=True, + ) + name: Mapped[str] = mapped_column(String(255), nullable=False) + parent_workspace_id: Mapped[str | None] = mapped_column( + String(26), + ForeignKey("workspaces.id", ondelete="RESTRICT"), + nullable=True, + index=True, + ) + is_root: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + # Relationships + tenant = relationship("TenantModel", back_populates="workspaces") + parent_workspace = relationship( + "WorkspaceModel", + remote_side="WorkspaceModel.id", + back_populates="child_workspaces", + ) + child_workspaces = relationship( + "WorkspaceModel", + back_populates="parent_workspace", + ) + + __table_args__ = ( + Index("idx_workspaces_name_tenant", "name", "tenant_id"), + Index( + "idx_workspaces_root_unique", + "tenant_id", + "is_root", + unique=True, + postgresql_where=(is_root == True), # noqa: E712 + ), + ) + + def __repr__(self) -> str: + """Return string representation.""" + return ( + f"" + ) + + class APIKeyModel(Base, TimestampMixin): """ORM model for api_keys table. diff --git a/src/api/iam/infrastructure/observability/__init__.py b/src/api/iam/infrastructure/observability/__init__.py index c5f7c2b5..0b78c976 100644 --- a/src/api/iam/infrastructure/observability/__init__.py +++ b/src/api/iam/infrastructure/observability/__init__.py @@ -11,9 +11,11 @@ DefaultGroupRepositoryProbe, DefaultTenantRepositoryProbe, DefaultUserRepositoryProbe, + DefaultWorkspaceRepositoryProbe, GroupRepositoryProbe, TenantRepositoryProbe, UserRepositoryProbe, + WorkspaceRepositoryProbe, ) __all__ = [ @@ -25,4 +27,6 @@ "DefaultUserRepositoryProbe", "TenantRepositoryProbe", "DefaultTenantRepositoryProbe", + "WorkspaceRepositoryProbe", + "DefaultWorkspaceRepositoryProbe", ] diff --git a/src/api/iam/infrastructure/observability/repository_probe.py b/src/api/iam/infrastructure/observability/repository_probe.py index affce841..bdb86aeb 100644 --- a/src/api/iam/infrastructure/observability/repository_probe.py +++ b/src/api/iam/infrastructure/observability/repository_probe.py @@ -309,3 +309,100 @@ def duplicate_tenant_name(self, name: str) -> None: name=name, **self._get_context_kwargs(), ) + + +class WorkspaceRepositoryProbe(Protocol): + """Domain probe for workspace repository operations. + + Records domain events during workspace persistence operations. + """ + + def workspace_saved(self, workspace_id: str, tenant_id: str) -> None: + """Record that a workspace was successfully saved.""" + ... + + def workspace_retrieved(self, workspace_id: str) -> None: + """Record that a workspace was retrieved.""" + ... + + def workspace_not_found(self, workspace_id: str) -> None: + """Record that a workspace was not found.""" + ... + + def workspace_deleted(self, workspace_id: str) -> None: + """Record that a workspace was deleted.""" + ... + + def workspaces_listed(self, tenant_id: str, count: int) -> None: + """Record that workspaces were listed for a tenant.""" + ... + + def with_context(self, context: ObservationContext) -> WorkspaceRepositoryProbe: + """Create a new probe with observation context bound.""" + ... + + +class DefaultWorkspaceRepositoryProbe: + """Default implementation of WorkspaceRepositoryProbe using structlog.""" + + def __init__( + self, + logger: structlog.stdlib.BoundLogger | None = None, + context: ObservationContext | None = None, + ): + self._logger = logger or structlog.get_logger() + self._context = context + + def _get_context_kwargs(self) -> dict[str, Any]: + """Get context metadata as kwargs for logging.""" + if self._context is None: + return {} + return self._context.as_dict() + + def with_context( + self, context: ObservationContext + ) -> DefaultWorkspaceRepositoryProbe: + """Create a new probe with observation context bound.""" + return DefaultWorkspaceRepositoryProbe(logger=self._logger, context=context) + + def workspace_saved(self, workspace_id: str, tenant_id: str) -> None: + """Record that a workspace was successfully saved.""" + self._logger.info( + "workspace_saved", + workspace_id=workspace_id, + tenant_id=tenant_id, + **self._get_context_kwargs(), + ) + + def workspace_retrieved(self, workspace_id: str) -> None: + """Record that a workspace was retrieved.""" + self._logger.debug( + "workspace_retrieved", + workspace_id=workspace_id, + **self._get_context_kwargs(), + ) + + def workspace_not_found(self, workspace_id: str) -> None: + """Record that a workspace was not found.""" + self._logger.debug( + "workspace_not_found", + workspace_id=workspace_id, + **self._get_context_kwargs(), + ) + + def workspace_deleted(self, workspace_id: str) -> None: + """Record that a workspace was deleted.""" + self._logger.info( + "workspace_deleted", + workspace_id=workspace_id, + **self._get_context_kwargs(), + ) + + def workspaces_listed(self, tenant_id: str, count: int) -> None: + """Record that workspaces were listed for a tenant.""" + self._logger.debug( + "workspaces_listed", + tenant_id=tenant_id, + count=count, + **self._get_context_kwargs(), + ) diff --git a/src/api/iam/infrastructure/workspace_repository.py b/src/api/iam/infrastructure/workspace_repository.py new file mode 100644 index 00000000..5d13c98c --- /dev/null +++ b/src/api/iam/infrastructure/workspace_repository.py @@ -0,0 +1,270 @@ +"""PostgreSQL implementation of IWorkspaceRepository. + +This repository manages workspace metadata storage in PostgreSQL. +Unlike GroupRepository, it doesn't need SpiceDB for membership hydration +since workspaces don't have members yet (Phase 3). + +Write operations use the transactional outbox pattern - domain events are +collected from the aggregate and appended to the outbox table. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from iam.domain.aggregates import Workspace +from iam.domain.value_objects import TenantId, WorkspaceId +from iam.infrastructure.models import WorkspaceModel +from iam.infrastructure.observability import ( + DefaultWorkspaceRepositoryProbe, + WorkspaceRepositoryProbe, +) +from iam.infrastructure.outbox import IAMEventSerializer +from iam.ports.repositories import IWorkspaceRepository + +if TYPE_CHECKING: + from infrastructure.outbox.repository import OutboxRepository + + +class WorkspaceRepository(IWorkspaceRepository): + """Repository managing PostgreSQL storage for Workspace aggregates. + + This implementation stores workspace metadata in PostgreSQL only. + Workspaces are simple aggregates with no complex relationships + requiring SpiceDB hydration (that comes in Phase 3). + + Write operations use the transactional outbox pattern: + - Domain events are collected from the aggregate + - Events are appended to the outbox table (same transaction as PostgreSQL) + - The outbox worker processes events if needed + """ + + def __init__( + self, + session: AsyncSession, + outbox: "OutboxRepository", + probe: WorkspaceRepositoryProbe | None = None, + serializer: IAMEventSerializer | None = None, + ) -> None: + """Initialize repository with database session and outbox. + + Args: + session: AsyncSession from FastAPI dependency injection + outbox: Outbox repository for the transactional outbox pattern + probe: Optional domain probe for observability + serializer: Optional event serializer for testability + """ + self._session = session + self._outbox = outbox + self._probe = probe or DefaultWorkspaceRepositoryProbe() + self._serializer = serializer or IAMEventSerializer() + + async def save(self, workspace: Workspace) -> None: + """Persist workspace metadata to PostgreSQL, events to outbox. + + Uses the transactional outbox pattern: domain events are appended + to the outbox table within the same database transaction. + + Args: + workspace: The Workspace aggregate to persist + """ + # Upsert workspace metadata in PostgreSQL + stmt = select(WorkspaceModel).where(WorkspaceModel.id == workspace.id.value) + result = await self._session.execute(stmt) + model = result.scalar_one_or_none() + + if model: + # Update existing + model.name = workspace.name + model.parent_workspace_id = ( + workspace.parent_workspace_id.value + if workspace.parent_workspace_id + else None + ) + model.is_root = workspace.is_root + model.updated_at = workspace.updated_at + else: + # Create new + model = WorkspaceModel( + id=workspace.id.value, + tenant_id=workspace.tenant_id.value, + name=workspace.name, + parent_workspace_id=( + workspace.parent_workspace_id.value + if workspace.parent_workspace_id + else None + ), + is_root=workspace.is_root, + created_at=workspace.created_at, + updated_at=workspace.updated_at, + ) + self._session.add(model) + + # Flush to catch integrity errors before outbox writes + await self._session.flush() + + # Collect, serialize, and append events from the aggregate to outbox + events = workspace.collect_events() + for event in events: + payload = self._serializer.serialize(event) + await self._outbox.append( + event_type=type(event).__name__, + payload=payload, + occurred_at=event.occurred_at, + aggregate_type="workspace", + aggregate_id=workspace.id.value, + ) + + self._probe.workspace_saved(workspace.id.value, workspace.tenant_id.value) + + async def get_by_id(self, workspace_id: WorkspaceId) -> Workspace | None: + """Fetch workspace metadata from PostgreSQL. + + Args: + workspace_id: The unique identifier of the workspace + + Returns: + The Workspace aggregate, or None if not found + """ + stmt = select(WorkspaceModel).where(WorkspaceModel.id == workspace_id.value) + result = await self._session.execute(stmt) + model = result.scalar_one_or_none() + + if model is None: + self._probe.workspace_not_found(workspace_id.value) + return None + + workspace = self._to_domain(model) + self._probe.workspace_retrieved(workspace.id.value) + return workspace + + async def get_by_name(self, tenant_id: TenantId, name: str) -> Workspace | None: + """Fetch workspace by name within a tenant. + + Args: + tenant_id: The tenant to search within + name: The workspace name + + Returns: + The Workspace aggregate, or None if not found + """ + stmt = select(WorkspaceModel).where( + WorkspaceModel.tenant_id == tenant_id.value, + WorkspaceModel.name == name, + ) + result = await self._session.execute(stmt) + model = result.scalar_one_or_none() + + if model is None: + return None + + workspace = self._to_domain(model) + self._probe.workspace_retrieved(workspace.id.value) + return workspace + + async def get_root_workspace(self, tenant_id: TenantId) -> Workspace | None: + """Fetch the root workspace for a tenant. + + Args: + tenant_id: The tenant to find the root workspace for + + Returns: + The root Workspace aggregate, or None if not found + """ + stmt = select(WorkspaceModel).where( + WorkspaceModel.tenant_id == tenant_id.value, + WorkspaceModel.is_root == True, # noqa: E712 + ) + result = await self._session.execute(stmt) + model = result.scalar_one_or_none() + + if model is None: + return None + + workspace = self._to_domain(model) + self._probe.workspace_retrieved(workspace.id.value) + return workspace + + async def list_by_tenant(self, tenant_id: TenantId) -> list[Workspace]: + """Fetch all workspaces in a tenant. + + Args: + tenant_id: The tenant to list workspaces for + + Returns: + List of Workspace aggregates in the tenant + """ + stmt = select(WorkspaceModel).where(WorkspaceModel.tenant_id == tenant_id.value) + result = await self._session.execute(stmt) + models = result.scalars().all() + + workspaces = [self._to_domain(model) for model in models] + self._probe.workspaces_listed(tenant_id.value, len(workspaces)) + return workspaces + + async def delete(self, workspace: Workspace) -> bool: + """Delete workspace from PostgreSQL and emit domain events. + + The workspace should have mark_for_deletion() called before this + method to record the WorkspaceDeleted event. + + Args: + workspace: The Workspace aggregate to delete (with deletion event recorded) + + Returns: + True if deleted, False if not found + """ + stmt = select(WorkspaceModel).where(WorkspaceModel.id == workspace.id.value) + result = await self._session.execute(stmt) + model = result.scalar_one_or_none() + + if model is None: + return False + + # Collect and append deletion event to outbox before deletion + events = workspace.collect_events() + for event in events: + payload = self._serializer.serialize(event) + await self._outbox.append( + event_type=type(event).__name__, + payload=payload, + occurred_at=event.occurred_at, + aggregate_type="workspace", + aggregate_id=workspace.id.value, + ) + + # Delete from PostgreSQL + await self._session.delete(model) + await self._session.flush() + + self._probe.workspace_deleted(workspace.id.value) + return True + + def _to_domain(self, model: WorkspaceModel) -> Workspace: + """Convert a WorkspaceModel to a Workspace domain aggregate. + + Reconstitutes the aggregate from database state without generating + any domain events (this is a read operation, not a mutation). + + Args: + model: The SQLAlchemy model to convert + + Returns: + A Workspace domain aggregate + """ + return Workspace( + id=WorkspaceId(value=model.id), + tenant_id=TenantId(value=model.tenant_id), + name=model.name, + parent_workspace_id=( + WorkspaceId(value=model.parent_workspace_id) + if model.parent_workspace_id + else None + ), + is_root=model.is_root, + created_at=model.created_at, + updated_at=model.updated_at, + ) diff --git a/src/api/iam/ports/repositories.py b/src/api/iam/ports/repositories.py index bf66320c..c453e9a1 100644 --- a/src/api/iam/ports/repositories.py +++ b/src/api/iam/ports/repositories.py @@ -9,8 +9,8 @@ from typing import Callable, Protocol, runtime_checkable -from iam.domain.aggregates import APIKey, Group, Tenant, User -from iam.domain.value_objects import APIKeyId, GroupId, TenantId, UserId +from iam.domain.aggregates import APIKey, Group, Tenant, User, Workspace +from iam.domain.value_objects import APIKeyId, GroupId, TenantId, UserId, WorkspaceId from shared_kernel.authorization.protocols import AuthorizationProvider @@ -217,6 +217,95 @@ async def is_last_admin( ... +@runtime_checkable +class IWorkspaceRepository(Protocol): + """Repository for Workspace aggregate persistence. + + Simple repository for workspace metadata. Workspaces organize knowledge + graphs within a tenant. Unlike GroupRepository, no SpiceDB member + hydration is needed (workspace members are managed in a later phase). + + Write operations use the transactional outbox pattern to emit domain + events for SpiceDB relationship management. + """ + + async def save(self, workspace: Workspace) -> None: + """Persist a workspace aggregate. + + Creates a new workspace or updates an existing one. Persists workspace + metadata to PostgreSQL and domain events to the outbox. + + Args: + workspace: The Workspace aggregate to persist + """ + ... + + async def get_by_id(self, workspace_id: WorkspaceId) -> Workspace | None: + """Retrieve a workspace by its ID. + + Args: + workspace_id: The unique identifier of the workspace + + Returns: + The Workspace aggregate, or None if not found + """ + ... + + async def get_by_name(self, tenant_id: TenantId, name: str) -> Workspace | None: + """Retrieve a workspace by name within a tenant. + + Used for application-level uniqueness checks before creating + a new workspace. + + Args: + tenant_id: The tenant to search within + name: The workspace name + + Returns: + The Workspace aggregate, or None if not found + """ + ... + + async def get_root_workspace(self, tenant_id: TenantId) -> Workspace | None: + """Retrieve the root workspace for a tenant. + + Each tenant has exactly one root workspace (is_root=True). + + Args: + tenant_id: The tenant to find the root workspace for + + Returns: + The root Workspace aggregate, or None if not found + """ + ... + + async def list_by_tenant(self, tenant_id: TenantId) -> list[Workspace]: + """List all workspaces in a tenant. + + Args: + tenant_id: The tenant to list workspaces for + + Returns: + List of Workspace aggregates in the tenant + """ + ... + + async def delete(self, workspace: Workspace) -> bool: + """Delete a workspace and emit domain events. + + The workspace should have mark_for_deletion() called before this + method to record the WorkspaceDeleted event. The outbox worker + will handle removing relationships from SpiceDB. + + Args: + workspace: The Workspace aggregate to delete (with deletion event recorded) + + Returns: + True if deleted, False if not found + """ + ... + + @runtime_checkable class IAPIKeyRepository(Protocol): """Repository for APIKey aggregate persistence. diff --git a/src/api/infrastructure/migrations/versions/205809969bf4_create_workspaces_table.py b/src/api/infrastructure/migrations/versions/205809969bf4_create_workspaces_table.py new file mode 100644 index 00000000..5320767c --- /dev/null +++ b/src/api/infrastructure/migrations/versions/205809969bf4_create_workspaces_table.py @@ -0,0 +1,87 @@ +"""create workspaces table + +Revision ID: 205809969bf4 +Revises: 36612dcd7676 +Create Date: 2026-02-06 15:35:32.767286 + +Creates the workspaces table for organizing knowledge graphs within tenants. +Uses RESTRICT FK constraints to force application-level cascading and ensure +domain events are emitted for SpiceDB cleanup. +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "205809969bf4" +down_revision: Union[str, Sequence[str], None] = "36612dcd7676" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Create workspaces table with RESTRICT FK constraints. + + Key constraints: + - tenant_id FK with RESTRICT (forces explicit workspace deletion before tenant) + - parent_workspace_id self-FK with RESTRICT (prevents deleting parent with children) + - Partial unique index ensures only one root workspace per tenant + """ + op.create_table( + "workspaces", + sa.Column("id", sa.String(26), primary_key=True), + sa.Column( + "tenant_id", + sa.String(26), + sa.ForeignKey("tenants.id", ondelete="RESTRICT"), + nullable=False, + ), + sa.Column("name", sa.String(255), nullable=False), + sa.Column( + "parent_workspace_id", + sa.String(26), + sa.ForeignKey("workspaces.id", ondelete="RESTRICT"), + nullable=True, + ), + sa.Column("is_root", sa.Boolean, nullable=False, server_default=sa.false()), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + nullable=False, + ), + ) + + # Index on tenant_id for listing workspaces by tenant + op.create_index("idx_workspaces_tenant_id", "workspaces", ["tenant_id"]) + + # Index on parent_workspace_id for hierarchy queries + op.create_index("idx_workspaces_parent", "workspaces", ["parent_workspace_id"]) + + # Composite index for name + tenant lookups + op.create_index("idx_workspaces_name_tenant", "workspaces", ["name", "tenant_id"]) + + # Partial unique index: only one root workspace per tenant + op.create_index( + "idx_workspaces_root_unique", + "workspaces", + ["tenant_id", "is_root"], + unique=True, + postgresql_where=sa.text("is_root = TRUE"), + ) + + +def downgrade() -> None: + """Drop workspaces table and all associated indexes.""" + op.drop_index("idx_workspaces_root_unique", table_name="workspaces") + op.drop_index("idx_workspaces_name_tenant", table_name="workspaces") + op.drop_index("idx_workspaces_parent", table_name="workspaces") + op.drop_index("idx_workspaces_tenant_id", table_name="workspaces") + op.drop_table("workspaces") diff --git a/src/api/infrastructure/migrations/versions/36612dcd7676_fix_groups_tenant_cascade_constraint.py b/src/api/infrastructure/migrations/versions/36612dcd7676_fix_groups_tenant_cascade_constraint.py new file mode 100644 index 00000000..10c3d2b4 --- /dev/null +++ b/src/api/infrastructure/migrations/versions/36612dcd7676_fix_groups_tenant_cascade_constraint.py @@ -0,0 +1,56 @@ +"""fix groups tenant cascade constraint + +Revision ID: 36612dcd7676 +Revises: 193b7c6ad230 +Create Date: 2026-02-06 15:34:59.572180 + +Changes CASCADE to RESTRICT on groups.tenant_id FK to force application-level +cascading. This ensures GroupDeleted domain events are emitted when groups are +removed, preventing orphaned SpiceDB relationships. +""" + +from typing import Sequence, Union + +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "36612dcd7676" +down_revision: Union[str, Sequence[str], None] = "193b7c6ad230" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Change groups.tenant_id FK from CASCADE to RESTRICT. + + RESTRICT forces the application layer to explicitly delete groups + before deleting a tenant, ensuring domain events are emitted for + SpiceDB cleanup. + """ + # Drop the existing CASCADE FK constraint + op.drop_constraint("fk_groups_tenant_id", "groups", type_="foreignkey") + + # Re-create with RESTRICT instead of CASCADE + op.create_foreign_key( + "fk_groups_tenant_id", + "groups", + "tenants", + ["tenant_id"], + ["id"], + ondelete="RESTRICT", + ) + + +def downgrade() -> None: + """Revert groups.tenant_id FK to CASCADE for rollback.""" + op.drop_constraint("fk_groups_tenant_id", "groups", type_="foreignkey") + + op.create_foreign_key( + "fk_groups_tenant_id", + "groups", + "tenants", + ["tenant_id"], + ["id"], + ondelete="CASCADE", + ) diff --git a/src/api/infrastructure/settings.py b/src/api/infrastructure/settings.py index 885d98af..9e91e1ff 100644 --- a/src/api/infrastructure/settings.py +++ b/src/api/infrastructure/settings.py @@ -271,6 +271,7 @@ class IAMSettings(BaseSettings): Environment variables: KARTOGRAPH_IAM_DEFAULT_TENANT_NAME: Default tenant name for single-tenant mode (default: default) + KARTOGRAPH_IAM_DEFAULT_WORKSPACE_NAME: Default root workspace name (default: None, uses tenant name) """ model_config = SettingsConfigDict( @@ -285,6 +286,11 @@ class IAMSettings(BaseSettings): description="Default tenant name for single-tenant mode", ) + default_workspace_name: str | None = Field( + default=None, + description="Default root workspace name (if None, uses tenant name)", + ) + @lru_cache def get_iam_settings() -> IAMSettings: diff --git a/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py b/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py new file mode 100644 index 00000000..6552aee4 --- /dev/null +++ b/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py @@ -0,0 +1,774 @@ +"""Unit tests for WorkspaceRepository. + +Following TDD principles - tests verify repository behavior with mocked dependencies. +Tests cover all IWorkspaceRepository protocol methods including outbox event emission, +constraint enforcement, and edge cases. +""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from iam.domain.aggregates import Workspace +from iam.domain.value_objects import TenantId, WorkspaceId +from iam.infrastructure.models import WorkspaceModel +from iam.infrastructure.workspace_repository import WorkspaceRepository +from iam.ports.repositories import IWorkspaceRepository + + +@pytest.fixture +def mock_session(): + """Create mock async session.""" + session = AsyncMock() + return session + + +@pytest.fixture +def mock_probe(): + """Create mock repository probe.""" + probe = MagicMock() + return probe + + +@pytest.fixture +def mock_outbox(): + """Create mock outbox repository.""" + outbox = MagicMock() + outbox.append = AsyncMock() + return outbox + + +@pytest.fixture +def mock_serializer(): + """Create mock event serializer.""" + serializer = MagicMock() + serializer.serialize.return_value = {"test": "payload"} + return serializer + + +@pytest.fixture +def repository(mock_session, mock_probe, mock_outbox): + """Create repository with mock dependencies.""" + return WorkspaceRepository( + session=mock_session, + outbox=mock_outbox, + probe=mock_probe, + ) + + +@pytest.fixture +def tenant_id(): + """Create a test tenant ID.""" + return TenantId.generate() + + +@pytest.fixture +def now(): + """Create a fixed timestamp for testing.""" + return datetime.now(UTC) + + +class TestProtocolCompliance: + """Tests for protocol compliance.""" + + def test_implements_protocol(self, repository): + """Repository should implement IWorkspaceRepository protocol.""" + assert isinstance(repository, IWorkspaceRepository) + + +class TestSave: + """Tests for save method.""" + + @pytest.mark.asyncio + async def test_save_workspace_creates_in_database( + self, repository, mock_session, tenant_id + ): + """Should add new workspace model to session when workspace doesn't exist.""" + workspace = Workspace.create_root( + name="Root Workspace", + tenant_id=tenant_id, + ) + + # Mock session to return None (workspace doesn't exist) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + # Should add new model + mock_session.add.assert_called_once() + added_model = mock_session.add.call_args[0][0] + assert isinstance(added_model, WorkspaceModel) + assert added_model.id == workspace.id.value + assert added_model.tenant_id == tenant_id.value + assert added_model.name == "Root Workspace" + assert added_model.is_root is True + assert added_model.parent_workspace_id is None + + @pytest.mark.asyncio + async def test_save_workspace_updates_existing( + self, repository, mock_session, tenant_id, now + ): + """Should update existing workspace model when workspace exists.""" + workspace_id = WorkspaceId.generate() + workspace = Workspace( + id=workspace_id, + tenant_id=tenant_id, + name="Updated Name", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ) + + # Mock existing workspace + existing_model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Old Name", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = existing_model + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + # Should not add, should update + mock_session.add.assert_not_called() + assert existing_model.name == "Updated Name" + + @pytest.mark.asyncio + async def test_save_workspace_emits_events_to_outbox( + self, repository, mock_session, mock_outbox, tenant_id + ): + """Should append collected events to outbox when saving.""" + # Use factory to generate events + workspace = Workspace.create_root( + name="Root Workspace", + tenant_id=tenant_id, + ) + + # Mock session + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + # Should have appended WorkspaceCreated event to outbox + assert mock_outbox.append.call_count == 1 + call_kwargs = mock_outbox.append.call_args.kwargs + assert call_kwargs["event_type"] == "WorkspaceCreated" + assert call_kwargs["aggregate_type"] == "workspace" + assert call_kwargs["aggregate_id"] == workspace.id.value + + @pytest.mark.asyncio + async def test_save_child_workspace_with_parent( + self, repository, mock_session, tenant_id + ): + """Should save child workspace with parent reference.""" + parent_id = WorkspaceId.generate() + workspace = Workspace.create( + name="Child Workspace", + tenant_id=tenant_id, + parent_workspace_id=parent_id, + ) + + # Mock session to return None (workspace doesn't exist) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + # Should add new model with parent reference + mock_session.add.assert_called_once() + added_model = mock_session.add.call_args[0][0] + assert added_model.parent_workspace_id == parent_id.value + assert added_model.is_root is False + + +class TestGetById: + """Tests for get_by_id method.""" + + @pytest.mark.asyncio + async def test_get_by_id_returns_workspace( + self, repository, mock_session, tenant_id, now + ): + """Should return workspace when found by ID.""" + workspace_id = WorkspaceId.generate() + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Test Workspace", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + result = await repository.get_by_id(workspace_id) + + assert result is not None + assert result.id.value == workspace_id.value + assert result.tenant_id.value == tenant_id.value + assert result.name == "Test Workspace" + assert result.is_root is True + assert result.parent_workspace_id is None + + @pytest.mark.asyncio + async def test_get_by_id_returns_none_when_not_found( + self, repository, mock_session + ): + """Should return None when workspace doesn't exist.""" + workspace_id = WorkspaceId.generate() + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + result = await repository.get_by_id(workspace_id) + + assert result is None + + @pytest.mark.asyncio + async def test_get_by_id_returns_workspace_with_parent( + self, repository, mock_session, tenant_id, now + ): + """Should return workspace with parent_workspace_id reconstituted.""" + workspace_id = WorkspaceId.generate() + parent_id = WorkspaceId.generate() + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Child Workspace", + parent_workspace_id=parent_id.value, + is_root=False, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + result = await repository.get_by_id(workspace_id) + + assert result is not None + assert result.parent_workspace_id is not None + assert result.parent_workspace_id.value == parent_id.value + assert result.is_root is False + + +class TestGetByName: + """Tests for get_by_name method.""" + + @pytest.mark.asyncio + async def test_get_by_name_returns_workspace( + self, repository, mock_session, tenant_id, now + ): + """Should return workspace when found by name in tenant.""" + workspace_id = WorkspaceId.generate() + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Engineering", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + result = await repository.get_by_name(tenant_id, "Engineering") + + assert result is not None + assert result.name == "Engineering" + + @pytest.mark.asyncio + async def test_get_by_name_returns_none_when_not_found( + self, repository, mock_session, tenant_id + ): + """Should return None when workspace name doesn't exist in tenant.""" + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + result = await repository.get_by_name(tenant_id, "Nonexistent") + + assert result is None + + +class TestGetRootWorkspace: + """Tests for get_root_workspace method.""" + + @pytest.mark.asyncio + async def test_get_root_workspace_returns_root( + self, repository, mock_session, tenant_id, now + ): + """Should return root workspace for tenant.""" + workspace_id = WorkspaceId.generate() + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Root", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + result = await repository.get_root_workspace(tenant_id) + + assert result is not None + assert result.is_root is True + assert result.parent_workspace_id is None + + @pytest.mark.asyncio + async def test_get_root_workspace_returns_none_when_not_found( + self, repository, mock_session, tenant_id + ): + """Should return None when no root workspace exists for tenant.""" + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + result = await repository.get_root_workspace(tenant_id) + + assert result is None + + +class TestListByTenant: + """Tests for list_by_tenant method.""" + + @pytest.mark.asyncio + async def test_list_by_tenant_returns_all_workspaces( + self, repository, mock_session, tenant_id, now + ): + """Should return all workspaces in a tenant.""" + models = [ + WorkspaceModel( + id=WorkspaceId.generate().value, + tenant_id=tenant_id.value, + name="Root", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ), + WorkspaceModel( + id=WorkspaceId.generate().value, + tenant_id=tenant_id.value, + name="Engineering", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ), + WorkspaceModel( + id=WorkspaceId.generate().value, + tenant_id=tenant_id.value, + name="Marketing", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ), + ] + + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = models + mock_session.execute.return_value = mock_result + + result = await repository.list_by_tenant(tenant_id) + + assert len(result) == 3 + names = {w.name for w in result} + assert "Root" in names + assert "Engineering" in names + assert "Marketing" in names + + @pytest.mark.asyncio + async def test_list_by_tenant_returns_empty_when_none( + self, repository, mock_session, tenant_id + ): + """Should return empty list when tenant has no workspaces.""" + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [] + mock_session.execute.return_value = mock_result + + result = await repository.list_by_tenant(tenant_id) + + assert result == [] + + +class TestDelete: + """Tests for delete method.""" + + @pytest.mark.asyncio + async def test_delete_workspace_removes_from_database( + self, repository, mock_session, tenant_id, now + ): + """Should delete workspace from PostgreSQL.""" + workspace_id = WorkspaceId.generate() + workspace = Workspace( + id=workspace_id, + tenant_id=tenant_id, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + # Mark for deletion to record event + workspace.mark_for_deletion() + + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + result = await repository.delete(workspace) + + assert result is True + mock_session.delete.assert_called_once_with(model) + + @pytest.mark.asyncio + async def test_delete_workspace_emits_events_to_outbox( + self, repository, mock_session, mock_outbox, tenant_id, now + ): + """Should append WorkspaceDeleted event to outbox.""" + workspace_id = WorkspaceId.generate() + workspace = Workspace( + id=workspace_id, + tenant_id=tenant_id, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + # Mark for deletion to record event + workspace.mark_for_deletion() + + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + await repository.delete(workspace) + + # Should have appended WorkspaceDeleted event + calls = mock_outbox.append.call_args_list + event_types = [call.kwargs.get("event_type") for call in calls] + assert "WorkspaceDeleted" in event_types + + @pytest.mark.asyncio + async def test_delete_returns_false_when_not_found( + self, repository, mock_session, tenant_id, now + ): + """Should return False when workspace doesn't exist in database.""" + workspace = Workspace( + id=WorkspaceId.generate(), + tenant_id=tenant_id, + name="Ghost", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + result = await repository.delete(workspace) + + assert result is False + + +class TestWorkspaceNameNotUniqueAcrossTenants: + """Tests for workspace name uniqueness behavior across tenants.""" + + @pytest.mark.asyncio + async def test_workspace_name_not_unique_across_tenants( + self, repository, mock_session, now + ): + """Same workspace name should be allowed in different tenants. + + Workspace names are unique within a tenant (application-level), + not globally. This test verifies two workspaces with the same name + can exist in different tenants. + """ + tenant_a = TenantId.generate() + tenant_b = TenantId.generate() + + workspace_a = Workspace.create_root(name="Root", tenant_id=tenant_a) + workspace_b = Workspace.create_root(name="Root", tenant_id=tenant_b) + + # Mock session for first save + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + # Both saves should succeed (no uniqueness violation) + await repository.save(workspace_a) + await repository.save(workspace_b) + + # Both adds should have been called + assert mock_session.add.call_count == 2 + + +class TestParentWorkspaceReference: + """Tests for parent workspace self-referential relationship.""" + + @pytest.mark.asyncio + async def test_parent_workspace_reference_works( + self, repository, mock_session, tenant_id, now + ): + """Should correctly save and retrieve child workspaces with parent references.""" + parent_id = WorkspaceId.generate() + child = Workspace.create( + name="Child", + tenant_id=tenant_id, + parent_workspace_id=parent_id, + ) + + # Mock session to return None (workspace doesn't exist) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(child) + + # Verify parent_workspace_id was persisted + added_model = mock_session.add.call_args[0][0] + assert added_model.parent_workspace_id == parent_id.value + + +class TestRootWorkspaceConstraint: + """Tests for root workspace uniqueness constraint.""" + + @pytest.mark.asyncio + async def test_root_workspace_constraint_enforced( + self, repository, mock_session, tenant_id, now + ): + """Should verify only one root workspace per tenant. + + The partial unique index ensures at most one root workspace per tenant. + This unit test verifies that the model correctly sets is_root=True. + The actual database constraint enforcement is tested in integration tests. + """ + root = Workspace.create_root( + name="Root", + tenant_id=tenant_id, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(root) + + added_model = mock_session.add.call_args[0][0] + assert added_model.is_root is True + + # A non-root workspace should have is_root=False + parent_id = WorkspaceId.generate() + child = Workspace.create( + name="Child", + tenant_id=tenant_id, + parent_workspace_id=parent_id, + ) + + await repository.save(child) + + child_model = mock_session.add.call_args[0][0] + assert child_model.is_root is False + + +class TestSerializerInjection: + """Tests for serializer dependency injection.""" + + @pytest.mark.asyncio + async def test_uses_injected_serializer( + self, mock_session, mock_outbox, mock_probe, mock_serializer + ): + """Should use injected serializer instead of creating default.""" + repository = WorkspaceRepository( + session=mock_session, + outbox=mock_outbox, + probe=mock_probe, + serializer=mock_serializer, + ) + + tenant_id = TenantId.generate() + workspace = Workspace.create_root( + name="Test Workspace", + tenant_id=tenant_id, + ) + + # Mock session + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + # Injected serializer should have been called + mock_serializer.serialize.assert_called() + + def test_uses_default_serializer_when_not_injected( + self, mock_session, mock_outbox, mock_probe + ): + """Should create default serializer when not injected.""" + from iam.infrastructure.outbox import IAMEventSerializer + + repository = WorkspaceRepository( + session=mock_session, + outbox=mock_outbox, + probe=mock_probe, + ) + + assert isinstance(repository._serializer, IAMEventSerializer) + + +class TestObservabilityProbe: + """Tests for domain probe usage.""" + + @pytest.mark.asyncio + async def test_probe_called_on_save( + self, repository, mock_session, mock_probe, tenant_id + ): + """Should call probe.workspace_saved on successful save.""" + workspace = Workspace.create_root( + name="Root", + tenant_id=tenant_id, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.save(workspace) + + mock_probe.workspace_saved.assert_called_once_with( + workspace.id.value, tenant_id.value + ) + + @pytest.mark.asyncio + async def test_probe_called_on_not_found( + self, repository, mock_session, mock_probe + ): + """Should call probe.workspace_not_found when get_by_id finds nothing.""" + workspace_id = WorkspaceId.generate() + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.get_by_id(workspace_id) + + mock_probe.workspace_not_found.assert_called_once_with(workspace_id.value) + + @pytest.mark.asyncio + async def test_probe_called_on_retrieved( + self, repository, mock_session, mock_probe, tenant_id, now + ): + """Should call probe.workspace_retrieved on successful get_by_id.""" + workspace_id = WorkspaceId.generate() + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="Test", + parent_workspace_id=None, + is_root=True, + created_at=now, + updated_at=now, + ) + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + await repository.get_by_id(workspace_id) + + mock_probe.workspace_retrieved.assert_called_once_with(workspace_id.value) + + @pytest.mark.asyncio + async def test_probe_called_on_delete( + self, repository, mock_session, mock_probe, tenant_id, now + ): + """Should call probe.workspace_deleted on successful deletion.""" + workspace_id = WorkspaceId.generate() + workspace = Workspace( + id=workspace_id, + tenant_id=tenant_id, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + workspace.mark_for_deletion() + + model = WorkspaceModel( + id=workspace_id.value, + tenant_id=tenant_id.value, + name="To Delete", + parent_workspace_id=None, + is_root=False, + created_at=now, + updated_at=now, + ) + + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = model + mock_session.execute.return_value = mock_result + + await repository.delete(workspace) + + mock_probe.workspace_deleted.assert_called_once_with(workspace_id.value) + + @pytest.mark.asyncio + async def test_probe_called_on_list( + self, repository, mock_session, mock_probe, tenant_id + ): + """Should call probe.workspaces_listed on list_by_tenant.""" + mock_result = MagicMock() + mock_result.scalars.return_value.all.return_value = [] + mock_session.execute.return_value = mock_result + + await repository.list_by_tenant(tenant_id) + + mock_probe.workspaces_listed.assert_called_once_with(tenant_id.value, 0) diff --git a/website/src/data/env-vars.json b/website/src/data/env-vars.json index 1669891a..bb47fb9f 100644 --- a/website/src/data/env-vars.json +++ b/website/src/data/env-vars.json @@ -207,7 +207,7 @@ "IAMSettings": { "class_name": "IAMSettings", "prefix": "KARTOGRAPH_IAM_", - "doc": "IAM (Identity and Access Management) settings.\n\n Environment variables:\n KARTOGRAPH_IAM_DEFAULT_TENANT_NAME: Default tenant name for single-tenant mode (default: default)\n ", + "doc": "IAM (Identity and Access Management) settings.\n\n Environment variables:\n KARTOGRAPH_IAM_DEFAULT_TENANT_NAME: Default tenant name for single-tenant mode (default: default)\n KARTOGRAPH_IAM_DEFAULT_WORKSPACE_NAME: Default root workspace name (default: None, uses tenant name)\n ", "properties": [ { "env_var": "KARTOGRAPH_IAM_DEFAULT_TENANT_NAME", @@ -215,6 +215,13 @@ "default": "default", "required": false, "description": "Default tenant name for single-tenant mode" + }, + { + "env_var": "KARTOGRAPH_IAM_DEFAULT_WORKSPACE_NAME", + "type": "str | None", + "default": null, + "required": false, + "description": "Default root workspace name (if None, uses tenant name)" } ] }, From 4e51f644ce37280a3fa666f26e78e79d5aa499fc Mon Sep 17 00:00:00 2001 From: John Sell Date: Fri, 6 Feb 2026 15:53:02 -0500 Subject: [PATCH 10/11] refactor(iam.infrastructure): split models.py into modular structure Co-Authored-By: Claude Sonnet 4.5 (1M context) --- src/api/iam/infrastructure/models.py | 218 ------------------ src/api/iam/infrastructure/models/__init__.py | 19 ++ src/api/iam/infrastructure/models/api_key.py | 72 ++++++ src/api/iam/infrastructure/models/group.py | 43 ++++ src/api/iam/infrastructure/models/tenant.py | 32 +++ src/api/iam/infrastructure/models/user.py | 29 +++ .../iam/infrastructure/models/workspace.py | 84 +++++++ 7 files changed, 279 insertions(+), 218 deletions(-) delete mode 100644 src/api/iam/infrastructure/models.py create mode 100644 src/api/iam/infrastructure/models/__init__.py create mode 100644 src/api/iam/infrastructure/models/api_key.py create mode 100644 src/api/iam/infrastructure/models/group.py create mode 100644 src/api/iam/infrastructure/models/tenant.py create mode 100644 src/api/iam/infrastructure/models/user.py create mode 100644 src/api/iam/infrastructure/models/workspace.py diff --git a/src/api/iam/infrastructure/models.py b/src/api/iam/infrastructure/models.py deleted file mode 100644 index 442fe332..00000000 --- a/src/api/iam/infrastructure/models.py +++ /dev/null @@ -1,218 +0,0 @@ -"""SQLAlchemy ORM models for IAM bounded context. - -These models map to database tables and are used by repository implementations. -They store only metadata - authorization data (membership, roles) is stored in SpiceDB. -""" - -from datetime import datetime - -from sqlalchemy import Boolean, DateTime, ForeignKey, Index, String, UniqueConstraint -from sqlalchemy.orm import Mapped, mapped_column, relationship - -from infrastructure.database.models import Base, TimestampMixin - - -class GroupModel(Base, TimestampMixin): - """ORM model for groups table (metadata only). - - Stores group metadata in PostgreSQL. Membership relationships are - managed through SpiceDB, not as database columns. - - Note: Group names are NOT globally unique - per-tenant uniqueness - is enforced at the application level. - - Foreign Key Constraint: - - tenant_id references tenants.id with RESTRICT delete - - Application layer must explicitly delete groups before tenant deletion - - This ensures GroupDeleted domain events are emitted for SpiceDB cleanup - """ - - __tablename__ = "groups" - - id: Mapped[str] = mapped_column(String(26), primary_key=True) - tenant_id: Mapped[str] = mapped_column( - String(26), - ForeignKey("tenants.id", ondelete="RESTRICT"), - nullable=False, - index=True, - ) - name: Mapped[str] = mapped_column(String(255), nullable=False, index=True) - - def __repr__(self) -> str: - """Return string representation.""" - return ( - f"" - ) - - -class UserModel(Base, TimestampMixin): - """ORM model for users table (metadata only). - - Stores user metadata in PostgreSQL. Users are provisioned from SSO - and this table only stores minimal metadata for lookup and reference. - - Note: id is VARCHAR(255) to accommodate external SSO IDs (UUIDs, Auth0, etc.) - """ - - __tablename__ = "users" - - id: Mapped[str] = mapped_column(String(255), primary_key=True) - username: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) - - def __repr__(self) -> str: - """Return string representation.""" - return f"" - - -class TenantModel(Base, TimestampMixin): - """ORM model for tenants table. - - Stores tenant metadata in PostgreSQL. Tenants represent organizations - and are the top-level isolation boundary in the system. - - Note: Tenant names are globally unique across the entire system. - """ - - __tablename__ = "tenants" - - id: Mapped[str] = mapped_column(String(26), primary_key=True) - name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) - - # Relationships - workspaces = relationship("WorkspaceModel", back_populates="tenant") - - def __repr__(self) -> str: - """Return string representation.""" - return f"" - - -class WorkspaceModel(Base, TimestampMixin): - """ORM model for workspaces table. - - Stores workspace metadata in PostgreSQL. Workspaces organize knowledge - graphs within a tenant. Each tenant has exactly one root workspace - (auto-created on tenant creation) and can have multiple child workspaces. - - Foreign Key Constraints: - - tenant_id references tenants.id with RESTRICT delete - Application must delete workspaces before tenant deletion - - parent_workspace_id references workspaces.id with RESTRICT delete - Cannot delete a parent workspace while children exist - - Partial Unique Index: - - Only one root workspace (is_root=TRUE) per tenant - """ - - __tablename__ = "workspaces" - - id: Mapped[str] = mapped_column(String(26), primary_key=True) - tenant_id: Mapped[str] = mapped_column( - String(26), - ForeignKey("tenants.id", ondelete="RESTRICT"), - nullable=False, - index=True, - ) - name: Mapped[str] = mapped_column(String(255), nullable=False) - parent_workspace_id: Mapped[str | None] = mapped_column( - String(26), - ForeignKey("workspaces.id", ondelete="RESTRICT"), - nullable=True, - index=True, - ) - is_root: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - created_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False - ) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False - ) - - # Relationships - tenant = relationship("TenantModel", back_populates="workspaces") - parent_workspace = relationship( - "WorkspaceModel", - remote_side="WorkspaceModel.id", - back_populates="child_workspaces", - ) - child_workspaces = relationship( - "WorkspaceModel", - back_populates="parent_workspace", - ) - - __table_args__ = ( - Index("idx_workspaces_name_tenant", "name", "tenant_id"), - Index( - "idx_workspaces_root_unique", - "tenant_id", - "is_root", - unique=True, - postgresql_where=(is_root == True), # noqa: E712 - ), - ) - - def __repr__(self) -> str: - """Return string representation.""" - return ( - f"" - ) - - -class APIKeyModel(Base, TimestampMixin): - """ORM model for api_keys table. - - Stores API key metadata in PostgreSQL. The key_hash is the only - sensitive data stored - the plaintext secret is never persisted. - - Notes: - - created_by_user_id is VARCHAR(255) to match users.id (external SSO IDs) - This is for audit trail only - authorization is handled by SpiceDB. - - tenant_id is VARCHAR(26) for ULID format - - key_hash is unique for authentication lookup - - prefix allows key identification without exposing the full key - - Per-user key names are unique within a tenant - - Foreign Key Constraint: - - tenant_id references tenants.id with CASCADE delete - - When a tenant is deleted, all API keys are cascade deleted - - API key revocation must be handled in service layer to emit events - """ - - __tablename__ = "api_keys" - - id: Mapped[str] = mapped_column(String(26), primary_key=True) - created_by_user_id: Mapped[str] = mapped_column( - String(255), nullable=False, index=True - ) - tenant_id: Mapped[str] = mapped_column( - String(26), - ForeignKey("tenants.id", ondelete="CASCADE"), - nullable=False, - index=True, - ) - name: Mapped[str] = mapped_column(String(255), nullable=False) - key_hash: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) - prefix: Mapped[str] = mapped_column(String(12), nullable=False, index=True) - expires_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False - ) - last_used_at: Mapped[datetime | None] = mapped_column( - DateTime(timezone=True), nullable=True - ) - is_revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) - - __table_args__ = ( - UniqueConstraint( - "tenant_id", - "created_by_user_id", - "name", - name="uq_api_keys_tenant_user_name", - ), - ) - - def __repr__(self) -> str: - """Return string representation.""" - return ( - f"" - ) diff --git a/src/api/iam/infrastructure/models/__init__.py b/src/api/iam/infrastructure/models/__init__.py new file mode 100644 index 00000000..58aaab47 --- /dev/null +++ b/src/api/iam/infrastructure/models/__init__.py @@ -0,0 +1,19 @@ +"""SQLAlchemy ORM models for IAM bounded context. + +These models map to database tables and are used by repository implementations. +They store only metadata - authorization data (membership, roles) is stored in SpiceDB. +""" + +from iam.infrastructure.models.api_key import APIKeyModel +from iam.infrastructure.models.group import GroupModel +from iam.infrastructure.models.tenant import TenantModel +from iam.infrastructure.models.user import UserModel +from iam.infrastructure.models.workspace import WorkspaceModel + +__all__ = [ + "APIKeyModel", + "GroupModel", + "TenantModel", + "UserModel", + "WorkspaceModel", +] diff --git a/src/api/iam/infrastructure/models/api_key.py b/src/api/iam/infrastructure/models/api_key.py new file mode 100644 index 00000000..48c81cf0 --- /dev/null +++ b/src/api/iam/infrastructure/models/api_key.py @@ -0,0 +1,72 @@ +"""SQLAlchemy ORM model for the api_keys table. + +Stores API key metadata in PostgreSQL. The key_hash is the only +sensitive data stored - the plaintext secret is never persisted. +""" + +from datetime import datetime + +from sqlalchemy import Boolean, DateTime, ForeignKey, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column + +from infrastructure.database.models import Base, TimestampMixin + + +class APIKeyModel(Base, TimestampMixin): + """ORM model for api_keys table. + + Stores API key metadata in PostgreSQL. The key_hash is the only + sensitive data stored - the plaintext secret is never persisted. + + Notes: + - created_by_user_id is VARCHAR(255) to match users.id (external SSO IDs) + This is for audit trail only - authorization is handled by SpiceDB. + - tenant_id is VARCHAR(26) for ULID format + - key_hash is unique for authentication lookup + - prefix allows key identification without exposing the full key + - Per-user key names are unique within a tenant + + Foreign Key Constraint: + - tenant_id references tenants.id with CASCADE delete + - When a tenant is deleted, all API keys are cascade deleted + - API key revocation must be handled in service layer to emit events + """ + + __tablename__ = "api_keys" + + id: Mapped[str] = mapped_column(String(26), primary_key=True) + created_by_user_id: Mapped[str] = mapped_column( + String(255), nullable=False, index=True + ) + tenant_id: Mapped[str] = mapped_column( + String(26), + ForeignKey("tenants.id", ondelete="CASCADE"), + nullable=False, + index=True, + ) + name: Mapped[str] = mapped_column(String(255), nullable=False) + key_hash: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + prefix: Mapped[str] = mapped_column(String(12), nullable=False, index=True) + expires_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + last_used_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), nullable=True + ) + is_revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + + __table_args__ = ( + UniqueConstraint( + "tenant_id", + "created_by_user_id", + "name", + name="uq_api_keys_tenant_user_name", + ), + ) + + def __repr__(self) -> str: + """Return string representation.""" + return ( + f"" + ) diff --git a/src/api/iam/infrastructure/models/group.py b/src/api/iam/infrastructure/models/group.py new file mode 100644 index 00000000..0bb9279c --- /dev/null +++ b/src/api/iam/infrastructure/models/group.py @@ -0,0 +1,43 @@ +"""SQLAlchemy ORM model for the groups table. + +Stores group metadata in PostgreSQL. Membership relationships are +managed through SpiceDB, not as database columns. +""" + +from sqlalchemy import ForeignKey, String +from sqlalchemy.orm import Mapped, mapped_column + +from infrastructure.database.models import Base, TimestampMixin + + +class GroupModel(Base, TimestampMixin): + """ORM model for groups table (metadata only). + + Stores group metadata in PostgreSQL. Membership relationships are + managed through SpiceDB, not as database columns. + + Note: Group names are NOT globally unique - per-tenant uniqueness + is enforced at the application level. + + Foreign Key Constraint: + - tenant_id references tenants.id with RESTRICT delete + - Application layer must explicitly delete groups before tenant deletion + - This ensures GroupDeleted domain events are emitted for SpiceDB cleanup + """ + + __tablename__ = "groups" + + id: Mapped[str] = mapped_column(String(26), primary_key=True) + tenant_id: Mapped[str] = mapped_column( + String(26), + ForeignKey("tenants.id", ondelete="RESTRICT"), + nullable=False, + index=True, + ) + name: Mapped[str] = mapped_column(String(255), nullable=False, index=True) + + def __repr__(self) -> str: + """Return string representation.""" + return ( + f"" + ) diff --git a/src/api/iam/infrastructure/models/tenant.py b/src/api/iam/infrastructure/models/tenant.py new file mode 100644 index 00000000..2e7f94d3 --- /dev/null +++ b/src/api/iam/infrastructure/models/tenant.py @@ -0,0 +1,32 @@ +"""SQLAlchemy ORM model for the tenants table. + +Stores tenant metadata in PostgreSQL. Tenants represent organizations +and are the top-level isolation boundary in the system. +""" + +from sqlalchemy import String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from infrastructure.database.models import Base, TimestampMixin + + +class TenantModel(Base, TimestampMixin): + """ORM model for tenants table. + + Stores tenant metadata in PostgreSQL. Tenants represent organizations + and are the top-level isolation boundary in the system. + + Note: Tenant names are globally unique across the entire system. + """ + + __tablename__ = "tenants" + + id: Mapped[str] = mapped_column(String(26), primary_key=True) + name: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + + # Relationships + workspaces = relationship("WorkspaceModel", back_populates="tenant") + + def __repr__(self) -> str: + """Return string representation.""" + return f"" diff --git a/src/api/iam/infrastructure/models/user.py b/src/api/iam/infrastructure/models/user.py new file mode 100644 index 00000000..6f834822 --- /dev/null +++ b/src/api/iam/infrastructure/models/user.py @@ -0,0 +1,29 @@ +"""SQLAlchemy ORM model for the users table. + +Stores user metadata in PostgreSQL. Users are provisioned from SSO +and this table only stores minimal metadata for lookup and reference. +""" + +from sqlalchemy import String +from sqlalchemy.orm import Mapped, mapped_column + +from infrastructure.database.models import Base, TimestampMixin + + +class UserModel(Base, TimestampMixin): + """ORM model for users table (metadata only). + + Stores user metadata in PostgreSQL. Users are provisioned from SSO + and this table only stores minimal metadata for lookup and reference. + + Note: id is VARCHAR(255) to accommodate external SSO IDs (UUIDs, Auth0, etc.) + """ + + __tablename__ = "users" + + id: Mapped[str] = mapped_column(String(255), primary_key=True) + username: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + + def __repr__(self) -> str: + """Return string representation.""" + return f"" diff --git a/src/api/iam/infrastructure/models/workspace.py b/src/api/iam/infrastructure/models/workspace.py new file mode 100644 index 00000000..462f0a5b --- /dev/null +++ b/src/api/iam/infrastructure/models/workspace.py @@ -0,0 +1,84 @@ +"""SQLAlchemy ORM model for the workspaces table. + +Stores workspace metadata in PostgreSQL. Workspaces organize knowledge +graphs within a tenant. +""" + +from datetime import datetime + +from sqlalchemy import Boolean, DateTime, ForeignKey, Index, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from infrastructure.database.models import Base, TimestampMixin + + +class WorkspaceModel(Base, TimestampMixin): + """ORM model for workspaces table. + + Stores workspace metadata in PostgreSQL. Workspaces organize knowledge + graphs within a tenant. Each tenant has exactly one root workspace + (auto-created on tenant creation) and can have multiple child workspaces. + + Foreign Key Constraints: + - tenant_id references tenants.id with RESTRICT delete + Application must delete workspaces before tenant deletion + - parent_workspace_id references workspaces.id with RESTRICT delete + Cannot delete a parent workspace while children exist + + Partial Unique Index: + - Only one root workspace (is_root=TRUE) per tenant + """ + + __tablename__ = "workspaces" + + id: Mapped[str] = mapped_column(String(26), primary_key=True) + tenant_id: Mapped[str] = mapped_column( + String(26), + ForeignKey("tenants.id", ondelete="RESTRICT"), + nullable=False, + index=True, + ) + name: Mapped[str] = mapped_column(String(255), nullable=False) + parent_workspace_id: Mapped[str | None] = mapped_column( + String(26), + ForeignKey("workspaces.id", ondelete="RESTRICT"), + nullable=True, + index=True, + ) + is_root: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + # Relationships + tenant = relationship("TenantModel", back_populates="workspaces") + parent_workspace = relationship( + "WorkspaceModel", + remote_side="WorkspaceModel.id", + back_populates="child_workspaces", + ) + child_workspaces = relationship( + "WorkspaceModel", + back_populates="parent_workspace", + ) + + __table_args__ = ( + Index("idx_workspaces_name_tenant", "name", "tenant_id"), + Index( + "idx_workspaces_root_unique", + "tenant_id", + "is_root", + unique=True, + postgresql_where=(is_root == True), # noqa: E712 + ), + ) + + def __repr__(self) -> str: + """Return string representation.""" + return ( + f"" + ) From fa4c2be8131beec6c69ff7b25b0dafd10e6c92ef Mon Sep 17 00:00:00 2001 From: John Sell Date: Fri, 6 Feb 2026 16:16:36 -0500 Subject: [PATCH 11/11] fix(iam.infrastructure): fix workspace timestamp inheritance and probe consistency Co-Authored-By: Claude Sonnet 4.5 (1M context) --- .../iam/infrastructure/models/workspace.py | 10 +----- .../observability/repository_probe.py | 8 ++--- .../infrastructure/workspace_repository.py | 10 +++++- .../test_workspace_repository.py | 36 ++++++++++++++++++- 4 files changed, 49 insertions(+), 15 deletions(-) diff --git a/src/api/iam/infrastructure/models/workspace.py b/src/api/iam/infrastructure/models/workspace.py index 462f0a5b..91292658 100644 --- a/src/api/iam/infrastructure/models/workspace.py +++ b/src/api/iam/infrastructure/models/workspace.py @@ -4,9 +4,7 @@ graphs within a tenant. """ -from datetime import datetime - -from sqlalchemy import Boolean, DateTime, ForeignKey, Index, String +from sqlalchemy import Boolean, ForeignKey, Index, String from sqlalchemy.orm import Mapped, mapped_column, relationship from infrastructure.database.models import Base, TimestampMixin @@ -46,12 +44,6 @@ class WorkspaceModel(Base, TimestampMixin): index=True, ) is_root: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) - created_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False - ) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), nullable=False - ) # Relationships tenant = relationship("TenantModel", back_populates="workspaces") diff --git a/src/api/iam/infrastructure/observability/repository_probe.py b/src/api/iam/infrastructure/observability/repository_probe.py index bdb86aeb..1431b270 100644 --- a/src/api/iam/infrastructure/observability/repository_probe.py +++ b/src/api/iam/infrastructure/observability/repository_probe.py @@ -1,7 +1,7 @@ """Domain probe for IAM repository operations. Following Domain-Oriented Observability patterns, this probe captures -domain-significant events related to group, user, and tenant repository operations. +domain-significant events related to group, user, tenant, and workspace repository operations. """ from __future__ import annotations @@ -325,7 +325,7 @@ def workspace_retrieved(self, workspace_id: str) -> None: """Record that a workspace was retrieved.""" ... - def workspace_not_found(self, workspace_id: str) -> None: + def workspace_not_found(self, **kwargs: Any) -> None: """Record that a workspace was not found.""" ... @@ -382,11 +382,11 @@ def workspace_retrieved(self, workspace_id: str) -> None: **self._get_context_kwargs(), ) - def workspace_not_found(self, workspace_id: str) -> None: + def workspace_not_found(self, **kwargs: Any) -> None: """Record that a workspace was not found.""" self._logger.debug( "workspace_not_found", - workspace_id=workspace_id, + **kwargs, **self._get_context_kwargs(), ) diff --git a/src/api/iam/infrastructure/workspace_repository.py b/src/api/iam/infrastructure/workspace_repository.py index 5d13c98c..53d9b7ab 100644 --- a/src/api/iam/infrastructure/workspace_repository.py +++ b/src/api/iam/infrastructure/workspace_repository.py @@ -134,7 +134,7 @@ async def get_by_id(self, workspace_id: WorkspaceId) -> Workspace | None: model = result.scalar_one_or_none() if model is None: - self._probe.workspace_not_found(workspace_id.value) + self._probe.workspace_not_found(workspace_id=workspace_id.value) return None workspace = self._to_domain(model) @@ -159,6 +159,10 @@ async def get_by_name(self, tenant_id: TenantId, name: str) -> Workspace | None: model = result.scalar_one_or_none() if model is None: + self._probe.workspace_not_found( + tenant_id=tenant_id.value, + name=name, + ) return None workspace = self._to_domain(model) @@ -182,6 +186,10 @@ async def get_root_workspace(self, tenant_id: TenantId) -> Workspace | None: model = result.scalar_one_or_none() if model is None: + self._probe.workspace_not_found( + tenant_id=tenant_id.value, + is_root=True, + ) return None workspace = self._to_domain(model) diff --git a/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py b/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py index 6552aee4..ae716002 100644 --- a/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py +++ b/src/api/tests/unit/iam/infrastructure/test_workspace_repository.py @@ -700,7 +700,41 @@ async def test_probe_called_on_not_found( await repository.get_by_id(workspace_id) - mock_probe.workspace_not_found.assert_called_once_with(workspace_id.value) + mock_probe.workspace_not_found.assert_called_once_with( + workspace_id=workspace_id.value + ) + + @pytest.mark.asyncio + async def test_probe_called_on_get_by_name_not_found( + self, repository, mock_session, mock_probe, tenant_id + ): + """Should call probe.workspace_not_found when get_by_name finds nothing.""" + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.get_by_name(tenant_id, "Nonexistent") + + mock_probe.workspace_not_found.assert_called_once_with( + tenant_id=tenant_id.value, + name="Nonexistent", + ) + + @pytest.mark.asyncio + async def test_probe_called_on_get_root_workspace_not_found( + self, repository, mock_session, mock_probe, tenant_id + ): + """Should call probe.workspace_not_found when get_root_workspace finds nothing.""" + mock_result = MagicMock() + mock_result.scalar_one_or_none.return_value = None + mock_session.execute.return_value = mock_result + + await repository.get_root_workspace(tenant_id) + + mock_probe.workspace_not_found.assert_called_once_with( + tenant_id=tenant_id.value, + is_root=True, + ) @pytest.mark.asyncio async def test_probe_called_on_retrieved(