diff --git a/tests/integration/endpoints/test_config_integration.py b/tests/integration/endpoints/test_config_integration.py index c85af079..74aee0b3 100644 --- a/tests/integration/endpoints/test_config_integration.py +++ b/tests/integration/endpoints/test_config_integration.py @@ -22,10 +22,10 @@ async def test_config_endpoint_returns_config( - Real noop authentication is used - Response structure matches expected format - Args: - test_config: Loads test configuration - test_request: FastAPI request - test_auth: noop authentication tuple + Parameters: + test_config (AppConfig): Fixture providing the expected configuration to be returned. + test_request (Request): FastAPI request object used to call the endpoint. + test_auth (AuthTuple): Authentication fixture used for the request. """ response = await config_endpoint_handler(auth=test_auth, request=test_request) @@ -47,10 +47,10 @@ async def test_config_endpoint_returns_current_config( - Real noop authentication is used - Response structure matches expected format - Args: - current_config: Loads root configuration - test_request: FastAPI request - test_auth: noop authentication tuple + Parameters: + current_config (AppConfig): Loads root configuration + test_request (Request): FastAPI request + test_auth (AuthTuple): noop authentication tuple """ response = await config_endpoint_handler(auth=test_auth, request=test_request) @@ -69,9 +69,9 @@ async def test_config_endpoint_fails_without_configuration( - HTTPException is raised when configuration is not loaded - Error message indicates configuration is not loaded - Args: - test_request: FastAPI request - test_auth: noop authentication tuple + Parameters: + test_request (Request): FastAPI request fixture + test_auth (AuthTuple): noop authentication fixture """ # Verify that HTTPException is raised when configuration is not loaded diff --git a/tests/integration/endpoints/test_health_integration.py b/tests/integration/endpoints/test_health_integration.py index cc138544..029370d6 100644 --- a/tests/integration/endpoints/test_health_integration.py +++ b/tests/integration/endpoints/test_health_integration.py @@ -24,6 +24,10 @@ def mock_llama_stack_client_fixture( This is the only external dependency we mock for integration tests, as it represents an external service call. + + Returns: + mock_client: An AsyncMock representing the Llama Stack client whose + `inspect.version` returns an empty list. """ mock_holder_class = mocker.patch("app.endpoints.health.AsyncLlamaStackClientHolder") @@ -50,9 +54,12 @@ async def test_health_liveness( - Real noop authentication is used - Response structure matches expected format - Args: + Parameters: test_config: Loads test configuration test_auth: noop authentication tuple + + Returns: + None """ _ = test_config @@ -76,7 +83,7 @@ async def test_health_readiness_provider_statuses( - Provider health status, ID, and error messages are correctly mapped - Multiple providers with different health states are handled correctly - Args: + Parameters: mock_llama_stack_client_health: Mocked Llama Stack client mocker: pytest-mock fixture for creating mock objects """ @@ -130,7 +137,7 @@ async def test_health_readiness_client_error( - Error propagates from the endpoint handler (desired behavior) - The endpoint does not catch RuntimeError, only APIConnectionError - Args: + Parameters: test_response: FastAPI response object test_auth: noop authentication tuple """ @@ -157,10 +164,13 @@ async def test_health_readiness( - Real noop authentication is used - Response structure matches expected format - Args: + Parameters: mock_llama_stack_client_health: Mocked Llama Stack client test_response: FastAPI response object test_auth: noop authentication tuple + + Returns: + None """ _ = mock_llama_stack_client_health diff --git a/tests/integration/endpoints/test_info_integration.py b/tests/integration/endpoints/test_info_integration.py index 690227b2..7350f3a5 100644 --- a/tests/integration/endpoints/test_info_integration.py +++ b/tests/integration/endpoints/test_info_integration.py @@ -22,6 +22,12 @@ def mock_llama_stack_client_fixture( This is the only external dependency we mock for integration tests, as it represents an external service call. + + Parameters: + mocker (pytest_mock.MockerFixture): The pytest-mock fixture used to apply the patch. + + Yields: + AsyncMock: A mocked Llama Stack client configured for tests. """ mock_holder_class = mocker.patch("app.endpoints.info.AsyncLlamaStackClientHolder") @@ -52,11 +58,14 @@ async def test_info_endpoint_returns_service_information( - Real noop authentication is used - Response structure matches expected format - Args: + Parameters: test_config: Loads real configuration (required for endpoint to access config) mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple + + Returns: + None """ # Fixtures with side effects (needed but not directly used) _ = test_config @@ -87,7 +96,7 @@ async def test_info_endpoint_handles_connection_error( - HTTPException is raised with correct status code - Error response includes proper error details - Args: + Parameters: test_config: Loads real configuration (required for endpoint to access config) mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -126,7 +135,7 @@ async def test_info_endpoint_uses_configuration_values( - Endpoint reads configuration values correctly - Service name from config appears in response - Args: + Parameters: test_config: Loads real configuration (required for endpoint to access config) mock_llama_stack_client: Mocked Llama Stack client test_request: Real FastAPI request diff --git a/tests/integration/endpoints/test_query_v2_integration.py b/tests/integration/endpoints/test_query_v2_integration.py index 5091ec61..9d4108fc 100644 --- a/tests/integration/endpoints/test_query_v2_integration.py +++ b/tests/integration/endpoints/test_query_v2_integration.py @@ -40,6 +40,12 @@ def mock_llama_stack_client_fixture( This is the only external dependency we mock for integration tests, as it represents an external service call. + + Parameters: + mocker (MockerFixture): pytest-mock fixture used to create and patch mocks. + + Returns: + mock_client: The mocked Llama Stack client instance configured as described above. """ # Patch in app.endpoints.query where it's actually used by query_endpoint_handler_base mock_holder_class = mocker.patch("app.endpoints.query.AsyncLlamaStackClientHolder") @@ -107,6 +113,9 @@ def patch_db_session_fixture( This sets up the global session_local in app.database to use the test database. Uses an in-memory SQLite database, isolating tests from production data. This fixture is autouse=True, so it applies to all tests in this module automatically. + + Returns: + The test database Session instance to be used by the test. """ # Store original values to restore later original_engine = app.database.engine @@ -143,7 +152,7 @@ async def test_query_v2_endpoint_successful_response( - Response is correctly formatted - Conversation ID is returned - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -188,12 +197,15 @@ async def test_query_v2_endpoint_handles_connection_error( - HTTPException is raised with correct status code - Error response includes proper error details - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple mocker: pytest-mock fixture + + Returns: + None """ _ = test_config @@ -232,11 +244,14 @@ async def test_query_v2_endpoint_empty_query( - Validation works correctly - Error response is returned if needed - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple + + Returns: + None """ _ = test_config _ = mock_llama_stack_client @@ -272,7 +287,7 @@ async def test_query_v2_endpoint_with_attachments( - Attachment content is included in request - Response handles attachments correctly - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -326,7 +341,7 @@ async def test_query_v2_endpoint_with_tool_calls( - RAG tool responses are included - Referenced documents are returned - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -392,7 +407,7 @@ async def test_query_v2_endpoint_with_mcp_list_tools( - Tool names list is captured - Server label is included - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -456,7 +471,7 @@ async def test_query_v2_endpoint_with_multiple_tool_types( - All tool summaries are included - Response text combines with tool results - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -527,13 +542,16 @@ async def test_query_v2_endpoint_bypasses_tools_when_no_tools_true( - Response succeeds without tools - Integration between query handler and tool preparation - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple patch_db_session: Test database session mocker: pytest-mock fixture + + Returns: + None """ _ = test_config _ = patch_db_session @@ -580,13 +598,16 @@ async def test_query_v2_endpoint_uses_tools_when_available( - Response succeeds with tools enabled - Integration between query handler, vector stores, and tool preparation - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple patch_db_session: Test database session mocker: pytest-mock fixture + + Returns: + None """ _ = test_config _ = patch_db_session @@ -639,7 +660,7 @@ async def test_query_v2_endpoint_persists_conversation_to_database( - User ID, model, provider are stored correctly - Topic summary is generated and stored - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -690,7 +711,7 @@ async def test_query_v2_endpoint_updates_existing_conversation( - Last message timestamp updates - Topic summary is NOT regenerated - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -751,7 +772,7 @@ async def test_query_v2_endpoint_conversation_ownership_validation( - User can access their own conversation - Conversation must exist in database - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -805,13 +826,16 @@ async def test_query_v2_endpoint_creates_valid_cache_entry( Note: We spy on cache storage to verify integration, not to mock it. - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple patch_db_session: Test database session mocker: pytest-mock fixture + + Returns: + None """ _ = test_config _ = mock_llama_stack_client @@ -864,7 +888,7 @@ async def test_query_v2_endpoint_conversation_not_found_returns_404( - Status code is 404 NOT FOUND - Error message indicates conversation not found - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -920,7 +944,7 @@ async def test_query_v2_endpoint_with_shield_violation( Note: Shields are advisory - violations are logged but don't block requests. This matches query V1 behavior. - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -982,7 +1006,7 @@ async def test_query_v2_endpoint_without_shields( - extra_body.guardrails is not included when no shields - Response succeeds without shields - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -1032,7 +1056,7 @@ async def test_query_v2_endpoint_handles_empty_llm_response( - Response contains empty/minimal content - Conversation is still persisted - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -1091,7 +1115,7 @@ async def test_query_v2_endpoint_quota_integration( - Token usage from Llama Stack flows through quota system - Complete integration between query handler and quota management - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -1151,13 +1175,16 @@ async def test_query_v2_endpoint_rejects_query_when_quota_exceeded( - Error response contains appropriate message - LLM is not called when quota is exceeded - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request test_auth: noop authentication tuple patch_db_session: Test database session mocker: pytest-mock fixture (to simulate quota exceeded) + + Returns: + None """ _ = test_config _ = mock_llama_stack_client @@ -1212,7 +1239,7 @@ async def test_query_v2_endpoint_transcript_behavior( - Conversation is persisted regardless of transcript setting - Integration between query handler and transcript configuration - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request @@ -1302,7 +1329,7 @@ async def test_query_v2_endpoint_uses_conversation_history_model( - Message count increments properly - Integration between query handler and conversation persistence - Args: + Parameters: test_config: Test configuration mock_llama_stack_client: Mocked Llama Stack client test_request: FastAPI request