Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 84 additions & 0 deletions src/llm_orchestration_service_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from models.request_models import (
OrchestrationRequest,
OrchestrationResponse,
TestOrchestrationRequest,
TestOrchestrationResponse,
EmbeddingRequest,
EmbeddingResponse,
ContextGenerationRequest,
Expand Down Expand Up @@ -124,6 +126,88 @@ def orchestrate_llm_request(
)


@app.post(
"/orchestrate/test",
response_model=TestOrchestrationResponse,
status_code=status.HTTP_200_OK,
summary="Process test LLM orchestration request",
description="Processes a simplified test message through the LLM orchestration pipeline",
)
def test_orchestrate_llm_request(
http_request: Request,
request: TestOrchestrationRequest,
) -> TestOrchestrationResponse:
"""
Process test LLM orchestration request with simplified input.

Args:
http_request: FastAPI Request object for accessing app state
request: TestOrchestrationRequest containing only message, environment, and connection_id

Returns:
TestOrchestrationResponse: Response with LLM output and status flags (without chatId)

Raises:
HTTPException: For processing errors
"""
try:
logger.info(
f"Received test orchestration request for environment: {request.environment}"
)

# Get the orchestration service from app state
if not hasattr(http_request.app.state, "orchestration_service"):
logger.error("Orchestration service not found in app state")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Service not initialized",
)

orchestration_service = http_request.app.state.orchestration_service
if orchestration_service is None:
logger.error("Orchestration service is None")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Service not initialized",
)

# Map TestOrchestrationRequest to OrchestrationRequest with defaults
full_request = OrchestrationRequest(
chatId="test-session",
message=request.message,
authorId="test-user",
conversationHistory=[],
url="test-context",
environment=request.environment,
connection_id=request.connection_id,
)

# Process the request using the same logic
response = orchestration_service.process_orchestration_request(full_request)

# Convert to TestOrchestrationResponse (exclude chatId)
test_response = TestOrchestrationResponse(
llmServiceActive=response.llmServiceActive,
questionOutOfLLMScope=response.questionOutOfLLMScope,
inputGuardFailed=response.inputGuardFailed,
content=response.content,
)

logger.info(
f"Successfully processed test request for environment: {request.environment}"
)
return test_response

except HTTPException:
raise
except Exception as e:
logger.error(f"Unexpected error processing test request: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Internal server error occurred",
)


@app.post(
"/embeddings",
response_model=EmbeddingResponse,
Expand Down
28 changes: 28 additions & 0 deletions src/models/request_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,3 +129,31 @@ class EmbeddingErrorResponse(BaseModel):
error: str = Field(..., description="Error message")
failed_texts: List[str] = Field(..., description="Texts that failed to embed")
retry_after: Optional[int] = Field(None, description="Retry after seconds")


# Test endpoint models


class TestOrchestrationRequest(BaseModel):
"""Model for simplified test orchestration request."""

message: str = Field(..., description="User's message/query")
environment: Literal["production", "test", "development"] = Field(
..., description="Environment context"
)
connection_id: Optional[str] = Field(
None, description="Optional connection identifier"
)


class TestOrchestrationResponse(BaseModel):
"""Model for test orchestration response (without chatId)."""

llmServiceActive: bool = Field(..., description="Whether LLM service is active")
questionOutOfLLMScope: bool = Field(
..., description="Whether question is out of LLM scope"
)
inputGuardFailed: bool = Field(
..., description="Whether input guard validation failed"
)
content: str = Field(..., description="Response content with citations")
1 change: 0 additions & 1 deletion vault/agent-out/pidfile
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
7
Loading