diff --git a/GUI/src/pages/LLMConnections/CreateLLMConnection.tsx b/GUI/src/pages/LLMConnections/CreateLLMConnection.tsx
index dfeb583..c77bdfc 100644
--- a/GUI/src/pages/LLMConnections/CreateLLMConnection.tsx
+++ b/GUI/src/pages/LLMConnections/CreateLLMConnection.tsx
@@ -15,8 +15,8 @@ const CreateLLMConnection = () => {
// Query to check for existing production connection
const { data: existingProductionConnection } = useQuery({
- queryKey: ['production-connection'],
- queryFn: getProductionConnection,
+ queryKey: llmConnectionsQueryKeys.production(),
+ queryFn: () => getProductionConnection(),
});
const createConnectionMutation = useMutation({
diff --git a/GUI/src/pages/LLMConnections/ViewLLMConnection.tsx b/GUI/src/pages/LLMConnections/ViewLLMConnection.tsx
index 28e429f..3a55528 100644
--- a/GUI/src/pages/LLMConnections/ViewLLMConnection.tsx
+++ b/GUI/src/pages/LLMConnections/ViewLLMConnection.tsx
@@ -249,6 +249,13 @@ const ViewLLMConnection = () => {
accessKey: connectionData.accessKey || '', // Don't show API keys
// Embedding model credentials (don't show sensitive data, but include structure)
embeddingModelApiKey: connectionData.embeddingModelApiKey || '', // Don't show API keys
+ // Embedding AWS Bedrock credentials
+ embeddingAccessKey: connectionData.embeddingAccessKey || '',
+ embeddingSecretKey: connectionData.embeddingSecretKey || '',
+ // Embedding Azure credentials
+ embeddingDeploymentName: connectionData.embeddingDeploymentName || '',
+ embeddingTargetUri: connectionData.embeddingTargetUri || '',
+ embeddingAzureApiKey: connectionData.embeddingAzureApiKey || '',
};
return (
diff --git a/GUI/src/pages/LLMConnections/index.tsx b/GUI/src/pages/LLMConnections/index.tsx
index 6d46024..18dd7f4 100644
--- a/GUI/src/pages/LLMConnections/index.tsx
+++ b/GUI/src/pages/LLMConnections/index.tsx
@@ -13,7 +13,7 @@ import BudgetBanner from 'components/molecules/BudgetBanner';
import './LLMConnections.scss';
import { platforms, trainingStatuses } from 'config/dataModelsConfig';
import LLMConnectionCard from 'components/molecules/LLMConnectionCard';
-import { fetchLLMConnectionsPaginated, LLMConnectionFilters, LLMConnection, getProductionConnection } from 'services/llmConnections';
+import { fetchLLMConnectionsPaginated, LLMConnectionFilters, LLMConnection, getProductionConnection, ProductionConnectionFilters } from 'services/llmConnections';
import { llmConnectionsQueryKeys } from 'utils/queryKeys';
const LLMConnections: FC = () => {
@@ -35,10 +35,17 @@ const LLMConnections: FC = () => {
queryFn: () => fetchLLMConnectionsPaginated(filters),
});
- // Fetch production connection separately
+ // Fetch production connection separately with potential filters
+ const [productionFilters, setProductionFilters] = useState
({
+ sortBy: 'created_at',
+ sortOrder: 'desc',
+ llmPlatform: '',
+ llmModel: '',
+ });
+
const { data: productionConnection, isLoading: isProductionLoading } = useQuery({
- queryKey: llmConnectionsQueryKeys.production(),
- queryFn: getProductionConnection,
+ queryKey: llmConnectionsQueryKeys.production(productionFilters),
+ queryFn: () => getProductionConnection(productionFilters),
});
@@ -50,11 +57,23 @@ const LLMConnections: FC = () => {
setFilters(prev => ({ ...prev, pageNumber: pageIndex }));
}, [pageIndex]);
+ // Sync production filters with main filters on component mount
+ useEffect(() => {
+ setProductionFilters(prev => ({
+ ...prev,
+ llmPlatform: filters.llmPlatform || '',
+ llmModel: filters.llmModel || '',
+ sortBy: filters.sortBy || 'created_at',
+ sortOrder: filters.sortOrder || 'desc',
+ }));
+ }, [filters.llmPlatform, filters.llmModel, filters.sortBy, filters.sortOrder]);
+
const handleFilterChange = (
name: string,
value: string | number | undefined | { name: string; id: string }
) => {
let filterUpdate: Partial = {};
+ let productionFilterUpdate: Partial = {};
if (name === 'sorting') {
// Handle sorting format - no conversion needed, use snake_case directly
@@ -62,11 +81,21 @@ const LLMConnections: FC = () => {
const [sortBy, sortOrder] = sortingValue.split(' ');
filterUpdate = {
+ sortBy: sortBy,
+ sortOrder: sortOrder as 'asc' | 'desc'
+ };
+
+ productionFilterUpdate = {
sortBy: sortBy,
sortOrder: sortOrder as 'asc' | 'desc'
};
} else {
filterUpdate = { [name]: value };
+
+ // Update production filters for relevant fields
+ if (name === 'llmPlatform' || name === 'llmModel') {
+ productionFilterUpdate = { [name]: value as string };
+ }
}
setFilters((prevFilters) => ({
@@ -74,6 +103,14 @@ const LLMConnections: FC = () => {
...filterUpdate,
}));
+ // Update production filters if relevant
+ if (Object.keys(productionFilterUpdate).length > 0) {
+ setProductionFilters((prevFilters) => ({
+ ...prevFilters,
+ ...productionFilterUpdate,
+ }));
+ }
+
// Reset to first page when filters change
if (name !== 'pageNumber') {
setPageIndex(1);
@@ -192,6 +229,12 @@ const LLMConnections: FC = () => {
llmModel: '',
environment: '',
});
+ setProductionFilters({
+ sortBy: 'created_at',
+ sortOrder: 'desc',
+ llmPlatform: '',
+ llmModel: '',
+ });
setPageIndex(1);
}}
appearance={ButtonAppearanceTypes.SECONDARY}
@@ -202,7 +245,7 @@ const LLMConnections: FC = () => {
- {productionConnection && (
+ {productionConnection && filters?.environment !== "testing" && (
Production LLM Connection
diff --git a/GUI/src/pages/TestProductionLLM/index.tsx b/GUI/src/pages/TestProductionLLM/index.tsx
index b5334c1..a9c1493 100644
--- a/GUI/src/pages/TestProductionLLM/index.tsx
+++ b/GUI/src/pages/TestProductionLLM/index.tsx
@@ -124,11 +124,11 @@ const TestProductionLLM: FC = () => {
setMessages(prev => [...prev, botMessage]);
// Show toast notification
- toast.open({
- type: botMessageType,
- title: t('errorOccurred'),
- message: t('errorMessage'),
- });
+ // toast.open({
+ // type: botMessageType,
+ // title: t('errorOccurred'),
+ // message: t('errorMessage'),
+ // });
} catch (error) {
console.error('Error sending message:', error);
diff --git a/GUI/src/services/llmConnections.ts b/GUI/src/services/llmConnections.ts
index 83882ab..5b3921c 100644
--- a/GUI/src/services/llmConnections.ts
+++ b/GUI/src/services/llmConnections.ts
@@ -30,6 +30,13 @@ export interface LLMConnection {
accessKey?: string;
// Embedding model credentials
embeddingModelApiKey?: string;
+ // Embedding AWS Bedrock credentials
+ embeddingAccessKey?: string;
+ embeddingSecretKey?: string;
+ // Embedding Azure credentials
+ embeddingDeploymentName?: string;
+ embeddingTargetUri?: string;
+ embeddingAzureApiKey?: string;
}
export interface LLMConnectionsResponse {
@@ -69,6 +76,16 @@ export interface LLMConnectionFilters {
environment?: string;
status?: string;
}
+
+export interface ProductionConnectionFilters {
+ llmPlatform?: string;
+ llmModel?: string;
+ embeddingPlatform?: string;
+ embeddingModel?: string;
+ connectionStatus?: string;
+ sortBy?: string;
+ sortOrder?: string;
+}
export interface LegacyLLMConnectionFilters {
page: number;
pageSize: number;
@@ -98,11 +115,18 @@ export interface LLMConnectionFormData {
accessKey?: string;
// Embedding model credentials
embeddingModelApiKey?: string;
+ // Embedding AWS Bedrock credentials
+ embeddingAccessKey?: string;
+ embeddingSecretKey?: string;
+ // Embedding Azure credentials
+ embeddingDeploymentName?: string;
+ embeddingTargetUri?: string;
+ embeddingAzureApiKey?: string;
}
// Vault secret service functions
async function createVaultSecret(connectionId: string, connectionData: LLMConnectionFormData): Promise {
-
+
const payload = {
connectionId,
llmPlatform: connectionData.llmPlatform,
@@ -121,19 +145,29 @@ async function createVaultSecret(connectionId: string, connectionData: LLMConnec
targetUrl: connectionData.targetUri || '',
apiKey: connectionData.apiKey || '',
}),
- embeddingModelApiKey: connectionData.embeddingModelApiKey || '',
+ // Embedding AWS Bedrock credentials
+ ...(connectionData.embeddingModelPlatform === 'aws' && {
+ embeddingAccessKey: connectionData.embeddingAccessKey || '',
+ embeddingSecretKey: connectionData.embeddingSecretKey || '',
+ }),
+ // Embedding Azure credentials
+ ...(connectionData.embeddingModelPlatform === 'azure' && {
+ embeddingDeploymentName: connectionData.embeddingDeploymentName || '',
+ embeddingTargetUri: connectionData.embeddingTargetUri || '',
+ embeddingAzureApiKey: connectionData.embeddingAzureApiKey || '',
+ }),
};
await apiDev.post(vaultEndpoints.CREATE_VAULT_SECRET(), payload);
}
async function deleteVaultSecret(connectionId: string, connectionData: Partial): Promise {
-
+
const payload = {
connectionId,
llmPlatform: connectionData.llmPlatform || '',
llmModel: connectionData.llmModel || '',
- embeddingModel: connectionData.embeddingModel || '',
+ embeddingModel: connectionData.embeddingModel || '',
embeddingPlatform: connectionData.embeddingModelPlatform || '',
deploymentEnvironment: connectionData.deploymentEnvironment?.toLowerCase() || '',
};
@@ -164,8 +198,22 @@ export async function getLLMConnection(id: string | number): Promise {
- const { data } = await apiDev.get(llmConnectionsEndpoints.GET_PRODUCTION_CONNECTION());
+export async function getProductionConnection(filters?: ProductionConnectionFilters): Promise {
+ const queryParams = new URLSearchParams();
+
+ if (filters?.llmPlatform) queryParams.append('llmPlatform', filters.llmPlatform);
+ if (filters?.llmModel) queryParams.append('llmModel', filters.llmModel);
+ if (filters?.embeddingPlatform) queryParams.append('embeddingPlatform', filters.embeddingPlatform);
+ if (filters?.embeddingModel) queryParams.append('embeddingModel', filters.embeddingModel);
+ if (filters?.connectionStatus) queryParams.append('connectionStatus', filters.connectionStatus);
+ if (filters?.sortBy) queryParams.append('sortBy', filters.sortBy);
+ if (filters?.sortOrder) queryParams.append('sortOrder', filters.sortOrder);
+
+ const url = queryParams.toString()
+ ? `${llmConnectionsEndpoints.GET_PRODUCTION_CONNECTION()}?${queryParams.toString()}`
+ : llmConnectionsEndpoints.GET_PRODUCTION_CONNECTION();
+
+ const { data } = await apiDev.get(url);
return data?.response?.[0] || null;
}
@@ -190,11 +238,17 @@ export async function createLLMConnection(connectionData: LLMConnectionFormData)
secret_key: maskSensitiveKey(connectionData.secretKey) || "",
access_key: maskSensitiveKey(connectionData.accessKey) || "",
// Embedding model credentials
- embedding_model_api_key: maskSensitiveKey(connectionData.embeddingModelApiKey) || "",
+ // Embedding AWS Bedrock credentials
+ embedding_access_key: maskSensitiveKey(connectionData.embeddingAccessKey) || "",
+ embedding_secret_key: maskSensitiveKey(connectionData.embeddingSecretKey) || "",
+ // Embedding Azure credentials
+ embedding_deployment_name: connectionData.embeddingDeploymentName || "",
+ embedding_target_uri: connectionData.embeddingTargetUri || "",
+ embedding_azure_api_key: maskSensitiveKey(connectionData.embeddingAzureApiKey) || "",
});
-
+
const connection = data?.response;
-
+
// After successful database creation, store secrets in vault
if (connection && connection.id) {
try {
@@ -205,7 +259,7 @@ export async function createLLMConnection(connectionData: LLMConnectionFormData)
// The connection is already created in the database
}
}
-
+
return connection;
}
@@ -233,22 +287,30 @@ export async function updateLLMConnection(
secret_key: maskSensitiveKey(connectionData.secretKey) || "",
access_key: maskSensitiveKey(connectionData.accessKey) || "",
// Embedding model credentials
- embedding_model_api_key: maskSensitiveKey(connectionData.embeddingModelApiKey) || "",
+ // Embedding AWS Bedrock credentials
+ embedding_access_key: maskSensitiveKey(connectionData.embeddingAccessKey) || "",
+ embedding_secret_key: maskSensitiveKey(connectionData.embeddingSecretKey) || "",
+ // Embedding Azure credentials
+ embedding_deployment_name: connectionData.embeddingDeploymentName || "",
+ embedding_target_uri: connectionData.embeddingTargetUri || "",
+ embedding_azure_api_key: maskSensitiveKey(connectionData.embeddingAzureApiKey) || "",
});
-
+
const connection = data?.response;
-
- // After successful database update, update secrets in vault
- if (connection) {
+
+ if (connection && (connectionData.secretKey && !connectionData.secretKey?.includes('*')
+ || connectionData.accessKey && !connectionData.accessKey?.includes('*')
+ || connectionData.apiKey && !connectionData.apiKey?.includes('*')
+ || connectionData.embeddingAccessKey && !connectionData.embeddingAccessKey?.includes('*')
+ || connectionData.embeddingSecretKey && !connectionData.embeddingSecretKey?.includes('*')
+ || connectionData.embeddingAzureApiKey && !connectionData.embeddingAzureApiKey?.includes('*'))) {
try {
await createVaultSecret(id.toString(), connectionData);
} catch (vaultError) {
console.error('Failed to update secrets in vault:', vaultError);
- // Note: We don't throw here to avoid breaking the connection update flow
- // The connection is already updated in the database
}
}
-
+
return connection;
}
@@ -260,12 +322,12 @@ export async function deleteLLMConnection(id: string | number): Promise {
} catch (error) {
console.error('Failed to get connection data before deletion:', error);
}
-
+
// Delete from database
await apiDev.post(llmConnectionsEndpoints.DELETE_LLM_CONNECTION(), {
connection_id: id,
});
-
+
// After successful database deletion, delete secrets from vault
if (connectionToDelete) {
try {
@@ -293,9 +355,9 @@ export async function checkBudgetStatus(): Promise {
return null;
}
}
-
+
export async function updateLLMConnectionStatus(
- id: string | number,
+ id: string | number,
status: 'active' | 'inactive'
): Promise {
const { data } = await apiDev.post(llmConnectionsEndpoints.UPDATE_LLM_CONNECTION_STATUS(), {
diff --git a/GUI/src/utils/queryKeys.ts b/GUI/src/utils/queryKeys.ts
index e004497..e10462e 100644
--- a/GUI/src/utils/queryKeys.ts
+++ b/GUI/src/utils/queryKeys.ts
@@ -1,5 +1,5 @@
import { PaginationState, SortingState } from '@tanstack/react-table';
-import { LLMConnectionFilters, LegacyLLMConnectionFilters } from 'services/llmConnections';
+import { LLMConnectionFilters, LegacyLLMConnectionFilters, ProductionConnectionFilters } from 'services/llmConnections';
import { InferenceRequest } from 'services/inference';
@@ -30,7 +30,7 @@ export const llmConnectionsQueryKeys = {
details: () => [...llmConnectionsQueryKeys.all(), 'detail'] as const,
detail: (id: string | number) => [...llmConnectionsQueryKeys.details(), id] as const,
budgetStatus: () => [...llmConnectionsQueryKeys.all(), 'budget-status'] as const,
- production: () => [...llmConnectionsQueryKeys.all(), 'production'] as const,
+ production: (filters?: ProductionConnectionFilters) => [...llmConnectionsQueryKeys.all(), 'production', filters] as const,
};
export const inferenceQueryKeys = {
diff --git a/endpoints.md b/endpoints.md
index 6bd4fc9..262e81a 100644
--- a/endpoints.md
+++ b/endpoints.md
@@ -357,12 +357,41 @@ GET /ruuter-private/llm/connections/list
| `llmPlatform` | `string` | Filter by LLM platform |
| `llmModel` | `string` | Filter by LLM model |
| `deploymentEnvironment` | `string` | Filter by environment (Testing / Production) |
+| `pageNumber` | `number` | Page number (1-based) |
+| `pageSize` | `number` | Number of items per page |
+| `sortBy` | `string` | Field to sort by |
+| `sortOrder` | `string` | Sort order: 'asc' or 'desc' |
### Example Request
```http
GET /ruuter-private/llm/connections/list?llmPlatform=OpenAI&deploymentEnvironment=Testing&model=GPT4
```
+---
+
+## 5. Get Production LLM Connection (with filters)
+
+### Endpoint
+```http
+GET /ruuter-private/llm/connections/production
+```
+
+### Query Parameters (Optional for filtering)
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `llmPlatform` | `string` | Filter by LLM platform |
+| `llmModel` | `string` | Filter by LLM model |
+| `embeddingPlatform` | `string` | Filter by embedding platform |
+| `embeddingModel` | `string` | Filter by embedding model |
+| `connectionStatus` | `string` | Filter by connection status |
+| `sortBy` | `string` | Field to sort by |
+| `sortOrder` | `string` | Sort order: 'asc' or 'desc' |
+
+### Example Request
+```http
+GET /ruuter-private/llm/connections/production?llmPlatform=OpenAI&connectionStatus=active
+```
+
### Response (200 OK)
```json
[
diff --git a/vault/agent-out/pidfile b/vault/agent-out/pidfile
deleted file mode 100644
index e69de29..0000000