From de80080bda48d8f8ca76d9dac3c2a1d196aa2e72 Mon Sep 17 00:00:00 2001 From: Dawnlck Date: Tue, 18 Nov 2025 16:40:11 +0800 Subject: [PATCH 1/3] Add configurable model provider integration --- server/claude-sdk.js | 26 +++- server/database/db.js | 111 +++++++++++++- server/database/init.sql | 19 ++- server/index.js | 5 +- server/routes/settings.js | 81 ++++++++++- src/components/CredentialsSettings.jsx | 193 ++++++++++++++++++++++++- 6 files changed, 428 insertions(+), 7 deletions(-) diff --git a/server/claude-sdk.js b/server/claude-sdk.js index 9fea12712..bcde12c2f 100644 --- a/server/claude-sdk.js +++ b/server/claude-sdk.js @@ -16,6 +16,7 @@ import { query } from '@anthropic-ai/claude-agent-sdk'; import { promises as fs } from 'fs'; import path from 'path'; import os from 'os'; +import { modelProvidersDb } from './database/db.js'; // Session tracking: Map of session IDs to active query instances const activeSessions = new Map(); @@ -346,15 +347,36 @@ async function loadMcpConfig(cwd) { * @returns {Promise} */ async function queryClaudeSDK(command, options = {}, ws) { - const { sessionId } = options; + const runtimeOptions = { ...options }; + const { sessionId } = runtimeOptions; let capturedSessionId = sessionId; let sessionCreatedSent = false; let tempImagePaths = []; let tempDir = null; try { + // Apply user-selected model provider overrides if available + if (runtimeOptions.userId) { + try { + const provider = modelProvidersDb.getActiveProvider(runtimeOptions.userId); + if (provider) { + if (provider.api_key) { + process.env.ANTHROPIC_API_KEY = provider.api_key; + } + if (provider.api_base_url) { + process.env.ANTHROPIC_API_URL = provider.api_base_url; + } + if (provider.model_id && !runtimeOptions.model) { + runtimeOptions.model = provider.model_id; + } + } + } catch (error) { + console.error('[ERROR] Unable to load active model provider:', error); + } + } + // Map CLI options to SDK format - const sdkOptions = mapCliOptionsToSDK(options); + const sdkOptions = mapCliOptionsToSDK(runtimeOptions); // Load MCP configuration const mcpServers = await loadMcpConfig(options.cwd); diff --git a/server/database/db.js b/server/database/db.js index dbb9d3776..3d231c93f 100644 --- a/server/database/db.js +++ b/server/database/db.js @@ -75,6 +75,24 @@ const runMigrations = () => { db.exec('ALTER TABLE users ADD COLUMN has_completed_onboarding BOOLEAN DEFAULT 0'); } + // Ensure model_providers table exists (custom model API replacement) + db.exec(` + CREATE TABLE IF NOT EXISTS model_providers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + provider_name TEXT NOT NULL, + api_base_url TEXT NOT NULL, + api_key TEXT NOT NULL, + model_id TEXT, + description TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT 0, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE + ); + `); + db.exec('CREATE INDEX IF NOT EXISTS idx_model_providers_user_id ON model_providers(user_id);'); + db.exec('CREATE INDEX IF NOT EXISTS idx_model_providers_active ON model_providers(is_active);'); + console.log('Database migrations completed successfully'); } catch (error) { console.error('Error running migrations:', error.message); @@ -351,11 +369,102 @@ const githubTokensDb = { } }; +// Model providers for third-party API replacement +const modelProvidersDb = { + createProvider: (userId, providerName, apiBaseUrl, apiKey, modelId, description) => { + try { + const existingProviders = modelProvidersDb.getProviders(userId); + const isActive = existingProviders.length === 0 ? 1 : 0; + + const stmt = db.prepare(` + INSERT INTO model_providers (user_id, provider_name, api_base_url, api_key, model_id, description, is_active) + VALUES (?, ?, ?, ?, ?, ?, ?) + `); + + const result = stmt.run(userId, providerName, apiBaseUrl, apiKey, modelId || null, description || null, isActive); + + if (isActive) { + modelProvidersDb.setActiveProvider(userId, result.lastInsertRowid); + } + + return { id: result.lastInsertRowid, providerName, apiBaseUrl, modelId, description, isActive: Boolean(isActive) }; + } catch (err) { + throw err; + } + }, + + getProviders: (userId) => { + try { + const stmt = db.prepare(` + SELECT id, provider_name, api_base_url, model_id, description, created_at, is_active, api_key + FROM model_providers + WHERE user_id = ? + ORDER BY created_at DESC + `); + const providers = stmt.all(userId) || []; + + return providers.map((provider) => ({ + ...provider, + api_key_preview: provider.api_key ? `${provider.api_key.slice(0, 6)}...${provider.api_key.slice(-4)}` : '', + api_key: undefined, + })); + } catch (err) { + throw err; + } + }, + + getActiveProvider: (userId) => { + try { + const row = db.prepare('SELECT * FROM model_providers WHERE user_id = ? AND is_active = 1 LIMIT 1').get(userId); + return row || null; + } catch (err) { + throw err; + } + }, + + setActiveProvider: (userId, providerId) => { + const transaction = db.transaction(() => { + db.prepare('UPDATE model_providers SET is_active = 0 WHERE user_id = ?').run(userId); + const result = db.prepare('UPDATE model_providers SET is_active = 1 WHERE id = ? AND user_id = ?').run(providerId, userId); + return result.changes > 0; + }); + + return transaction(); + }, + + deleteProvider: (userId, providerId) => { + const transaction = db.transaction(() => { + const provider = db.prepare('SELECT is_active FROM model_providers WHERE id = ? AND user_id = ?').get(providerId, userId); + const result = db.prepare('DELETE FROM model_providers WHERE id = ? AND user_id = ?').run(providerId, userId); + + if (result.changes === 0) return false; + + if (provider?.is_active) { + const latest = db.prepare(` + SELECT id FROM model_providers + WHERE user_id = ? + ORDER BY created_at DESC + LIMIT 1 + `).get(userId); + + if (latest) { + modelProvidersDb.setActiveProvider(userId, latest.id); + } + } + + return true; + }); + + return transaction(); + } +}; + export { db, initializeDatabase, userDb, apiKeysDb, credentialsDb, - githubTokensDb // Backward compatibility + githubTokensDb, // Backward compatibility + modelProvidersDb }; \ No newline at end of file diff --git a/server/database/init.sql b/server/database/init.sql index e52daef46..61455e15b 100644 --- a/server/database/init.sql +++ b/server/database/init.sql @@ -49,4 +49,21 @@ CREATE TABLE IF NOT EXISTS user_credentials ( CREATE INDEX IF NOT EXISTS idx_user_credentials_user_id ON user_credentials(user_id); CREATE INDEX IF NOT EXISTS idx_user_credentials_type ON user_credentials(credential_type); -CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active); \ No newline at end of file +CREATE INDEX IF NOT EXISTS idx_user_credentials_active ON user_credentials(is_active); + +-- Custom model providers for API replacement +CREATE TABLE IF NOT EXISTS model_providers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + provider_name TEXT NOT NULL, + api_base_url TEXT NOT NULL, + api_key TEXT NOT NULL, + model_id TEXT, + description TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT 0, + FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE +); + +CREATE INDEX IF NOT EXISTS idx_model_providers_user_id ON model_providers(user_id); +CREATE INDEX IF NOT EXISTS idx_model_providers_active ON model_providers(is_active); \ No newline at end of file diff --git a/server/index.js b/server/index.js index a094d9dc2..1fa1cb82d 100755 --- a/server/index.js +++ b/server/index.js @@ -695,6 +695,8 @@ wss.on('connection', (ws, request) => { if (pathname === '/shell') { handleShellConnection(ws); } else if (pathname === '/ws') { + // Attach authenticated user to the WebSocket connection for downstream handlers + ws.user = request.user; handleChatConnection(ws); } else { console.log('[WARN] Unknown WebSocket path:', pathname); @@ -705,6 +707,7 @@ wss.on('connection', (ws, request) => { // Handle chat WebSocket connections function handleChatConnection(ws) { console.log('[INFO] Chat WebSocket connected'); + const userId = ws.user?.id; // Add to connected clients for project updates connectedClients.add(ws); @@ -719,7 +722,7 @@ function handleChatConnection(ws) { console.log('๐Ÿ”„ Session:', data.options?.sessionId ? 'Resume' : 'New'); // Use Claude Agents SDK - await queryClaudeSDK(data.command, data.options, ws); + await queryClaudeSDK(data.command, { ...data.options, userId }, ws); } else if (data.type === 'cursor-command') { console.log('[DEBUG] Cursor message:', data.command || '[Continue/Resume]'); console.log('๐Ÿ“ Project:', data.options?.cwd || 'Unknown'); diff --git a/server/routes/settings.js b/server/routes/settings.js index d1c141b65..609f86491 100644 --- a/server/routes/settings.js +++ b/server/routes/settings.js @@ -1,5 +1,5 @@ import express from 'express'; -import { apiKeysDb, credentialsDb } from '../database/db.js'; +import { apiKeysDb, credentialsDb, modelProvidersDb } from '../database/db.js'; const router = express.Router(); @@ -175,4 +175,83 @@ router.patch('/credentials/:credentialId/toggle', async (req, res) => { } }); +// =============================== +// Model Provider Management +// =============================== + +// List all configured model providers (API replacement) +router.get('/model-providers', async (req, res) => { + try { + const providers = modelProvidersDb.getProviders(req.user.id); + const active = modelProvidersDb.getActiveProvider(req.user.id); + + res.json({ + providers, + activeProviderId: active?.id || null + }); + } catch (error) { + console.error('Error fetching model providers:', error); + res.status(500).json({ error: 'Failed to fetch model providers' }); + } +}); + +// Create a new provider entry +router.post('/model-providers', async (req, res) => { + try { + const { providerName, apiBaseUrl, apiKey, modelId, description } = req.body; + + if (!providerName?.trim() || !apiBaseUrl?.trim() || !apiKey?.trim()) { + return res.status(400).json({ error: 'Provider name, API base URL, and API key are required' }); + } + + const result = modelProvidersDb.createProvider( + req.user.id, + providerName.trim(), + apiBaseUrl.trim(), + apiKey.trim(), + modelId?.trim() || null, + description?.trim() || null + ); + + res.json({ success: true, provider: result }); + } catch (error) { + console.error('Error creating model provider:', error); + res.status(500).json({ error: 'Failed to create model provider' }); + } +}); + +// Set active provider +router.patch('/model-providers/:providerId/activate', async (req, res) => { + try { + const { providerId } = req.params; + const success = modelProvidersDb.setActiveProvider(req.user.id, parseInt(providerId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Provider not found' }); + } + } catch (error) { + console.error('Error activating model provider:', error); + res.status(500).json({ error: 'Failed to activate model provider' }); + } +}); + +// Delete provider +router.delete('/model-providers/:providerId', async (req, res) => { + try { + const { providerId } = req.params; + const success = modelProvidersDb.deleteProvider(req.user.id, parseInt(providerId)); + + if (success) { + res.json({ success: true }); + } else { + res.status(404).json({ error: 'Provider not found' }); + } + } catch (error) { + console.error('Error deleting model provider:', error); + res.status(500).json({ error: 'Failed to delete model provider' }); + } +}); + export default router; diff --git a/src/components/CredentialsSettings.jsx b/src/components/CredentialsSettings.jsx index 1150b9fbd..29a6b9712 100644 --- a/src/components/CredentialsSettings.jsx +++ b/src/components/CredentialsSettings.jsx @@ -1,7 +1,7 @@ import { useState, useEffect } from 'react'; import { Button } from './ui/button'; import { Input } from './ui/input'; -import { Key, Plus, Trash2, Eye, EyeOff, Copy, Check, Github, ExternalLink } from 'lucide-react'; +import { Key, Plus, Trash2, Eye, EyeOff, Copy, Check, Github, ExternalLink, Server } from 'lucide-react'; import { useVersionCheck } from '../hooks/useVersionCheck'; import { version } from '../../package.json'; import { authenticatedFetch } from '../utils/api'; @@ -16,6 +16,14 @@ function CredentialsSettings() { const [newGithubName, setNewGithubName] = useState(''); const [newGithubToken, setNewGithubToken] = useState(''); const [newGithubDescription, setNewGithubDescription] = useState(''); + const [modelProviders, setModelProviders] = useState([]); + const [showNewProviderForm, setShowNewProviderForm] = useState(false); + const [newProviderName, setNewProviderName] = useState(''); + const [newProviderBaseUrl, setNewProviderBaseUrl] = useState(''); + const [newProviderApiKey, setNewProviderApiKey] = useState(''); + const [newProviderModelId, setNewProviderModelId] = useState(''); + const [newProviderDescription, setNewProviderDescription] = useState(''); + const [creatingProvider, setCreatingProvider] = useState(false); const [showToken, setShowToken] = useState({}); const [copiedKey, setCopiedKey] = useState(null); const [newlyCreatedKey, setNewlyCreatedKey] = useState(null); @@ -40,6 +48,11 @@ function CredentialsSettings() { const credentialsRes = await authenticatedFetch('/api/settings/credentials?type=github_token'); const credentialsData = await credentialsRes.json(); setGithubCredentials(credentialsData.credentials || []); + + // Fetch model providers + const providersRes = await authenticatedFetch('/api/settings/model-providers'); + const providersData = await providersRes.json(); + setModelProviders(providersData.providers || []); } catch (error) { console.error('Error fetching settings:', error); } finally { @@ -145,6 +158,63 @@ function CredentialsSettings() { } }; + const createModelProvider = async () => { + if (!newProviderName.trim() || !newProviderBaseUrl.trim() || !newProviderApiKey.trim()) return; + + try { + setCreatingProvider(true); + const res = await authenticatedFetch('/api/settings/model-providers', { + method: 'POST', + body: JSON.stringify({ + providerName: newProviderName, + apiBaseUrl: newProviderBaseUrl, + apiKey: newProviderApiKey, + modelId: newProviderModelId, + description: newProviderDescription + }) + }); + + const data = await res.json(); + if (data.success) { + setShowNewProviderForm(false); + setNewProviderName(''); + setNewProviderBaseUrl(''); + setNewProviderApiKey(''); + setNewProviderModelId(''); + setNewProviderDescription(''); + fetchData(); + } + } catch (error) { + console.error('Error creating model provider:', error); + } finally { + setCreatingProvider(false); + } + }; + + const setActiveProvider = async (providerId) => { + try { + await authenticatedFetch(`/api/settings/model-providers/${providerId}/activate`, { + method: 'PATCH' + }); + fetchData(); + } catch (error) { + console.error('Error activating model provider:', error); + } + }; + + const deleteModelProvider = async (providerId) => { + if (!confirm('Delete this model provider?')) return; + + try { + await authenticatedFetch(`/api/settings/model-providers/${providerId}`, { + method: 'DELETE' + }); + fetchData(); + } catch (error) { + console.error('Error deleting model provider:', error); + } + }; + const copyToClipboard = (text, id) => { navigator.clipboard.writeText(text); setCopiedKey(id); @@ -388,6 +458,127 @@ function CredentialsSettings() { + {/* Model Providers for API replacement */} +
+
+
+ +
+

Model Providers

+

Connect third-party model APIs and switch anytime.

+
+
+ +
+ + {showNewProviderForm && ( +
+
+ setNewProviderName(e.target.value)} + /> + setNewProviderModelId(e.target.value)} + /> +
+ + setNewProviderBaseUrl(e.target.value)} + /> + +
+ setNewProviderApiKey(e.target.value)} + className="pr-10" + /> + +
+ + setNewProviderDescription(e.target.value)} + /> + +
+ + +
+
+ )} + +
+ {modelProviders.length === 0 ? ( +

No model providers configured yet.

+ ) : ( + modelProviders.map((provider) => ( +
+
+
+
+ {provider.provider_name} + {provider.is_active && ( + Active + )} +
+
{provider.api_base_url}
+ {provider.model_id && ( +
Default model: {provider.model_id}
+ )} +
Key: {provider.api_key_preview || '***'}
+ {provider.description && ( +
{provider.description}
+ )} +
+
+ {!provider.is_active && ( + + )} + +
+
+
+ )) + )} +
+
+ {/* Version Information */}
From 37a4137ef843182d37cd032235e0e61a5b3a440e Mon Sep 17 00:00:00 2001 From: tata Date: Tue, 18 Nov 2025 18:54:26 +0800 Subject: [PATCH 2/3] refactor: standardize quote style and improve code formatting in claude-sdk.js --- server/claude-sdk.js | 268 ++++++--- server/routes/agent.js | 595 ++++++++++++------- server/routes/git.js | 996 +++++++++++++++++++------------- server/routes/settings.js | 221 +++++-- server/utils/claude-settings.js | 190 ++++++ 5 files changed, 1515 insertions(+), 755 deletions(-) create mode 100644 server/utils/claude-settings.js diff --git a/server/claude-sdk.js b/server/claude-sdk.js index bcde12c2f..a0c82605d 100644 --- a/server/claude-sdk.js +++ b/server/claude-sdk.js @@ -12,11 +12,11 @@ * - WebSocket message streaming */ -import { query } from '@anthropic-ai/claude-agent-sdk'; -import { promises as fs } from 'fs'; -import path from 'path'; -import os from 'os'; -import { modelProvidersDb } from './database/db.js'; +import { query } from "@anthropic-ai/claude-agent-sdk"; +import { promises as fs } from "fs"; +import path from "path"; +import os from "os"; +import { modelProvidersDb } from "./database/db.js"; // Session tracking: Map of session IDs to active query instances const activeSessions = new Map(); @@ -37,7 +37,7 @@ function mapCliOptionsToSDK(options = {}) { } // Map permission mode - if (permissionMode && permissionMode !== 'default') { + if (permissionMode && permissionMode !== "default") { sdkOptions.permissionMode = permissionMode; } @@ -45,20 +45,26 @@ function mapCliOptionsToSDK(options = {}) { const settings = toolsSettings || { allowedTools: [], disallowedTools: [], - skipPermissions: false + skipPermissions: false, }; // Handle tool permissions - if (settings.skipPermissions && permissionMode !== 'plan') { + if (settings.skipPermissions && permissionMode !== "plan") { // When skipping permissions, use bypassPermissions mode - sdkOptions.permissionMode = 'bypassPermissions'; + sdkOptions.permissionMode = "bypassPermissions"; } else { // Map allowed tools let allowedTools = [...(settings.allowedTools || [])]; // Add plan mode default tools - if (permissionMode === 'plan') { - const planModeTools = ['Read', 'Task', 'exit_plan_mode', 'TodoRead', 'TodoWrite']; + if (permissionMode === "plan") { + const planModeTools = [ + "Read", + "Task", + "exit_plan_mode", + "TodoRead", + "TodoWrite", + ]; for (const tool of planModeTools) { if (!allowedTools.includes(tool)) { allowedTools.push(tool); @@ -78,17 +84,17 @@ function mapCliOptionsToSDK(options = {}) { // Map model (default to sonnet) // Map model (default to sonnet) - sdkOptions.model = options.model || 'sonnet'; + sdkOptions.model = options.model || "sonnet"; // Map system prompt configuration sdkOptions.systemPrompt = { - type: 'preset', - preset: 'claude_code' // Required to use CLAUDE.md + type: "preset", + preset: "claude_code", // Required to use CLAUDE.md }; // Map setting sources for CLAUDE.md loading // This loads CLAUDE.md from project, user (~/.config/claude/CLAUDE.md), and local directories - sdkOptions.settingSources = ['project', 'user', 'local']; + sdkOptions.settingSources = ["project", "user", "local"]; // Map resume session if (sessionId) { @@ -105,13 +111,18 @@ function mapCliOptionsToSDK(options = {}) { * @param {Array} tempImagePaths - Temp image file paths for cleanup * @param {string} tempDir - Temp directory for cleanup */ -function addSession(sessionId, queryInstance, tempImagePaths = [], tempDir = null) { +function addSession( + sessionId, + queryInstance, + tempImagePaths = [], + tempDir = null +) { activeSessions.set(sessionId, { instance: queryInstance, startTime: Date.now(), - status: 'active', + status: "active", tempImagePaths, - tempDir + tempDir, }); } @@ -158,7 +169,7 @@ function transformMessage(sdkMessage) { * @returns {Object|null} Token budget object or null */ function extractTokenBudget(resultMessage) { - if (resultMessage.type !== 'result' || !resultMessage.modelUsage) { + if (resultMessage.type !== "result" || !resultMessage.modelUsage) { return null; } @@ -172,23 +183,36 @@ function extractTokenBudget(resultMessage) { // Use cumulative tokens if available (tracks total for the session) // Otherwise fall back to per-request tokens - const inputTokens = modelData.cumulativeInputTokens || modelData.inputTokens || 0; - const outputTokens = modelData.cumulativeOutputTokens || modelData.outputTokens || 0; - const cacheReadTokens = modelData.cumulativeCacheReadInputTokens || modelData.cacheReadInputTokens || 0; - const cacheCreationTokens = modelData.cumulativeCacheCreationInputTokens || modelData.cacheCreationInputTokens || 0; + const inputTokens = + modelData.cumulativeInputTokens || modelData.inputTokens || 0; + const outputTokens = + modelData.cumulativeOutputTokens || modelData.outputTokens || 0; + const cacheReadTokens = + modelData.cumulativeCacheReadInputTokens || + modelData.cacheReadInputTokens || + 0; + const cacheCreationTokens = + modelData.cumulativeCacheCreationInputTokens || + modelData.cacheCreationInputTokens || + 0; // Total used = input + output + cache tokens - const totalUsed = inputTokens + outputTokens + cacheReadTokens + cacheCreationTokens; + const totalUsed = + inputTokens + outputTokens + cacheReadTokens + cacheCreationTokens; // Use configured context window budget from environment (default 160000) // This is the user's budget limit, not the model's context window const contextWindow = parseInt(process.env.CONTEXT_WINDOW) || 160000; - console.log(`๐Ÿ“Š Token calculation: input=${inputTokens}, output=${outputTokens}, cache=${cacheReadTokens + cacheCreationTokens}, total=${totalUsed}/${contextWindow}`); + console.log( + `๐Ÿ“Š Token calculation: input=${inputTokens}, output=${outputTokens}, cache=${ + cacheReadTokens + cacheCreationTokens + }, total=${totalUsed}/${contextWindow}` + ); return { used: totalUsed, - total: contextWindow + total: contextWindow, }; } @@ -211,7 +235,7 @@ async function handleImages(command, images, cwd) { try { // Create temp directory in the project directory const workingDir = cwd || process.cwd(); - tempDir = path.join(workingDir, '.tmp', 'images', Date.now().toString()); + tempDir = path.join(workingDir, ".tmp", "images", Date.now().toString()); await fs.mkdir(tempDir, { recursive: true }); // Save each image to a temp file @@ -219,31 +243,35 @@ async function handleImages(command, images, cwd) { // Extract base64 data and mime type const matches = image.data.match(/^data:([^;]+);base64,(.+)$/); if (!matches) { - console.error('Invalid image data format'); + console.error("Invalid image data format"); continue; } const [, mimeType, base64Data] = matches; - const extension = mimeType.split('/')[1] || 'png'; + const extension = mimeType.split("/")[1] || "png"; const filename = `image_${index}.${extension}`; const filepath = path.join(tempDir, filename); // Write base64 data to file - await fs.writeFile(filepath, Buffer.from(base64Data, 'base64')); + await fs.writeFile(filepath, Buffer.from(base64Data, "base64")); tempImagePaths.push(filepath); } // Include the full image paths in the prompt let modifiedCommand = command; if (tempImagePaths.length > 0 && command && command.trim()) { - const imageNote = `\n\n[Images provided at the following paths:]\n${tempImagePaths.map((p, i) => `${i + 1}. ${p}`).join('\n')}`; + const imageNote = `\n\n[Images provided at the following paths:]\n${tempImagePaths + .map((p, i) => `${i + 1}. ${p}`) + .join("\n")}`; modifiedCommand = command + imageNote; } - console.log(`๐Ÿ“ธ Processed ${tempImagePaths.length} images to temp directory: ${tempDir}`); + console.log( + `๐Ÿ“ธ Processed ${tempImagePaths.length} images to temp directory: ${tempDir}` + ); return { modifiedCommand, tempImagePaths, tempDir }; } catch (error) { - console.error('Error processing images for SDK:', error); + console.error("Error processing images for SDK:", error); return { modifiedCommand: command, tempImagePaths, tempDir }; } } @@ -261,21 +289,25 @@ async function cleanupTempFiles(tempImagePaths, tempDir) { try { // Delete individual temp files for (const imagePath of tempImagePaths) { - await fs.unlink(imagePath).catch(err => - console.error(`Failed to delete temp image ${imagePath}:`, err) - ); + await fs + .unlink(imagePath) + .catch((err) => + console.error(`Failed to delete temp image ${imagePath}:`, err) + ); } // Delete temp directory if (tempDir) { - await fs.rm(tempDir, { recursive: true, force: true }).catch(err => - console.error(`Failed to delete temp directory ${tempDir}:`, err) - ); + await fs + .rm(tempDir, { recursive: true, force: true }) + .catch((err) => + console.error(`Failed to delete temp directory ${tempDir}:`, err) + ); } console.log(`๐Ÿงน Cleaned up ${tempImagePaths.length} temp image files`); } catch (error) { - console.error('Error during temp file cleanup:', error); + console.error("Error during temp file cleanup:", error); } } @@ -286,24 +318,24 @@ async function cleanupTempFiles(tempImagePaths, tempDir) { */ async function loadMcpConfig(cwd) { try { - const claudeConfigPath = path.join(os.homedir(), '.claude.json'); + const claudeConfigPath = path.join(os.homedir(), ".claude.json"); // Check if config file exists try { await fs.access(claudeConfigPath); } catch (error) { // File doesn't exist, return null - console.log('๐Ÿ“ก No ~/.claude.json found, proceeding without MCP servers'); + console.log("๐Ÿ“ก No ~/.claude.json found, proceeding without MCP servers"); return null; } // Read and parse config file let claudeConfig; try { - const configContent = await fs.readFile(claudeConfigPath, 'utf8'); + const configContent = await fs.readFile(claudeConfigPath, "utf8"); claudeConfig = JSON.parse(configContent); } catch (error) { - console.error('โŒ Failed to parse ~/.claude.json:', error.message); + console.error("โŒ Failed to parse ~/.claude.json:", error.message); return null; } @@ -311,30 +343,45 @@ async function loadMcpConfig(cwd) { let mcpServers = {}; // Add global MCP servers - if (claudeConfig.mcpServers && typeof claudeConfig.mcpServers === 'object') { + if ( + claudeConfig.mcpServers && + typeof claudeConfig.mcpServers === "object" + ) { mcpServers = { ...claudeConfig.mcpServers }; - console.log(`๐Ÿ“ก Loaded ${Object.keys(mcpServers).length} global MCP servers`); + console.log( + `๐Ÿ“ก Loaded ${Object.keys(mcpServers).length} global MCP servers` + ); } // Add/override with project-specific MCP servers if (claudeConfig.claudeProjects && cwd) { const projectConfig = claudeConfig.claudeProjects[cwd]; - if (projectConfig && projectConfig.mcpServers && typeof projectConfig.mcpServers === 'object') { + if ( + projectConfig && + projectConfig.mcpServers && + typeof projectConfig.mcpServers === "object" + ) { mcpServers = { ...mcpServers, ...projectConfig.mcpServers }; - console.log(`๐Ÿ“ก Loaded ${Object.keys(projectConfig.mcpServers).length} project-specific MCP servers`); + console.log( + `๐Ÿ“ก Loaded ${ + Object.keys(projectConfig.mcpServers).length + } project-specific MCP servers` + ); } } // Return null if no servers found if (Object.keys(mcpServers).length === 0) { - console.log('๐Ÿ“ก No MCP servers configured'); + console.log("๐Ÿ“ก No MCP servers configured"); return null; } - console.log(`โœ… Total MCP servers loaded: ${Object.keys(mcpServers).length}`); + console.log( + `โœ… Total MCP servers loaded: ${Object.keys(mcpServers).length}` + ); return mcpServers; } catch (error) { - console.error('โŒ Error loading MCP config:', error.message); + console.error("โŒ Error loading MCP config:", error.message); return null; } } @@ -358,20 +405,32 @@ async function queryClaudeSDK(command, options = {}, ws) { // Apply user-selected model provider overrides if available if (runtimeOptions.userId) { try { - const provider = modelProvidersDb.getActiveProvider(runtimeOptions.userId); + const provider = modelProvidersDb.getActiveProvider( + runtimeOptions.userId + ); if (provider) { + console.log(`๐Ÿ”ง Applying model provider: ${provider.provider_name}`); if (provider.api_key) { process.env.ANTHROPIC_API_KEY = provider.api_key; + console.log(`๐Ÿ”‘ Set ANTHROPIC_API_KEY from provider`); } if (provider.api_base_url) { - process.env.ANTHROPIC_API_URL = provider.api_base_url; + process.env.ANTHROPIC_BASE_URL = provider.api_base_url; + console.log( + `๐ŸŒ Set ANTHROPIC_BASE_URL to: ${provider.api_base_url}` + ); } if (provider.model_id && !runtimeOptions.model) { runtimeOptions.model = provider.model_id; + console.log(`๐Ÿค– Set model to: ${provider.model_id}`); } + } else { + console.log( + "โ„น๏ธ No active model provider configured, using default settings" + ); } } catch (error) { - console.error('[ERROR] Unable to load active model provider:', error); + console.error("[ERROR] Unable to load active model provider:", error); } } @@ -385,7 +444,11 @@ async function queryClaudeSDK(command, options = {}, ws) { } // Handle images - save to temp files and modify prompt - const imageResult = await handleImages(command, options.images, options.cwd); + const imageResult = await handleImages( + command, + options.images, + options.cwd + ); const finalCommand = imageResult.modifiedCommand; tempImagePaths = imageResult.tempImagePaths; tempDir = imageResult.tempDir; @@ -393,7 +456,7 @@ async function queryClaudeSDK(command, options = {}, ws) { // Create SDK query instance const queryInstance = query({ prompt: finalCommand, - options: sdkOptions + options: sdkOptions, }); // Track the query instance for abort capability @@ -402,49 +465,67 @@ async function queryClaudeSDK(command, options = {}, ws) { } // Process streaming messages - console.log('๐Ÿ”„ Starting async generator loop for session:', capturedSessionId || 'NEW'); + console.log( + "๐Ÿ”„ Starting async generator loop for session:", + capturedSessionId || "NEW" + ); for await (const message of queryInstance) { // Capture session ID from first message if (message.session_id && !capturedSessionId) { - capturedSessionId = message.session_id; addSession(capturedSessionId, queryInstance, tempImagePaths, tempDir); // Set session ID on writer - if (ws.setSessionId && typeof ws.setSessionId === 'function') { + if (ws.setSessionId && typeof ws.setSessionId === "function") { ws.setSessionId(capturedSessionId); } // Send session-created event only once for new sessions if (!sessionId && !sessionCreatedSent) { sessionCreatedSent = true; - ws.send(JSON.stringify({ - type: 'session-created', - sessionId: capturedSessionId - })); + ws.send( + JSON.stringify({ + type: "session-created", + sessionId: capturedSessionId, + }) + ); } else { - console.log('โš ๏ธ Not sending session-created. sessionId:', sessionId, 'sessionCreatedSent:', sessionCreatedSent); + console.log( + "โš ๏ธ Not sending session-created. sessionId:", + sessionId, + "sessionCreatedSent:", + sessionCreatedSent + ); } } else { - console.log('โš ๏ธ No session_id in message or already captured. message.session_id:', message.session_id, 'capturedSessionId:', capturedSessionId); + console.log( + "โš ๏ธ No session_id in message or already captured. message.session_id:", + message.session_id, + "capturedSessionId:", + capturedSessionId + ); } // Transform and send message to WebSocket const transformedMessage = transformMessage(message); - ws.send(JSON.stringify({ - type: 'claude-response', - data: transformedMessage - })); + ws.send( + JSON.stringify({ + type: "claude-response", + data: transformedMessage, + }) + ); // Extract and send token budget updates from result messages - if (message.type === 'result') { + if (message.type === "result") { const tokenBudget = extractTokenBudget(message); if (tokenBudget) { - console.log('๐Ÿ“Š Token budget from modelUsage:', tokenBudget); - ws.send(JSON.stringify({ - type: 'token-budget', - data: tokenBudget - })); + console.log("๐Ÿ“Š Token budget from modelUsage:", tokenBudget); + ws.send( + JSON.stringify({ + type: "token-budget", + data: tokenBudget, + }) + ); } } } @@ -458,17 +539,18 @@ async function queryClaudeSDK(command, options = {}, ws) { await cleanupTempFiles(tempImagePaths, tempDir); // Send completion event - console.log('โœ… Streaming complete, sending claude-complete event'); - ws.send(JSON.stringify({ - type: 'claude-complete', - sessionId: capturedSessionId, - exitCode: 0, - isNewSession: !sessionId && !!command - })); - console.log('๐Ÿ“ค claude-complete event sent'); - + console.log("โœ… Streaming complete, sending claude-complete event"); + ws.send( + JSON.stringify({ + type: "claude-complete", + sessionId: capturedSessionId, + exitCode: 0, + isNewSession: !sessionId && !!command, + }) + ); + console.log("๐Ÿ“ค claude-complete event sent"); } catch (error) { - console.error('SDK query error:', error); + console.error("SDK query error:", error); // Clean up session on error if (capturedSessionId) { @@ -479,10 +561,12 @@ async function queryClaudeSDK(command, options = {}, ws) { await cleanupTempFiles(tempImagePaths, tempDir); // Send error to WebSocket - ws.send(JSON.stringify({ - type: 'claude-error', - error: error.message - })); + ws.send( + JSON.stringify({ + type: "claude-error", + error: error.message, + }) + ); throw error; } @@ -508,7 +592,7 @@ async function abortClaudeSDKSession(sessionId) { await session.instance.interrupt(); // Update session status - session.status = 'aborted'; + session.status = "aborted"; // Clean up temporary image files await cleanupTempFiles(session.tempImagePaths, session.tempDir); @@ -530,7 +614,7 @@ async function abortClaudeSDKSession(sessionId) { */ function isClaudeSDKSessionActive(sessionId) { const session = getSession(sessionId); - return session && session.status === 'active'; + return session && session.status === "active"; } /** @@ -546,5 +630,5 @@ export { queryClaudeSDK, abortClaudeSDKSession, isClaudeSDKSessionActive, - getActiveClaudeSDKSessions + getActiveClaudeSDKSessions, }; diff --git a/server/routes/agent.js b/server/routes/agent.js index 71ad1f60b..e6206d74d 100644 --- a/server/routes/agent.js +++ b/server/routes/agent.js @@ -1,29 +1,29 @@ -import express from 'express'; -import { spawn } from 'child_process'; -import path from 'path'; -import os from 'os'; -import { promises as fs } from 'fs'; -import crypto from 'crypto'; -import { apiKeysDb, githubTokensDb } from '../database/db.js'; -import { addProjectManually } from '../projects.js'; -import { queryClaudeSDK } from '../claude-sdk.js'; -import { spawnCursor } from '../cursor-cli.js'; -import { Octokit } from '@octokit/rest'; +import express from "express"; +import { spawn } from "child_process"; +import path from "path"; +import os from "os"; +import { promises as fs } from "fs"; +import crypto from "crypto"; +import { apiKeysDb, githubTokensDb } from "../database/db.js"; +import { addProjectManually } from "../projects.js"; +import { queryClaudeSDK } from "../claude-sdk.js"; +import { spawnCursor } from "../cursor-cli.js"; +import { Octokit } from "@octokit/rest"; const router = express.Router(); // Middleware to validate API key for external requests const validateExternalApiKey = (req, res, next) => { - const apiKey = req.headers['x-api-key'] || req.query.apiKey; + const apiKey = req.headers["x-api-key"] || req.query.apiKey; if (!apiKey) { - return res.status(401).json({ error: 'API key required' }); + return res.status(401).json({ error: "API key required" }); } const user = apiKeysDb.validateApiKey(apiKey); if (!user) { - return res.status(401).json({ error: 'Invalid or inactive API key' }); + return res.status(401).json({ error: "Invalid or inactive API key" }); } req.user = user; @@ -37,23 +37,23 @@ const validateExternalApiKey = (req, res, next) => { */ async function getGitRemoteUrl(repoPath) { return new Promise((resolve, reject) => { - const gitProcess = spawn('git', ['config', '--get', 'remote.origin.url'], { + const gitProcess = spawn("git", ["config", "--get", "remote.origin.url"], { cwd: repoPath, - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ["pipe", "pipe", "pipe"], }); - let stdout = ''; - let stderr = ''; + let stdout = ""; + let stderr = ""; - gitProcess.stdout.on('data', (data) => { + gitProcess.stdout.on("data", (data) => { stdout += data.toString(); }); - gitProcess.stderr.on('data', (data) => { + gitProcess.stderr.on("data", (data) => { stderr += data.toString(); }); - gitProcess.on('close', (code) => { + gitProcess.on("close", (code) => { if (code === 0) { resolve(stdout.trim()); } else { @@ -61,7 +61,7 @@ async function getGitRemoteUrl(repoPath) { } }); - gitProcess.on('error', (error) => { + gitProcess.on("error", (error) => { reject(new Error(`Failed to execute git: ${error.message}`)); }); }); @@ -74,11 +74,11 @@ async function getGitRemoteUrl(repoPath) { */ function normalizeGitHubUrl(url) { // Remove .git suffix - let normalized = url.replace(/\.git$/, ''); + let normalized = url.replace(/\.git$/, ""); // Convert SSH to HTTPS format for comparison - normalized = normalized.replace(/^git@github\.com:/, 'https://github.com/'); + normalized = normalized.replace(/^git@github\.com:/, "https://github.com/"); // Remove trailing slash - normalized = normalized.replace(/\/$/, ''); + normalized = normalized.replace(/\/$/, ""); return normalized.toLowerCase(); } @@ -92,11 +92,11 @@ function parseGitHubUrl(url) { // Handle SSH URLs: git@github.com:owner/repo or git@github.com:owner/repo.git const match = url.match(/github\.com[:/]([^/]+)\/([^/]+?)(?:\.git)?$/); if (!match) { - throw new Error('Invalid GitHub URL format'); + throw new Error("Invalid GitHub URL format"); } return { owner: match[1], - repo: match[2].replace(/\.git$/, '') + repo: match[2].replace(/\.git$/, ""), }; } @@ -109,14 +109,14 @@ function autogenerateBranchName(message) { // Convert to lowercase, replace spaces/special chars with hyphens let branchName = message .toLowerCase() - .replace(/[^a-z0-9\s-]/g, '') // Remove special characters - .replace(/\s+/g, '-') // Replace spaces with hyphens - .replace(/-+/g, '-') // Replace multiple hyphens with single - .replace(/^-|-$/g, ''); // Remove leading/trailing hyphens + .replace(/[^a-z0-9\s-]/g, "") // Remove special characters + .replace(/\s+/g, "-") // Replace spaces with hyphens + .replace(/-+/g, "-") // Replace multiple hyphens with single + .replace(/^-|-$/g, ""); // Remove leading/trailing hyphens // Ensure non-empty fallback if (!branchName) { - branchName = 'task'; + branchName = "task"; } // Generate timestamp suffix (last 6 chars of base36 timestamp) @@ -130,11 +130,11 @@ function autogenerateBranchName(message) { } // Remove any trailing hyphen after truncation and ensure no leading hyphen - branchName = branchName.replace(/-$/, '').replace(/^-+/, ''); + branchName = branchName.replace(/-$/, "").replace(/^-+/, ""); // If still empty or starts with hyphen after cleanup, use fallback - if (!branchName || branchName.startsWith('-')) { - branchName = 'task'; + if (!branchName || branchName.startsWith("-")) { + branchName = "task"; } // Combine base name with timestamp suffix @@ -155,22 +155,31 @@ function autogenerateBranchName(message) { * @returns {{valid: boolean, error?: string}} - Validation result */ function validateBranchName(branchName) { - if (!branchName || branchName.trim() === '') { - return { valid: false, error: 'Branch name cannot be empty' }; + if (!branchName || branchName.trim() === "") { + return { valid: false, error: "Branch name cannot be empty" }; } // Git branch name rules const invalidPatterns = [ - { pattern: /^\./, message: 'Branch name cannot start with a dot' }, - { pattern: /\.$/, message: 'Branch name cannot end with a dot' }, - { pattern: /\.\./, message: 'Branch name cannot contain consecutive dots (..)' }, - { pattern: /\s/, message: 'Branch name cannot contain spaces' }, - { pattern: /[~^:?*\[\\]/, message: 'Branch name cannot contain special characters: ~ ^ : ? * [ \\' }, - { pattern: /@{/, message: 'Branch name cannot contain @{' }, - { pattern: /\/$/, message: 'Branch name cannot end with a slash' }, - { pattern: /^\//, message: 'Branch name cannot start with a slash' }, - { pattern: /\/\//, message: 'Branch name cannot contain consecutive slashes' }, - { pattern: /\.lock$/, message: 'Branch name cannot end with .lock' } + { pattern: /^\./, message: "Branch name cannot start with a dot" }, + { pattern: /\.$/, message: "Branch name cannot end with a dot" }, + { + pattern: /\.\./, + message: "Branch name cannot contain consecutive dots (..)", + }, + { pattern: /\s/, message: "Branch name cannot contain spaces" }, + { + pattern: /[~^:?*\[\\]/, + message: "Branch name cannot contain special characters: ~ ^ : ? * [ \\", + }, + { pattern: /@{/, message: "Branch name cannot contain @{" }, + { pattern: /\/$/, message: "Branch name cannot end with a slash" }, + { pattern: /^\//, message: "Branch name cannot start with a slash" }, + { + pattern: /\/\//, + message: "Branch name cannot contain consecutive slashes", + }, + { pattern: /\.lock$/, message: "Branch name cannot end with .lock" }, ]; for (const { pattern, message } of invalidPatterns) { @@ -181,7 +190,10 @@ function validateBranchName(branchName) { // Check for ASCII control characters if (/[\x00-\x1F\x7F]/.test(branchName)) { - return { valid: false, error: 'Branch name cannot contain control characters' }; + return { + valid: false, + error: "Branch name cannot contain control characters", + }; } return { valid: true }; @@ -195,32 +207,39 @@ function validateBranchName(branchName) { */ async function getCommitMessages(projectPath, limit = 5) { return new Promise((resolve, reject) => { - const gitProcess = spawn('git', ['log', `-${limit}`, '--pretty=format:%s'], { - cwd: projectPath, - stdio: ['pipe', 'pipe', 'pipe'] - }); + const gitProcess = spawn( + "git", + ["log", `-${limit}`, "--pretty=format:%s"], + { + cwd: projectPath, + stdio: ["pipe", "pipe", "pipe"], + } + ); - let stdout = ''; - let stderr = ''; + let stdout = ""; + let stderr = ""; - gitProcess.stdout.on('data', (data) => { + gitProcess.stdout.on("data", (data) => { stdout += data.toString(); }); - gitProcess.stderr.on('data', (data) => { + gitProcess.stderr.on("data", (data) => { stderr += data.toString(); }); - gitProcess.on('close', (code) => { + gitProcess.on("close", (code) => { if (code === 0) { - const messages = stdout.trim().split('\n').filter(msg => msg.length > 0); + const messages = stdout + .trim() + .split("\n") + .filter((msg) => msg.length > 0); resolve(messages); } else { reject(new Error(`Failed to get commit messages: ${stderr}`)); } }); - gitProcess.on('error', (error) => { + gitProcess.on("error", (error) => { reject(new Error(`Failed to execute git: ${error.message}`)); }); }); @@ -235,13 +254,19 @@ async function getCommitMessages(projectPath, limit = 5) { * @param {string} baseBranch - Base branch to branch from (default: 'main') * @returns {Promise} */ -async function createGitHubBranch(octokit, owner, repo, branchName, baseBranch = 'main') { +async function createGitHubBranch( + octokit, + owner, + repo, + branchName, + baseBranch = "main" +) { try { // Get the SHA of the base branch const { data: ref } = await octokit.git.getRef({ owner, repo, - ref: `heads/${baseBranch}` + ref: `heads/${baseBranch}`, }); const baseSha = ref.object.sha; @@ -251,12 +276,15 @@ async function createGitHubBranch(octokit, owner, repo, branchName, baseBranch = owner, repo, ref: `refs/heads/${branchName}`, - sha: baseSha + sha: baseSha, }); console.log(`โœ… Created branch '${branchName}' on GitHub`); } catch (error) { - if (error.status === 422 && error.message.includes('Reference already exists')) { + if ( + error.status === 422 && + error.message.includes("Reference already exists") + ) { console.log(`โ„น๏ธ Branch '${branchName}' already exists on GitHub`); } else { throw error; @@ -275,21 +303,29 @@ async function createGitHubBranch(octokit, owner, repo, branchName, baseBranch = * @param {string} baseBranch - Base branch (default: 'main') * @returns {Promise<{number: number, url: string}>} - PR number and URL */ -async function createGitHubPR(octokit, owner, repo, branchName, title, body, baseBranch = 'main') { +async function createGitHubPR( + octokit, + owner, + repo, + branchName, + title, + body, + baseBranch = "main" +) { const { data: pr } = await octokit.pulls.create({ owner, repo, title, head: branchName, base: baseBranch, - body + body, }); console.log(`โœ… Created pull request #${pr.number}: ${pr.html_url}`); return { number: pr.number, - url: pr.html_url + url: pr.html_url, }; } @@ -304,8 +340,8 @@ async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) { return new Promise(async (resolve, reject) => { try { // Validate GitHub URL - if (!githubUrl || !githubUrl.includes('github.com')) { - throw new Error('Invalid GitHub URL'); + if (!githubUrl || !githubUrl.includes("github.com")) { + throw new Error("Invalid GitHub URL"); } const cloneDir = path.resolve(projectPath); @@ -320,13 +356,19 @@ async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) { const normalizedRequested = normalizeGitHubUrl(githubUrl); if (normalizedExisting === normalizedRequested) { - console.log('โœ… Repository already exists at path with correct URL'); + console.log( + "โœ… Repository already exists at path with correct URL" + ); return resolve(cloneDir); } else { - throw new Error(`Directory ${cloneDir} already exists with a different repository (${existingUrl}). Expected: ${githubUrl}`); + throw new Error( + `Directory ${cloneDir} already exists with a different repository (${existingUrl}). Expected: ${githubUrl}` + ); } } catch (gitError) { - throw new Error(`Directory ${cloneDir} already exists but is not a valid git repository or git command failed`); + throw new Error( + `Directory ${cloneDir} already exists but is not a valid git repository or git command failed` + ); } } catch (accessError) { // Directory doesn't exist - proceed with clone @@ -340,40 +382,47 @@ async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) { if (githubToken) { // Convert HTTPS URL to authenticated URL // Example: https://github.com/user/repo -> https://token@github.com/user/repo - cloneUrl = githubUrl.replace('https://github.com', `https://${githubToken}@github.com`); + cloneUrl = githubUrl.replace( + "https://github.com", + `https://${githubToken}@github.com` + ); } - console.log('๐Ÿ”„ Cloning repository:', githubUrl); - console.log('๐Ÿ“ Destination:', cloneDir); + console.log("๐Ÿ”„ Cloning repository:", githubUrl); + console.log("๐Ÿ“ Destination:", cloneDir); // Execute git clone - const gitProcess = spawn('git', ['clone', '--depth', '1', cloneUrl, cloneDir], { - stdio: ['pipe', 'pipe', 'pipe'] - }); + const gitProcess = spawn( + "git", + ["clone", "--depth", "1", cloneUrl, cloneDir], + { + stdio: ["pipe", "pipe", "pipe"], + } + ); - let stdout = ''; - let stderr = ''; + let stdout = ""; + let stderr = ""; - gitProcess.stdout.on('data', (data) => { + gitProcess.stdout.on("data", (data) => { stdout += data.toString(); }); - gitProcess.stderr.on('data', (data) => { + gitProcess.stderr.on("data", (data) => { stderr += data.toString(); - console.log('Git stderr:', data.toString()); + console.log("Git stderr:", data.toString()); }); - gitProcess.on('close', (code) => { + gitProcess.on("close", (code) => { if (code === 0) { - console.log('โœ… Repository cloned successfully'); + console.log("โœ… Repository cloned successfully"); resolve(cloneDir); } else { - console.error('โŒ Git clone failed:', stderr); + console.error("โŒ Git clone failed:", stderr); reject(new Error(`Git clone failed: ${stderr}`)); } }); - gitProcess.on('error', (error) => { + gitProcess.on("error", (error) => { reject(new Error(`Failed to execute git: ${error.message}`)); }); } catch (error) { @@ -390,28 +439,39 @@ async function cloneGitHubRepo(githubUrl, githubToken = null, projectPath) { async function cleanupProject(projectPath, sessionId = null) { try { // Only clean up projects in the external-projects directory - if (!projectPath.includes('.claude/external-projects')) { - console.warn('โš ๏ธ Refusing to clean up non-external project:', projectPath); + if (!projectPath.includes(".claude/external-projects")) { + console.warn( + "โš ๏ธ Refusing to clean up non-external project:", + projectPath + ); return; } - console.log('๐Ÿงน Cleaning up project:', projectPath); + console.log("๐Ÿงน Cleaning up project:", projectPath); await fs.rm(projectPath, { recursive: true, force: true }); - console.log('โœ… Project cleaned up'); + console.log("โœ… Project cleaned up"); // Also clean up the Claude session directory if sessionId provided if (sessionId) { try { - const sessionPath = path.join(os.homedir(), '.claude', 'sessions', sessionId); - console.log('๐Ÿงน Cleaning up session directory:', sessionPath); + const sessionPath = path.join( + os.homedir(), + ".claude", + "sessions", + sessionId + ); + console.log("๐Ÿงน Cleaning up session directory:", sessionPath); await fs.rm(sessionPath, { recursive: true, force: true }); - console.log('โœ… Session directory cleaned up'); + console.log("โœ… Session directory cleaned up"); } catch (error) { - console.error('โš ๏ธ Failed to clean up session directory:', error.message); + console.error( + "โš ๏ธ Failed to clean up session directory:", + error.message + ); } } } catch (error) { - console.error('โŒ Failed to clean up project:', error); + console.error("โŒ Failed to clean up project:", error); } } @@ -463,7 +523,7 @@ class ResponseCollector { this.messages.push(data); // Extract sessionId if present - if (typeof data === 'string') { + if (typeof data === "string") { try { const parsed = JSON.parse(data); if (parsed.sessionId) { @@ -501,16 +561,20 @@ class ResponseCollector { for (const msg of this.messages) { // Skip initial status message - if (msg && msg.type === 'status') { + if (msg && msg.type === "status") { continue; } // Handle JSON strings - if (typeof msg === 'string') { + if (typeof msg === "string") { try { const parsed = JSON.parse(msg); // Only include claude-response messages with assistant type - if (parsed.type === 'claude-response' && parsed.data && parsed.data.type === 'assistant') { + if ( + parsed.type === "claude-response" && + parsed.data && + parsed.data.type === "assistant" + ) { assistantMessages.push(parsed.data); } } catch (e) { @@ -535,7 +599,7 @@ class ResponseCollector { let data = msg; // Parse if string - if (typeof msg === 'string') { + if (typeof msg === "string") { try { data = JSON.parse(msg); } catch (e) { @@ -544,7 +608,7 @@ class ResponseCollector { } // Extract usage from claude-response messages - if (data && data.type === 'claude-response' && data.data) { + if (data && data.type === "claude-response" && data.data) { const msgData = data.data; if (msgData.message && msgData.message.usage) { const usage = msgData.message.usage; @@ -561,7 +625,8 @@ class ResponseCollector { outputTokens: totalOutput, cacheReadTokens: totalCacheRead, cacheCreationTokens: totalCacheCreation, - totalTokens: totalInput + totalOutput + totalCacheRead + totalCacheCreation + totalTokens: + totalInput + totalOutput + totalCacheRead + totalCacheCreation, }; } } @@ -799,34 +864,59 @@ class ResponseCollector { * "cleanup": false * } */ -router.post('/', validateExternalApiKey, async (req, res) => { - const { githubUrl, projectPath, message, provider = 'claude', model, githubToken, branchName } = req.body; +router.post("/", validateExternalApiKey, async (req, res) => { + const { + githubUrl, + projectPath, + message, + provider = "claude", + model, + githubToken, + branchName, + } = req.body; // Parse stream and cleanup as booleans (handle string "true"/"false" from curl) - const stream = req.body.stream === undefined ? true : (req.body.stream === true || req.body.stream === 'true'); - const cleanup = req.body.cleanup === undefined ? true : (req.body.cleanup === true || req.body.cleanup === 'true'); + const stream = + req.body.stream === undefined + ? true + : req.body.stream === true || req.body.stream === "true"; + const cleanup = + req.body.cleanup === undefined + ? true + : req.body.cleanup === true || req.body.cleanup === "true"; // If branchName is provided, automatically enable createBranch - const createBranch = branchName ? true : (req.body.createBranch === true || req.body.createBranch === 'true'); - const createPR = req.body.createPR === true || req.body.createPR === 'true'; + const createBranch = branchName + ? true + : req.body.createBranch === true || req.body.createBranch === "true"; + const createPR = req.body.createPR === true || req.body.createPR === "true"; // Validate inputs if (!githubUrl && !projectPath) { - return res.status(400).json({ error: 'Either githubUrl or projectPath is required' }); + return res + .status(400) + .json({ error: "Either githubUrl or projectPath is required" }); } if (!message || !message.trim()) { - return res.status(400).json({ error: 'message is required' }); + return res.status(400).json({ error: "message is required" }); } - if (!['claude', 'cursor'].includes(provider)) { - return res.status(400).json({ error: 'provider must be "claude" or "cursor"' }); + if (!["claude", "cursor"].includes(provider)) { + return res + .status(400) + .json({ error: 'provider must be "claude" or "cursor"' }); } // Validate GitHub branch/PR creation requirements // Allow branch/PR creation with projectPath as long as it has a GitHub remote if ((createBranch || createPR) && !githubUrl && !projectPath) { - return res.status(400).json({ error: 'createBranch and createPR require either githubUrl or projectPath with a GitHub remote' }); + return res + .status(400) + .json({ + error: + "createBranch and createPR require either githubUrl or projectPath with a GitHub remote", + }); } let finalProjectPath = null; @@ -836,18 +926,31 @@ router.post('/', validateExternalApiKey, async (req, res) => { // Determine the final project path if (githubUrl) { // Clone repository (to projectPath if provided, otherwise generate path) - const tokenToUse = githubToken || githubTokensDb.getActiveGithubToken(req.user.id); + const tokenToUse = + githubToken || githubTokensDb.getActiveGithubToken(req.user.id); let targetPath; if (projectPath) { targetPath = projectPath; } else { // Generate a unique path for cloning - const repoHash = crypto.createHash('md5').update(githubUrl + Date.now()).digest('hex'); - targetPath = path.join(os.homedir(), '.claude', 'external-projects', repoHash); + const repoHash = crypto + .createHash("md5") + .update(githubUrl + Date.now()) + .digest("hex"); + targetPath = path.join( + os.homedir(), + ".claude", + "external-projects", + repoHash + ); } - finalProjectPath = await cloneGitHubRepo(githubUrl.trim(), tokenToUse, targetPath); + finalProjectPath = await cloneGitHubRepo( + githubUrl.trim(), + tokenToUse, + targetPath + ); } else { // Use existing project path finalProjectPath = path.resolve(projectPath); @@ -864,11 +967,17 @@ router.post('/', validateExternalApiKey, async (req, res) => { let project; try { project = await addProjectManually(finalProjectPath); - console.log('๐Ÿ“ฆ Project registered:', project); + console.log("๐Ÿ“ฆ Project registered:", project); } catch (error) { // If project already exists, that's fine - continue with the existing registration - if (error.message && error.message.includes('Project already configured')) { - console.log('๐Ÿ“ฆ Using existing project registration for:', finalProjectPath); + if ( + error.message && + error.message.includes("Project already configured") + ) { + console.log( + "๐Ÿ“ฆ Using existing project registration for:", + finalProjectPath + ); project = { path: finalProjectPath }; } else { throw error; @@ -878,18 +987,20 @@ router.post('/', validateExternalApiKey, async (req, res) => { // Set up writer based on streaming mode if (stream) { // Set up SSE headers for streaming - res.setHeader('Content-Type', 'text/event-stream'); - res.setHeader('Cache-Control', 'no-cache'); - res.setHeader('Connection', 'keep-alive'); - res.setHeader('X-Accel-Buffering', 'no'); // Disable nginx buffering + res.setHeader("Content-Type", "text/event-stream"); + res.setHeader("Cache-Control", "no-cache"); + res.setHeader("Connection", "keep-alive"); + res.setHeader("X-Accel-Buffering", "no"); // Disable nginx buffering writer = new SSEStreamWriter(res); // Send initial status writer.send({ - type: 'status', - message: githubUrl ? 'Repository cloned and session started' : 'Session started', - projectPath: finalProjectPath + type: "status", + message: githubUrl + ? "Repository cloned and session started" + : "Session started", + projectPath: finalProjectPath, }); } else { // Non-streaming mode: collect messages @@ -897,33 +1008,43 @@ router.post('/', validateExternalApiKey, async (req, res) => { // Collect initial status message writer.send({ - type: 'status', - message: githubUrl ? 'Repository cloned and session started' : 'Session started', - projectPath: finalProjectPath + type: "status", + message: githubUrl + ? "Repository cloned and session started" + : "Session started", + projectPath: finalProjectPath, }); } // Start the appropriate session - if (provider === 'claude') { - console.log('๐Ÿค– Starting Claude SDK session'); - - await queryClaudeSDK(message.trim(), { - projectPath: finalProjectPath, - cwd: finalProjectPath, - sessionId: null, // New session - permissionMode: 'bypassPermissions' // Bypass all permissions for API calls - }, writer); - - } else if (provider === 'cursor') { - console.log('๐Ÿ–ฑ๏ธ Starting Cursor CLI session'); - - await spawnCursor(message.trim(), { - projectPath: finalProjectPath, - cwd: finalProjectPath, - sessionId: null, // New session - model: model || undefined, - skipPermissions: true // Bypass permissions for Cursor - }, writer); + if (provider === "claude") { + console.log("๐Ÿค– Starting Claude SDK session"); + + await queryClaudeSDK( + message.trim(), + { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, // New session + permissionMode: "bypassPermissions", // Bypass all permissions for API calls + userId: req.user.id, // Pass user ID to enable model provider configuration + }, + writer + ); + } else if (provider === "cursor") { + console.log("๐Ÿ–ฑ๏ธ Starting Cursor CLI session"); + + await spawnCursor( + message.trim(), + { + projectPath: finalProjectPath, + cwd: finalProjectPath, + sessionId: null, // New session + model: model || undefined, + skipPermissions: true, // Bypass permissions for Cursor + }, + writer + ); } // Handle GitHub branch and PR creation after successful agent completion @@ -932,13 +1053,16 @@ router.post('/', validateExternalApiKey, async (req, res) => { if (createBranch || createPR) { try { - console.log('๐Ÿ”„ Starting GitHub branch/PR creation workflow...'); + console.log("๐Ÿ”„ Starting GitHub branch/PR creation workflow..."); // Get GitHub token - const tokenToUse = githubToken || githubTokensDb.getActiveGithubToken(req.user.id); + const tokenToUse = + githubToken || githubTokensDb.getActiveGithubToken(req.user.id); if (!tokenToUse) { - throw new Error('GitHub token required for branch/PR creation. Please configure a GitHub token in settings.'); + throw new Error( + "GitHub token required for branch/PR creation. Please configure a GitHub token in settings." + ); } // Initialize Octokit @@ -947,15 +1071,19 @@ router.post('/', validateExternalApiKey, async (req, res) => { // Get GitHub URL - either from parameter or from git remote let repoUrl = githubUrl; if (!repoUrl) { - console.log('๐Ÿ” Getting GitHub URL from git remote...'); + console.log("๐Ÿ” Getting GitHub URL from git remote..."); try { repoUrl = await getGitRemoteUrl(finalProjectPath); - if (!repoUrl.includes('github.com')) { - throw new Error('Project does not have a GitHub remote configured'); + if (!repoUrl.includes("github.com")) { + throw new Error( + "Project does not have a GitHub remote configured" + ); } console.log(`โœ… Found GitHub remote: ${repoUrl}`); } catch (error) { - throw new Error(`Failed to get GitHub remote URL: ${error.message}`); + throw new Error( + `Failed to get GitHub remote URL: ${error.message}` + ); } } @@ -979,33 +1107,53 @@ router.post('/', validateExternalApiKey, async (req, res) => { if (createBranch) { // Create and checkout the new branch locally - console.log('๐Ÿ”„ Creating local branch...'); - const checkoutProcess = spawn('git', ['checkout', '-b', finalBranchName], { - cwd: finalProjectPath, - stdio: 'pipe' - }); + console.log("๐Ÿ”„ Creating local branch..."); + const checkoutProcess = spawn( + "git", + ["checkout", "-b", finalBranchName], + { + cwd: finalProjectPath, + stdio: "pipe", + } + ); await new Promise((resolve, reject) => { - let stderr = ''; - checkoutProcess.stderr.on('data', (data) => { stderr += data.toString(); }); - checkoutProcess.on('close', (code) => { + let stderr = ""; + checkoutProcess.stderr.on("data", (data) => { + stderr += data.toString(); + }); + checkoutProcess.on("close", (code) => { if (code === 0) { - console.log(`โœ… Created and checked out local branch '${finalBranchName}'`); + console.log( + `โœ… Created and checked out local branch '${finalBranchName}'` + ); resolve(); } else { // Branch might already exist locally, try to checkout - if (stderr.includes('already exists')) { - console.log(`โ„น๏ธ Branch '${finalBranchName}' already exists locally, checking out...`); - const checkoutExisting = spawn('git', ['checkout', finalBranchName], { - cwd: finalProjectPath, - stdio: 'pipe' - }); - checkoutExisting.on('close', (checkoutCode) => { + if (stderr.includes("already exists")) { + console.log( + `โ„น๏ธ Branch '${finalBranchName}' already exists locally, checking out...` + ); + const checkoutExisting = spawn( + "git", + ["checkout", finalBranchName], + { + cwd: finalProjectPath, + stdio: "pipe", + } + ); + checkoutExisting.on("close", (checkoutCode) => { if (checkoutCode === 0) { - console.log(`โœ… Checked out existing branch '${finalBranchName}'`); + console.log( + `โœ… Checked out existing branch '${finalBranchName}'` + ); resolve(); } else { - reject(new Error(`Failed to checkout existing branch: ${stderr}`)); + reject( + new Error( + `Failed to checkout existing branch: ${stderr}` + ) + ); } }); } else { @@ -1016,25 +1164,38 @@ router.post('/', validateExternalApiKey, async (req, res) => { }); // Push the branch to remote - console.log('๐Ÿ”„ Pushing branch to remote...'); - const pushProcess = spawn('git', ['push', '-u', 'origin', finalBranchName], { - cwd: finalProjectPath, - stdio: 'pipe' - }); + console.log("๐Ÿ”„ Pushing branch to remote..."); + const pushProcess = spawn( + "git", + ["push", "-u", "origin", finalBranchName], + { + cwd: finalProjectPath, + stdio: "pipe", + } + ); await new Promise((resolve, reject) => { - let stderr = ''; - let stdout = ''; - pushProcess.stdout.on('data', (data) => { stdout += data.toString(); }); - pushProcess.stderr.on('data', (data) => { stderr += data.toString(); }); - pushProcess.on('close', (code) => { + let stderr = ""; + let stdout = ""; + pushProcess.stdout.on("data", (data) => { + stdout += data.toString(); + }); + pushProcess.stderr.on("data", (data) => { + stderr += data.toString(); + }); + pushProcess.on("close", (code) => { if (code === 0) { console.log(`โœ… Pushed branch '${finalBranchName}' to remote`); resolve(); } else { // Check if branch exists on remote but has different commits - if (stderr.includes('already exists') || stderr.includes('up-to-date')) { - console.log(`โ„น๏ธ Branch '${finalBranchName}' already exists on remote, using existing branch`); + if ( + stderr.includes("already exists") || + stderr.includes("up-to-date") + ) { + console.log( + `โ„น๏ธ Branch '${finalBranchName}' already exists on remote, using existing branch` + ); resolve(); } else { reject(new Error(`Failed to push branch: ${stderr}`)); @@ -1045,58 +1206,67 @@ router.post('/', validateExternalApiKey, async (req, res) => { branchInfo = { name: finalBranchName, - url: `https://github.com/${owner}/${repo}/tree/${finalBranchName}` + url: `https://github.com/${owner}/${repo}/tree/${finalBranchName}`, }; } if (createPR) { // Get commit messages to generate PR description - console.log('๐Ÿ”„ Generating PR title and description...'); + console.log("๐Ÿ”„ Generating PR title and description..."); const commitMessages = await getCommitMessages(finalProjectPath, 5); // Use the first commit message as the PR title, or fallback to the agent message - const prTitle = commitMessages.length > 0 ? commitMessages[0] : message; + const prTitle = + commitMessages.length > 0 ? commitMessages[0] : message; // Generate PR body from commit messages - let prBody = '## Changes\n\n'; + let prBody = "## Changes\n\n"; if (commitMessages.length > 0) { - prBody += commitMessages.map(msg => `- ${msg}`).join('\n'); + prBody += commitMessages.map((msg) => `- ${msg}`).join("\n"); } else { prBody += `Agent task: ${message}`; } - prBody += '\n\n---\n*This pull request was automatically created by Claude Code UI Agent.*'; + prBody += + "\n\n---\n*This pull request was automatically created by Claude Code UI Agent.*"; console.log(`๐Ÿ“ PR Title: ${prTitle}`); // Create the pull request - console.log('๐Ÿ”„ Creating pull request...'); - prInfo = await createGitHubPR(octokit, owner, repo, finalBranchName, prTitle, prBody, 'main'); + console.log("๐Ÿ”„ Creating pull request..."); + prInfo = await createGitHubPR( + octokit, + owner, + repo, + finalBranchName, + prTitle, + prBody, + "main" + ); } // Send branch/PR info in response if (stream) { if (branchInfo) { writer.send({ - type: 'github-branch', - branch: branchInfo + type: "github-branch", + branch: branchInfo, }); } if (prInfo) { writer.send({ - type: 'github-pr', - pullRequest: prInfo + type: "github-pr", + pullRequest: prInfo, }); } } - } catch (error) { - console.error('โŒ GitHub branch/PR creation error:', error); + console.error("โŒ GitHub branch/PR creation error:", error); // Send error but don't fail the entire request if (stream) { writer.send({ - type: 'github-error', - error: error.message + type: "github-error", + error: error.message, }); } // Store error info for non-streaming response @@ -1121,7 +1291,7 @@ router.post('/', validateExternalApiKey, async (req, res) => { sessionId: writer.getSessionId(), messages: assistantMessages, tokens: tokenSummary, - projectPath: finalProjectPath + projectPath: finalProjectPath, }; // Add branch/PR info if created @@ -1143,9 +1313,8 @@ router.post('/', validateExternalApiKey, async (req, res) => { cleanupProject(finalProjectPath, sessionIdForCleanup); }, 5000); } - } catch (error) { - console.error('โŒ External session error:', error); + console.error("โŒ External session error:", error); // Clean up on error if (finalProjectPath && cleanup && githubUrl) { @@ -1157,25 +1326,25 @@ router.post('/', validateExternalApiKey, async (req, res) => { // For streaming, send error event and stop if (!writer) { // Set up SSE headers if not already done - res.setHeader('Content-Type', 'text/event-stream'); - res.setHeader('Cache-Control', 'no-cache'); - res.setHeader('Connection', 'keep-alive'); - res.setHeader('X-Accel-Buffering', 'no'); + res.setHeader("Content-Type", "text/event-stream"); + res.setHeader("Cache-Control", "no-cache"); + res.setHeader("Connection", "keep-alive"); + res.setHeader("X-Accel-Buffering", "no"); writer = new SSEStreamWriter(res); } if (!res.writableEnded) { writer.send({ - type: 'error', + type: "error", error: error.message, - message: `Failed: ${error.message}` + message: `Failed: ${error.message}`, }); writer.end(); } } else if (!res.headersSent) { res.status(500).json({ success: false, - error: error.message + error: error.message, }); } } diff --git a/server/routes/git.js b/server/routes/git.js index 0df4e44dd..d41930bf3 100755 --- a/server/routes/git.js +++ b/server/routes/git.js @@ -1,11 +1,11 @@ -import express from 'express'; -import { exec } from 'child_process'; -import { promisify } from 'util'; -import path from 'path'; -import { promises as fs } from 'fs'; -import { extractProjectDirectory } from '../projects.js'; -import { queryClaudeSDK } from '../claude-sdk.js'; -import { spawnCursor } from '../cursor-cli.js'; +import express from "express"; +import { exec } from "child_process"; +import { promisify } from "util"; +import path from "path"; +import { promises as fs } from "fs"; +import { extractProjectDirectory } from "../projects.js"; +import { queryClaudeSDK } from "../claude-sdk.js"; +import { spawnCursor } from "../cursor-cli.js"; const router = express.Router(); const execAsync = promisify(exec); @@ -15,39 +15,44 @@ async function getActualProjectPath(projectName) { try { return await extractProjectDirectory(projectName); } catch (error) { - console.error(`Error extracting project directory for ${projectName}:`, error); + console.error( + `Error extracting project directory for ${projectName}:`, + error + ); // Fallback to the old method - return projectName.replace(/-/g, '/'); + return projectName.replace(/-/g, "/"); } } // Helper function to strip git diff headers function stripDiffHeaders(diff) { - if (!diff) return ''; + if (!diff) return ""; - const lines = diff.split('\n'); + const lines = diff.split("\n"); const filteredLines = []; let startIncluding = false; for (const line of lines) { // Skip all header lines including diff --git, index, file mode, and --- / +++ file paths - if (line.startsWith('diff --git') || - line.startsWith('index ') || - line.startsWith('new file mode') || - line.startsWith('deleted file mode') || - line.startsWith('---') || - line.startsWith('+++')) { + if ( + line.startsWith("diff --git") || + line.startsWith("index ") || + line.startsWith("new file mode") || + line.startsWith("deleted file mode") || + line.startsWith("---") || + line.startsWith("+++") + ) { continue; } // Start including lines from @@ hunk headers onwards - if (line.startsWith('@@') || startIncluding) { + if (line.startsWith("@@") || startIncluding) { startIncluding = true; filteredLines.push(line); } } - return filteredLines.join('\n'); + return filteredLines.join("\n"); } // Helper function to validate git repository @@ -61,28 +66,35 @@ async function validateGitRepository(projectPath) { try { // Use --show-toplevel to get the root of the git repository - const { stdout: gitRoot } = await execAsync('git rev-parse --show-toplevel', { cwd: projectPath }); + const { stdout: gitRoot } = await execAsync( + "git rev-parse --show-toplevel", + { cwd: projectPath } + ); const normalizedGitRoot = path.resolve(gitRoot.trim()); const normalizedProjectPath = path.resolve(projectPath); - + // Ensure the git root matches our project path (prevent using parent git repos) if (normalizedGitRoot !== normalizedProjectPath) { - throw new Error(`Project directory is not a git repository. This directory is inside a git repository at ${normalizedGitRoot}, but git operations should be run from the repository root.`); + throw new Error( + `Project directory is not a git repository. This directory is inside a git repository at ${normalizedGitRoot}, but git operations should be run from the repository root.` + ); } } catch (error) { - if (error.message.includes('Project directory is not a git repository')) { + if (error.message.includes("Project directory is not a git repository")) { throw error; } - throw new Error('Not a git repository. This directory does not contain a .git folder. Initialize a git repository with "git init" to use source control features.'); + throw new Error( + 'Not a git repository. This directory does not contain a .git folder. Initialize a git repository with "git init" to use source control features.' + ); } } // Get git status for a project -router.get('/status', async (req, res) => { +router.get("/status", async (req, res) => { const { project } = req.query; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -92,42 +104,50 @@ router.get('/status', async (req, res) => { await validateGitRepository(projectPath); // Get current branch - handle case where there are no commits yet - let branch = 'main'; + let branch = "main"; let hasCommits = true; try { - const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: branchOutput } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); branch = branchOutput.trim(); } catch (error) { // No HEAD exists - repository has no commits yet - if (error.message.includes('unknown revision') || error.message.includes('ambiguous argument')) { + if ( + error.message.includes("unknown revision") || + error.message.includes("ambiguous argument") + ) { hasCommits = false; - branch = 'main'; + branch = "main"; } else { throw error; } } // Get git status - const { stdout: statusOutput } = await execAsync('git status --porcelain', { cwd: projectPath }); + const { stdout: statusOutput } = await execAsync("git status --porcelain", { + cwd: projectPath, + }); const modified = []; const added = []; const deleted = []; const untracked = []; - statusOutput.split('\n').forEach(line => { + statusOutput.split("\n").forEach((line) => { if (!line.trim()) return; const status = line.substring(0, 2); const file = line.substring(3); - if (status === 'M ' || status === ' M' || status === 'MM') { + if (status === "M " || status === " M" || status === "MM") { modified.push(file); - } else if (status === 'A ' || status === 'AM') { + } else if (status === "A " || status === "AM") { added.push(file); - } else if (status === 'D ' || status === ' D') { + } else if (status === "D " || status === " D") { deleted.push(file); - } else if (status === '??') { + } else if (status === "??") { untracked.push(file); } }); @@ -138,39 +158,50 @@ router.get('/status', async (req, res) => { modified, added, deleted, - untracked + untracked, }); } catch (error) { - console.error('Git status error:', error); + console.error("Git status error:", error); res.json({ - error: error.message.includes('not a git repository') || error.message.includes('Project directory is not a git repository') - ? error.message - : 'Git operation failed', - details: error.message.includes('not a git repository') || error.message.includes('Project directory is not a git repository') - ? error.message - : `Failed to get git status: ${error.message}` + error: + error.message.includes("not a git repository") || + error.message.includes("Project directory is not a git repository") + ? error.message + : "Git operation failed", + details: + error.message.includes("not a git repository") || + error.message.includes("Project directory is not a git repository") + ? error.message + : `Failed to get git status: ${error.message}`, }); } }); // Get diff for a specific file -router.get('/diff', async (req, res) => { +router.get("/diff", async (req, res) => { const { project, file } = req.query; - + if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res + .status(400) + .json({ error: "Project name and file path are required" }); } try { const projectPath = await getActualProjectPath(project); - + // Validate git repository await validateGitRepository(projectPath); - + // Check if file is untracked or deleted - const { stdout: statusOutput } = await execAsync(`git status --porcelain "${file}"`, { cwd: projectPath }); - const isUntracked = statusOutput.startsWith('??'); - const isDeleted = statusOutput.trim().startsWith('D ') || statusOutput.trim().startsWith(' D'); + const { stdout: statusOutput } = await execAsync( + `git status --porcelain "${file}"`, + { cwd: projectPath } + ); + const isUntracked = statusOutput.startsWith("??"); + const isDeleted = + statusOutput.trim().startsWith("D ") || + statusOutput.trim().startsWith(" D"); let diff; if (isUntracked) { @@ -182,45 +213,58 @@ router.get('/diff', async (req, res) => { // For directories, show a simple message diff = `Directory: ${file}\n(Cannot show diff for directories)`; } else { - const fileContent = await fs.readFile(filePath, 'utf-8'); - const lines = fileContent.split('\n'); - diff = `--- /dev/null\n+++ b/${file}\n@@ -0,0 +1,${lines.length} @@\n` + - lines.map(line => `+${line}`).join('\n'); + const fileContent = await fs.readFile(filePath, "utf-8"); + const lines = fileContent.split("\n"); + diff = + `--- /dev/null\n+++ b/${file}\n@@ -0,0 +1,${lines.length} @@\n` + + lines.map((line) => `+${line}`).join("\n"); } } else if (isDeleted) { // For deleted files, show the entire file content from HEAD as deletions - const { stdout: fileContent } = await execAsync(`git show HEAD:"${file}"`, { cwd: projectPath }); - const lines = fileContent.split('\n'); - diff = `--- a/${file}\n+++ /dev/null\n@@ -1,${lines.length} +0,0 @@\n` + - lines.map(line => `-${line}`).join('\n'); + const { stdout: fileContent } = await execAsync( + `git show HEAD:"${file}"`, + { cwd: projectPath } + ); + const lines = fileContent.split("\n"); + diff = + `--- a/${file}\n+++ /dev/null\n@@ -1,${lines.length} +0,0 @@\n` + + lines.map((line) => `-${line}`).join("\n"); } else { // Get diff for tracked files // First check for unstaged changes (working tree vs index) - const { stdout: unstagedDiff } = await execAsync(`git diff -- "${file}"`, { cwd: projectPath }); + const { stdout: unstagedDiff } = await execAsync( + `git diff -- "${file}"`, + { cwd: projectPath } + ); if (unstagedDiff) { // Show unstaged changes if they exist diff = stripDiffHeaders(unstagedDiff); } else { // If no unstaged changes, check for staged changes (index vs HEAD) - const { stdout: stagedDiff } = await execAsync(`git diff --cached -- "${file}"`, { cwd: projectPath }); - diff = stripDiffHeaders(stagedDiff) || ''; + const { stdout: stagedDiff } = await execAsync( + `git diff --cached -- "${file}"`, + { cwd: projectPath } + ); + diff = stripDiffHeaders(stagedDiff) || ""; } } res.json({ diff }); } catch (error) { - console.error('Git diff error:', error); + console.error("Git diff error:", error); res.json({ error: error.message }); } }); // Get file content with diff information for CodeEditor -router.get('/file-with-diff', async (req, res) => { +router.get("/file-with-diff", async (req, res) => { const { project, file } = req.query; if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res + .status(400) + .json({ error: "Project name and file path are required" }); } try { @@ -230,16 +274,24 @@ router.get('/file-with-diff', async (req, res) => { await validateGitRepository(projectPath); // Check file status - const { stdout: statusOutput } = await execAsync(`git status --porcelain "${file}"`, { cwd: projectPath }); - const isUntracked = statusOutput.startsWith('??'); - const isDeleted = statusOutput.trim().startsWith('D ') || statusOutput.trim().startsWith(' D'); + const { stdout: statusOutput } = await execAsync( + `git status --porcelain "${file}"`, + { cwd: projectPath } + ); + const isUntracked = statusOutput.startsWith("??"); + const isDeleted = + statusOutput.trim().startsWith("D ") || + statusOutput.trim().startsWith(" D"); - let currentContent = ''; - let oldContent = ''; + let currentContent = ""; + let oldContent = ""; if (isDeleted) { // For deleted files, get content from HEAD - const { stdout: headContent } = await execAsync(`git show HEAD:"${file}"`, { cwd: projectPath }); + const { stdout: headContent } = await execAsync( + `git show HEAD:"${file}"`, + { cwd: projectPath } + ); oldContent = headContent; currentContent = headContent; // Show the deleted content in editor } else { @@ -249,19 +301,24 @@ router.get('/file-with-diff', async (req, res) => { if (stats.isDirectory()) { // Cannot show content for directories - return res.status(400).json({ error: 'Cannot show diff for directories' }); + return res + .status(400) + .json({ error: "Cannot show diff for directories" }); } - currentContent = await fs.readFile(filePath, 'utf-8'); + currentContent = await fs.readFile(filePath, "utf-8"); if (!isUntracked) { // Get the old content from HEAD for tracked files try { - const { stdout: headContent } = await execAsync(`git show HEAD:"${file}"`, { cwd: projectPath }); + const { stdout: headContent } = await execAsync( + `git show HEAD:"${file}"`, + { cwd: projectPath } + ); oldContent = headContent; } catch (error) { // File might be newly added to git (staged but not committed) - oldContent = ''; + oldContent = ""; } } } @@ -270,20 +327,20 @@ router.get('/file-with-diff', async (req, res) => { currentContent, oldContent, isDeleted, - isUntracked + isUntracked, }); } catch (error) { - console.error('Git file-with-diff error:', error); + console.error("Git file-with-diff error:", error); res.json({ error: error.message }); } }); // Create initial commit -router.post('/initial-commit', async (req, res) => { +router.post("/initial-commit", async (req, res) => { const { project } = req.body; if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -294,27 +351,37 @@ router.post('/initial-commit', async (req, res) => { // Check if there are already commits try { - await execAsync('git rev-parse HEAD', { cwd: projectPath }); - return res.status(400).json({ error: 'Repository already has commits. Use regular commit instead.' }); + await execAsync("git rev-parse HEAD", { cwd: projectPath }); + return res + .status(400) + .json({ + error: "Repository already has commits. Use regular commit instead.", + }); } catch (error) { // No HEAD - this is good, we can create initial commit } // Add all files - await execAsync('git add .', { cwd: projectPath }); + await execAsync("git add .", { cwd: projectPath }); // Create initial commit - const { stdout } = await execAsync('git commit -m "Initial commit"', { cwd: projectPath }); + const { stdout } = await execAsync('git commit -m "Initial commit"', { + cwd: projectPath, + }); - res.json({ success: true, output: stdout, message: 'Initial commit created successfully' }); + res.json({ + success: true, + output: stdout, + message: "Initial commit created successfully", + }); } catch (error) { - console.error('Git initial commit error:', error); + console.error("Git initial commit error:", error); // Handle the case where there's nothing to commit - if (error.message.includes('nothing to commit')) { + if (error.message.includes("nothing to commit")) { return res.status(400).json({ - error: 'Nothing to commit', - details: 'No files found in the repository. Add some files first.' + error: "Nothing to commit", + details: "No files found in the repository. Add some files first.", }); } @@ -323,149 +390,162 @@ router.post('/initial-commit', async (req, res) => { }); // Commit changes -router.post('/commit', async (req, res) => { +router.post("/commit", async (req, res) => { const { project, message, files } = req.body; - + if (!project || !message || !files || files.length === 0) { - return res.status(400).json({ error: 'Project name, commit message, and files are required' }); + return res + .status(400) + .json({ error: "Project name, commit message, and files are required" }); } try { const projectPath = await getActualProjectPath(project); - + // Validate git repository await validateGitRepository(projectPath); - + // Stage selected files for (const file of files) { await execAsync(`git add "${file}"`, { cwd: projectPath }); } - + // Commit with message - const { stdout } = await execAsync(`git commit -m "${message.replace(/"/g, '\\"')}"`, { cwd: projectPath }); - + const { stdout } = await execAsync( + `git commit -m "${message.replace(/"/g, '\\"')}"`, + { cwd: projectPath } + ); + res.json({ success: true, output: stdout }); } catch (error) { - console.error('Git commit error:', error); + console.error("Git commit error:", error); res.status(500).json({ error: error.message }); } }); // Get list of branches -router.get('/branches', async (req, res) => { +router.get("/branches", async (req, res) => { const { project } = req.query; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { const projectPath = await getActualProjectPath(project); - + // Validate git repository await validateGitRepository(projectPath); - + // Get all branches - const { stdout } = await execAsync('git branch -a', { cwd: projectPath }); - + const { stdout } = await execAsync("git branch -a", { cwd: projectPath }); + // Parse branches const branches = stdout - .split('\n') - .map(branch => branch.trim()) - .filter(branch => branch && !branch.includes('->')) // Remove empty lines and HEAD pointer - .map(branch => { + .split("\n") + .map((branch) => branch.trim()) + .filter((branch) => branch && !branch.includes("->")) // Remove empty lines and HEAD pointer + .map((branch) => { // Remove asterisk from current branch - if (branch.startsWith('* ')) { + if (branch.startsWith("* ")) { return branch.substring(2); } // Remove remotes/ prefix - if (branch.startsWith('remotes/origin/')) { + if (branch.startsWith("remotes/origin/")) { return branch.substring(15); } return branch; }) .filter((branch, index, self) => self.indexOf(branch) === index); // Remove duplicates - + res.json({ branches }); } catch (error) { - console.error('Git branches error:', error); + console.error("Git branches error:", error); res.json({ error: error.message }); } }); // Checkout branch -router.post('/checkout', async (req, res) => { +router.post("/checkout", async (req, res) => { const { project, branch } = req.body; - + if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch are required' }); + return res + .status(400) + .json({ error: "Project name and branch are required" }); } try { const projectPath = await getActualProjectPath(project); - + // Checkout the branch - const { stdout } = await execAsync(`git checkout "${branch}"`, { cwd: projectPath }); - + const { stdout } = await execAsync(`git checkout "${branch}"`, { + cwd: projectPath, + }); + res.json({ success: true, output: stdout }); } catch (error) { - console.error('Git checkout error:', error); + console.error("Git checkout error:", error); res.status(500).json({ error: error.message }); } }); // Create new branch -router.post('/create-branch', async (req, res) => { +router.post("/create-branch", async (req, res) => { const { project, branch } = req.body; - + if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch name are required' }); + return res + .status(400) + .json({ error: "Project name and branch name are required" }); } try { const projectPath = await getActualProjectPath(project); - + // Create and checkout new branch - const { stdout } = await execAsync(`git checkout -b "${branch}"`, { cwd: projectPath }); - + const { stdout } = await execAsync(`git checkout -b "${branch}"`, { + cwd: projectPath, + }); + res.json({ success: true, output: stdout }); } catch (error) { - console.error('Git create branch error:', error); + console.error("Git create branch error:", error); res.status(500).json({ error: error.message }); } }); // Get recent commits -router.get('/commits', async (req, res) => { +router.get("/commits", async (req, res) => { const { project, limit = 10 } = req.query; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { const projectPath = await getActualProjectPath(project); - + // Get commit log with stats const { stdout } = await execAsync( `git log --pretty=format:'%H|%an|%ae|%ad|%s' --date=relative -n ${limit}`, { cwd: projectPath } ); - + const commits = stdout - .split('\n') - .filter(line => line.trim()) - .map(line => { - const [hash, author, email, date, ...messageParts] = line.split('|'); + .split("\n") + .filter((line) => line.trim()) + .map((line) => { + const [hash, author, email, date, ...messageParts] = line.split("|"); return { hash, author, email, date, - message: messageParts.join('|') + message: messageParts.join("|"), }; }); - + // Get stats for each commit for (const commit of commits) { try { @@ -473,67 +553,71 @@ router.get('/commits', async (req, res) => { `git show --stat --format='' ${commit.hash}`, { cwd: projectPath } ); - commit.stats = stats.trim().split('\n').pop(); // Get the summary line + commit.stats = stats.trim().split("\n").pop(); // Get the summary line } catch (error) { - commit.stats = ''; + commit.stats = ""; } } - + res.json({ commits }); } catch (error) { - console.error('Git commits error:', error); + console.error("Git commits error:", error); res.json({ error: error.message }); } }); // Get diff for a specific commit -router.get('/commit-diff', async (req, res) => { +router.get("/commit-diff", async (req, res) => { const { project, commit } = req.query; - + if (!project || !commit) { - return res.status(400).json({ error: 'Project name and commit hash are required' }); + return res + .status(400) + .json({ error: "Project name and commit hash are required" }); } try { const projectPath = await getActualProjectPath(project); - + // Get diff for the commit - const { stdout } = await execAsync( - `git show ${commit}`, - { cwd: projectPath } - ); - + const { stdout } = await execAsync(`git show ${commit}`, { + cwd: projectPath, + }); + res.json({ diff: stdout }); } catch (error) { - console.error('Git commit diff error:', error); + console.error("Git commit diff error:", error); res.json({ error: error.message }); } }); // Generate commit message based on staged changes using AI -router.post('/generate-commit-message', async (req, res) => { - const { project, files, provider = 'claude' } = req.body; +router.post("/generate-commit-message", async (req, res) => { + const { project, files, provider = "claude" } = req.body; if (!project || !files || files.length === 0) { - return res.status(400).json({ error: 'Project name and files are required' }); + return res + .status(400) + .json({ error: "Project name and files are required" }); } // Validate provider - if (!['claude', 'cursor'].includes(provider)) { - return res.status(400).json({ error: 'provider must be "claude" or "cursor"' }); + if (!["claude", "cursor"].includes(provider)) { + return res + .status(400) + .json({ error: 'provider must be "claude" or "cursor"' }); } try { const projectPath = await getActualProjectPath(project); // Get diff for selected files - let diffContext = ''; + let diffContext = ""; for (const file of files) { try { - const { stdout } = await execAsync( - `git diff HEAD -- "${file}"`, - { cwd: projectPath } - ); + const { stdout } = await execAsync(`git diff HEAD -- "${file}"`, { + cwd: projectPath, + }); if (stdout) { diffContext += `\n--- ${file} ---\n${stdout}`; } @@ -551,8 +635,11 @@ router.post('/generate-commit-message', async (req, res) => { const stats = await fs.stat(filePath); if (!stats.isDirectory()) { - const content = await fs.readFile(filePath, 'utf-8'); - diffContext += `\n--- ${file} (new file) ---\n${content.substring(0, 1000)}\n`; + const content = await fs.readFile(filePath, "utf-8"); + diffContext += `\n--- ${file} (new file) ---\n${content.substring( + 0, + 1000 + )}\n`; } else { diffContext += `\n--- ${file} (new directory) ---\n`; } @@ -563,11 +650,17 @@ router.post('/generate-commit-message', async (req, res) => { } // Generate commit message using AI - const message = await generateCommitMessageWithAI(files, diffContext, provider, projectPath); + const message = await generateCommitMessageWithAI( + files, + diffContext, + provider, + projectPath, + req.user.id + ); res.json({ message }); } catch (error) { - console.error('Generate commit message error:', error); + console.error("Generate commit message error:", error); res.status(500).json({ error: error.message }); } }); @@ -578,9 +671,16 @@ router.post('/generate-commit-message', async (req, res) => { * @param {string} diffContext - Git diff content * @param {string} provider - 'claude' or 'cursor' * @param {string} projectPath - Project directory path + * @param {number} userId - User ID for model provider configuration * @returns {Promise} Generated commit message */ -async function generateCommitMessageWithAI(files, diffContext, provider, projectPath) { +async function generateCommitMessageWithAI( + files, + diffContext, + provider, + projectPath, + userId +) { // Create the prompt const prompt = `Generate a conventional commit message for these changes. @@ -594,7 +694,7 @@ REQUIREMENTS: - Return ONLY the commit message (no markdown, explanations, or code blocks) FILES CHANGED: -${files.map(f => `- ${f}`).join('\n')} +${files.map((f) => `- ${f}`).join("\n")} DIFFS: ${diffContext.substring(0, 4000)} @@ -603,75 +703,94 @@ Generate the commit message:`; try { // Create a simple writer that collects the response - let responseText = ''; + let responseText = ""; const writer = { send: (data) => { try { - const parsed = typeof data === 'string' ? JSON.parse(data) : data; - console.log('๐Ÿ” Writer received message type:', parsed.type); + const parsed = typeof data === "string" ? JSON.parse(data) : data; + console.log("๐Ÿ” Writer received message type:", parsed.type); // Handle different message formats from Claude SDK and Cursor CLI // Claude SDK sends: {type: 'claude-response', data: {message: {content: [...]}}} - if (parsed.type === 'claude-response' && parsed.data) { + if (parsed.type === "claude-response" && parsed.data) { const message = parsed.data.message || parsed.data; - console.log('๐Ÿ“ฆ Claude response message:', JSON.stringify(message, null, 2).substring(0, 500)); + console.log( + "๐Ÿ“ฆ Claude response message:", + JSON.stringify(message, null, 2).substring(0, 500) + ); if (message.content && Array.isArray(message.content)) { // Extract text from content array for (const item of message.content) { - if (item.type === 'text' && item.text) { - console.log('โœ… Extracted text chunk:', item.text.substring(0, 100)); + if (item.type === "text" && item.text) { + console.log( + "โœ… Extracted text chunk:", + item.text.substring(0, 100) + ); responseText += item.text; } } } } // Cursor CLI sends: {type: 'cursor-output', output: '...'} - else if (parsed.type === 'cursor-output' && parsed.output) { - console.log('โœ… Cursor output:', parsed.output.substring(0, 100)); + else if (parsed.type === "cursor-output" && parsed.output) { + console.log("โœ… Cursor output:", parsed.output.substring(0, 100)); responseText += parsed.output; } // Also handle direct text messages - else if (parsed.type === 'text' && parsed.text) { - console.log('โœ… Direct text:', parsed.text.substring(0, 100)); + else if (parsed.type === "text" && parsed.text) { + console.log("โœ… Direct text:", parsed.text.substring(0, 100)); responseText += parsed.text; } } catch (e) { // Ignore parse errors - console.error('Error parsing writer data:', e); + console.error("Error parsing writer data:", e); } }, setSessionId: () => {}, // No-op for this use case }; - console.log('๐Ÿš€ Calling AI agent with provider:', provider); - console.log('๐Ÿ“ Prompt length:', prompt.length); + console.log("๐Ÿš€ Calling AI agent with provider:", provider); + console.log("๐Ÿ“ Prompt length:", prompt.length); // Call the appropriate agent - if (provider === 'claude') { - await queryClaudeSDK(prompt, { - cwd: projectPath, - permissionMode: 'bypassPermissions', - model: 'sonnet' - }, writer); - } else if (provider === 'cursor') { - await spawnCursor(prompt, { - cwd: projectPath, - skipPermissions: true - }, writer); + if (provider === "claude") { + await queryClaudeSDK( + prompt, + { + cwd: projectPath, + permissionMode: "bypassPermissions", + model: "sonnet", + userId: userId, // Pass user ID to enable model provider configuration + }, + writer + ); + } else if (provider === "cursor") { + await spawnCursor( + prompt, + { + cwd: projectPath, + skipPermissions: true, + }, + writer + ); } - console.log('๐Ÿ“Š Total response text collected:', responseText.length, 'characters'); - console.log('๐Ÿ“„ Response preview:', responseText.substring(0, 200)); + console.log( + "๐Ÿ“Š Total response text collected:", + responseText.length, + "characters" + ); + console.log("๐Ÿ“„ Response preview:", responseText.substring(0, 200)); // Clean up the response const cleanedMessage = cleanCommitMessage(responseText); - console.log('๐Ÿงน Cleaned message:', cleanedMessage.substring(0, 200)); + console.log("๐Ÿงน Cleaned message:", cleanedMessage.substring(0, 200)); - return cleanedMessage || 'chore: update files'; + return cleanedMessage || "chore: update files"; } catch (error) { - console.error('Error generating commit message with AI:', error); + console.error("Error generating commit message with AI:", error); // Fallback to simple message - return `chore: update ${files.length} file${files.length !== 1 ? 's' : ''}`; + return `chore: update ${files.length} file${files.length !== 1 ? "s" : ""}`; } } @@ -682,28 +801,30 @@ Generate the commit message:`; */ function cleanCommitMessage(text) { if (!text || !text.trim()) { - return ''; + return ""; } let cleaned = text.trim(); // Remove markdown code blocks - cleaned = cleaned.replace(/```[a-z]*\n/g, ''); - cleaned = cleaned.replace(/```/g, ''); + cleaned = cleaned.replace(/```[a-z]*\n/g, ""); + cleaned = cleaned.replace(/```/g, ""); // Remove markdown headers - cleaned = cleaned.replace(/^#+\s*/gm, ''); + cleaned = cleaned.replace(/^#+\s*/gm, ""); // Remove leading/trailing quotes - cleaned = cleaned.replace(/^["']|["']$/g, ''); + cleaned = cleaned.replace(/^["']|["']$/g, ""); // If there are multiple lines, take everything (subject + body) // Just clean up extra blank lines - cleaned = cleaned.replace(/\n{3,}/g, '\n\n'); + cleaned = cleaned.replace(/\n{3,}/g, "\n\n"); // Remove any explanatory text before the actual commit message // Look for conventional commit pattern and start from there - const conventionalCommitMatch = cleaned.match(/(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+?\))?:.+/s); + const conventionalCommitMatch = cleaned.match( + /(feat|fix|docs|style|refactor|perf|test|build|ci|chore)(\(.+?\))?:.+/s + ); if (conventionalCommitMatch) { cleaned = cleaned.substring(cleaned.indexOf(conventionalCommitMatch[0])); } @@ -712,11 +833,11 @@ function cleanCommitMessage(text) { } // Get remote status (ahead/behind commits with smart remote detection) -router.get('/remote-status', async (req, res) => { +router.get("/remote-status", async (req, res) => { const { project } = req.query; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -724,37 +845,46 @@ router.get('/remote-status', async (req, res) => { await validateGitRepository(projectPath); // Get current branch - const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: currentBranch } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); const branch = currentBranch.trim(); // Check if there's a remote tracking branch (smart detection) let trackingBranch; let remoteName; try { - const { stdout } = await execAsync(`git rev-parse --abbrev-ref ${branch}@{upstream}`, { cwd: projectPath }); + const { stdout } = await execAsync( + `git rev-parse --abbrev-ref ${branch}@{upstream}`, + { cwd: projectPath } + ); trackingBranch = stdout.trim(); - remoteName = trackingBranch.split('/')[0]; // Extract remote name (e.g., "origin/main" -> "origin") + remoteName = trackingBranch.split("/")[0]; // Extract remote name (e.g., "origin/main" -> "origin") } catch (error) { // No upstream branch configured - but check if we have remotes let hasRemote = false; let remoteName = null; try { - const { stdout } = await execAsync('git remote', { cwd: projectPath }); - const remotes = stdout.trim().split('\n').filter(r => r.trim()); + const { stdout } = await execAsync("git remote", { cwd: projectPath }); + const remotes = stdout + .trim() + .split("\n") + .filter((r) => r.trim()); if (remotes.length > 0) { hasRemote = true; - remoteName = remotes.includes('origin') ? 'origin' : remotes[0]; + remoteName = remotes.includes("origin") ? "origin" : remotes[0]; } } catch (remoteError) { // No remotes configured } - - return res.json({ + + return res.json({ hasRemote, hasUpstream: false, branch, remoteName, - message: 'No remote tracking branch configured' + message: "No remote tracking branch configured", }); } @@ -763,8 +893,8 @@ router.get('/remote-status', async (req, res) => { `git rev-list --count --left-right ${trackingBranch}...HEAD`, { cwd: projectPath } ); - - const [behind, ahead] = countOutput.trim().split('\t').map(Number); + + const [behind, ahead] = countOutput.trim().split("\t").map(Number); res.json({ hasRemote: true, @@ -774,20 +904,20 @@ router.get('/remote-status', async (req, res) => { remoteName, ahead: ahead || 0, behind: behind || 0, - isUpToDate: ahead === 0 && behind === 0 + isUpToDate: ahead === 0 && behind === 0, }); } catch (error) { - console.error('Git remote status error:', error); + console.error("Git remote status error:", error); res.json({ error: error.message }); } }); // Fetch from remote (using smart remote detection) -router.post('/fetch', async (req, res) => { +router.post("/fetch", async (req, res) => { const { project } = req.body; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -795,40 +925,54 @@ router.post('/fetch', async (req, res) => { await validateGitRepository(projectPath); // Get current branch and its upstream remote - const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: currentBranch } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); const branch = currentBranch.trim(); - let remoteName = 'origin'; // fallback + let remoteName = "origin"; // fallback try { - const { stdout } = await execAsync(`git rev-parse --abbrev-ref ${branch}@{upstream}`, { cwd: projectPath }); - remoteName = stdout.trim().split('/')[0]; // Extract remote name + const { stdout } = await execAsync( + `git rev-parse --abbrev-ref ${branch}@{upstream}`, + { cwd: projectPath } + ); + remoteName = stdout.trim().split("/")[0]; // Extract remote name } catch (error) { // No upstream, try to fetch from origin anyway - console.log('No upstream configured, using origin as fallback'); + console.log("No upstream configured, using origin as fallback"); } - const { stdout } = await execAsync(`git fetch ${remoteName}`, { cwd: projectPath }); - - res.json({ success: true, output: stdout || 'Fetch completed successfully', remoteName }); + const { stdout } = await execAsync(`git fetch ${remoteName}`, { + cwd: projectPath, + }); + + res.json({ + success: true, + output: stdout || "Fetch completed successfully", + remoteName, + }); } catch (error) { - console.error('Git fetch error:', error); - res.status(500).json({ - error: 'Fetch failed', - details: error.message.includes('Could not resolve hostname') - ? 'Unable to connect to remote repository. Check your internet connection.' - : error.message.includes('fatal: \'origin\' does not appear to be a git repository') - ? 'No remote repository configured. Add a remote with: git remote add origin ' - : error.message + console.error("Git fetch error:", error); + res.status(500).json({ + error: "Fetch failed", + details: error.message.includes("Could not resolve hostname") + ? "Unable to connect to remote repository. Check your internet connection." + : error.message.includes( + "fatal: 'origin' does not appear to be a git repository" + ) + ? "No remote repository configured. Add a remote with: git remote add origin " + : error.message, }); } }); // Pull from remote (fetch + merge using smart remote detection) -router.post('/pull', async (req, res) => { +router.post("/pull", async (req, res) => { const { project } = req.body; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -836,66 +980,85 @@ router.post('/pull', async (req, res) => { await validateGitRepository(projectPath); // Get current branch and its upstream remote - const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: currentBranch } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); const branch = currentBranch.trim(); - let remoteName = 'origin'; // fallback + let remoteName = "origin"; // fallback let remoteBranch = branch; // fallback try { - const { stdout } = await execAsync(`git rev-parse --abbrev-ref ${branch}@{upstream}`, { cwd: projectPath }); + const { stdout } = await execAsync( + `git rev-parse --abbrev-ref ${branch}@{upstream}`, + { cwd: projectPath } + ); const tracking = stdout.trim(); - remoteName = tracking.split('/')[0]; // Extract remote name - remoteBranch = tracking.split('/').slice(1).join('/'); // Extract branch name + remoteName = tracking.split("/")[0]; // Extract remote name + remoteBranch = tracking.split("/").slice(1).join("/"); // Extract branch name } catch (error) { // No upstream, use fallback - console.log('No upstream configured, using origin/branch as fallback'); + console.log("No upstream configured, using origin/branch as fallback"); } - const { stdout } = await execAsync(`git pull ${remoteName} ${remoteBranch}`, { cwd: projectPath }); - - res.json({ - success: true, - output: stdout || 'Pull completed successfully', + const { stdout } = await execAsync( + `git pull ${remoteName} ${remoteBranch}`, + { cwd: projectPath } + ); + + res.json({ + success: true, + output: stdout || "Pull completed successfully", remoteName, - remoteBranch + remoteBranch, }); } catch (error) { - console.error('Git pull error:', error); - + console.error("Git pull error:", error); + // Enhanced error handling for common pull scenarios - let errorMessage = 'Pull failed'; + let errorMessage = "Pull failed"; let details = error.message; - - if (error.message.includes('CONFLICT')) { - errorMessage = 'Merge conflicts detected'; - details = 'Pull created merge conflicts. Please resolve conflicts manually in the editor, then commit the changes.'; - } else if (error.message.includes('Please commit your changes or stash them')) { - errorMessage = 'Uncommitted changes detected'; - details = 'Please commit or stash your local changes before pulling.'; - } else if (error.message.includes('Could not resolve hostname')) { - errorMessage = 'Network error'; - details = 'Unable to connect to remote repository. Check your internet connection.'; - } else if (error.message.includes('fatal: \'origin\' does not appear to be a git repository')) { - errorMessage = 'Remote not configured'; - details = 'No remote repository configured. Add a remote with: git remote add origin '; - } else if (error.message.includes('diverged')) { - errorMessage = 'Branches have diverged'; - details = 'Your local branch and remote branch have diverged. Consider fetching first to review changes.'; + + if (error.message.includes("CONFLICT")) { + errorMessage = "Merge conflicts detected"; + details = + "Pull created merge conflicts. Please resolve conflicts manually in the editor, then commit the changes."; + } else if ( + error.message.includes("Please commit your changes or stash them") + ) { + errorMessage = "Uncommitted changes detected"; + details = "Please commit or stash your local changes before pulling."; + } else if (error.message.includes("Could not resolve hostname")) { + errorMessage = "Network error"; + details = + "Unable to connect to remote repository. Check your internet connection."; + } else if ( + error.message.includes( + "fatal: 'origin' does not appear to be a git repository" + ) + ) { + errorMessage = "Remote not configured"; + details = + "No remote repository configured. Add a remote with: git remote add origin "; + } else if (error.message.includes("diverged")) { + errorMessage = "Branches have diverged"; + details = + "Your local branch and remote branch have diverged. Consider fetching first to review changes."; } - - res.status(500).json({ - error: errorMessage, - details: details + + res.status(500).json({ + error: errorMessage, + details: details, }); } }); // Push commits to remote repository -router.post('/push', async (req, res) => { +router.post("/push", async (req, res) => { const { project } = req.body; - + if (!project) { - return res.status(400).json({ error: 'Project name is required' }); + return res.status(400).json({ error: "Project name is required" }); } try { @@ -903,69 +1066,89 @@ router.post('/push', async (req, res) => { await validateGitRepository(projectPath); // Get current branch and its upstream remote - const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: currentBranch } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); const branch = currentBranch.trim(); - let remoteName = 'origin'; // fallback + let remoteName = "origin"; // fallback let remoteBranch = branch; // fallback try { - const { stdout } = await execAsync(`git rev-parse --abbrev-ref ${branch}@{upstream}`, { cwd: projectPath }); + const { stdout } = await execAsync( + `git rev-parse --abbrev-ref ${branch}@{upstream}`, + { cwd: projectPath } + ); const tracking = stdout.trim(); - remoteName = tracking.split('/')[0]; // Extract remote name - remoteBranch = tracking.split('/').slice(1).join('/'); // Extract branch name + remoteName = tracking.split("/")[0]; // Extract remote name + remoteBranch = tracking.split("/").slice(1).join("/"); // Extract branch name } catch (error) { // No upstream, use fallback - console.log('No upstream configured, using origin/branch as fallback'); + console.log("No upstream configured, using origin/branch as fallback"); } - const { stdout } = await execAsync(`git push ${remoteName} ${remoteBranch}`, { cwd: projectPath }); - - res.json({ - success: true, - output: stdout || 'Push completed successfully', + const { stdout } = await execAsync( + `git push ${remoteName} ${remoteBranch}`, + { cwd: projectPath } + ); + + res.json({ + success: true, + output: stdout || "Push completed successfully", remoteName, - remoteBranch + remoteBranch, }); } catch (error) { - console.error('Git push error:', error); - + console.error("Git push error:", error); + // Enhanced error handling for common push scenarios - let errorMessage = 'Push failed'; + let errorMessage = "Push failed"; let details = error.message; - - if (error.message.includes('rejected')) { - errorMessage = 'Push rejected'; - details = 'The remote has newer commits. Pull first to merge changes before pushing.'; - } else if (error.message.includes('non-fast-forward')) { - errorMessage = 'Non-fast-forward push'; - details = 'Your branch is behind the remote. Pull the latest changes first.'; - } else if (error.message.includes('Could not resolve hostname')) { - errorMessage = 'Network error'; - details = 'Unable to connect to remote repository. Check your internet connection.'; - } else if (error.message.includes('fatal: \'origin\' does not appear to be a git repository')) { - errorMessage = 'Remote not configured'; - details = 'No remote repository configured. Add a remote with: git remote add origin '; - } else if (error.message.includes('Permission denied')) { - errorMessage = 'Authentication failed'; - details = 'Permission denied. Check your credentials or SSH keys.'; - } else if (error.message.includes('no upstream branch')) { - errorMessage = 'No upstream branch'; - details = 'No upstream branch configured. Use: git push --set-upstream origin '; + + if (error.message.includes("rejected")) { + errorMessage = "Push rejected"; + details = + "The remote has newer commits. Pull first to merge changes before pushing."; + } else if (error.message.includes("non-fast-forward")) { + errorMessage = "Non-fast-forward push"; + details = + "Your branch is behind the remote. Pull the latest changes first."; + } else if (error.message.includes("Could not resolve hostname")) { + errorMessage = "Network error"; + details = + "Unable to connect to remote repository. Check your internet connection."; + } else if ( + error.message.includes( + "fatal: 'origin' does not appear to be a git repository" + ) + ) { + errorMessage = "Remote not configured"; + details = + "No remote repository configured. Add a remote with: git remote add origin "; + } else if (error.message.includes("Permission denied")) { + errorMessage = "Authentication failed"; + details = "Permission denied. Check your credentials or SSH keys."; + } else if (error.message.includes("no upstream branch")) { + errorMessage = "No upstream branch"; + details = + "No upstream branch configured. Use: git push --set-upstream origin "; } - - res.status(500).json({ - error: errorMessage, - details: details + + res.status(500).json({ + error: errorMessage, + details: details, }); } }); // Publish branch to remote (set upstream and push) -router.post('/publish', async (req, res) => { +router.post("/publish", async (req, res) => { const { project, branch } = req.body; - + if (!project || !branch) { - return res.status(400).json({ error: 'Project name and branch are required' }); + return res + .status(400) + .json({ error: "Project name and branch are required" }); } try { @@ -973,75 +1156,94 @@ router.post('/publish', async (req, res) => { await validateGitRepository(projectPath); // Get current branch to verify it matches the requested branch - const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', { cwd: projectPath }); + const { stdout: currentBranch } = await execAsync( + "git rev-parse --abbrev-ref HEAD", + { cwd: projectPath } + ); const currentBranchName = currentBranch.trim(); - + if (currentBranchName !== branch) { - return res.status(400).json({ - error: `Branch mismatch. Current branch is ${currentBranchName}, but trying to publish ${branch}` + return res.status(400).json({ + error: `Branch mismatch. Current branch is ${currentBranchName}, but trying to publish ${branch}`, }); } // Check if remote exists - let remoteName = 'origin'; + let remoteName = "origin"; try { - const { stdout } = await execAsync('git remote', { cwd: projectPath }); - const remotes = stdout.trim().split('\n').filter(r => r.trim()); + const { stdout } = await execAsync("git remote", { cwd: projectPath }); + const remotes = stdout + .trim() + .split("\n") + .filter((r) => r.trim()); if (remotes.length === 0) { - return res.status(400).json({ - error: 'No remote repository configured. Add a remote with: git remote add origin ' + return res.status(400).json({ + error: + "No remote repository configured. Add a remote with: git remote add origin ", }); } - remoteName = remotes.includes('origin') ? 'origin' : remotes[0]; + remoteName = remotes.includes("origin") ? "origin" : remotes[0]; } catch (error) { - return res.status(400).json({ - error: 'No remote repository configured. Add a remote with: git remote add origin ' + return res.status(400).json({ + error: + "No remote repository configured. Add a remote with: git remote add origin ", }); } // Publish the branch (set upstream and push) - const { stdout } = await execAsync(`git push --set-upstream ${remoteName} ${branch}`, { cwd: projectPath }); - - res.json({ - success: true, - output: stdout || 'Branch published successfully', + const { stdout } = await execAsync( + `git push --set-upstream ${remoteName} ${branch}`, + { cwd: projectPath } + ); + + res.json({ + success: true, + output: stdout || "Branch published successfully", remoteName, - branch + branch, }); } catch (error) { - console.error('Git publish error:', error); - + console.error("Git publish error:", error); + // Enhanced error handling for common publish scenarios - let errorMessage = 'Publish failed'; + let errorMessage = "Publish failed"; let details = error.message; - - if (error.message.includes('rejected')) { - errorMessage = 'Publish rejected'; - details = 'The remote branch already exists and has different commits. Use push instead.'; - } else if (error.message.includes('Could not resolve hostname')) { - errorMessage = 'Network error'; - details = 'Unable to connect to remote repository. Check your internet connection.'; - } else if (error.message.includes('Permission denied')) { - errorMessage = 'Authentication failed'; - details = 'Permission denied. Check your credentials or SSH keys.'; - } else if (error.message.includes('fatal:') && error.message.includes('does not appear to be a git repository')) { - errorMessage = 'Remote not configured'; - details = 'Remote repository not properly configured. Check your remote URL.'; + + if (error.message.includes("rejected")) { + errorMessage = "Publish rejected"; + details = + "The remote branch already exists and has different commits. Use push instead."; + } else if (error.message.includes("Could not resolve hostname")) { + errorMessage = "Network error"; + details = + "Unable to connect to remote repository. Check your internet connection."; + } else if (error.message.includes("Permission denied")) { + errorMessage = "Authentication failed"; + details = "Permission denied. Check your credentials or SSH keys."; + } else if ( + error.message.includes("fatal:") && + error.message.includes("does not appear to be a git repository") + ) { + errorMessage = "Remote not configured"; + details = + "Remote repository not properly configured. Check your remote URL."; } - - res.status(500).json({ - error: errorMessage, - details: details + + res.status(500).json({ + error: errorMessage, + details: details, }); } }); // Discard changes for a specific file -router.post('/discard', async (req, res) => { +router.post("/discard", async (req, res) => { const { project, file } = req.body; - + if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res + .status(400) + .json({ error: "Project name and file path are required" }); } try { @@ -1049,15 +1251,20 @@ router.post('/discard', async (req, res) => { await validateGitRepository(projectPath); // Check file status to determine correct discard command - const { stdout: statusOutput } = await execAsync(`git status --porcelain "${file}"`, { cwd: projectPath }); - + const { stdout: statusOutput } = await execAsync( + `git status --porcelain "${file}"`, + { cwd: projectPath } + ); + if (!statusOutput.trim()) { - return res.status(400).json({ error: 'No changes to discard for this file' }); + return res + .status(400) + .json({ error: "No changes to discard for this file" }); } const status = statusOutput.substring(0, 2); - if (status === '??') { + if (status === "??") { // Untracked file or directory - delete it const filePath = path.join(projectPath, file); const stats = await fs.stat(filePath); @@ -1067,27 +1274,29 @@ router.post('/discard', async (req, res) => { } else { await fs.unlink(filePath); } - } else if (status.includes('M') || status.includes('D')) { + } else if (status.includes("M") || status.includes("D")) { // Modified or deleted file - restore from HEAD await execAsync(`git restore "${file}"`, { cwd: projectPath }); - } else if (status.includes('A')) { + } else if (status.includes("A")) { // Added file - unstage it await execAsync(`git reset HEAD "${file}"`, { cwd: projectPath }); } - + res.json({ success: true, message: `Changes discarded for ${file}` }); } catch (error) { - console.error('Git discard error:', error); + console.error("Git discard error:", error); res.status(500).json({ error: error.message }); } }); // Delete untracked file -router.post('/delete-untracked', async (req, res) => { +router.post("/delete-untracked", async (req, res) => { const { project, file } = req.body; - + if (!project || !file) { - return res.status(400).json({ error: 'Project name and file path are required' }); + return res + .status(400) + .json({ error: "Project name and file path are required" }); } try { @@ -1095,16 +1304,25 @@ router.post('/delete-untracked', async (req, res) => { await validateGitRepository(projectPath); // Check if file is actually untracked - const { stdout: statusOutput } = await execAsync(`git status --porcelain "${file}"`, { cwd: projectPath }); - + const { stdout: statusOutput } = await execAsync( + `git status --porcelain "${file}"`, + { cwd: projectPath } + ); + if (!statusOutput.trim()) { - return res.status(400).json({ error: 'File is not untracked or does not exist' }); + return res + .status(400) + .json({ error: "File is not untracked or does not exist" }); } const status = statusOutput.substring(0, 2); - - if (status !== '??') { - return res.status(400).json({ error: 'File is not untracked. Use discard for tracked files.' }); + + if (status !== "??") { + return res + .status(400) + .json({ + error: "File is not untracked. Use discard for tracked files.", + }); } // Delete the untracked file or directory @@ -1114,15 +1332,21 @@ router.post('/delete-untracked', async (req, res) => { if (stats.isDirectory()) { // Use rm with recursive option for directories await fs.rm(filePath, { recursive: true, force: true }); - res.json({ success: true, message: `Untracked directory ${file} deleted successfully` }); + res.json({ + success: true, + message: `Untracked directory ${file} deleted successfully`, + }); } else { await fs.unlink(filePath); - res.json({ success: true, message: `Untracked file ${file} deleted successfully` }); + res.json({ + success: true, + message: `Untracked file ${file} deleted successfully`, + }); } } catch (error) { - console.error('Git delete untracked error:', error); + console.error("Git delete untracked error:", error); res.status(500).json({ error: error.message }); } }); -export default router; \ No newline at end of file +export default router; diff --git a/server/routes/settings.js b/server/routes/settings.js index 609f86491..771a24222 100644 --- a/server/routes/settings.js +++ b/server/routes/settings.js @@ -1,5 +1,10 @@ -import express from 'express'; -import { apiKeysDb, credentialsDb, modelProvidersDb } from '../database/db.js'; +import express from "express"; +import { apiKeysDb, credentialsDb, modelProvidersDb } from "../database/db.js"; +import { + updateClaudeSettingsForProvider, + readClaudeSettings, + SETTINGS_PATH, +} from "../utils/claude-settings.js"; const router = express.Router(); @@ -8,43 +13,43 @@ const router = express.Router(); // =============================== // Get all API keys for the authenticated user -router.get('/api-keys', async (req, res) => { +router.get("/api-keys", async (req, res) => { try { const apiKeys = apiKeysDb.getApiKeys(req.user.id); // Don't send the full API key in the list for security - const sanitizedKeys = apiKeys.map(key => ({ + const sanitizedKeys = apiKeys.map((key) => ({ ...key, - api_key: key.api_key.substring(0, 10) + '...' + api_key: key.api_key.substring(0, 10) + "...", })); res.json({ apiKeys: sanitizedKeys }); } catch (error) { - console.error('Error fetching API keys:', error); - res.status(500).json({ error: 'Failed to fetch API keys' }); + console.error("Error fetching API keys:", error); + res.status(500).json({ error: "Failed to fetch API keys" }); } }); // Create a new API key -router.post('/api-keys', async (req, res) => { +router.post("/api-keys", async (req, res) => { try { const { keyName } = req.body; if (!keyName || !keyName.trim()) { - return res.status(400).json({ error: 'Key name is required' }); + return res.status(400).json({ error: "Key name is required" }); } const result = apiKeysDb.createApiKey(req.user.id, keyName.trim()); res.json({ success: true, - apiKey: result + apiKey: result, }); } catch (error) { - console.error('Error creating API key:', error); - res.status(500).json({ error: 'Failed to create API key' }); + console.error("Error creating API key:", error); + res.status(500).json({ error: "Failed to create API key" }); } }); // Delete an API key -router.delete('/api-keys/:keyId', async (req, res) => { +router.delete("/api-keys/:keyId", async (req, res) => { try { const { keyId } = req.params; const success = apiKeysDb.deleteApiKey(req.user.id, parseInt(keyId)); @@ -52,34 +57,38 @@ router.delete('/api-keys/:keyId', async (req, res) => { if (success) { res.json({ success: true }); } else { - res.status(404).json({ error: 'API key not found' }); + res.status(404).json({ error: "API key not found" }); } } catch (error) { - console.error('Error deleting API key:', error); - res.status(500).json({ error: 'Failed to delete API key' }); + console.error("Error deleting API key:", error); + res.status(500).json({ error: "Failed to delete API key" }); } }); // Toggle API key active status -router.patch('/api-keys/:keyId/toggle', async (req, res) => { +router.patch("/api-keys/:keyId/toggle", async (req, res) => { try { const { keyId } = req.params; const { isActive } = req.body; - if (typeof isActive !== 'boolean') { - return res.status(400).json({ error: 'isActive must be a boolean' }); + if (typeof isActive !== "boolean") { + return res.status(400).json({ error: "isActive must be a boolean" }); } - const success = apiKeysDb.toggleApiKey(req.user.id, parseInt(keyId), isActive); + const success = apiKeysDb.toggleApiKey( + req.user.id, + parseInt(keyId), + isActive + ); if (success) { res.json({ success: true }); } else { - res.status(404).json({ error: 'API key not found' }); + res.status(404).json({ error: "API key not found" }); } } catch (error) { - console.error('Error toggling API key:', error); - res.status(500).json({ error: 'Failed to toggle API key' }); + console.error("Error toggling API key:", error); + res.status(500).json({ error: "Failed to toggle API key" }); } }); @@ -88,33 +97,34 @@ router.patch('/api-keys/:keyId/toggle', async (req, res) => { // =============================== // Get all credentials for the authenticated user (optionally filtered by type) -router.get('/credentials', async (req, res) => { +router.get("/credentials", async (req, res) => { try { const { type } = req.query; const credentials = credentialsDb.getCredentials(req.user.id, type || null); // Don't send the actual credential values for security res.json({ credentials }); } catch (error) { - console.error('Error fetching credentials:', error); - res.status(500).json({ error: 'Failed to fetch credentials' }); + console.error("Error fetching credentials:", error); + res.status(500).json({ error: "Failed to fetch credentials" }); } }); // Create a new credential -router.post('/credentials', async (req, res) => { +router.post("/credentials", async (req, res) => { try { - const { credentialName, credentialType, credentialValue, description } = req.body; + const { credentialName, credentialType, credentialValue, description } = + req.body; if (!credentialName || !credentialName.trim()) { - return res.status(400).json({ error: 'Credential name is required' }); + return res.status(400).json({ error: "Credential name is required" }); } if (!credentialType || !credentialType.trim()) { - return res.status(400).json({ error: 'Credential type is required' }); + return res.status(400).json({ error: "Credential type is required" }); } if (!credentialValue || !credentialValue.trim()) { - return res.status(400).json({ error: 'Credential value is required' }); + return res.status(400).json({ error: "Credential value is required" }); } const result = credentialsDb.createCredential( @@ -127,51 +137,58 @@ router.post('/credentials', async (req, res) => { res.json({ success: true, - credential: result + credential: result, }); } catch (error) { - console.error('Error creating credential:', error); - res.status(500).json({ error: 'Failed to create credential' }); + console.error("Error creating credential:", error); + res.status(500).json({ error: "Failed to create credential" }); } }); // Delete a credential -router.delete('/credentials/:credentialId', async (req, res) => { +router.delete("/credentials/:credentialId", async (req, res) => { try { const { credentialId } = req.params; - const success = credentialsDb.deleteCredential(req.user.id, parseInt(credentialId)); + const success = credentialsDb.deleteCredential( + req.user.id, + parseInt(credentialId) + ); if (success) { res.json({ success: true }); } else { - res.status(404).json({ error: 'Credential not found' }); + res.status(404).json({ error: "Credential not found" }); } } catch (error) { - console.error('Error deleting credential:', error); - res.status(500).json({ error: 'Failed to delete credential' }); + console.error("Error deleting credential:", error); + res.status(500).json({ error: "Failed to delete credential" }); } }); // Toggle credential active status -router.patch('/credentials/:credentialId/toggle', async (req, res) => { +router.patch("/credentials/:credentialId/toggle", async (req, res) => { try { const { credentialId } = req.params; const { isActive } = req.body; - if (typeof isActive !== 'boolean') { - return res.status(400).json({ error: 'isActive must be a boolean' }); + if (typeof isActive !== "boolean") { + return res.status(400).json({ error: "isActive must be a boolean" }); } - const success = credentialsDb.toggleCredential(req.user.id, parseInt(credentialId), isActive); + const success = credentialsDb.toggleCredential( + req.user.id, + parseInt(credentialId), + isActive + ); if (success) { res.json({ success: true }); } else { - res.status(404).json({ error: 'Credential not found' }); + res.status(404).json({ error: "Credential not found" }); } } catch (error) { - console.error('Error toggling credential:', error); - res.status(500).json({ error: 'Failed to toggle credential' }); + console.error("Error toggling credential:", error); + res.status(500).json({ error: "Failed to toggle credential" }); } }); @@ -180,28 +197,30 @@ router.patch('/credentials/:credentialId/toggle', async (req, res) => { // =============================== // List all configured model providers (API replacement) -router.get('/model-providers', async (req, res) => { +router.get("/model-providers", async (req, res) => { try { const providers = modelProvidersDb.getProviders(req.user.id); const active = modelProvidersDb.getActiveProvider(req.user.id); res.json({ providers, - activeProviderId: active?.id || null + activeProviderId: active?.id || null, }); } catch (error) { - console.error('Error fetching model providers:', error); - res.status(500).json({ error: 'Failed to fetch model providers' }); + console.error("Error fetching model providers:", error); + res.status(500).json({ error: "Failed to fetch model providers" }); } }); // Create a new provider entry -router.post('/model-providers', async (req, res) => { +router.post("/model-providers", async (req, res) => { try { const { providerName, apiBaseUrl, apiKey, modelId, description } = req.body; if (!providerName?.trim() || !apiBaseUrl?.trim() || !apiKey?.trim()) { - return res.status(400).json({ error: 'Provider name, API base URL, and API key are required' }); + return res.status(400).json({ + error: "Provider name, API base URL, and API key are required", + }); } const result = modelProvidersDb.createProvider( @@ -213,44 +232,118 @@ router.post('/model-providers', async (req, res) => { description?.trim() || null ); + // If this is the first provider (automatically activated), update settings.json + if (result.isActive) { + try { + const activeProvider = modelProvidersDb.getActiveProvider(req.user.id); + await updateClaudeSettingsForProvider(activeProvider); + console.log( + `โœ… Created and activated first provider: ${providerName} in settings.json` + ); + } catch (settingsError) { + console.error( + "โš ๏ธ Failed to update settings.json for new provider:", + settingsError + ); + } + } + res.json({ success: true, provider: result }); } catch (error) { - console.error('Error creating model provider:', error); - res.status(500).json({ error: 'Failed to create model provider' }); + console.error("Error creating model provider:", error); + res.status(500).json({ error: "Failed to create model provider" }); } }); // Set active provider -router.patch('/model-providers/:providerId/activate', async (req, res) => { +router.patch("/model-providers/:providerId/activate", async (req, res) => { try { const { providerId } = req.params; - const success = modelProvidersDb.setActiveProvider(req.user.id, parseInt(providerId)); + const success = modelProvidersDb.setActiveProvider( + req.user.id, + parseInt(providerId) + ); if (success) { + // Get the activated provider details + const activeProvider = modelProvidersDb.getActiveProvider(req.user.id); + + // Update ~/.claude/settings.json with the new provider configuration + try { + await updateClaudeSettingsForProvider(activeProvider); + console.log( + `โœ… Updated ~/.claude/settings.json for provider: ${activeProvider.provider_name}` + ); + } catch (settingsError) { + console.error( + "โš ๏ธ Failed to update settings.json (provider is still active in database):", + settingsError + ); + // Don't fail the request if settings.json update fails + // The provider is still active in the database and will work for API calls + } + res.json({ success: true }); } else { - res.status(404).json({ error: 'Provider not found' }); + res.status(404).json({ error: "Provider not found" }); } } catch (error) { - console.error('Error activating model provider:', error); - res.status(500).json({ error: 'Failed to activate model provider' }); + console.error("Error activating model provider:", error); + res.status(500).json({ error: "Failed to activate model provider" }); } }); // Delete provider -router.delete('/model-providers/:providerId', async (req, res) => { +router.delete("/model-providers/:providerId", async (req, res) => { try { const { providerId } = req.params; - const success = modelProvidersDb.deleteProvider(req.user.id, parseInt(providerId)); + const success = modelProvidersDb.deleteProvider( + req.user.id, + parseInt(providerId) + ); if (success) { + // After deletion, check if there's a new active provider + const activeProvider = modelProvidersDb.getActiveProvider(req.user.id); + + // Update ~/.claude/settings.json + try { + await updateClaudeSettingsForProvider(activeProvider); + if (activeProvider) { + console.log( + `โœ… Switched to provider: ${activeProvider.provider_name} in settings.json` + ); + } else { + console.log("โœ… Cleared custom provider settings from settings.json"); + } + } catch (settingsError) { + console.error( + "โš ๏ธ Failed to update settings.json after deletion:", + settingsError + ); + } + res.json({ success: true }); } else { - res.status(404).json({ error: 'Provider not found' }); + res.status(404).json({ error: "Provider not found" }); } } catch (error) { - console.error('Error deleting model provider:', error); - res.status(500).json({ error: 'Failed to delete model provider' }); + console.error("Error deleting model provider:", error); + res.status(500).json({ error: "Failed to delete model provider" }); + } +}); + +// Debug endpoint: Get current Claude settings.json content +router.get("/claude-settings", async (req, res) => { + try { + const settings = await readClaudeSettings(); + res.json({ + path: SETTINGS_PATH, + settings: settings, + }); + } catch (error) { + console.error("Error reading Claude settings:", error); + res.status(500).json({ error: "Failed to read Claude settings" }); } }); diff --git a/server/utils/claude-settings.js b/server/utils/claude-settings.js new file mode 100644 index 000000000..1e4376f74 --- /dev/null +++ b/server/utils/claude-settings.js @@ -0,0 +1,190 @@ +/** + * Utility functions for managing ~/.claude/settings.json + * This file is used by Claude Code CLI for environment configuration + */ + +import { promises as fs } from "fs"; +import path from "path"; +import os from "os"; + +const SETTINGS_PATH = path.join(os.homedir(), ".claude", "settings.json"); + +/** + * Reads the current settings.json file + * @returns {Promise} Settings object + */ +async function readClaudeSettings() { + try { + // Ensure directory exists + const settingsDir = path.dirname(SETTINGS_PATH); + await fs.mkdir(settingsDir, { recursive: true }); + + // Try to read existing file + try { + const content = await fs.readFile(SETTINGS_PATH, "utf-8"); + return JSON.parse(content); + } catch (error) { + // File doesn't exist or is invalid, return default structure + if (error.code === "ENOENT") { + console.log("๐Ÿ“ No existing settings.json found, will create new one"); + return { + env: {}, + permissions: { + allow: [], + deny: [], + }, + }; + } + throw error; + } + } catch (error) { + console.error("Error reading Claude settings:", error); + throw error; + } +} + +/** + * Writes settings to ~/.claude/settings.json + * @param {Object} settings - Settings object to write + */ +async function writeClaudeSettings(settings) { + try { + // Ensure directory exists + const settingsDir = path.dirname(SETTINGS_PATH); + await fs.mkdir(settingsDir, { recursive: true }); + + // Write with pretty formatting + await fs.writeFile( + SETTINGS_PATH, + JSON.stringify(settings, null, 2), + "utf-8" + ); + console.log(`โœ… Updated ${SETTINGS_PATH}`); + } catch (error) { + console.error("Error writing Claude settings:", error); + throw error; + } +} + +/** + * Updates environment variables in settings.json for a model provider + * @param {Object} provider - Provider object with api_base_url, api_key, model_id + * @returns {Promise} + */ +async function updateClaudeSettingsForProvider(provider) { + try { + console.log( + `๐Ÿ”„ Updating ~/.claude/settings.json for provider: ${ + provider?.provider_name || "default" + }` + ); + + const settings = await readClaudeSettings(); + + if (!provider) { + // No provider - use default Anthropic settings or clear custom settings + console.log( + "โ„น๏ธ No provider specified, using default Anthropic configuration" + ); + + // Remove custom base URL if it exists + if (settings.env?.ANTHROPIC_BASE_URL) { + delete settings.env.ANTHROPIC_BASE_URL; + } + if (settings.env?.ANTHROPIC_AUTH_TOKEN) { + delete settings.env.ANTHROPIC_AUTH_TOKEN; + } + // Keep ANTHROPIC_API_KEY if it exists (user might have set it manually) + + await writeClaudeSettings(settings); + return; + } + + // Ensure env object exists + if (!settings.env) { + settings.env = {}; + } + + // Update API key + if (provider.api_key) { + // Use ANTHROPIC_AUTH_TOKEN for LLM gateways (per official docs) + // Claude Code SDK checks both ANTHROPIC_AUTH_TOKEN and ANTHROPIC_API_KEY + settings.env.ANTHROPIC_AUTH_TOKEN = provider.api_key; + settings.env.ANTHROPIC_API_KEY = provider.api_key; // For backwards compatibility + console.log("๐Ÿ”‘ Updated API keys in settings.json"); + } + + // Update base URL + if (provider.api_base_url) { + settings.env.ANTHROPIC_BASE_URL = provider.api_base_url; + console.log(`๐ŸŒ Updated ANTHROPIC_BASE_URL to: ${provider.api_base_url}`); + } else { + // Remove custom base URL if provider doesn't specify one + if (settings.env.ANTHROPIC_BASE_URL) { + delete settings.env.ANTHROPIC_BASE_URL; + console.log("๐ŸŒ Removed custom ANTHROPIC_BASE_URL"); + } + } + + // Update model ID if specified + if (provider.model_id) { + settings.env.ANTHROPIC_MODEL = provider.model_id; + // Also set the small/fast model to the same value for consistency + settings.env.ANTHROPIC_SMALL_FAST_MODEL = provider.model_id; + console.log(`๐Ÿค– Updated model to: ${provider.model_id}`); + } else { + // Remove model override if not specified + if (settings.env.ANTHROPIC_MODEL) { + delete settings.env.ANTHROPIC_MODEL; + } + if (settings.env.ANTHROPIC_SMALL_FAST_MODEL) { + delete settings.env.ANTHROPIC_SMALL_FAST_MODEL; + } + console.log("๐Ÿค– Removed model override, will use default"); + } + + // Write updated settings + await writeClaudeSettings(settings); + + console.log( + `โœ… Successfully updated settings.json for provider: ${provider.provider_name}` + ); + } catch (error) { + console.error("Error updating Claude settings for provider:", error); + throw error; + } +} + +/** + * Backs up the current settings.json file + * @returns {Promise} Path to backup file + */ +async function backupClaudeSettings() { + try { + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + const backupPath = `${SETTINGS_PATH}.backup-${timestamp}`; + + try { + await fs.copyFile(SETTINGS_PATH, backupPath); + console.log(`๐Ÿ“ฆ Created backup: ${backupPath}`); + return backupPath; + } catch (error) { + if (error.code === "ENOENT") { + console.log("โ„น๏ธ No existing settings.json to backup"); + return null; + } + throw error; + } + } catch (error) { + console.error("Error backing up Claude settings:", error); + throw error; + } +} + +export { + readClaudeSettings, + writeClaudeSettings, + updateClaudeSettingsForProvider, + backupClaudeSettings, + SETTINGS_PATH, +}; From 32cc80d0cb8894a723f2e71f01fae20eae43c2fc Mon Sep 17 00:00:00 2001 From: tata Date: Wed, 19 Nov 2025 18:12:58 +0800 Subject: [PATCH 3/3] fix: prevent environment variable leakage between requests --- server/claude-sdk.js | 22 ++++++++++++++++++++-- server/utils/claude-settings.js | 12 +++++++++--- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/server/claude-sdk.js b/server/claude-sdk.js index a0c82605d..d7365b547 100644 --- a/server/claude-sdk.js +++ b/server/claude-sdk.js @@ -401,6 +401,10 @@ async function queryClaudeSDK(command, options = {}, ws) { let tempImagePaths = []; let tempDir = null; + // Capture original environment variables to restore later + const originalAuthToken = process.env.ANTHROPIC_AUTH_TOKEN; + const originalBaseUrl = process.env.ANTHROPIC_BASE_URL; + try { // Apply user-selected model provider overrides if available if (runtimeOptions.userId) { @@ -411,8 +415,8 @@ async function queryClaudeSDK(command, options = {}, ws) { if (provider) { console.log(`๐Ÿ”ง Applying model provider: ${provider.provider_name}`); if (provider.api_key) { - process.env.ANTHROPIC_API_KEY = provider.api_key; - console.log(`๐Ÿ”‘ Set ANTHROPIC_API_KEY from provider`); + process.env.ANTHROPIC_AUTH_TOKEN = provider.api_key; + console.log(`๐Ÿ”‘ Set ANTHROPIC_AUTH_TOKEN from provider`); } if (provider.api_base_url) { process.env.ANTHROPIC_BASE_URL = provider.api_base_url; @@ -420,6 +424,7 @@ async function queryClaudeSDK(command, options = {}, ws) { `๐ŸŒ Set ANTHROPIC_BASE_URL to: ${provider.api_base_url}` ); } + // Model selection precedence: user-provided > provider > default "sonnet" if (provider.model_id && !runtimeOptions.model) { runtimeOptions.model = provider.model_id; console.log(`๐Ÿค– Set model to: ${provider.model_id}`); @@ -569,6 +574,19 @@ async function queryClaudeSDK(command, options = {}, ws) { ); throw error; + } finally { + // Restore original environment variables to prevent cross-request leakage + if (originalAuthToken === undefined) { + delete process.env.ANTHROPIC_AUTH_TOKEN; + } else { + process.env.ANTHROPIC_AUTH_TOKEN = originalAuthToken; + } + + if (originalBaseUrl === undefined) { + delete process.env.ANTHROPIC_BASE_URL; + } else { + process.env.ANTHROPIC_BASE_URL = originalBaseUrl; + } } } diff --git a/server/utils/claude-settings.js b/server/utils/claude-settings.js index 1e4376f74..567ae997b 100644 --- a/server/utils/claude-settings.js +++ b/server/utils/claude-settings.js @@ -94,6 +94,13 @@ async function updateClaudeSettingsForProvider(provider) { if (settings.env?.ANTHROPIC_AUTH_TOKEN) { delete settings.env.ANTHROPIC_AUTH_TOKEN; } + // Remove model overrides to prevent stale configurations + if (settings.env?.ANTHROPIC_MODEL) { + delete settings.env.ANTHROPIC_MODEL; + } + if (settings.env?.ANTHROPIC_SMALL_FAST_MODEL) { + delete settings.env.ANTHROPIC_SMALL_FAST_MODEL; + } // Keep ANTHROPIC_API_KEY if it exists (user might have set it manually) await writeClaudeSettings(settings); @@ -108,10 +115,9 @@ async function updateClaudeSettingsForProvider(provider) { // Update API key if (provider.api_key) { // Use ANTHROPIC_AUTH_TOKEN for LLM gateways (per official docs) - // Claude Code SDK checks both ANTHROPIC_AUTH_TOKEN and ANTHROPIC_API_KEY + // Do NOT overwrite ANTHROPIC_API_KEY to preserve manually-set keys settings.env.ANTHROPIC_AUTH_TOKEN = provider.api_key; - settings.env.ANTHROPIC_API_KEY = provider.api_key; // For backwards compatibility - console.log("๐Ÿ”‘ Updated API keys in settings.json"); + console.log("๐Ÿ”‘ Updated ANTHROPIC_AUTH_TOKEN in settings.json"); } // Update base URL