Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ FROM node:18-alpine

WORKDIR /app

RUN apk add --no-cache libc6-compat
RUN apk add --no-cache libc6-compat openssl
RUN apk update

# Install pnpm
Expand All @@ -27,4 +27,4 @@ RUN pnpm run build
EXPOSE 3000

# Start the application
CMD ["sh", "./start.sh"]
CMD ["sh", "./start.sh"]
9 changes: 9 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@
"prisma": {
"seed": "tsx prisma/seed.ts"
},
"pnpm": {
"onlyBuiltDependencies": [
"@prisma/client",
"@prisma/engines",
"esbuild",
"prisma",
"sqlite3"
]
},
"dependencies": {
"@ai-sdk/amazon-bedrock": "^0.0.17",
"@ai-sdk/anthropic": "^0.0.46",
Expand Down
107 changes: 98 additions & 9 deletions src/app/[...openai]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,33 @@ const openaiClient = new OpenAI({
// Allow streaming responses up to 30 seconds
export const maxDuration = 30;

function transformCursorMessages(messages: any[]): any[] {
return messages.map((message) => {
// Handle tool role messages from Cursor
if (message.role === "tool") {
// Transform tool messages to assistant messages with tool results
return {
role: "assistant",
content: message.content || "",
tool_call_id: message.tool_call_id,
name: message.name,
};
}

// Handle assistant messages with tool_calls
if (message.role === "assistant" && message.tool_calls) {
return {
role: "assistant",
content: message.content || "",
tool_calls: message.tool_calls,
};
}

// Pass through other messages as-is
return message;
});
}

async function getAIModelClient(provider: string, model: string) {
switch (provider.toLowerCase()) {
case "openai":
Expand Down Expand Up @@ -66,11 +93,18 @@ export async function POST(
{ params }: { params: { openai: string[] } },
) {
const endpoint = params.openai.join("/");
console.log("POST request received:", {
endpoint,
url: request.url,
headers: Object.fromEntries(request.headers),
});

if (endpoint !== "chat/completions" && endpoint !== "v1/chat/completions") {
return NextResponse.json({ error: "Not found", endpoint }, { status: 404 });
}

const body = await request.json();
console.log("Request body:", JSON.stringify(body, null, 2));
const { messages, model: cursorModel, stream = false, ...otherParams } = body;

try {
Expand All @@ -96,7 +130,8 @@ export async function POST(

const aiModel = await getAIModelClient(provider, model);

let modifiedMessages = messages;
// Transform Cursor messages to AI SDK format
let modifiedMessages = transformCursorMessages(messages);

if (provider.toLowerCase() === "anthropiccached") {
const hasPotentialContext = messages.some(
Expand Down Expand Up @@ -237,6 +272,9 @@ export async function POST(
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, x-api-key, ngrok-skip-browser-warning",
},
});
}
Expand Down Expand Up @@ -267,7 +305,13 @@ export async function POST(
};
await insertLog(logEntry);

return NextResponse.json(result);
return NextResponse.json(result, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, x-api-key, ngrok-skip-browser-warning",
},
});
} catch (error) {
console.error("Error in chat completion:", error);
const errorMessage = error instanceof Error ? error.message : String(error);
Expand All @@ -293,28 +337,48 @@ export async function GET(
{ params }: { params: { openai: string[] } },
) {
const endpoint = params.openai.join("/");

// Existing 'models' endpoint
if (endpoint === "models") {
console.log("GET request received:", {
endpoint,
url: request.url,
headers: Object.fromEntries(request.headers),
});

// Handle both 'models' and 'v1/models' endpoints
if (endpoint === "models" || endpoint === "v1/models") {
const logEntry = {
method: "GET",
url: "/api/v1/models",
url: `/api/${endpoint}`,
headers: Object.fromEntries(request.headers),
body: {},
response: {},
timestamp: new Date(),
metadata: {}, // Add empty metadata object to satisfy Prisma schema
};

try {
const models = await openaiClient.models.list();
logEntry.response = models;
await insertLog(logEntry);
return NextResponse.json(models);
return NextResponse.json(models, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, x-api-key, ngrok-skip-browser-warning",
},
});
} catch (error) {
console.error("Error fetching models:", error);
logEntry.response = { error: String(error) };
logEntry.metadata = { error: String(error) }; // Add error to metadata
await insertLog(logEntry);
return NextResponse.json({ error: String(error) }, { status: 500 });
return NextResponse.json({ error: String(error) }, {
status: 500,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, x-api-key, ngrok-skip-browser-warning",
},
});
}
}

Expand All @@ -333,7 +397,9 @@ export async function GET(
return testGroq();
}

return NextResponse.json({ error: "Not found" }, { status: 404 });
// Log any unmatched endpoints
console.log("Unmatched GET endpoint:", endpoint);
return NextResponse.json({ error: "Not found", endpoint }, { status: 404 });
}

async function testOpenAI() {
Expand Down Expand Up @@ -436,3 +502,26 @@ async function testGroq() {
return NextResponse.json({ error: String(error) }, { status: 500 });
}
}

// Handle OPTIONS requests for CORS
export async function OPTIONS(
request: NextRequest,
{ params }: { params: { openai: string[] } },
) {
const endpoint = params.openai.join("/");
console.log("OPTIONS request received:", {
endpoint,
url: request.url,
headers: Object.fromEntries(request.headers),
});

return new NextResponse(null, {
status: 204,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization, x-api-key, ngrok-skip-browser-warning",
"Access-Control-Max-Age": "86400",
},
});
}
2 changes: 1 addition & 1 deletion src/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ export default function Home() {
setLogs(logsData as unknown as Log[]); // Type assertion
setStats(statsData);
setAIConfigurations(configData as AIConfiguration[]); // Type assertion
const defaultConfig = configData.find((config) => config.isDefault);
const defaultConfig = configData?.find((config) => config.isDefault);
setSelectedConfig(defaultConfig ? defaultConfig.name : "");

setLoading(false);
Expand Down
14 changes: 7 additions & 7 deletions src/components/LogsList.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,16 @@ const LogsListComponent: React.FC<LogsListProps> = ({
return (
<div className="space-y-4">
{logs.map((log) => {
const totalTokens = log.metadata.totalTokens || 0;
const totalCost = log.metadata.totalCost || 0;
const totalTokens = log.metadata?.totalTokens || 0;
const totalCost = log.metadata?.totalCost || 0;
const firstUserMessage =
log.body.messages.find((m) => m.role === "user" && !("name" in m))
log.body?.messages?.find((m) => m.role === "user" && !("name" in m))
?.content || "No message available";
const truncatedMessage =
firstUserMessage.slice(0, 100) +
(firstUserMessage.length > 100 ? "..." : "");
const isSelected = selectedLogId === log.id;
const providerColorClass = getProviderColor(log.metadata.provider);
const providerColorClass = getProviderColor(log.metadata?.provider || "other");

return (
<Card
Expand All @@ -73,12 +73,12 @@ const LogsListComponent: React.FC<LogsListProps> = ({
<div className="flex items-center space-x-2">
<Badge
variant="outline"
className={getProviderColor(log.metadata.provider)}
className={getProviderColor(log.metadata?.provider || "other")}
>
{log.metadata.provider}
{log.metadata?.provider || "unknown"}
</Badge>
<span className="text-sm font-medium">
{log.metadata.model}
{log.metadata?.model || "unknown"}
</span>
</div>
</div>
Expand Down
26 changes: 26 additions & 0 deletions src/middleware.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import { NextResponse } from 'next/server'
import type { NextRequest } from 'next/server'

export function middleware(request: NextRequest) {
console.log("=== INCOMING REQUEST ===");
console.log("Method:", request.method);
console.log("URL:", request.url);
console.log("Path:", new URL(request.url).pathname);
console.log("Headers:", Object.fromEntries(request.headers));
console.log("=======================");

return NextResponse.next();
}

// Configure which paths the middleware runs on
export const config = {
matcher: [
/*
* Match all request paths except for the ones starting with:
* - _next/static (static files)
* - _next/image (image optimization files)
* - favicon.ico (favicon file)
*/
'/((?!_next/static|_next/image|favicon.ico).*)',
],
}