Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions packages/cli/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import {
ShellTool,
EditTool,
WriteFileTool,
Provider,
} from '@google/gemini-cli-core';
import type { Settings } from './settings.js';

Expand All @@ -56,6 +57,11 @@ const logger = {
};

export interface CliArgs {
provider: string | undefined;
'openai-base-url': string | undefined;
'openai-api-key': string | undefined;
'openai-extra-header': string[] | undefined;
'openai-token-cmd': string | undefined;
model: string | undefined;
sandbox: boolean | string | undefined;
sandboxImage: string | undefined;
Expand Down Expand Up @@ -165,6 +171,28 @@ export async function parseArguments(settings: Settings): Promise<CliArgs> {
)
.command('$0 [promptWords...]', 'Launch Gemini CLI', (yargsInstance) =>
yargsInstance
.option('provider', {
type: 'string',
description: 'The provider to use. Can be "gemini" or "openai".',
choices: ['gemini', 'openai'],
})
.option('openai-base-url', {
type: 'string',
description: 'The base URL for the OpenAI API.',
})
.option('openai-api-key', {
type: 'string',
description: 'The API key for the OpenAI API.',
})
.option('openai-extra-header', {
type: 'array',
string: true,
description: 'Extra headers to send to the OpenAI API.',
})
.option('openai-token-cmd', {
type: 'string',
description: 'A command to run to get a token for the OpenAI API.',
})
.option('model', {
alias: 'm',
type: 'string',
Expand Down Expand Up @@ -565,6 +593,11 @@ export async function loadCliConfig(
? argv.screenReader
: (settings.ui?.accessibility?.screenReader ?? false);
return new Config({
provider: argv.provider as Provider,
openaiBaseUrl: argv['openai-base-url'],
openaiApiKey: argv['openai-api-key'],
openaiExtraHeader: argv['openai-extra-header'],
openaiTokenCmd: argv['openai-token-cmd'],
sessionId,
embeddingModel: DEFAULT_GEMINI_EMBEDDING_MODEL,
sandbox: sandboxConfig,
Expand Down
62 changes: 58 additions & 4 deletions packages/core/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import { ReadManyFilesTool } from '../tools/read-many-files.js';
import { MemoryTool, setGeminiMdFilename } from '../tools/memoryTool.js';
import { WebSearchTool } from '../tools/web-search.js';
import { GeminiClient } from '../core/client.js';
import { OpenAIClient } from '../core/openaiClient.js';
import { BaseLlmClient } from '../core/baseLlmClient.js';
import { FileDiscoveryService } from '../services/fileDiscoveryService.js';
import { GitService } from '../services/gitService.js';
Expand Down Expand Up @@ -60,6 +61,7 @@ import { RipgrepFallbackEvent } from '../telemetry/types.js';
import type { FallbackModelHandler } from '../fallback/types.js';
import { ModelRouterService } from '../routing/modelRouterService.js';
import { OutputFormat } from '../output/types.js';
import { Provider } from '../provider.js';

// Re-export OAuth config type
export type { MCPOAuthConfig, AnyToolInvocation };
Expand Down Expand Up @@ -184,6 +186,11 @@ export interface SandboxConfig {
}

export interface ConfigParameters {
provider?: Provider;
openaiBaseUrl?: string;
openaiApiKey?: string;
openaiExtraHeader?: string[];
openaiTokenCmd?: string;
sessionId: string;
embeddingModel?: string;
sandbox?: SandboxConfig;
Expand Down Expand Up @@ -253,6 +260,11 @@ export interface ConfigParameters {
export class Config {
private toolRegistry!: ToolRegistry;
private promptRegistry!: PromptRegistry;
private readonly provider: Provider;
private readonly openaiBaseUrl: string | undefined;
private readonly openaiApiKey: string | undefined;
private readonly openaiExtraHeader: string[] | undefined;
private readonly openaiTokenCmd: string | undefined;
private readonly sessionId: string;
private fileSystemService: FileSystemService;
private contentGeneratorConfig!: ContentGeneratorConfig;
Expand All @@ -279,6 +291,7 @@ export class Config {
private readonly telemetrySettings: TelemetrySettings;
private readonly usageStatisticsEnabled: boolean;
private geminiClient!: GeminiClient;
private openAIClient!: OpenAIClient;
private baseLlmClient!: BaseLlmClient;
private modelRouterService: ModelRouterService;
private readonly fileFiltering: {
Expand Down Expand Up @@ -338,6 +351,11 @@ export class Config {
private readonly useModelRouter: boolean;

constructor(params: ConfigParameters) {
this.provider = params.provider ?? Provider.GEMINI;
this.openaiBaseUrl = params.openaiBaseUrl;
this.openaiApiKey = params.openaiApiKey;
this.openaiExtraHeader = params.openaiExtraHeader;
this.openaiTokenCmd = params.openaiTokenCmd;
this.sessionId = params.sessionId;
this.embeddingModel =
params.embeddingModel ?? DEFAULT_GEMINI_EMBEDDING_MODEL;
Expand Down Expand Up @@ -444,7 +462,11 @@ export class Config {
if (this.getProxy()) {
setGlobalDispatcher(new ProxyAgent(this.getProxy() as string));
}
this.geminiClient = new GeminiClient(this);
if (this.provider === Provider.OPENAI) {
this.openAIClient = new OpenAIClient(this);
} else {
this.geminiClient = new GeminiClient(this);
}
this.modelRouterService = new ModelRouterService(this);
}

Expand All @@ -465,7 +487,7 @@ export class Config {
this.promptRegistry = new PromptRegistry();
this.toolRegistry = await this.createToolRegistry();

await this.geminiClient.initialize();
await this.getClient().initialize();
}

getContentGenerator(): ContentGenerator {
Expand All @@ -479,8 +501,10 @@ export class Config {
this.contentGeneratorConfig?.authType === AuthType.USE_GEMINI &&
authMethod === AuthType.LOGIN_WITH_GOOGLE
) {
// Restore the conversation history to the new client
this.geminiClient.stripThoughtsFromHistory();
if (this.provider === Provider.GEMINI) {
// Restore the conversation history to the new client
this.geminiClient.stripThoughtsFromHistory();
}
}

const newContentGeneratorConfig = createContentGeneratorConfig(
Expand Down Expand Up @@ -722,9 +746,39 @@ export class Config {
}

getGeminiClient(): GeminiClient {
if (this.provider !== Provider.GEMINI) {
throw new Error('Gemini client is not available for the current provider');
}
return this.geminiClient;
}

getClient(): GeminiClient | OpenAIClient {
if (this.provider === Provider.OPENAI) {
return this.openAIClient;
}
return this.geminiClient;
}

getProvider(): Provider {
return this.provider;
}

getOpenAIBaseUrl(): string | undefined {
return this.openaiBaseUrl;
}

getOpenAIApiKey(): string | undefined {
return this.openaiApiKey;
}

getOpenAIExtraHeaders(): string[] | undefined {
return this.openaiExtraHeader;
}

getOpenAITokenCmd(): string | undefined {
return this.openaiTokenCmd;
}

getModelRouterService(): ModelRouterService {
return this.modelRouterService;
}
Expand Down
100 changes: 100 additions & 0 deletions packages/core/src/core/openaiClient.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/

import type {
GenerateContentConfig,
PartListUnion,
Content,
GenerateContentResponse,
} from '@google/genai';
import type { ServerGeminiStreamEvent, ChatCompressionInfo } from './turn.js';
import { Turn, CompressionStatus } from './turn.js';
import type { Config } from '../config/config.js';
import type { ChatRecordingService } from '../services/chatRecordingService.js';
import { LoopDetectionService } from '../services/loopDetectionService.js';
import { GeminiChat } from './geminiChat.js';

export class OpenAIClient {
constructor(private readonly config: Config) {}

async initialize() {
return Promise.resolve();
}

async addHistory(content: Content) {
return Promise.resolve();
}

getChat(): GeminiChat {
throw new Error('Not implemented');
}

isInitialized(): boolean {
return false;
}

getHistory(): Content[] {
return [];
}

stripThoughtsFromHistory() {}

setHistory(history: Content[]) {}

async setTools(): Promise<void> {
return Promise.resolve();
}

async resetChat(): Promise<void> {
return Promise.resolve();
}

getChatRecordingService(): ChatRecordingService | undefined {
return undefined;
}

getLoopDetectionService(): LoopDetectionService {
throw new Error('Not implemented');
}

async addDirectoryContext(): Promise<void> {
return Promise.resolve();
}

async startChat(extraHistory?: Content[]): Promise<GeminiChat> {
throw new Error('Not implemented');
}

async *sendMessageStream(
request: PartListUnion,
signal: AbortSignal,
prompt_id: string,
turns?: number,
): AsyncGenerator<ServerGeminiStreamEvent, Turn> {
yield* (async function* () {})();
throw new Error('Not implemented');
}

async generateContent(
contents: Content[],
generationConfig: GenerateContentConfig,
abortSignal: AbortSignal,
model: string,
): Promise<GenerateContentResponse> {
throw new Error('Not implemented');
}

async tryCompressChat(
prompt_id: string,
force?: boolean,
): Promise<ChatCompressionInfo> {
return {
compressionStatus: CompressionStatus.NOOP,
originalTokenCount: 0,
newTokenCount: 0,
};
}
}
10 changes: 10 additions & 0 deletions packages/core/src/provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/

export enum Provider {
GEMINI = 'gemini',
OPENAI = 'openai',
}
Loading