diff --git a/packages/cli/src/config/config.ts b/packages/cli/src/config/config.ts index eaace7b0749..6e919a1c75c 100755 --- a/packages/cli/src/config/config.ts +++ b/packages/cli/src/config/config.ts @@ -32,6 +32,7 @@ import { ShellTool, EditTool, WriteFileTool, + Provider, } from '@google/gemini-cli-core'; import type { Settings } from './settings.js'; @@ -56,6 +57,11 @@ const logger = { }; export interface CliArgs { + provider: string | undefined; + 'openai-base-url': string | undefined; + 'openai-api-key': string | undefined; + 'openai-extra-header': string[] | undefined; + 'openai-token-cmd': string | undefined; model: string | undefined; sandbox: boolean | string | undefined; sandboxImage: string | undefined; @@ -165,6 +171,28 @@ export async function parseArguments(settings: Settings): Promise { ) .command('$0 [promptWords...]', 'Launch Gemini CLI', (yargsInstance) => yargsInstance + .option('provider', { + type: 'string', + description: 'The provider to use. Can be "gemini" or "openai".', + choices: ['gemini', 'openai'], + }) + .option('openai-base-url', { + type: 'string', + description: 'The base URL for the OpenAI API.', + }) + .option('openai-api-key', { + type: 'string', + description: 'The API key for the OpenAI API.', + }) + .option('openai-extra-header', { + type: 'array', + string: true, + description: 'Extra headers to send to the OpenAI API.', + }) + .option('openai-token-cmd', { + type: 'string', + description: 'A command to run to get a token for the OpenAI API.', + }) .option('model', { alias: 'm', type: 'string', @@ -565,6 +593,11 @@ export async function loadCliConfig( ? argv.screenReader : (settings.ui?.accessibility?.screenReader ?? false); return new Config({ + provider: argv.provider as Provider, + openaiBaseUrl: argv['openai-base-url'], + openaiApiKey: argv['openai-api-key'], + openaiExtraHeader: argv['openai-extra-header'], + openaiTokenCmd: argv['openai-token-cmd'], sessionId, embeddingModel: DEFAULT_GEMINI_EMBEDDING_MODEL, sandbox: sandboxConfig, diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts index 2d1a686016b..1c0292f988d 100644 --- a/packages/core/src/config/config.ts +++ b/packages/core/src/config/config.ts @@ -31,6 +31,7 @@ import { ReadManyFilesTool } from '../tools/read-many-files.js'; import { MemoryTool, setGeminiMdFilename } from '../tools/memoryTool.js'; import { WebSearchTool } from '../tools/web-search.js'; import { GeminiClient } from '../core/client.js'; +import { OpenAIClient } from '../core/openaiClient.js'; import { BaseLlmClient } from '../core/baseLlmClient.js'; import { FileDiscoveryService } from '../services/fileDiscoveryService.js'; import { GitService } from '../services/gitService.js'; @@ -60,6 +61,7 @@ import { RipgrepFallbackEvent } from '../telemetry/types.js'; import type { FallbackModelHandler } from '../fallback/types.js'; import { ModelRouterService } from '../routing/modelRouterService.js'; import { OutputFormat } from '../output/types.js'; +import { Provider } from '../provider.js'; // Re-export OAuth config type export type { MCPOAuthConfig, AnyToolInvocation }; @@ -184,6 +186,11 @@ export interface SandboxConfig { } export interface ConfigParameters { + provider?: Provider; + openaiBaseUrl?: string; + openaiApiKey?: string; + openaiExtraHeader?: string[]; + openaiTokenCmd?: string; sessionId: string; embeddingModel?: string; sandbox?: SandboxConfig; @@ -253,6 +260,11 @@ export interface ConfigParameters { export class Config { private toolRegistry!: ToolRegistry; private promptRegistry!: PromptRegistry; + private readonly provider: Provider; + private readonly openaiBaseUrl: string | undefined; + private readonly openaiApiKey: string | undefined; + private readonly openaiExtraHeader: string[] | undefined; + private readonly openaiTokenCmd: string | undefined; private readonly sessionId: string; private fileSystemService: FileSystemService; private contentGeneratorConfig!: ContentGeneratorConfig; @@ -279,6 +291,7 @@ export class Config { private readonly telemetrySettings: TelemetrySettings; private readonly usageStatisticsEnabled: boolean; private geminiClient!: GeminiClient; + private openAIClient!: OpenAIClient; private baseLlmClient!: BaseLlmClient; private modelRouterService: ModelRouterService; private readonly fileFiltering: { @@ -338,6 +351,11 @@ export class Config { private readonly useModelRouter: boolean; constructor(params: ConfigParameters) { + this.provider = params.provider ?? Provider.GEMINI; + this.openaiBaseUrl = params.openaiBaseUrl; + this.openaiApiKey = params.openaiApiKey; + this.openaiExtraHeader = params.openaiExtraHeader; + this.openaiTokenCmd = params.openaiTokenCmd; this.sessionId = params.sessionId; this.embeddingModel = params.embeddingModel ?? DEFAULT_GEMINI_EMBEDDING_MODEL; @@ -444,7 +462,11 @@ export class Config { if (this.getProxy()) { setGlobalDispatcher(new ProxyAgent(this.getProxy() as string)); } - this.geminiClient = new GeminiClient(this); + if (this.provider === Provider.OPENAI) { + this.openAIClient = new OpenAIClient(this); + } else { + this.geminiClient = new GeminiClient(this); + } this.modelRouterService = new ModelRouterService(this); } @@ -465,7 +487,7 @@ export class Config { this.promptRegistry = new PromptRegistry(); this.toolRegistry = await this.createToolRegistry(); - await this.geminiClient.initialize(); + await this.getClient().initialize(); } getContentGenerator(): ContentGenerator { @@ -479,8 +501,10 @@ export class Config { this.contentGeneratorConfig?.authType === AuthType.USE_GEMINI && authMethod === AuthType.LOGIN_WITH_GOOGLE ) { - // Restore the conversation history to the new client - this.geminiClient.stripThoughtsFromHistory(); + if (this.provider === Provider.GEMINI) { + // Restore the conversation history to the new client + this.geminiClient.stripThoughtsFromHistory(); + } } const newContentGeneratorConfig = createContentGeneratorConfig( @@ -722,9 +746,39 @@ export class Config { } getGeminiClient(): GeminiClient { + if (this.provider !== Provider.GEMINI) { + throw new Error('Gemini client is not available for the current provider'); + } return this.geminiClient; } + getClient(): GeminiClient | OpenAIClient { + if (this.provider === Provider.OPENAI) { + return this.openAIClient; + } + return this.geminiClient; + } + + getProvider(): Provider { + return this.provider; + } + + getOpenAIBaseUrl(): string | undefined { + return this.openaiBaseUrl; + } + + getOpenAIApiKey(): string | undefined { + return this.openaiApiKey; + } + + getOpenAIExtraHeaders(): string[] | undefined { + return this.openaiExtraHeader; + } + + getOpenAITokenCmd(): string | undefined { + return this.openaiTokenCmd; + } + getModelRouterService(): ModelRouterService { return this.modelRouterService; } diff --git a/packages/core/src/core/openaiClient.ts b/packages/core/src/core/openaiClient.ts new file mode 100644 index 00000000000..731ef3ca2fb --- /dev/null +++ b/packages/core/src/core/openaiClient.ts @@ -0,0 +1,100 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import type { + GenerateContentConfig, + PartListUnion, + Content, + GenerateContentResponse, +} from '@google/genai'; +import type { ServerGeminiStreamEvent, ChatCompressionInfo } from './turn.js'; +import { Turn, CompressionStatus } from './turn.js'; +import type { Config } from '../config/config.js'; +import type { ChatRecordingService } from '../services/chatRecordingService.js'; +import { LoopDetectionService } from '../services/loopDetectionService.js'; +import { GeminiChat } from './geminiChat.js'; + +export class OpenAIClient { + constructor(private readonly config: Config) {} + + async initialize() { + return Promise.resolve(); + } + + async addHistory(content: Content) { + return Promise.resolve(); + } + + getChat(): GeminiChat { + throw new Error('Not implemented'); + } + + isInitialized(): boolean { + return false; + } + + getHistory(): Content[] { + return []; + } + + stripThoughtsFromHistory() {} + + setHistory(history: Content[]) {} + + async setTools(): Promise { + return Promise.resolve(); + } + + async resetChat(): Promise { + return Promise.resolve(); + } + + getChatRecordingService(): ChatRecordingService | undefined { + return undefined; + } + + getLoopDetectionService(): LoopDetectionService { + throw new Error('Not implemented'); + } + + async addDirectoryContext(): Promise { + return Promise.resolve(); + } + + async startChat(extraHistory?: Content[]): Promise { + throw new Error('Not implemented'); + } + + async *sendMessageStream( + request: PartListUnion, + signal: AbortSignal, + prompt_id: string, + turns?: number, + ): AsyncGenerator { + yield* (async function* () {})(); + throw new Error('Not implemented'); + } + + async generateContent( + contents: Content[], + generationConfig: GenerateContentConfig, + abortSignal: AbortSignal, + model: string, + ): Promise { + throw new Error('Not implemented'); + } + + async tryCompressChat( + prompt_id: string, + force?: boolean, + ): Promise { + return { + compressionStatus: CompressionStatus.NOOP, + originalTokenCount: 0, + newTokenCount: 0, + }; + } +} \ No newline at end of file diff --git a/packages/core/src/provider.ts b/packages/core/src/provider.ts new file mode 100644 index 00000000000..3e497bf289d --- /dev/null +++ b/packages/core/src/provider.ts @@ -0,0 +1,10 @@ +/** + * @license + * Copyright 2025 Google LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +export enum Provider { + GEMINI = 'gemini', + OPENAI = 'openai', +} \ No newline at end of file