diff --git a/src/components/ConsoleOAuthFlow.tsx b/src/components/ConsoleOAuthFlow.tsx
index 5ddc67255..44c69025f 100644
--- a/src/components/ConsoleOAuthFlow.tsx
+++ b/src/components/ConsoleOAuthFlow.tsx
@@ -48,6 +48,15 @@ type OAuthStatus =
opusModel: string
activeField: 'base_url' | 'api_key' | 'haiku_model' | 'sonnet_model' | 'opus_model'
} // OpenAI Chat Completions API platform
+ | {
+ state: 'gemini_api'
+ baseUrl: string
+ apiKey: string
+ haikuModel: string
+ sonnetModel: string
+ opusModel: string
+ activeField: 'base_url' | 'api_key' | 'haiku_model' | 'sonnet_model' | 'opus_model'
+ } // Gemini Generate Content API platform
| { state: 'ready_to_start' } // Flow started, waiting for browser to open
| { state: 'waiting_for_login'; url: string } // Browser opened, waiting for user to login
| { state: 'creating_api_key' } // Got access token, creating API key
@@ -60,7 +69,6 @@ type OAuthStatus =
}
const PASTE_HERE_MSG = 'Paste code here if prompted > '
-
export function ConsoleOAuthFlow({
onDone,
startingMessage,
@@ -476,6 +484,16 @@ function OAuthStatusMessage({
),
value: 'openai_chat_api',
},
+ {
+ label: (
+
+ Gemini API ·{' '}
+ Google Gemini native REST/SSE
+ {'\n'}
+
+ ),
+ value: 'gemini_api',
+ },
{
label: (
@@ -543,6 +561,17 @@ function OAuthStatusMessage({
opusModel: process.env.ANTHROPIC_DEFAULT_OPUS_MODEL ?? '',
activeField: 'base_url',
})
+ } else if (value === 'gemini_api') {
+ logEvent('tengu_gemini_api_selected', {})
+ setOAuthStatus({
+ state: 'gemini_api',
+ baseUrl: process.env.GEMINI_BASE_URL ?? '',
+ apiKey: process.env.GEMINI_API_KEY ?? '',
+ haikuModel: process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL ?? '',
+ sonnetModel: process.env.ANTHROPIC_DEFAULT_SONNET_MODEL ?? '',
+ opusModel: process.env.ANTHROPIC_DEFAULT_OPUS_MODEL ?? '',
+ activeField: 'base_url',
+ })
} else if (value === 'platform') {
logEvent('tengu_oauth_platform_selected', {})
setOAuthStatus({ state: 'platform_setup' })
@@ -974,6 +1003,238 @@ function OAuthStatusMessage({
)
}
+ case 'gemini_api':
+ {
+ type GeminiField = 'base_url' | 'api_key' | 'haiku_model' | 'sonnet_model' | 'opus_model'
+ const GEMINI_FIELDS: GeminiField[] = [
+ 'base_url',
+ 'api_key',
+ 'haiku_model',
+ 'sonnet_model',
+ 'opus_model',
+ ]
+ const gp = oauthStatus as {
+ state: 'gemini_api'
+ activeField: GeminiField
+ baseUrl: string
+ apiKey: string
+ haikuModel: string
+ sonnetModel: string
+ opusModel: string
+ }
+ const { activeField, baseUrl, apiKey, haikuModel, sonnetModel, opusModel } = gp
+ const geminiDisplayValues: Record = {
+ base_url: baseUrl,
+ api_key: apiKey,
+ haiku_model: haikuModel,
+ sonnet_model: sonnetModel,
+ opus_model: opusModel,
+ }
+
+ const [geminiInputValue, setGeminiInputValue] = useState(
+ () => geminiDisplayValues[activeField],
+ )
+ const [geminiInputCursorOffset, setGeminiInputCursorOffset] = useState(
+ () => geminiDisplayValues[activeField].length,
+ )
+
+ const buildGeminiState = useCallback(
+ (field: GeminiField, value: string, newActive?: GeminiField) => {
+ const s = {
+ state: 'gemini_api' as const,
+ activeField: newActive ?? activeField,
+ baseUrl,
+ apiKey,
+ haikuModel,
+ sonnetModel,
+ opusModel,
+ }
+ switch (field) {
+ case 'base_url':
+ return { ...s, baseUrl: value }
+ case 'api_key':
+ return { ...s, apiKey: value }
+ case 'haiku_model':
+ return { ...s, haikuModel: value }
+ case 'sonnet_model':
+ return { ...s, sonnetModel: value }
+ case 'opus_model':
+ return { ...s, opusModel: value }
+ }
+ },
+ [activeField, baseUrl, apiKey, haikuModel, sonnetModel, opusModel],
+ )
+
+ const doGeminiSave = useCallback(() => {
+ const finalVals = { ...geminiDisplayValues, [activeField]: geminiInputValue }
+ if (!finalVals.haiku_model || !finalVals.sonnet_model || !finalVals.opus_model) {
+ setOAuthStatus({
+ state: 'error',
+ message: 'Gemini setup requires Haiku, Sonnet, and Opus model names.',
+ toRetry: {
+ state: 'gemini_api',
+ baseUrl: finalVals.base_url,
+ apiKey: finalVals.api_key,
+ haikuModel: finalVals.haiku_model,
+ sonnetModel: finalVals.sonnet_model,
+ opusModel: finalVals.opus_model,
+ activeField,
+ },
+ })
+ return
+ }
+
+ const env: Record = {}
+ if (finalVals.base_url) env.GEMINI_BASE_URL = finalVals.base_url
+ if (finalVals.api_key) env.GEMINI_API_KEY = finalVals.api_key
+ if (finalVals.haiku_model) env.ANTHROPIC_DEFAULT_HAIKU_MODEL = finalVals.haiku_model
+ if (finalVals.sonnet_model) env.ANTHROPIC_DEFAULT_SONNET_MODEL = finalVals.sonnet_model
+ if (finalVals.opus_model) env.ANTHROPIC_DEFAULT_OPUS_MODEL = finalVals.opus_model
+ const { error } = updateSettingsForSource('userSettings', {
+ modelType: 'gemini' as any,
+ env,
+ } as any)
+ if (error) {
+ setOAuthStatus({
+ state: 'error',
+ message: `Failed to save: ${error.message}`,
+ toRetry: {
+ state: 'gemini_api',
+ baseUrl: '',
+ apiKey: '',
+ haikuModel: '',
+ sonnetModel: '',
+ opusModel: '',
+ activeField: 'base_url',
+ },
+ })
+ } else {
+ for (const [k, v] of Object.entries(env)) process.env[k] = v
+ setOAuthStatus({ state: 'success' })
+ void onDone()
+ }
+ }, [activeField, geminiInputValue, geminiDisplayValues, onDone, setOAuthStatus])
+
+ const handleGeminiEnter = useCallback(() => {
+ const idx = GEMINI_FIELDS.indexOf(activeField)
+ setOAuthStatus(buildGeminiState(activeField, geminiInputValue))
+ if (idx === GEMINI_FIELDS.length - 1) {
+ doGeminiSave()
+ } else {
+ const next = GEMINI_FIELDS[idx + 1]!
+ setGeminiInputValue(geminiDisplayValues[next] ?? '')
+ setGeminiInputCursorOffset((geminiDisplayValues[next] ?? '').length)
+ }
+ }, [
+ activeField,
+ buildGeminiState,
+ doGeminiSave,
+ geminiDisplayValues,
+ geminiInputValue,
+ setOAuthStatus,
+ ])
+
+ useKeybinding(
+ 'tabs:next',
+ () => {
+ const idx = GEMINI_FIELDS.indexOf(activeField)
+ if (idx < GEMINI_FIELDS.length - 1) {
+ setOAuthStatus(
+ buildGeminiState(activeField, geminiInputValue, GEMINI_FIELDS[idx + 1]),
+ )
+ setGeminiInputValue(geminiDisplayValues[GEMINI_FIELDS[idx + 1]!] ?? '')
+ setGeminiInputCursorOffset(
+ (geminiDisplayValues[GEMINI_FIELDS[idx + 1]!] ?? '').length,
+ )
+ }
+ },
+ { context: 'Tabs' },
+ )
+ useKeybinding(
+ 'tabs:previous',
+ () => {
+ const idx = GEMINI_FIELDS.indexOf(activeField)
+ if (idx > 0) {
+ setOAuthStatus(
+ buildGeminiState(activeField, geminiInputValue, GEMINI_FIELDS[idx - 1]),
+ )
+ setGeminiInputValue(geminiDisplayValues[GEMINI_FIELDS[idx - 1]!] ?? '')
+ setGeminiInputCursorOffset(
+ (geminiDisplayValues[GEMINI_FIELDS[idx - 1]!] ?? '').length,
+ )
+ }
+ },
+ { context: 'Tabs' },
+ )
+ useKeybinding(
+ 'confirm:no',
+ () => {
+ setOAuthStatus({ state: 'idle' })
+ },
+ { context: 'Confirmation' },
+ )
+
+ const geminiColumns = useTerminalSize().columns - 20
+
+ const renderGeminiRow = (
+ field: GeminiField,
+ label: string,
+ opts?: { mask?: boolean },
+ ) => {
+ const active = activeField === field
+ const val = geminiDisplayValues[field]
+ return (
+
+
+ {` ${label} `}
+
+
+ {active ? (
+
+ ) : val ? (
+
+ {opts?.mask
+ ? val.slice(0, 8) + '\u00b7'.repeat(Math.max(0, val.length - 8))
+ : val}
+
+ ) : null}
+
+ )
+ }
+
+ return (
+
+ Gemini API Setup
+
+ Configure a Gemini Generate Content compatible endpoint. Base URL is
+ optional and defaults to Google's v1beta API.
+
+
+ {renderGeminiRow('base_url', 'Base URL ')}
+ {renderGeminiRow('api_key', 'API Key ', { mask: true })}
+ {renderGeminiRow('haiku_model', 'Haiku ')}
+ {renderGeminiRow('sonnet_model', 'Sonnet ')}
+ {renderGeminiRow('opus_model', 'Opus ')}
+
+
+ Tab to switch · Enter on last field to save · Esc to go back
+
+
+ )
+ }
+
case 'platform_setup':
return (
diff --git a/src/services/api/claude.ts b/src/services/api/claude.ts
index bc6f380f2..f2b19d3c3 100644
--- a/src/services/api/claude.ts
+++ b/src/services/api/claude.ts
@@ -640,24 +640,51 @@ export function assistantMessageToMessageParam(
} else {
return {
role: 'assistant',
- content: message.message.content.map((_, i) => ({
- ..._,
- ...(i === message.message.content.length - 1 &&
- _.type !== 'thinking' &&
- _.type !== 'redacted_thinking' &&
- (feature('CONNECTOR_TEXT') ? !isConnectorTextBlock(_) : true)
- ? enablePromptCaching
- ? { cache_control: getCacheControl({ querySource }) }
- : {}
- : {}),
- })),
+ content: message.message.content.map((_, i) => {
+ const contentBlock = stripGeminiProviderMetadata(_)
+ return {
+ ...contentBlock,
+ ...(i === message.message.content.length - 1 &&
+ contentBlock.type !== 'thinking' &&
+ contentBlock.type !== 'redacted_thinking' &&
+ (feature('CONNECTOR_TEXT')
+ ? !isConnectorTextBlock(contentBlock)
+ : true)
+ ? enablePromptCaching
+ ? { cache_control: getCacheControl({ querySource }) }
+ : {}
+ : {}),
+ }
+ }),
}
}
}
return {
role: 'assistant',
- content: message.message.content,
+ content:
+ typeof message.message.content === 'string'
+ ? message.message.content
+ : message.message.content.map(stripGeminiProviderMetadata),
+ }
+}
+
+function stripGeminiProviderMetadata(
+ contentBlock: T,
+): T {
+ if (
+ typeof contentBlock === 'string' ||
+ !('_geminiThoughtSignature' in contentBlock)
+ ) {
+ return contentBlock
+ }
+
+ const {
+ _geminiThoughtSignature: _unusedGeminiThoughtSignature,
+ ...rest
+ } = contentBlock as T & {
+ _geminiThoughtSignature?: string
}
+ return rest as T
}
export type Options = {
@@ -1310,6 +1337,19 @@ async function* queryModel(
return
}
+ if (getAPIProvider() === 'gemini') {
+ const { queryModelGemini } = await import('./gemini/index.js')
+ yield* queryModelGemini(
+ messagesForAPI,
+ systemPrompt,
+ filteredTools,
+ signal,
+ options,
+ thinkingConfig,
+ )
+ return
+ }
+
// Instrumentation: Track message count after normalization
logEvent('tengu_api_after_normalize', {
postNormalizedMessageCount: messagesForAPI.length,
diff --git a/src/services/api/gemini/__tests__/convertMessages.test.ts b/src/services/api/gemini/__tests__/convertMessages.test.ts
new file mode 100644
index 000000000..11d49ca37
--- /dev/null
+++ b/src/services/api/gemini/__tests__/convertMessages.test.ts
@@ -0,0 +1,202 @@
+import { describe, expect, test } from 'bun:test'
+import type {
+ AssistantMessage,
+ UserMessage,
+} from '../../../../types/message.js'
+import { anthropicMessagesToGemini } from '../convertMessages.js'
+
+function makeUserMsg(content: string | any[]): UserMessage {
+ return {
+ type: 'user',
+ uuid: '00000000-0000-0000-0000-000000000000',
+ message: { role: 'user', content },
+ } as UserMessage
+}
+
+function makeAssistantMsg(content: string | any[]): AssistantMessage {
+ return {
+ type: 'assistant',
+ uuid: '00000000-0000-0000-0000-000000000001',
+ message: { role: 'assistant', content },
+ } as AssistantMessage
+}
+
+describe('anthropicMessagesToGemini', () => {
+ test('converts system prompt to systemInstruction', () => {
+ const result = anthropicMessagesToGemini(
+ [makeUserMsg('hello')],
+ ['You are helpful.'] as any,
+ )
+
+ expect(result.systemInstruction).toEqual({
+ parts: [{ text: 'You are helpful.' }],
+ })
+ })
+
+ test('converts assistant tool_use to functionCall', () => {
+ const result = anthropicMessagesToGemini(
+ [
+ makeAssistantMsg([
+ {
+ type: 'tool_use',
+ id: 'toolu_123',
+ name: 'bash',
+ input: { command: 'ls' },
+ _geminiThoughtSignature: 'sig-tool',
+ },
+ ]),
+ ],
+ [] as any,
+ )
+
+ expect(result.contents).toEqual([
+ {
+ role: 'model',
+ parts: [
+ {
+ functionCall: {
+ name: 'bash',
+ args: { command: 'ls' },
+ },
+ thoughtSignature: 'sig-tool',
+ },
+ ],
+ },
+ ])
+ })
+
+ test('converts tool_result to functionResponse using prior tool name', () => {
+ const result = anthropicMessagesToGemini(
+ [
+ makeAssistantMsg([
+ {
+ type: 'tool_use',
+ id: 'toolu_123',
+ name: 'bash',
+ input: { command: 'ls' },
+ },
+ ]),
+ makeUserMsg([
+ {
+ type: 'tool_result',
+ tool_use_id: 'toolu_123',
+ content: 'file.txt',
+ },
+ ]),
+ ],
+ [] as any,
+ )
+
+ expect(result.contents[1]).toEqual({
+ role: 'user',
+ parts: [
+ {
+ functionResponse: {
+ name: 'bash',
+ response: {
+ result: 'file.txt',
+ },
+ },
+ },
+ ],
+ })
+ })
+
+ test('converts thinking blocks with signatures', () => {
+ const result = anthropicMessagesToGemini(
+ [
+ makeAssistantMsg([
+ {
+ type: 'thinking',
+ thinking: 'internal reasoning',
+ signature: 'sig-thinking',
+ },
+ {
+ type: 'text',
+ text: 'visible answer',
+ },
+ ]),
+ ],
+ [] as any,
+ )
+
+ expect(result.contents[0]).toEqual({
+ role: 'model',
+ parts: [
+ {
+ text: 'internal reasoning',
+ thought: true,
+ thoughtSignature: 'sig-thinking',
+ },
+ {
+ text: 'visible answer',
+ },
+ ],
+ })
+ })
+
+ test('filters empty assistant text and signature-only thinking parts', () => {
+ const result = anthropicMessagesToGemini(
+ [
+ makeAssistantMsg([
+ {
+ type: 'text',
+ text: '',
+ _geminiThoughtSignature: 'sig-empty-text',
+ },
+ {
+ type: 'thinking',
+ thinking: '',
+ signature: 'sig-empty-thinking',
+ },
+ {
+ type: 'tool_use',
+ id: 'toolu_123',
+ name: 'bash',
+ input: { command: 'pwd' },
+ },
+ ]),
+ ],
+ [] as any,
+ )
+
+ expect(result.contents).toEqual([
+ {
+ role: 'model',
+ parts: [
+ {
+ functionCall: {
+ name: 'bash',
+ args: { command: 'pwd' },
+ },
+ },
+ ],
+ },
+ ])
+ })
+
+ test('filters empty user text blocks', () => {
+ const result = anthropicMessagesToGemini(
+ [
+ makeUserMsg([
+ {
+ type: 'text',
+ text: '',
+ },
+ {
+ type: 'text',
+ text: 'hello',
+ },
+ ]),
+ ],
+ [] as any,
+ )
+
+ expect(result.contents).toEqual([
+ {
+ role: 'user',
+ parts: [{ text: 'hello' }],
+ },
+ ])
+ })
+})
diff --git a/src/services/api/gemini/__tests__/convertTools.test.ts b/src/services/api/gemini/__tests__/convertTools.test.ts
new file mode 100644
index 000000000..999f362cd
--- /dev/null
+++ b/src/services/api/gemini/__tests__/convertTools.test.ts
@@ -0,0 +1,130 @@
+import { describe, expect, test } from 'bun:test'
+import {
+ anthropicToolChoiceToGemini,
+ anthropicToolsToGemini,
+} from '../convertTools.js'
+
+describe('anthropicToolsToGemini', () => {
+ test('converts basic tool to parametersJsonSchema', () => {
+ const tools = [
+ {
+ type: 'custom',
+ name: 'bash',
+ description: 'Run a bash command',
+ input_schema: {
+ type: 'object',
+ properties: { command: { type: 'string' } },
+ required: ['command'],
+ },
+ },
+ ]
+
+ expect(anthropicToolsToGemini(tools as any)).toEqual([
+ {
+ functionDeclarations: [
+ {
+ name: 'bash',
+ description: 'Run a bash command',
+ parametersJsonSchema: {
+ type: 'object',
+ properties: { command: { type: 'string' } },
+ propertyOrdering: ['command'],
+ required: ['command'],
+ },
+ },
+ ],
+ },
+ ])
+ })
+
+ test('sanitizes unsupported JSON Schema fields for Gemini', () => {
+ const tools = [
+ {
+ type: 'custom',
+ name: 'complex',
+ description: 'Complex schema',
+ input_schema: {
+ $schema: 'http://json-schema.org/draft-07/schema#',
+ type: 'object',
+ additionalProperties: false,
+ propertyNames: { pattern: '^[a-z]+$' },
+ properties: {
+ mode: { const: 'strict' },
+ retries: {
+ type: 'integer',
+ exclusiveMinimum: 0,
+ },
+ metadata: {
+ type: 'object',
+ additionalProperties: {
+ type: 'string',
+ propertyNames: { pattern: '^[a-z]+$' },
+ },
+ },
+ },
+ required: ['mode'],
+ },
+ },
+ ]
+
+ expect(anthropicToolsToGemini(tools as any)).toEqual([
+ {
+ functionDeclarations: [
+ {
+ name: 'complex',
+ description: 'Complex schema',
+ parametersJsonSchema: {
+ type: 'object',
+ additionalProperties: false,
+ properties: {
+ mode: {
+ type: 'string',
+ enum: ['strict'],
+ },
+ retries: {
+ type: 'integer',
+ minimum: 0,
+ },
+ metadata: {
+ type: 'object',
+ additionalProperties: {
+ type: 'string',
+ },
+ },
+ },
+ propertyOrdering: ['mode', 'retries', 'metadata'],
+ required: ['mode'],
+ },
+ },
+ ],
+ },
+ ])
+ })
+
+ test('returns empty array when no tools are provided', () => {
+ expect(anthropicToolsToGemini([])).toEqual([])
+ })
+})
+
+describe('anthropicToolChoiceToGemini', () => {
+ test('maps auto', () => {
+ expect(anthropicToolChoiceToGemini({ type: 'auto' })).toEqual({
+ mode: 'AUTO',
+ })
+ })
+
+ test('maps any', () => {
+ expect(anthropicToolChoiceToGemini({ type: 'any' })).toEqual({
+ mode: 'ANY',
+ })
+ })
+
+ test('maps explicit tool choice', () => {
+ expect(
+ anthropicToolChoiceToGemini({ type: 'tool', name: 'bash' }),
+ ).toEqual({
+ mode: 'ANY',
+ allowedFunctionNames: ['bash'],
+ })
+ })
+})
diff --git a/src/services/api/gemini/__tests__/modelMapping.test.ts b/src/services/api/gemini/__tests__/modelMapping.test.ts
new file mode 100644
index 000000000..18846b23c
--- /dev/null
+++ b/src/services/api/gemini/__tests__/modelMapping.test.ts
@@ -0,0 +1,72 @@
+import { afterEach, beforeEach, describe, expect, test } from 'bun:test'
+import { resolveGeminiModel } from '../modelMapping.js'
+
+describe('resolveGeminiModel', () => {
+ const originalEnv = {
+ GEMINI_MODEL: process.env.GEMINI_MODEL,
+ ANTHROPIC_DEFAULT_HAIKU_MODEL: process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL,
+ ANTHROPIC_DEFAULT_SONNET_MODEL: process.env.ANTHROPIC_DEFAULT_SONNET_MODEL,
+ ANTHROPIC_DEFAULT_OPUS_MODEL: process.env.ANTHROPIC_DEFAULT_OPUS_MODEL,
+ }
+
+ beforeEach(() => {
+ delete process.env.GEMINI_MODEL
+ delete process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL
+ delete process.env.ANTHROPIC_DEFAULT_SONNET_MODEL
+ delete process.env.ANTHROPIC_DEFAULT_OPUS_MODEL
+ })
+
+ afterEach(() => {
+ Object.assign(process.env, originalEnv)
+ })
+
+ test('GEMINI_MODEL env var overrides family mappings', () => {
+ process.env.GEMINI_MODEL = 'gemini-2.5-pro'
+ process.env.ANTHROPIC_DEFAULT_SONNET_MODEL = 'gemini-2.5-flash'
+
+ expect(resolveGeminiModel('claude-sonnet-4-6')).toBe('gemini-2.5-pro')
+ })
+
+ test('resolves sonnet model from shared family override', () => {
+ process.env.ANTHROPIC_DEFAULT_SONNET_MODEL = 'gemini-2.5-flash'
+ expect(resolveGeminiModel('claude-sonnet-4-6')).toBe('gemini-2.5-flash')
+ })
+
+ test('resolves haiku model from shared family override', () => {
+ process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL = 'gemini-2.5-flash-lite'
+ expect(resolveGeminiModel('claude-haiku-4-5-20251001')).toBe(
+ 'gemini-2.5-flash-lite',
+ )
+ })
+
+ test('resolves opus model from shared family override', () => {
+ process.env.ANTHROPIC_DEFAULT_OPUS_MODEL = 'gemini-2.5-pro'
+ expect(resolveGeminiModel('claude-opus-4-6')).toBe('gemini-2.5-pro')
+ })
+
+ test('uses shared family override', () => {
+ process.env.ANTHROPIC_DEFAULT_SONNET_MODEL = 'legacy-gemini-sonnet'
+ expect(resolveGeminiModel('claude-sonnet-4-6')).toBe(
+ 'legacy-gemini-sonnet',
+ )
+ })
+
+ test('strips [1m] suffix before resolving', () => {
+ process.env.ANTHROPIC_DEFAULT_SONNET_MODEL = 'gemini-2.5-flash'
+ expect(resolveGeminiModel('claude-sonnet-4-6[1m]')).toBe(
+ 'gemini-2.5-flash',
+ )
+ })
+
+ test('passes through explicit Gemini model names', () => {
+ expect(resolveGeminiModel('gemini-3.1-flash-lite-preview')).toBe(
+ 'gemini-3.1-flash-lite-preview',
+ )
+ })
+
+ test('throws when family mapping is missing', () => {
+ expect(() => resolveGeminiModel('claude-sonnet-4-6')).toThrow(
+ 'Gemini provider requires GEMINI_MODEL or ANTHROPIC_DEFAULT_SONNET_MODEL to be configured.',
+ )
+ })
+})
diff --git a/src/services/api/gemini/__tests__/streamAdapter.test.ts b/src/services/api/gemini/__tests__/streamAdapter.test.ts
new file mode 100644
index 000000000..d7b42229f
--- /dev/null
+++ b/src/services/api/gemini/__tests__/streamAdapter.test.ts
@@ -0,0 +1,175 @@
+import { describe, expect, test } from 'bun:test'
+import { adaptGeminiStreamToAnthropic } from '../streamAdapter.js'
+import type { GeminiStreamChunk } from '../types.js'
+
+function mockStream(
+ chunks: GeminiStreamChunk[],
+): AsyncIterable {
+ return {
+ [Symbol.asyncIterator]() {
+ let index = 0
+ return {
+ async next() {
+ if (index >= chunks.length) {
+ return { done: true, value: undefined }
+ }
+ return { done: false, value: chunks[index++] }
+ },
+ }
+ },
+ }
+}
+
+async function collectEvents(chunks: GeminiStreamChunk[]) {
+ const events: any[] = []
+ for await (const event of adaptGeminiStreamToAnthropic(
+ mockStream(chunks),
+ 'gemini-2.5-flash',
+ )) {
+ events.push(event)
+ }
+ return events
+}
+
+describe('adaptGeminiStreamToAnthropic', () => {
+ test('converts text chunks', async () => {
+ const events = await collectEvents([
+ {
+ candidates: [
+ {
+ content: {
+ parts: [{ text: 'Hello' }],
+ },
+ },
+ ],
+ },
+ {
+ candidates: [
+ {
+ content: {
+ parts: [{ text: ' world' }],
+ },
+ finishReason: 'STOP',
+ },
+ ],
+ },
+ ])
+
+ const textDeltas = events.filter(
+ event =>
+ event.type === 'content_block_delta' && event.delta.type === 'text_delta',
+ )
+
+ expect(events[0].type).toBe('message_start')
+ expect(textDeltas).toHaveLength(2)
+ expect(textDeltas[0].delta.text).toBe('Hello')
+ expect(textDeltas[1].delta.text).toBe(' world')
+
+ const messageDelta = events.find(event => event.type === 'message_delta')
+ expect(messageDelta.delta.stop_reason).toBe('end_turn')
+ })
+
+ test('converts thinking chunks and signatures', async () => {
+ const events = await collectEvents([
+ {
+ candidates: [
+ {
+ content: {
+ parts: [{ text: 'Think', thought: true }],
+ },
+ },
+ ],
+ },
+ {
+ candidates: [
+ {
+ content: {
+ parts: [{ thought: true, thoughtSignature: 'sig-123' }],
+ },
+ finishReason: 'STOP',
+ },
+ ],
+ },
+ ])
+
+ const blockStart = events.find(event => event.type === 'content_block_start')
+ expect(blockStart.content_block.type).toBe('thinking')
+
+ const signatureDelta = events.find(
+ event =>
+ event.type === 'content_block_delta' &&
+ event.delta.type === 'signature_delta',
+ )
+ expect(signatureDelta.delta.signature).toBe('sig-123')
+ })
+
+ test('converts function calls to tool_use blocks', async () => {
+ const events = await collectEvents([
+ {
+ candidates: [
+ {
+ content: {
+ parts: [
+ {
+ functionCall: {
+ name: 'bash',
+ args: { command: 'ls' },
+ },
+ thoughtSignature: 'sig-tool',
+ },
+ ],
+ },
+ finishReason: 'STOP',
+ },
+ ],
+ },
+ ])
+
+ const blockStart = events.find(event => event.type === 'content_block_start')
+ expect(blockStart.content_block.type).toBe('tool_use')
+ expect(blockStart.content_block.name).toBe('bash')
+
+ const signatureDelta = events.find(
+ event =>
+ event.type === 'content_block_delta' &&
+ event.delta.type === 'signature_delta',
+ )
+ expect(signatureDelta.delta.signature).toBe('sig-tool')
+
+ const inputDelta = events.find(
+ event =>
+ event.type === 'content_block_delta' &&
+ event.delta.type === 'input_json_delta',
+ )
+ expect(inputDelta.delta.partial_json).toBe('{"command":"ls"}')
+
+ const messageDelta = events.find(event => event.type === 'message_delta')
+ expect(messageDelta.delta.stop_reason).toBe('tool_use')
+ })
+
+ test('maps usage metadata into output tokens', async () => {
+ const events = await collectEvents([
+ {
+ candidates: [
+ {
+ content: {
+ parts: [{ text: 'Hello' }],
+ },
+ finishReason: 'STOP',
+ },
+ ],
+ usageMetadata: {
+ promptTokenCount: 10,
+ candidatesTokenCount: 5,
+ thoughtsTokenCount: 2,
+ },
+ },
+ ])
+
+ const messageStart = events.find(event => event.type === 'message_start')
+ expect(messageStart.message.usage.input_tokens).toBe(10)
+
+ const messageDelta = events.find(event => event.type === 'message_delta')
+ expect(messageDelta.usage.output_tokens).toBe(7)
+ })
+})
diff --git a/src/services/api/gemini/client.ts b/src/services/api/gemini/client.ts
new file mode 100644
index 000000000..2c8b68f8e
--- /dev/null
+++ b/src/services/api/gemini/client.ts
@@ -0,0 +1,97 @@
+import { parseSSEFrames } from 'src/cli/transports/SSETransport.js'
+import { errorMessage } from 'src/utils/errors.js'
+import { getProxyFetchOptions } from 'src/utils/proxy.js'
+import type {
+ GeminiGenerateContentRequest,
+ GeminiStreamChunk,
+} from './types.js'
+
+const DEFAULT_GEMINI_BASE_URL =
+ 'https://generativelanguage.googleapis.com/v1beta'
+
+const STREAM_DECODE_OPTS: TextDecodeOptions = { stream: true }
+
+function getGeminiBaseUrl(): string {
+ return (process.env.GEMINI_BASE_URL || DEFAULT_GEMINI_BASE_URL).replace(
+ /\/+$/,
+ '',
+ )
+}
+
+function getGeminiModelPath(model: string): string {
+ const normalized = model.replace(/^\/+/, '')
+ return normalized.startsWith('models/') ? normalized : `models/${normalized}`
+}
+
+export async function* streamGeminiGenerateContent(params: {
+ model: string
+ body: GeminiGenerateContentRequest
+ signal: AbortSignal
+ fetchOverride?: typeof fetch
+}): AsyncGenerator {
+ const fetchImpl = params.fetchOverride ?? fetch
+ const url = `${getGeminiBaseUrl()}/${getGeminiModelPath(params.model)}:streamGenerateContent?alt=sse`
+
+ const response = await fetchImpl(url, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'x-goog-api-key': process.env.GEMINI_API_KEY || '',
+ },
+ body: JSON.stringify(params.body),
+ signal: params.signal,
+ ...getProxyFetchOptions({ forAnthropicAPI: false }),
+ })
+
+ if (!response.ok) {
+ const body = await response.text()
+ throw new Error(
+ `Gemini API request failed (${response.status} ${response.statusText}): ${body || 'empty response body'}`,
+ )
+ }
+
+ if (!response.body) {
+ throw new Error('Gemini API returned no response body')
+ }
+
+ const reader = response.body.getReader()
+ const decoder = new TextDecoder()
+ let buffer = ''
+
+ try {
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
+
+ buffer += decoder.decode(value, STREAM_DECODE_OPTS)
+ const { frames, remaining } = parseSSEFrames(buffer)
+ buffer = remaining
+
+ for (const frame of frames) {
+ if (!frame.data || frame.data === '[DONE]') continue
+ try {
+ yield JSON.parse(frame.data) as GeminiStreamChunk
+ } catch (error) {
+ throw new Error(
+ `Failed to parse Gemini SSE payload: ${errorMessage(error)}`,
+ )
+ }
+ }
+ }
+
+ buffer += decoder.decode()
+ const { frames } = parseSSEFrames(buffer)
+ for (const frame of frames) {
+ if (!frame.data || frame.data === '[DONE]') continue
+ try {
+ yield JSON.parse(frame.data) as GeminiStreamChunk
+ } catch (error) {
+ throw new Error(
+ `Failed to parse trailing Gemini SSE payload: ${errorMessage(error)}`,
+ )
+ }
+ }
+ } finally {
+ reader.releaseLock()
+ }
+}
diff --git a/src/services/api/gemini/convertMessages.ts b/src/services/api/gemini/convertMessages.ts
new file mode 100644
index 000000000..4ac3a209d
--- /dev/null
+++ b/src/services/api/gemini/convertMessages.ts
@@ -0,0 +1,278 @@
+import type {
+ BetaToolResultBlockParam,
+ BetaToolUseBlock,
+} from '@anthropic-ai/sdk/resources/beta/messages/messages.mjs'
+import type { AssistantMessage, UserMessage } from '../../../types/message.js'
+import { safeParseJSON } from '../../../utils/json.js'
+import type { SystemPrompt } from '../../../utils/systemPromptType.js'
+import {
+ GEMINI_THOUGHT_SIGNATURE_FIELD,
+ type GeminiContent,
+ type GeminiGenerateContentRequest,
+ type GeminiPart,
+} from './types.js'
+
+export function anthropicMessagesToGemini(
+ messages: (UserMessage | AssistantMessage)[],
+ systemPrompt: SystemPrompt,
+): Pick {
+ const contents: GeminiContent[] = []
+ const toolNamesById = new Map()
+
+ for (const msg of messages) {
+ if (msg.type === 'assistant') {
+ const content = convertInternalAssistantMessage(msg)
+ if (content.parts.length > 0) {
+ contents.push(content)
+ }
+
+ const assistantContent = msg.message.content
+ if (Array.isArray(assistantContent)) {
+ for (const block of assistantContent) {
+ if (typeof block !== 'string' && block.type === 'tool_use') {
+ toolNamesById.set(block.id, block.name)
+ }
+ }
+ }
+ continue
+ }
+
+ if (msg.type === 'user') {
+ const content = convertInternalUserMessage(msg, toolNamesById)
+ if (content.parts.length > 0) {
+ contents.push(content)
+ }
+ }
+ }
+
+ const systemText = systemPromptToText(systemPrompt)
+
+ return {
+ contents,
+ ...(systemText
+ ? {
+ systemInstruction: {
+ parts: [{ text: systemText }],
+ },
+ }
+ : {}),
+ }
+}
+
+function systemPromptToText(systemPrompt: SystemPrompt): string {
+ if (!systemPrompt || systemPrompt.length === 0) return ''
+ return systemPrompt.filter(Boolean).join('\n\n')
+}
+
+function convertInternalUserMessage(
+ msg: UserMessage,
+ toolNamesById: ReadonlyMap,
+): GeminiContent {
+ const content = msg.message.content
+
+ if (typeof content === 'string') {
+ return {
+ role: 'user',
+ parts: createTextGeminiParts(content),
+ }
+ }
+
+ if (!Array.isArray(content)) {
+ return { role: 'user', parts: [] }
+ }
+
+ return {
+ role: 'user',
+ parts: content.flatMap(block =>
+ convertUserContentBlockToGeminiParts(block, toolNamesById),
+ ),
+ }
+}
+
+function convertUserContentBlockToGeminiParts(
+ block: string | Record,
+ toolNamesById: ReadonlyMap,
+): GeminiPart[] {
+ if (typeof block === 'string') {
+ return createTextGeminiParts(block)
+ }
+
+ if (block.type === 'text') {
+ return createTextGeminiParts(block.text)
+ }
+
+ if (block.type === 'tool_result') {
+ const toolResult = block as unknown as BetaToolResultBlockParam
+ return [
+ {
+ functionResponse: {
+ name: toolNamesById.get(toolResult.tool_use_id) ?? toolResult.tool_use_id,
+ response: toolResultToResponseObject(toolResult),
+ },
+ },
+ ]
+ }
+
+ return []
+}
+
+function convertInternalAssistantMessage(msg: AssistantMessage): GeminiContent {
+ const content = msg.message.content
+
+ if (typeof content === 'string') {
+ return {
+ role: 'model',
+ parts: createTextGeminiParts(content),
+ }
+ }
+
+ if (!Array.isArray(content)) {
+ return { role: 'model', parts: [] }
+ }
+
+ const parts: GeminiPart[] = []
+ for (const block of content) {
+ if (typeof block === 'string') {
+ parts.push(...createTextGeminiParts(block))
+ continue
+ }
+
+ if (block.type === 'text') {
+ parts.push(
+ ...createTextGeminiParts(
+ block.text,
+ getGeminiThoughtSignature(block),
+ ),
+ )
+ continue
+ }
+
+ if (block.type === 'thinking') {
+ const thinkingPart = createThinkingGeminiPart(
+ block.thinking,
+ block.signature,
+ )
+ if (thinkingPart) {
+ parts.push(thinkingPart)
+ }
+ continue
+ }
+
+ if (block.type === 'tool_use') {
+ const toolUse = block as unknown as BetaToolUseBlock
+ parts.push({
+ functionCall: {
+ name: toolUse.name,
+ args: normalizeToolUseInput(toolUse.input),
+ },
+ ...(getGeminiThoughtSignature(block) && {
+ thoughtSignature: getGeminiThoughtSignature(block),
+ }),
+ })
+ }
+ }
+
+ return { role: 'model', parts }
+}
+
+function createTextGeminiParts(
+ value: unknown,
+ thoughtSignature?: string,
+): GeminiPart[] {
+ if (typeof value !== 'string' || value.length === 0) {
+ return []
+ }
+
+ return [
+ {
+ text: value,
+ ...(thoughtSignature && { thoughtSignature }),
+ },
+ ]
+}
+
+function createThinkingGeminiPart(
+ value: unknown,
+ thoughtSignature?: string,
+): GeminiPart | undefined {
+ if (typeof value !== 'string' || value.length === 0) {
+ return undefined
+ }
+
+ return {
+ text: value,
+ thought: true,
+ ...(thoughtSignature && { thoughtSignature }),
+ }
+}
+
+function normalizeToolUseInput(input: unknown): Record {
+ if (typeof input === 'string') {
+ const parsed = safeParseJSON(input)
+ if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
+ return parsed as Record
+ }
+ return parsed === null ? {} : { value: parsed }
+ }
+
+ if (input && typeof input === 'object' && !Array.isArray(input)) {
+ return input as Record
+ }
+
+ return input === undefined ? {} : { value: input }
+}
+
+function toolResultToResponseObject(
+ block: BetaToolResultBlockParam,
+): Record {
+ const result = normalizeToolResultContent(block.content)
+ if (
+ result &&
+ typeof result === 'object' &&
+ !Array.isArray(result)
+ ) {
+ return block.is_error ? { ...result, is_error: true } : result
+ }
+
+ return {
+ result,
+ ...(block.is_error ? { is_error: true } : {}),
+ }
+}
+
+function normalizeToolResultContent(content: unknown): unknown {
+ if (typeof content === 'string') {
+ const parsed = safeParseJSON(content)
+ return parsed ?? content
+ }
+
+ if (Array.isArray(content)) {
+ const text = content
+ .map(part => {
+ if (typeof part === 'string') return part
+ if (
+ part &&
+ typeof part === 'object' &&
+ 'text' in part &&
+ typeof part.text === 'string'
+ ) {
+ return part.text
+ }
+ return ''
+ })
+ .filter(Boolean)
+ .join('\n')
+
+ const parsed = safeParseJSON(text)
+ return parsed ?? text
+ }
+
+ return content ?? ''
+}
+
+function getGeminiThoughtSignature(block: Record): string | undefined {
+ const signature = block[GEMINI_THOUGHT_SIGNATURE_FIELD]
+ return typeof signature === 'string' && signature.length > 0
+ ? signature
+ : undefined
+}
diff --git a/src/services/api/gemini/convertTools.ts b/src/services/api/gemini/convertTools.ts
new file mode 100644
index 000000000..e287fa88d
--- /dev/null
+++ b/src/services/api/gemini/convertTools.ts
@@ -0,0 +1,284 @@
+import type { BetaToolUnion } from '@anthropic-ai/sdk/resources/beta/messages/messages.mjs'
+import type {
+ GeminiFunctionCallingConfig,
+ GeminiTool,
+} from './types.js'
+
+const GEMINI_JSON_SCHEMA_TYPES = new Set([
+ 'string',
+ 'number',
+ 'integer',
+ 'boolean',
+ 'object',
+ 'array',
+ 'null',
+])
+
+function normalizeGeminiJsonSchemaType(
+ value: unknown,
+): string | string[] | undefined {
+ if (typeof value === 'string') {
+ return GEMINI_JSON_SCHEMA_TYPES.has(value) ? value : undefined
+ }
+
+ if (Array.isArray(value)) {
+ const normalized = value.filter(
+ (item): item is string =>
+ typeof item === 'string' && GEMINI_JSON_SCHEMA_TYPES.has(item),
+ )
+ const unique = Array.from(new Set(normalized))
+ if (unique.length === 0) return undefined
+ return unique.length === 1 ? unique[0] : unique
+ }
+
+ return undefined
+}
+
+function inferGeminiJsonSchemaTypeFromValue(value: unknown): string | undefined {
+ if (value === null) return 'null'
+ if (Array.isArray(value)) return 'array'
+ if (typeof value === 'string') return 'string'
+ if (typeof value === 'boolean') return 'boolean'
+ if (typeof value === 'number') {
+ return Number.isInteger(value) ? 'integer' : 'number'
+ }
+ if (typeof value === 'object') return 'object'
+ return undefined
+}
+
+function inferGeminiJsonSchemaTypeFromEnum(
+ values: unknown[],
+): string | string[] | undefined {
+ const inferred = values
+ .map(inferGeminiJsonSchemaTypeFromValue)
+ .filter((value): value is string => value !== undefined)
+ const unique = Array.from(new Set(inferred))
+ if (unique.length === 0) return undefined
+ return unique.length === 1 ? unique[0] : unique
+}
+
+function addNullToGeminiJsonSchemaType(
+ value: string | string[] | undefined,
+): string | string[] | undefined {
+ if (value === undefined) return ['null']
+ if (Array.isArray(value)) {
+ return value.includes('null') ? value : [...value, 'null']
+ }
+ return value === 'null' ? value : [value, 'null']
+}
+
+function sanitizeGeminiJsonSchemaProperties(
+ value: unknown,
+): Record> | undefined {
+ if (!value || typeof value !== 'object' || Array.isArray(value)) {
+ return undefined
+ }
+
+ const sanitizedEntries = Object.entries(value as Record)
+ .map(([key, schema]) => [key, sanitizeGeminiJsonSchema(schema)] as const)
+ .filter(([, schema]) => Object.keys(schema).length > 0)
+
+ if (sanitizedEntries.length === 0) {
+ return undefined
+ }
+
+ return Object.fromEntries(sanitizedEntries)
+}
+
+function sanitizeGeminiJsonSchemaArray(
+ value: unknown,
+): Record[] | undefined {
+ if (!Array.isArray(value)) return undefined
+
+ const sanitized = value
+ .map(item => sanitizeGeminiJsonSchema(item))
+ .filter(item => Object.keys(item).length > 0)
+
+ return sanitized.length > 0 ? sanitized : undefined
+}
+
+function sanitizeGeminiJsonSchema(
+ schema: unknown,
+): Record {
+ if (!schema || typeof schema !== 'object' || Array.isArray(schema)) {
+ return {}
+ }
+
+ const source = schema as Record
+ const result: Record = {}
+
+ let type = normalizeGeminiJsonSchemaType(source.type)
+
+ if (source.const !== undefined) {
+ result.enum = [source.const]
+ type = type ?? inferGeminiJsonSchemaTypeFromValue(source.const)
+ } else if (Array.isArray(source.enum) && source.enum.length > 0) {
+ result.enum = source.enum
+ type = type ?? inferGeminiJsonSchemaTypeFromEnum(source.enum)
+ }
+
+ if (!type) {
+ if (source.properties && typeof source.properties === 'object') {
+ type = 'object'
+ } else if (source.items !== undefined || source.prefixItems !== undefined) {
+ type = 'array'
+ }
+ }
+
+ if (source.nullable === true) {
+ type = addNullToGeminiJsonSchemaType(type)
+ }
+
+ if (type) {
+ result.type = type
+ }
+
+ if (typeof source.title === 'string') {
+ result.title = source.title
+ }
+ if (typeof source.description === 'string') {
+ result.description = source.description
+ }
+ if (typeof source.format === 'string') {
+ result.format = source.format
+ }
+ if (typeof source.pattern === 'string') {
+ result.pattern = source.pattern
+ }
+ if (typeof source.minimum === 'number') {
+ result.minimum = source.minimum
+ } else if (typeof source.exclusiveMinimum === 'number') {
+ result.minimum = source.exclusiveMinimum
+ }
+ if (typeof source.maximum === 'number') {
+ result.maximum = source.maximum
+ } else if (typeof source.exclusiveMaximum === 'number') {
+ result.maximum = source.exclusiveMaximum
+ }
+ if (typeof source.minItems === 'number') {
+ result.minItems = source.minItems
+ }
+ if (typeof source.maxItems === 'number') {
+ result.maxItems = source.maxItems
+ }
+ if (typeof source.minLength === 'number') {
+ result.minLength = source.minLength
+ }
+ if (typeof source.maxLength === 'number') {
+ result.maxLength = source.maxLength
+ }
+ if (typeof source.minProperties === 'number') {
+ result.minProperties = source.minProperties
+ }
+ if (typeof source.maxProperties === 'number') {
+ result.maxProperties = source.maxProperties
+ }
+
+ const properties = sanitizeGeminiJsonSchemaProperties(source.properties)
+ if (properties) {
+ result.properties = properties
+ result.propertyOrdering = Object.keys(properties)
+ }
+
+ if (Array.isArray(source.required)) {
+ const required = source.required.filter(
+ (item): item is string => typeof item === 'string',
+ )
+ if (required.length > 0) {
+ result.required = required
+ }
+ }
+
+ if (typeof source.additionalProperties === 'boolean') {
+ result.additionalProperties = source.additionalProperties
+ } else {
+ const additionalProperties = sanitizeGeminiJsonSchema(
+ source.additionalProperties,
+ )
+ if (Object.keys(additionalProperties).length > 0) {
+ result.additionalProperties = additionalProperties
+ }
+ }
+
+ const items = sanitizeGeminiJsonSchema(source.items)
+ if (Object.keys(items).length > 0) {
+ result.items = items
+ }
+
+ const prefixItems = sanitizeGeminiJsonSchemaArray(source.prefixItems)
+ if (prefixItems) {
+ result.prefixItems = prefixItems
+ }
+
+ const anyOf = sanitizeGeminiJsonSchemaArray(source.anyOf ?? source.oneOf)
+ if (anyOf) {
+ result.anyOf = anyOf
+ }
+
+ return result
+}
+
+function sanitizeGeminiFunctionParameters(
+ schema: unknown,
+): Record {
+ const sanitized = sanitizeGeminiJsonSchema(schema)
+ if (Object.keys(sanitized).length > 0) {
+ return sanitized
+ }
+
+ return {
+ type: 'object',
+ properties: {},
+ }
+}
+
+export function anthropicToolsToGemini(tools: BetaToolUnion[]): GeminiTool[] {
+ const functionDeclarations = tools
+ .filter(tool => {
+ return tool.type === 'custom' || !('type' in tool) || tool.type !== 'server'
+ })
+ .map(tool => {
+ const anyTool = tool as Record
+ const name = (anyTool.name as string) || ''
+ const description = (anyTool.description as string) || ''
+ const inputSchema =
+ (anyTool.input_schema as Record | undefined) ?? {
+ type: 'object',
+ properties: {},
+ }
+
+ return {
+ name,
+ description,
+ parametersJsonSchema: sanitizeGeminiFunctionParameters(inputSchema),
+ }
+ })
+
+ return functionDeclarations.length > 0
+ ? [{ functionDeclarations }]
+ : []
+}
+
+export function anthropicToolChoiceToGemini(
+ toolChoice: unknown,
+): GeminiFunctionCallingConfig | undefined {
+ if (!toolChoice || typeof toolChoice !== 'object') return undefined
+
+ const tc = toolChoice as Record
+ const type = tc.type as string
+
+ switch (type) {
+ case 'auto':
+ return { mode: 'AUTO' }
+ case 'any':
+ return { mode: 'ANY' }
+ case 'tool':
+ return {
+ mode: 'ANY',
+ allowedFunctionNames:
+ typeof tc.name === 'string' ? [tc.name] : undefined,
+ }
+ default:
+ return undefined
+ }
+}
diff --git a/src/services/api/gemini/index.ts b/src/services/api/gemini/index.ts
new file mode 100644
index 000000000..64dff7458
--- /dev/null
+++ b/src/services/api/gemini/index.ts
@@ -0,0 +1,192 @@
+import type { BetaToolUnion } from '@anthropic-ai/sdk/resources/beta/messages/messages.mjs'
+import { randomUUID } from 'crypto'
+import type {
+ AssistantMessage,
+ Message,
+ StreamEvent,
+ SystemAPIErrorMessage,
+} from '../../../types/message.js'
+import { getEmptyToolPermissionContext, type Tools } from '../../../Tool.js'
+import { toolToAPISchema } from '../../../utils/api.js'
+import { logForDebugging } from '../../../utils/debug.js'
+import {
+ createAssistantAPIErrorMessage,
+ normalizeContentFromAPI,
+ normalizeMessagesForAPI,
+} from '../../../utils/messages.js'
+import type { SystemPrompt } from '../../../utils/systemPromptType.js'
+import type { ThinkingConfig } from '../../../utils/thinking.js'
+import type { Options } from '../claude.js'
+import { streamGeminiGenerateContent } from './client.js'
+import { anthropicMessagesToGemini } from './convertMessages.js'
+import {
+ anthropicToolChoiceToGemini,
+ anthropicToolsToGemini,
+} from './convertTools.js'
+import { resolveGeminiModel } from './modelMapping.js'
+import { adaptGeminiStreamToAnthropic } from './streamAdapter.js'
+import { GEMINI_THOUGHT_SIGNATURE_FIELD } from './types.js'
+
+export async function* queryModelGemini(
+ messages: Message[],
+ systemPrompt: SystemPrompt,
+ tools: Tools,
+ signal: AbortSignal,
+ options: Options,
+ thinkingConfig: ThinkingConfig,
+): AsyncGenerator<
+ StreamEvent | AssistantMessage | SystemAPIErrorMessage,
+ void
+> {
+ try {
+ const geminiModel = resolveGeminiModel(options.model)
+ const messagesForAPI = normalizeMessagesForAPI(messages, tools)
+
+ const toolSchemas = await Promise.all(
+ tools.map(tool =>
+ toolToAPISchema(tool, {
+ getToolPermissionContext: options.getToolPermissionContext,
+ tools,
+ agents: options.agents,
+ allowedAgentTypes: options.allowedAgentTypes,
+ model: options.model,
+ }),
+ ),
+ )
+
+ const standardTools = toolSchemas.filter(
+ (t): t is BetaToolUnion & { type: string } => {
+ const anyTool = t as Record
+ return (
+ anyTool.type !== 'advisor_20260301' &&
+ anyTool.type !== 'computer_20250124'
+ )
+ },
+ )
+
+ const { contents, systemInstruction } = anthropicMessagesToGemini(
+ messagesForAPI,
+ systemPrompt,
+ )
+ const geminiTools = anthropicToolsToGemini(standardTools)
+ const toolChoice = anthropicToolChoiceToGemini(options.toolChoice)
+
+ const stream = streamGeminiGenerateContent({
+ model: geminiModel,
+ signal,
+ fetchOverride: options.fetchOverride as typeof fetch | undefined,
+ body: {
+ contents,
+ ...(systemInstruction && { systemInstruction }),
+ ...(geminiTools.length > 0 && { tools: geminiTools }),
+ ...(toolChoice && {
+ toolConfig: {
+ functionCallingConfig: toolChoice,
+ },
+ }),
+ generationConfig: {
+ ...(options.temperatureOverride !== undefined && {
+ temperature: options.temperatureOverride,
+ }),
+ ...(thinkingConfig.type !== 'disabled' && {
+ thinkingConfig: {
+ includeThoughts: true,
+ ...(thinkingConfig.type === 'enabled' && {
+ thinkingBudget: thinkingConfig.budgetTokens,
+ }),
+ },
+ }),
+ },
+ },
+ })
+
+ logForDebugging(
+ `[Gemini] Calling model=${geminiModel}, messages=${contents.length}, tools=${geminiTools.length}`,
+ )
+
+ const adaptedStream = adaptGeminiStreamToAnthropic(stream, geminiModel)
+ const contentBlocks: Record = {}
+ let partialMessage: any = undefined
+ let ttftMs = 0
+ const start = Date.now()
+
+ for await (const event of adaptedStream) {
+ switch (event.type) {
+ case 'message_start':
+ partialMessage = (event as any).message
+ ttftMs = Date.now() - start
+ break
+ case 'content_block_start': {
+ const idx = (event as any).index
+ const cb = (event as any).content_block
+ if (cb.type === 'tool_use') {
+ contentBlocks[idx] = { ...cb, input: '' }
+ } else if (cb.type === 'text') {
+ contentBlocks[idx] = { ...cb, text: '' }
+ } else if (cb.type === 'thinking') {
+ contentBlocks[idx] = { ...cb, thinking: '', signature: '' }
+ } else {
+ contentBlocks[idx] = { ...cb }
+ }
+ break
+ }
+ case 'content_block_delta': {
+ const idx = (event as any).index
+ const delta = (event as any).delta
+ const block = contentBlocks[idx]
+ if (!block) break
+
+ if (delta.type === 'text_delta') {
+ block.text = (block.text || '') + delta.text
+ } else if (delta.type === 'input_json_delta') {
+ block.input = (block.input || '') + delta.partial_json
+ } else if (delta.type === 'thinking_delta') {
+ block.thinking = (block.thinking || '') + delta.thinking
+ } else if (delta.type === 'signature_delta') {
+ if (block.type === 'thinking') {
+ block.signature = delta.signature
+ } else {
+ block[GEMINI_THOUGHT_SIGNATURE_FIELD] = delta.signature
+ }
+ }
+ break
+ }
+ case 'content_block_stop': {
+ const idx = (event as any).index
+ const block = contentBlocks[idx]
+ if (!block || !partialMessage) break
+
+ const message: AssistantMessage = {
+ message: {
+ ...partialMessage,
+ content: normalizeContentFromAPI([block], tools, options.agentId),
+ },
+ requestId: undefined,
+ type: 'assistant',
+ uuid: randomUUID(),
+ timestamp: new Date().toISOString(),
+ }
+ yield message
+ break
+ }
+ case 'message_delta':
+ case 'message_stop':
+ break
+ }
+
+ yield {
+ type: 'stream_event',
+ event,
+ ...(event.type === 'message_start' ? { ttftMs } : undefined),
+ } as StreamEvent
+ }
+ } catch (error) {
+ const errorMessage = error instanceof Error ? error.message : String(error)
+ logForDebugging(`[Gemini] Error: ${errorMessage}`, { level: 'error' })
+ yield createAssistantAPIErrorMessage({
+ content: `API Error: ${errorMessage}`,
+ apiError: 'api_error',
+ error: error instanceof Error ? error : new Error(String(error)),
+ })
+ }
+}
diff --git a/src/services/api/gemini/modelMapping.ts b/src/services/api/gemini/modelMapping.ts
new file mode 100644
index 000000000..a18571174
--- /dev/null
+++ b/src/services/api/gemini/modelMapping.ts
@@ -0,0 +1,30 @@
+function getModelFamily(model: string): 'haiku' | 'sonnet' | 'opus' | null {
+ if (/haiku/i.test(model)) return 'haiku'
+ if (/opus/i.test(model)) return 'opus'
+ if (/sonnet/i.test(model)) return 'sonnet'
+ return null
+}
+
+export function resolveGeminiModel(anthropicModel: string): string {
+ if (process.env.GEMINI_MODEL) {
+ return process.env.GEMINI_MODEL
+ }
+
+ const cleanModel = anthropicModel.replace(/\[1m\]$/i, '')
+ const family = getModelFamily(cleanModel)
+
+ if (!family) {
+ return cleanModel
+ }
+
+ const sharedEnvVar = `ANTHROPIC_DEFAULT_${family.toUpperCase()}_MODEL`
+ const resolvedModel = process.env[sharedEnvVar]
+ if (resolvedModel) {
+ return resolvedModel
+ }
+
+ throw new Error(
+ `Gemini provider requires GEMINI_MODEL or ${sharedEnvVar} to be configured.`,
+ )
+}
+
diff --git a/src/services/api/gemini/streamAdapter.ts b/src/services/api/gemini/streamAdapter.ts
new file mode 100644
index 000000000..56a30c3e7
--- /dev/null
+++ b/src/services/api/gemini/streamAdapter.ts
@@ -0,0 +1,244 @@
+import type { BetaRawMessageStreamEvent } from '@anthropic-ai/sdk/resources/beta/messages/messages.mjs'
+import { randomUUID } from 'crypto'
+import type { GeminiPart, GeminiStreamChunk } from './types.js'
+
+export async function* adaptGeminiStreamToAnthropic(
+ stream: AsyncIterable,
+ model: string,
+): AsyncGenerator {
+ const messageId = `msg_${randomUUID().replace(/-/g, '').slice(0, 24)}`
+ let started = false
+ let stopped = false
+ let nextContentIndex = 0
+ let openTextLikeBlock:
+ | { index: number; type: 'text' | 'thinking' }
+ | null = null
+ let sawToolUse = false
+ let finishReason: string | undefined
+ let inputTokens = 0
+ let outputTokens = 0
+
+ for await (const chunk of stream) {
+ const usage = chunk.usageMetadata
+ if (usage) {
+ inputTokens = usage.promptTokenCount ?? inputTokens
+ outputTokens =
+ (usage.candidatesTokenCount ?? 0) + (usage.thoughtsTokenCount ?? 0)
+ }
+
+ if (!started) {
+ started = true
+ yield {
+ type: 'message_start',
+ message: {
+ id: messageId,
+ type: 'message',
+ role: 'assistant',
+ content: [],
+ model,
+ stop_reason: null,
+ stop_sequence: null,
+ usage: {
+ input_tokens: inputTokens,
+ output_tokens: 0,
+ cache_creation_input_tokens: 0,
+ cache_read_input_tokens: 0,
+ },
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ const candidate = chunk.candidates?.[0]
+ const parts = candidate?.content?.parts ?? []
+
+ for (const part of parts) {
+ if (part.functionCall) {
+ if (openTextLikeBlock) {
+ yield {
+ type: 'content_block_stop',
+ index: openTextLikeBlock.index,
+ } as BetaRawMessageStreamEvent
+ openTextLikeBlock = null
+ }
+
+ sawToolUse = true
+ const toolIndex = nextContentIndex++
+ const toolId = `toolu_${randomUUID().replace(/-/g, '').slice(0, 24)}`
+ yield {
+ type: 'content_block_start',
+ index: toolIndex,
+ content_block: {
+ type: 'tool_use',
+ id: toolId,
+ name: part.functionCall.name || '',
+ input: {},
+ },
+ } as BetaRawMessageStreamEvent
+
+ if (part.thoughtSignature) {
+ yield {
+ type: 'content_block_delta',
+ index: toolIndex,
+ delta: {
+ type: 'signature_delta',
+ signature: part.thoughtSignature,
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ if (part.functionCall.args && Object.keys(part.functionCall.args).length > 0) {
+ yield {
+ type: 'content_block_delta',
+ index: toolIndex,
+ delta: {
+ type: 'input_json_delta',
+ partial_json: JSON.stringify(part.functionCall.args),
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ yield {
+ type: 'content_block_stop',
+ index: toolIndex,
+ } as BetaRawMessageStreamEvent
+ continue
+ }
+
+ const textLikeType = getTextLikeBlockType(part)
+ if (textLikeType) {
+ if (!openTextLikeBlock || openTextLikeBlock.type !== textLikeType) {
+ if (openTextLikeBlock) {
+ yield {
+ type: 'content_block_stop',
+ index: openTextLikeBlock.index,
+ } as BetaRawMessageStreamEvent
+ }
+
+ openTextLikeBlock = {
+ index: nextContentIndex++,
+ type: textLikeType,
+ }
+
+ yield {
+ type: 'content_block_start',
+ index: openTextLikeBlock.index,
+ content_block:
+ textLikeType === 'thinking'
+ ? {
+ type: 'thinking',
+ thinking: '',
+ signature: '',
+ }
+ : {
+ type: 'text',
+ text: '',
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ if (part.text) {
+ yield {
+ type: 'content_block_delta',
+ index: openTextLikeBlock.index,
+ delta:
+ textLikeType === 'thinking'
+ ? {
+ type: 'thinking_delta',
+ thinking: part.text,
+ }
+ : {
+ type: 'text_delta',
+ text: part.text,
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ if (part.thoughtSignature) {
+ yield {
+ type: 'content_block_delta',
+ index: openTextLikeBlock.index,
+ delta: {
+ type: 'signature_delta',
+ signature: part.thoughtSignature,
+ },
+ } as BetaRawMessageStreamEvent
+ }
+
+ continue
+ }
+
+ if (part.thoughtSignature && openTextLikeBlock) {
+ yield {
+ type: 'content_block_delta',
+ index: openTextLikeBlock.index,
+ delta: {
+ type: 'signature_delta',
+ signature: part.thoughtSignature,
+ },
+ } as BetaRawMessageStreamEvent
+ }
+ }
+
+ if (candidate?.finishReason) {
+ finishReason = candidate.finishReason
+ }
+ }
+
+ if (!started) {
+ return
+ }
+
+ if (openTextLikeBlock) {
+ yield {
+ type: 'content_block_stop',
+ index: openTextLikeBlock.index,
+ } as BetaRawMessageStreamEvent
+ }
+
+ if (!stopped) {
+ yield {
+ type: 'message_delta',
+ delta: {
+ stop_reason: mapGeminiFinishReason(finishReason, sawToolUse),
+ stop_sequence: null,
+ },
+ usage: {
+ output_tokens: outputTokens,
+ },
+ } as BetaRawMessageStreamEvent
+
+ yield {
+ type: 'message_stop',
+ } as BetaRawMessageStreamEvent
+ stopped = true
+ }
+}
+
+function getTextLikeBlockType(
+ part: GeminiPart,
+): 'text' | 'thinking' | null {
+ if (typeof part.text !== 'string') {
+ return null
+ }
+ return part.thought ? 'thinking' : 'text'
+}
+
+function mapGeminiFinishReason(
+ reason: string | undefined,
+ sawToolUse: boolean,
+): string {
+ switch (reason) {
+ case 'MAX_TOKENS':
+ return 'max_tokens'
+ case 'STOP':
+ case 'FINISH_REASON_UNSPECIFIED':
+ case 'SAFETY':
+ case 'RECITATION':
+ case 'BLOCKLIST':
+ case 'PROHIBITED_CONTENT':
+ case 'SPII':
+ case 'MALFORMED_FUNCTION_CALL':
+ default:
+ return sawToolUse ? 'tool_use' : 'end_turn'
+ }
+}
diff --git a/src/services/api/gemini/types.ts b/src/services/api/gemini/types.ts
new file mode 100644
index 000000000..829a09f13
--- /dev/null
+++ b/src/services/api/gemini/types.ts
@@ -0,0 +1,80 @@
+export const GEMINI_THOUGHT_SIGNATURE_FIELD = '_geminiThoughtSignature'
+
+export type GeminiFunctionCall = {
+ name?: string
+ args?: Record
+}
+
+export type GeminiFunctionResponse = {
+ name?: string
+ response?: Record
+}
+
+export type GeminiPart = {
+ text?: string
+ thought?: boolean
+ thoughtSignature?: string
+ functionCall?: GeminiFunctionCall
+ functionResponse?: GeminiFunctionResponse
+}
+
+export type GeminiContent = {
+ role: 'user' | 'model'
+ parts: GeminiPart[]
+}
+
+export type GeminiFunctionDeclaration = {
+ name: string
+ description?: string
+ parameters?: Record
+ parametersJsonSchema?: Record
+}
+
+export type GeminiTool = {
+ functionDeclarations: GeminiFunctionDeclaration[]
+}
+
+export type GeminiFunctionCallingConfig = {
+ mode: 'AUTO' | 'ANY' | 'NONE'
+ allowedFunctionNames?: string[]
+}
+
+export type GeminiGenerateContentRequest = {
+ contents: GeminiContent[]
+ systemInstruction?: {
+ parts: Array<{ text: string }>
+ }
+ tools?: GeminiTool[]
+ toolConfig?: {
+ functionCallingConfig: GeminiFunctionCallingConfig
+ }
+ generationConfig?: {
+ temperature?: number
+ thinkingConfig?: {
+ includeThoughts?: boolean
+ thinkingBudget?: number
+ }
+ }
+}
+
+export type GeminiUsageMetadata = {
+ promptTokenCount?: number
+ candidatesTokenCount?: number
+ thoughtsTokenCount?: number
+ totalTokenCount?: number
+}
+
+export type GeminiCandidate = {
+ content?: {
+ role?: string
+ parts?: GeminiPart[]
+ }
+ finishReason?: string
+ index?: number
+}
+
+export type GeminiStreamChunk = {
+ candidates?: GeminiCandidate[]
+ usageMetadata?: GeminiUsageMetadata
+ modelVersion?: string
+}
diff --git a/src/services/tokenEstimation.ts b/src/services/tokenEstimation.ts
index 07a5c9e0b..c59d53a3a 100644
--- a/src/services/tokenEstimation.ts
+++ b/src/services/tokenEstimation.ts
@@ -143,11 +143,16 @@ export async function countMessagesTokensWithAPI(
): Promise {
return withTokenCountVCR(messages, tools, async () => {
try {
+ const provider = getAPIProvider()
+ if (provider === 'gemini') {
+ return roughTokenCountEstimationForAPIRequest(messages, tools)
+ }
+
const model = getMainLoopModel()
const betas = getModelBetas(model)
const containsThinking = hasThinkingBlocks(messages)
- if (getAPIProvider() === 'bedrock') {
+ if (provider === 'bedrock') {
// @anthropic-sdk/bedrock-sdk doesn't support countTokens currently
return countTokensWithBedrock({
model: normalizeModelStringForAPI(model),
@@ -252,6 +257,11 @@ export async function countTokensViaHaikuFallback(
messages: Anthropic.Beta.Messages.BetaMessageParam[],
tools: Anthropic.Beta.Messages.BetaToolUnion[],
): Promise {
+ const provider = getAPIProvider()
+ if (provider === 'gemini') {
+ return roughTokenCountEstimationForAPIRequest(messages, tools)
+ }
+
// Check if messages contain thinking blocks
const containsThinking = hasThinkingBlocks(messages)
@@ -388,6 +398,29 @@ function roughTokenCountEstimationForContent(
return totalTokens
}
+function roughTokenCountEstimationForAPIRequest(
+ messages: Anthropic.Beta.Messages.BetaMessageParam[],
+ tools: Anthropic.Beta.Messages.BetaToolUnion[],
+): number {
+ let totalTokens = 0
+
+ for (const message of messages) {
+ totalTokens += roughTokenCountEstimationForContent(
+ message.content as
+ | string
+ | Array
+ | Array
+ | undefined,
+ )
+ }
+
+ if (tools.length > 0) {
+ totalTokens += roughTokenCountEstimation(jsonStringify(tools))
+ }
+
+ return totalTokens
+}
+
function roughTokenCountEstimationForBlock(
block: string | Anthropic.ContentBlock | Anthropic.ContentBlockParam,
): number {
diff --git a/src/utils/__tests__/messages.test.ts b/src/utils/__tests__/messages.test.ts
index 96d21934b..316b34250 100644
--- a/src/utils/__tests__/messages.test.ts
+++ b/src/utils/__tests__/messages.test.ts
@@ -20,6 +20,7 @@ import {
isNotEmptyMessage,
deriveUUID,
normalizeMessages,
+ normalizeMessagesForAPI,
isClassifierDenial,
buildYoloRejectionMessage,
buildClassifierUnavailableMessage,
@@ -486,3 +487,23 @@ describe("normalizeMessages", () => {
expect(normalized.length).toBe(1);
});
});
+
+describe("normalizeMessagesForAPI", () => {
+ test("preserves Gemini thought signature metadata on tool_use blocks", () => {
+ const assistant = makeAssistantMsg([
+ {
+ type: "tool_use",
+ id: "tool-1",
+ name: "Bash",
+ input: { command: "pwd" },
+ _geminiThoughtSignature: "sig-123",
+ },
+ ]);
+
+ const normalized = normalizeMessagesForAPI([assistant]);
+ const block = (normalized[0] as AssistantMessage).message.content[0] as any;
+
+ expect(block.type).toBe("tool_use");
+ expect(block._geminiThoughtSignature).toBe("sig-123");
+ });
+});
diff --git a/src/utils/auth.ts b/src/utils/auth.ts
index d278c3a4a..aa722cf07 100644
--- a/src/utils/auth.ts
+++ b/src/utils/auth.ts
@@ -118,7 +118,9 @@ export function isAnthropicAuthEnabled(): boolean {
isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
(settings as any).modelType === 'openai' ||
- !!process.env.OPENAI_BASE_URL
+ (settings as any).modelType === 'gemini' ||
+ !!process.env.OPENAI_BASE_URL ||
+ !!process.env.GEMINI_BASE_URL
const apiKeyHelper = settings.apiKeyHelper
const hasExternalAuthToken =
process.env.ANTHROPIC_AUTH_TOKEN ||
diff --git a/src/utils/managedEnvConstants.ts b/src/utils/managedEnvConstants.ts
index 12c565658..5e5f57545 100644
--- a/src/utils/managedEnvConstants.ts
+++ b/src/utils/managedEnvConstants.ts
@@ -18,6 +18,7 @@ const PROVIDER_MANAGED_ENV_VARS = new Set([
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'CLAUDE_CODE_USE_FOUNDRY',
+ 'CLAUDE_CODE_USE_GEMINI',
// Endpoint config (base URLs, project/resource identifiers)
'ANTHROPIC_BASE_URL',
'ANTHROPIC_BEDROCK_BASE_URL',
@@ -25,6 +26,7 @@ const PROVIDER_MANAGED_ENV_VARS = new Set([
'ANTHROPIC_FOUNDRY_BASE_URL',
'ANTHROPIC_FOUNDRY_RESOURCE',
'ANTHROPIC_VERTEX_PROJECT_ID',
+ 'GEMINI_BASE_URL',
// Region routing (per-model VERTEX_REGION_CLAUDE_* handled by prefix below)
'CLOUD_ML_REGION',
// Auth
@@ -36,6 +38,7 @@ const PROVIDER_MANAGED_ENV_VARS = new Set([
'CLAUDE_CODE_SKIP_BEDROCK_AUTH',
'CLAUDE_CODE_SKIP_VERTEX_AUTH',
'CLAUDE_CODE_SKIP_FOUNDRY_AUTH',
+ 'GEMINI_API_KEY',
// Model defaults — often set to provider-specific ID formats
'ANTHROPIC_MODEL',
'ANTHROPIC_DEFAULT_HAIKU_MODEL',
@@ -53,6 +56,7 @@ const PROVIDER_MANAGED_ENV_VARS = new Set([
'ANTHROPIC_SMALL_FAST_MODEL',
'ANTHROPIC_SMALL_FAST_MODEL_AWS_REGION',
'CLAUDE_CODE_SUBAGENT_MODEL',
+ 'GEMINI_MODEL',
])
const PROVIDER_MANAGED_ENV_PREFIXES = [
@@ -147,7 +151,9 @@ export const SAFE_ENV_VARS = new Set([
'CLAUDE_CODE_SUBAGENT_MODEL',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_FOUNDRY',
+ 'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_VERTEX',
+ 'GEMINI_MODEL',
'DISABLE_AUTOUPDATER',
'DISABLE_BUG_COMMAND',
'DISABLE_COST_WARNINGS',
diff --git a/src/utils/messages.ts b/src/utils/messages.ts
index 50fb61367..607d06d18 100644
--- a/src/utils/messages.ts
+++ b/src/utils/messages.ts
@@ -2249,10 +2249,13 @@ export function normalizeMessagesForAPI(
}
}
- // When tool search is NOT enabled, explicitly construct tool_use
- // block with only standard API fields to avoid sending fields like
- // 'caller' that may be stored in sessions from tool search runs
+ // When tool search is NOT enabled, strip tool-search-only fields
+ // like 'caller', but preserve other provider metadata attached to
+ // the block (for example Gemini thought signatures on tool_use).
+ const { caller: _caller, ...toolUseRest } = block as ToolUseBlock &
+ Record & { caller?: unknown }
return {
+ ...toolUseRest,
type: 'tool_use' as const,
id: toolUseBlk.id,
name: canonicalName,
diff --git a/src/utils/model/__tests__/providers.test.ts b/src/utils/model/__tests__/providers.test.ts
index b028f1084..80c22faf8 100644
--- a/src/utils/model/__tests__/providers.test.ts
+++ b/src/utils/model/__tests__/providers.test.ts
@@ -1,8 +1,18 @@
-import { describe, expect, test, beforeEach, afterEach } from "bun:test";
-import { getAPIProvider, isFirstPartyAnthropicBaseUrl } from "../providers";
+import { afterEach, beforeEach, describe, expect, mock, test } from "bun:test";
+
+let mockedModelType: "gemini" | undefined;
+
+mock.module("../../settings/settings.js", () => ({
+ getInitialSettings: () =>
+ mockedModelType ? { modelType: mockedModelType } : {},
+}));
+
+const { getAPIProvider, isFirstPartyAnthropicBaseUrl } =
+ await import("../providers");
describe("getAPIProvider", () => {
const envKeys = [
+ "CLAUDE_CODE_USE_GEMINI",
"CLAUDE_CODE_USE_BEDROCK",
"CLAUDE_CODE_USE_VERTEX",
"CLAUDE_CODE_USE_FOUNDRY",
@@ -10,10 +20,15 @@ describe("getAPIProvider", () => {
const savedEnv: Record = {};
beforeEach(() => {
- for (const key of envKeys) savedEnv[key] = process.env[key];
+ mockedModelType = undefined;
+ for (const key of envKeys) {
+ savedEnv[key] = process.env[key];
+ delete process.env[key];
+ }
});
afterEach(() => {
+ mockedModelType = undefined;
for (const key of envKeys) {
if (savedEnv[key] !== undefined) {
process.env[key] = savedEnv[key];
@@ -24,12 +39,25 @@ describe("getAPIProvider", () => {
});
test('returns "firstParty" by default', () => {
- delete process.env.CLAUDE_CODE_USE_BEDROCK;
- delete process.env.CLAUDE_CODE_USE_VERTEX;
- delete process.env.CLAUDE_CODE_USE_FOUNDRY;
expect(getAPIProvider()).toBe("firstParty");
});
+ test('returns "gemini" when modelType is gemini', () => {
+ mockedModelType = "gemini";
+ expect(getAPIProvider()).toBe("gemini");
+ });
+
+ test("modelType takes precedence over environment variables", () => {
+ mockedModelType = "gemini";
+ process.env.CLAUDE_CODE_USE_BEDROCK = "1";
+ expect(getAPIProvider()).toBe("gemini");
+ });
+
+ test('returns "gemini" when CLAUDE_CODE_USE_GEMINI is set', () => {
+ process.env.CLAUDE_CODE_USE_GEMINI = "1";
+ expect(getAPIProvider()).toBe("gemini");
+ });
+
test('returns "bedrock" when CLAUDE_CODE_USE_BEDROCK is set', () => {
process.env.CLAUDE_CODE_USE_BEDROCK = "1";
expect(getAPIProvider()).toBe("bedrock");
@@ -45,6 +73,12 @@ describe("getAPIProvider", () => {
expect(getAPIProvider()).toBe("foundry");
});
+ test("bedrock takes precedence over gemini", () => {
+ process.env.CLAUDE_CODE_USE_BEDROCK = "1";
+ process.env.CLAUDE_CODE_USE_GEMINI = "1";
+ expect(getAPIProvider()).toBe("bedrock");
+ });
+
test("bedrock takes precedence over vertex", () => {
process.env.CLAUDE_CODE_USE_BEDROCK = "1";
process.env.CLAUDE_CODE_USE_VERTEX = "1";
diff --git a/src/utils/model/configs.ts b/src/utils/model/configs.ts
index d9bfae0f9..83f971385 100644
--- a/src/utils/model/configs.ts
+++ b/src/utils/model/configs.ts
@@ -12,6 +12,7 @@ export const CLAUDE_3_7_SONNET_CONFIG = {
vertex: 'claude-3-7-sonnet@20250219',
foundry: 'claude-3-7-sonnet',
openai: 'claude-3-7-sonnet-20250219',
+ gemini: 'claude-3-7-sonnet-20250219',
} as const satisfies ModelConfig
export const CLAUDE_3_5_V2_SONNET_CONFIG = {
@@ -20,6 +21,7 @@ export const CLAUDE_3_5_V2_SONNET_CONFIG = {
vertex: 'claude-3-5-sonnet-v2@20241022',
foundry: 'claude-3-5-sonnet',
openai: 'claude-3-5-sonnet-20241022',
+ gemini: 'claude-3-5-sonnet-20241022',
} as const satisfies ModelConfig
export const CLAUDE_3_5_HAIKU_CONFIG = {
@@ -28,6 +30,7 @@ export const CLAUDE_3_5_HAIKU_CONFIG = {
vertex: 'claude-3-5-haiku@20241022',
foundry: 'claude-3-5-haiku',
openai: 'claude-3-5-haiku-20241022',
+ gemini: 'claude-3-5-haiku-20241022',
} as const satisfies ModelConfig
export const CLAUDE_HAIKU_4_5_CONFIG = {
@@ -36,6 +39,7 @@ export const CLAUDE_HAIKU_4_5_CONFIG = {
vertex: 'claude-haiku-4-5@20251001',
foundry: 'claude-haiku-4-5',
openai: 'claude-haiku-4-5-20251001',
+ gemini: 'claude-haiku-4-5-20251001',
} as const satisfies ModelConfig
export const CLAUDE_SONNET_4_CONFIG = {
@@ -44,6 +48,7 @@ export const CLAUDE_SONNET_4_CONFIG = {
vertex: 'claude-sonnet-4@20250514',
foundry: 'claude-sonnet-4',
openai: 'claude-sonnet-4-20250514',
+ gemini: 'claude-sonnet-4-20250514',
} as const satisfies ModelConfig
export const CLAUDE_SONNET_4_5_CONFIG = {
@@ -52,6 +57,7 @@ export const CLAUDE_SONNET_4_5_CONFIG = {
vertex: 'claude-sonnet-4-5@20250929',
foundry: 'claude-sonnet-4-5',
openai: 'claude-sonnet-4-5-20250929',
+ gemini: 'claude-sonnet-4-5-20250929',
} as const satisfies ModelConfig
export const CLAUDE_OPUS_4_CONFIG = {
@@ -60,6 +66,7 @@ export const CLAUDE_OPUS_4_CONFIG = {
vertex: 'claude-opus-4@20250514',
foundry: 'claude-opus-4',
openai: 'claude-opus-4-20250514',
+ gemini: 'claude-opus-4-20250514',
} as const satisfies ModelConfig
export const CLAUDE_OPUS_4_1_CONFIG = {
@@ -68,6 +75,7 @@ export const CLAUDE_OPUS_4_1_CONFIG = {
vertex: 'claude-opus-4-1@20250805',
foundry: 'claude-opus-4-1',
openai: 'claude-opus-4-1-20250805',
+ gemini: 'claude-opus-4-1-20250805',
} as const satisfies ModelConfig
export const CLAUDE_OPUS_4_5_CONFIG = {
@@ -76,6 +84,7 @@ export const CLAUDE_OPUS_4_5_CONFIG = {
vertex: 'claude-opus-4-5@20251101',
foundry: 'claude-opus-4-5',
openai: 'claude-opus-4-5-20251101',
+ gemini: 'claude-opus-4-5-20251101',
} as const satisfies ModelConfig
export const CLAUDE_OPUS_4_6_CONFIG = {
@@ -84,6 +93,7 @@ export const CLAUDE_OPUS_4_6_CONFIG = {
vertex: 'claude-opus-4-6',
foundry: 'claude-opus-4-6',
openai: 'claude-opus-4-6',
+ gemini: 'claude-opus-4-6',
} as const satisfies ModelConfig
export const CLAUDE_SONNET_4_6_CONFIG = {
@@ -92,6 +102,7 @@ export const CLAUDE_SONNET_4_6_CONFIG = {
vertex: 'claude-sonnet-4-6',
foundry: 'claude-sonnet-4-6',
openai: 'claude-sonnet-4-6',
+ gemini: 'claude-sonnet-4-6',
} as const satisfies ModelConfig
// @[MODEL LAUNCH]: Register the new config here.
diff --git a/src/utils/model/providers.ts b/src/utils/model/providers.ts
index a082cd298..a6f26cad9 100644
--- a/src/utils/model/providers.ts
+++ b/src/utils/model/providers.ts
@@ -2,23 +2,32 @@ import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from
import { getInitialSettings } from '../settings/settings.js'
import { isEnvTruthy } from '../envUtils.js'
-export type APIProvider = 'firstParty' | 'bedrock' | 'vertex' | 'foundry' | 'openai'
+export type APIProvider =
+ | 'firstParty'
+ | 'bedrock'
+ | 'vertex'
+ | 'foundry'
+ | 'openai'
+ | 'gemini'
export function getAPIProvider(): APIProvider {
// 1. Check settings.json modelType field (highest priority)
const modelType = getInitialSettings().modelType
if (modelType === 'openai') return 'openai'
+ if (modelType === 'gemini') return 'gemini'
// 2. Check environment variables (backward compatibility)
- return isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
- ? 'openai'
- : isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK)
- ? 'bedrock'
- : isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX)
- ? 'vertex'
- : isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY)
- ? 'foundry'
- : 'firstParty'
+ return isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK)
+ ? 'bedrock'
+ : isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX)
+ ? 'vertex'
+ : isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY)
+ ? 'foundry'
+ : isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
+ ? 'openai'
+ : isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
+ ? 'gemini'
+ : 'firstParty'
}
export function getAPIProviderForStatsig(): AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS {
diff --git a/src/utils/settings/__tests__/config.test.ts b/src/utils/settings/__tests__/config.test.ts
index f8bf1b6ab..0b527ba32 100644
--- a/src/utils/settings/__tests__/config.test.ts
+++ b/src/utils/settings/__tests__/config.test.ts
@@ -474,3 +474,10 @@ describe("formatZodError", () => {
}
});
});
+
+describe("gemini settings", () => {
+ test("accepts gemini modelType", () => {
+ const result = SettingsSchema().safeParse({ modelType: "gemini" });
+ expect(result.success).toBe(true);
+ });
+});
diff --git a/src/utils/settings/types.ts b/src/utils/settings/types.ts
index 9c55a7f45..7e66623ca 100644
--- a/src/utils/settings/types.ts
+++ b/src/utils/settings/types.ts
@@ -373,11 +373,11 @@ export const SettingsSchema = lazySchema(() =>
.optional()
.describe('Tool usage permissions configuration'),
modelType: z
- .enum(['anthropic', 'openai'])
+ .enum(['anthropic', 'openai', 'gemini'])
.optional()
.describe(
- 'API provider type. "anthropic" uses the Anthropic API (default), "openai" uses the OpenAI Chat Completions API (/v1/chat/completions). ' +
- 'When set to "openai", configure OPENAI_API_KEY, OPENAI_BASE_URL, and OPENAI_MODEL in env.',
+ 'API provider type. "anthropic" uses the Anthropic API (default), "openai" uses the OpenAI Chat Completions API (/v1/chat/completions), and "gemini" uses the Gemini Generate Content API. ' +
+ 'When set to "openai", configure OPENAI_API_KEY, OPENAI_BASE_URL, and OPENAI_MODEL in env. When set to "gemini", configure GEMINI_API_KEY, optional GEMINI_BASE_URL, and either GEMINI_MODEL or ANTHROPIC_DEFAULT_*_MODEL family env vars.',
),
model: z
.string()
@@ -1153,3 +1153,4 @@ export type PluginConfig = {
[serverName: string]: UserConfigValues
}
}
+
diff --git a/src/utils/status.tsx b/src/utils/status.tsx
index 45be82dc8..6d66bc83f 100644
--- a/src/utils/status.tsx
+++ b/src/utils/status.tsx
@@ -339,8 +339,8 @@ export function buildAPIProviderProperties(): Property[] {
bedrock: 'AWS Bedrock',
vertex: 'Google Vertex AI',
foundry: 'Microsoft Foundry',
+ gemini: 'Gemini API'
}[apiProvider]
-
properties.push({
label: 'API provider',
value: providerLabel,
@@ -423,6 +423,13 @@ export function buildAPIProviderProperties(): Property[] {
value: 'Microsoft Foundry auth skipped',
})
}
+ } else if (apiProvider === 'gemini') {
+ const geminiBaseUrl =
+ process.env.GEMINI_BASE_URL || 'https://generativelanguage.googleapis.com/v1beta'
+ properties.push({
+ label: 'Gemini base URL',
+ value: geminiBaseUrl,
+ })
}
const proxyUrl = getProxyUrl()