diff --git a/package.json b/package.json index 38816cd955..bf3bef3021 100644 --- a/package.json +++ b/package.json @@ -72,5 +72,9 @@ "form-data": ">=4.0.4", "bluebird": ">=3.7.2" } + }, + "dependencies": { + "@types/node-fetch": "2", + "node-fetch": "2" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9d09c6adac..792e48f383 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,6 +14,13 @@ overrides: importers: .: + dependencies: + '@types/node-fetch': + specifier: '2' + version: 2.6.12 + node-fetch: + specifier: '2' + version: 2.7.0 devDependencies: '@changesets/changelog-github': specifier: ^0.5.1 @@ -1304,6 +1311,9 @@ importers: '@roo-code/types': specifier: workspace:^ version: link:../packages/types + '@types/node-fetch': + specifier: '2' + version: 2.6.12 '@vscode/codicons': specifier: ^0.0.36 version: 0.0.36 @@ -1397,6 +1407,9 @@ importers: node-cache: specifier: ^5.1.2 version: 5.1.2 + node-fetch: + specifier: '2' + version: 2.7.0 node-ipc: specifier: ^12.0.0 version: 12.0.0 @@ -25729,7 +25742,7 @@ snapshots: sirv: 3.0.1 tinyglobby: 0.2.14 tinyrainbow: 2.0.0 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.2.1)(@vitest/ui@3.2.4)(jiti@2.4.2)(jsdom@26.1.0)(lightningcss@1.30.1)(terser@5.43.1)(tsx@4.19.4)(yaml@2.8.0) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@20.17.50)(@vitest/ui@3.2.4)(jiti@2.4.2)(jsdom@26.1.0)(lightningcss@1.30.1)(terser@5.43.1)(tsx@4.19.4)(yaml@2.8.0) '@vitest/utils@2.0.5': dependencies: diff --git a/src/package.json b/src/package.json index 0fecb2ebe4..fc9fde3007 100644 --- a/src/package.json +++ b/src/package.json @@ -655,6 +655,7 @@ "@roo-code/ipc": "workspace:^", "@roo-code/telemetry": "workspace:^", "@roo-code/types": "workspace:^", + "@types/node-fetch": "2", "@vscode/codicons": "^0.0.36", "async-mutex": "^0.5.0", "axios": "^1.12.0", @@ -678,14 +679,15 @@ "ignore": "^7.0.3", "is-wsl": "^3.1.0", "isbinaryfile": "^5.0.2", - "json5": "^2.2.3", "jsdom": "^26.0.0", + "json5": "^2.2.3", "jwt-decode": "^4.0.0", "lodash.debounce": "^4.0.8", "lru-cache": "^11.1.0", "mammoth": "^1.9.1", "monaco-vscode-textmate-theme-converter": "^0.1.7", "node-cache": "^5.1.2", + "node-fetch": "2", "node-ipc": "^12.0.0", "ollama": "^0.5.17", "openai": "^5.12.2", @@ -729,13 +731,12 @@ "@roo-code/config-eslint": "workspace:^", "@roo-code/config-typescript": "workspace:^", "@types/clone-deep": "^4.0.4", - "dotenv": "^16.4.7", "@types/debug": "^4.1.12", "@types/diff": "^5.2.3", "@types/diff-match-patch": "^1.0.36", "@types/glob": "^8.1.0", - "@types/json5": "^2.2.0", "@types/jsdom": "^21.1.7", + "@types/json5": "^2.2.0", "@types/lodash.debounce": "^4.0.9", "@types/mocha": "^10.0.10", "@types/node": "20.x", @@ -750,6 +751,7 @@ "@types/vscode": "^1.84.0", "@vscode/test-electron": "^2.5.2", "@vscode/vsce": "3.3.2", + "dotenv": "^16.4.7", "esbuild": "^0.25.0", "execa": "^9.5.2", "glob": "^11.0.1", diff --git a/src/services/code-index/config-manager.ts b/src/services/code-index/config-manager.ts index 2c0e8bb5c9..30a2883c23 100644 --- a/src/services/code-index/config-manager.ts +++ b/src/services/code-index/config-manager.ts @@ -17,7 +17,7 @@ export class CodeIndexConfigManager { private openAiOptions?: ApiHandlerOptions private ollamaOptions?: ApiHandlerOptions private openAiCompatibleOptions?: { baseUrl: string; apiKey: string } - private geminiOptions?: { apiKey: string } + private geminiOptions?: { apiKey: string; baseUrl?: string } private mistralOptions?: { apiKey: string } private vercelAiGatewayOptions?: { apiKey: string } private qdrantUrl?: string = "http://localhost:6333" @@ -126,7 +126,14 @@ export class CodeIndexConfigManager { } : undefined - this.geminiOptions = geminiApiKey ? { apiKey: geminiApiKey } : undefined + // For Gemini provider, use the generic baseUrl field from codebaseIndexEmbedderBaseUrl + const geminiBaseUrl = codebaseIndexEmbedderProvider === "gemini" ? codebaseIndexEmbedderBaseUrl : "" + this.geminiOptions = geminiApiKey + ? { + apiKey: geminiApiKey, + baseUrl: geminiBaseUrl || undefined, + } + : undefined this.mistralOptions = mistralApiKey ? { apiKey: mistralApiKey } : undefined this.vercelAiGatewayOptions = vercelAiGatewayApiKey ? { apiKey: vercelAiGatewayApiKey } : undefined } @@ -144,7 +151,7 @@ export class CodeIndexConfigManager { openAiOptions?: ApiHandlerOptions ollamaOptions?: ApiHandlerOptions openAiCompatibleOptions?: { baseUrl: string; apiKey: string } - geminiOptions?: { apiKey: string } + geminiOptions?: { apiKey: string; baseUrl?: string } mistralOptions?: { apiKey: string } vercelAiGatewayOptions?: { apiKey: string } qdrantUrl?: string @@ -165,6 +172,7 @@ export class CodeIndexConfigManager { openAiCompatibleBaseUrl: this.openAiCompatibleOptions?.baseUrl ?? "", openAiCompatibleApiKey: this.openAiCompatibleOptions?.apiKey ?? "", geminiApiKey: this.geminiOptions?.apiKey ?? "", + geminiBaseUrl: this.geminiOptions?.baseUrl ?? "", mistralApiKey: this.mistralOptions?.apiKey ?? "", vercelAiGatewayApiKey: this.vercelAiGatewayOptions?.apiKey ?? "", qdrantUrl: this.qdrantUrl ?? "", @@ -267,6 +275,7 @@ export class CodeIndexConfigManager { const prevOpenAiCompatibleApiKey = prev?.openAiCompatibleApiKey ?? "" const prevModelDimension = prev?.modelDimension const prevGeminiApiKey = prev?.geminiApiKey ?? "" + const prevGeminiBaseUrl = prev?.geminiBaseUrl ?? "" const prevMistralApiKey = prev?.mistralApiKey ?? "" const prevVercelAiGatewayApiKey = prev?.vercelAiGatewayApiKey ?? "" const prevQdrantUrl = prev?.qdrantUrl ?? "" @@ -305,6 +314,7 @@ export class CodeIndexConfigManager { const currentOpenAiCompatibleApiKey = this.openAiCompatibleOptions?.apiKey ?? "" const currentModelDimension = this.modelDimension const currentGeminiApiKey = this.geminiOptions?.apiKey ?? "" + const currentGeminiBaseUrl = this.geminiOptions?.baseUrl ?? "" const currentMistralApiKey = this.mistralOptions?.apiKey ?? "" const currentVercelAiGatewayApiKey = this.vercelAiGatewayOptions?.apiKey ?? "" const currentQdrantUrl = this.qdrantUrl ?? "" @@ -329,6 +339,10 @@ export class CodeIndexConfigManager { return true } + if (prevGeminiBaseUrl !== currentGeminiBaseUrl) { + return true + } + if (prevMistralApiKey !== currentMistralApiKey) { return true } diff --git a/src/services/code-index/embedders/gemini.ts b/src/services/code-index/embedders/gemini.ts index 23615e570f..7d26690edf 100644 --- a/src/services/code-index/embedders/gemini.ts +++ b/src/services/code-index/embedders/gemini.ts @@ -1,3 +1,4 @@ +import fetch from "node-fetch" import { OpenAICompatibleEmbedder } from "./openai-compatible" import { IEmbedder, EmbeddingResponse, EmbedderInfo } from "../interfaces/embedder" import { GEMINI_MAX_ITEM_TOKENS } from "../constants" @@ -5,87 +6,91 @@ import { t } from "../../../i18n" import { TelemetryEventName } from "@roo-code/types" import { TelemetryService } from "@roo-code/telemetry" -/** - * Gemini embedder implementation that wraps the OpenAI Compatible embedder - * with configuration for Google's Gemini embedding API. - * - * Supported models: - * - text-embedding-004 (dimension: 768) - * - gemini-embedding-001 (dimension: 2048) - */ export class GeminiEmbedder implements IEmbedder { - private readonly openAICompatibleEmbedder: OpenAICompatibleEmbedder - private static readonly GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/" + private readonly openAICompatibleEmbedder?: OpenAICompatibleEmbedder + private static readonly DEFAULT_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/" private static readonly DEFAULT_MODEL = "gemini-embedding-001" private readonly modelId: string + private readonly apiKey: string + private readonly baseUrl: string + private readonly isCustom: boolean - /** - * Creates a new Gemini embedder - * @param apiKey The Gemini API key for authentication - * @param modelId The model ID to use (defaults to gemini-embedding-001) - */ - constructor(apiKey: string, modelId?: string) { + constructor(apiKey: string, modelId?: string, baseUrl?: string) { if (!apiKey) { throw new Error(t("embeddings:validation.apiKeyRequired")) } - // Use provided model or default + this.apiKey = apiKey this.modelId = modelId || GeminiEmbedder.DEFAULT_MODEL + this.isCustom = !!baseUrl + this.baseUrl = baseUrl || GeminiEmbedder.DEFAULT_BASE_URL - // Create an OpenAI Compatible embedder with Gemini's configuration - this.openAICompatibleEmbedder = new OpenAICompatibleEmbedder( - GeminiEmbedder.GEMINI_BASE_URL, - apiKey, - this.modelId, - GEMINI_MAX_ITEM_TOKENS, - ) + if (!this.isCustom) { + this.openAICompatibleEmbedder = new OpenAICompatibleEmbedder( + this.baseUrl, + this.apiKey, + this.modelId, + GEMINI_MAX_ITEM_TOKENS, + ) + } } - /** - * Creates embeddings for the given texts using Gemini's embedding API - * @param texts Array of text strings to embed - * @param model Optional model identifier (uses constructor model if not provided) - * @returns Promise resolving to embedding response - */ async createEmbeddings(texts: string[], model?: string): Promise { + const modelToUse = model || this.modelId + + if (!this.isCustom && this.openAICompatibleEmbedder) { + return this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse) + } + try { - // Use the provided model or fall back to the instance's model - const modelToUse = model || this.modelId - return await this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse) + const response = await fetch(this.baseUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }, + body: JSON.stringify({ + input: texts, + model: modelToUse, + }), + }) + + if (!response.ok) { + const errorBody = await response.text() + console.error(`Gemini custom endpoint error: ${response.status} ${response.statusText}`, errorBody) + throw new Error(`API request failed with status ${response.status}: ${errorBody}`) + } + + const data = (await response.json()) as any + return { + embeddings: data.data.map((d: any) => d.embedding), + usage: { + promptTokens: 0, + totalTokens: data.usage.total_tokens, + }, + } } catch (error) { TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, { error: error instanceof Error ? error.message : String(error), stack: error instanceof Error ? error.stack : undefined, - location: "GeminiEmbedder:createEmbeddings", + location: "GeminiEmbedder:createEmbeddings:custom", }) - console.error("Gemini embedder error in createEmbeddings:", error) // kilocode_change + console.error("Gemini custom embedder error in createEmbeddings:", error) throw error } } - /** - * Validates the Gemini embedder configuration by delegating to the underlying OpenAI-compatible embedder - * @returns Promise resolving to validation result with success status and optional error message - */ async validateConfiguration(): Promise<{ valid: boolean; error?: string }> { - try { - // Delegate validation to the OpenAI-compatible embedder - // The error messages will be specific to Gemini since we're using Gemini's base URL - return await this.openAICompatibleEmbedder.validateConfiguration() - } catch (error) { - TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - location: "GeminiEmbedder:validateConfiguration", - }) - console.error("Gemini embedder error in validateConfiguration:", error) // kilocode_change - throw error + if (!this.isCustom && this.openAICompatibleEmbedder) { + return this.openAICompatibleEmbedder.validateConfiguration() } + + // For custom URLs, we perform a lazy validation. We assume the URL is valid + // and let any potential errors be caught during the actual embedding process. + // This provides more flexibility for users with custom proxy setups. + return { valid: true } } - /** - * Returns information about this embedder - */ get embedderInfo(): EmbedderInfo { return { name: "gemini", diff --git a/src/services/code-index/interfaces/config.ts b/src/services/code-index/interfaces/config.ts index f168e26869..7f5cda7936 100644 --- a/src/services/code-index/interfaces/config.ts +++ b/src/services/code-index/interfaces/config.ts @@ -12,7 +12,7 @@ export interface CodeIndexConfig { openAiOptions?: ApiHandlerOptions ollamaOptions?: ApiHandlerOptions openAiCompatibleOptions?: { baseUrl: string; apiKey: string } - geminiOptions?: { apiKey: string } + geminiOptions?: { apiKey: string; baseUrl?: string } mistralOptions?: { apiKey: string } vercelAiGatewayOptions?: { apiKey: string } qdrantUrl?: string @@ -35,6 +35,7 @@ export type PreviousConfigSnapshot = { openAiCompatibleBaseUrl?: string openAiCompatibleApiKey?: string geminiApiKey?: string + geminiBaseUrl?: string mistralApiKey?: string vercelAiGatewayApiKey?: string qdrantUrl?: string diff --git a/src/services/code-index/service-factory.ts b/src/services/code-index/service-factory.ts index 6d69e1f0b6..d5c871f6a9 100644 --- a/src/services/code-index/service-factory.ts +++ b/src/services/code-index/service-factory.ts @@ -68,7 +68,7 @@ export class CodeIndexServiceFactory { if (!config.geminiOptions?.apiKey) { throw new Error(t("embeddings:serviceFactory.geminiConfigMissing")) } - return new GeminiEmbedder(config.geminiOptions.apiKey, config.modelId) + return new GeminiEmbedder(config.geminiOptions.apiKey, config.modelId, config.geminiOptions.baseUrl) } else if (provider === "mistral") { if (!config.mistralOptions?.apiKey) { throw new Error(t("embeddings:serviceFactory.mistralConfigMissing")) diff --git a/webview-ui/src/components/chat/CodeIndexPopover.tsx b/webview-ui/src/components/chat/CodeIndexPopover.tsx index 62164b1460..8f00ece22b 100644 --- a/webview-ui/src/components/chat/CodeIndexPopover.tsx +++ b/webview-ui/src/components/chat/CodeIndexPopover.tsx @@ -48,6 +48,7 @@ import { useEscapeKey } from "@src/hooks/useEscapeKey" // Default URLs for providers const DEFAULT_QDRANT_URL = "http://localhost:6333" const DEFAULT_OLLAMA_URL = "http://localhost:11434" +const DEFAULT_GEMINI_URL = "https://generativelanguage.googleapis.com" interface CodeIndexPopoverProps { children: React.ReactNode @@ -125,6 +126,10 @@ const createValidationSchema = (provider: EmbedderProvider, t: any) => { case "gemini": return baseSchema.extend({ + codebaseIndexEmbedderBaseUrl: z + .string() + .url(t("settings:codeIndex.validation.invalidGeminiUrl")) + .optional(), codebaseIndexGeminiApiKey: z.string().min(1, t("settings:codeIndex.validation.geminiApiKeyRequired")), codebaseIndexEmbedderModelId: z .string() @@ -392,7 +397,10 @@ export const CodeIndexPopover: React.FC = ({ }, [currentSettings, initialSettings]) const updateSetting = (key: keyof LocalCodeIndexSettings, value: any) => { - setCurrentSettings((prev) => ({ ...prev, [key]: value })) + setCurrentSettings((prev) => { + const updated = { ...prev, [key]: value } + return updated + }) // Clear validation error for this field when user starts typing if (formErrors[key]) { setFormErrors((prev) => { @@ -944,6 +952,36 @@ export const CodeIndexPopover: React.FC = ({ {currentSettings.codebaseIndexEmbedderProvider === "gemini" && ( <> +
+ + + updateSetting("codebaseIndexEmbedderBaseUrl", e.target.value) + } + onBlur={(e: any) => { + // Set default Gemini URL if field is empty + if (!e.target.value.trim()) { + updateSetting( + "codebaseIndexEmbedderBaseUrl", + DEFAULT_GEMINI_URL, + ) + } + }} + placeholder={t("settings:codeIndex.geminiBaseUrlPlaceholder")} + className={cn("w-full", { + "border-red-500": formErrors.codebaseIndexEmbedderBaseUrl, + })} + /> + {formErrors.codebaseIndexEmbedderBaseUrl && ( +

+ {formErrors.codebaseIndexEmbedderBaseUrl} +

+ )} +
+