diff --git a/sample.config.toml b/sample.config.toml index 7bc8880..836b5b5 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -4,6 +4,7 @@ SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" [API_KEYS] OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef +OPENAI_BASE_URL = "https://api.openai.com/v1" # OpenAI API base URL GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] diff --git a/src/config.ts b/src/config.ts index 7c0c7f1..cf3eebf 100644 --- a/src/config.ts +++ b/src/config.ts @@ -11,6 +11,7 @@ interface Config { }; API_KEYS: { OPENAI: string; + OPENAI_BASE_URL: string; GROQ: string; }; API_ENDPOINTS: { @@ -35,6 +36,9 @@ export const getSimilarityMeasure = () => export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; +export const getOpenaiBaseUrl = () => + loadConfig().API_KEYS.OPENAI_BASE_URL || 'https://api.openai.com/v1'; + export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG; diff --git a/src/lib/providers.ts b/src/lib/providers.ts index 9b62ce0..6528183 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -5,11 +5,13 @@ import { getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey, + getOpenaiBaseUrl, } from '../config'; import logger from '../utils/logger'; export const getAvailableProviders = async () => { const openAIApiKey = getOpenaiApiKey(); + const openAIBaseUrl = getOpenaiBaseUrl(); const groqApiKey = getGroqApiKey(); const ollamaEndpoint = getOllamaApiEndpoint(); @@ -18,25 +20,45 @@ export const getAvailableProviders = async () => { if (openAIApiKey) { try { models['openai'] = { - 'GPT-3.5 turbo': new ChatOpenAI({ - openAIApiKey, - modelName: 'gpt-3.5-turbo', - temperature: 0.7, - }), - 'GPT-4': new ChatOpenAI({ - openAIApiKey, - modelName: 'gpt-4', - temperature: 0.7, - }), - 'GPT-4 turbo': new ChatOpenAI({ - openAIApiKey, - modelName: 'gpt-4-turbo', - temperature: 0.7, - }), - embeddings: new OpenAIEmbeddings({ - openAIApiKey, - modelName: 'text-embedding-3-large', - }), + 'GPT-3.5 turbo': new ChatOpenAI( + { + openAIApiKey, + modelName: 'gpt-3.5-turbo', + temperature: 0.7, + }, + { + baseURL: openAIBaseUrl, + }, + ), + 'GPT-4': new ChatOpenAI( + { + openAIApiKey, + modelName: 'gpt-4', + temperature: 0.7, + }, + { + baseURL: openAIBaseUrl, + }, + ), + 'GPT-4 turbo': new ChatOpenAI( + { + openAIApiKey, + modelName: 'gpt-4-turbo', + temperature: 0.7, + }, + { + baseURL: openAIBaseUrl, + }, + ), + embeddings: new OpenAIEmbeddings( + { + openAIApiKey, + modelName: 'text-embedding-3-large', + }, + { + baseURL: openAIBaseUrl, + }, + ), }; } catch (err) { logger.error(`Error loading OpenAI models: ${err}`); @@ -86,10 +108,15 @@ export const getAvailableProviders = async () => { baseURL: 'https://api.groq.com/openai/v1', }, ), - embeddings: new OpenAIEmbeddings({ - openAIApiKey: openAIApiKey, - modelName: 'text-embedding-3-large', - }), + embeddings: new OpenAIEmbeddings( + { + openAIApiKey: openAIApiKey, + modelName: 'text-embedding-3-large', + }, + { + baseURL: openAIBaseUrl, + }, + ), }; } catch (err) { logger.error(`Error loading Groq models: ${err}`); diff --git a/src/routes/config.ts b/src/routes/config.ts index 1bb9246..f9e3545 100644 --- a/src/routes/config.ts +++ b/src/routes/config.ts @@ -4,6 +4,7 @@ import { getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey, + getOpenaiBaseUrl, updateConfig, } from '../config'; @@ -25,6 +26,7 @@ router.get('/', async (_, res) => { } config['openaiApiKey'] = getOpenaiApiKey(); + config['openaiBaseUrl'] = getOpenaiBaseUrl(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['groqApiKey'] = getGroqApiKey(); @@ -37,6 +39,7 @@ router.post('/', async (req, res) => { const updatedConfig = { API_KEYS: { OPENAI: config.openaiApiKey, + OPENAI_BASE_URL: config.openaiBaseUrl, GROQ: config.groqApiKey, }, API_ENDPOINTS: { diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 16e57de..8d20abe 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -7,6 +7,7 @@ interface SettingsType { [key: string]: string[]; }; openaiApiKey: string; + openaiBaseUrl: string; groqApiKey: string; ollamaApiUrl: string; } @@ -183,6 +184,21 @@ const SettingsDialog = ({ className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" /> +
OpenAI Base URL
+ + setConfig({ + ...config, + openaiBaseUrl: e.target.value, + }) + } + className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + /> +Ollama API URL