Add OPENAI_BASE_URL configuration option
This commit is contained in:
parent
408abd24ea
commit
57ee62f3af
5 changed files with 74 additions and 23 deletions
|
|
@ -4,6 +4,7 @@ SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
|||
|
||||
[API_KEYS]
|
||||
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
||||
OPENAI_BASE_URL = "https://api.openai.com/v1" # OpenAI API base URL
|
||||
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
||||
|
||||
[API_ENDPOINTS]
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ interface Config {
|
|||
};
|
||||
API_KEYS: {
|
||||
OPENAI: string;
|
||||
OPENAI_BASE_URL: string;
|
||||
GROQ: string;
|
||||
};
|
||||
API_ENDPOINTS: {
|
||||
|
|
@ -35,6 +36,9 @@ export const getSimilarityMeasure = () =>
|
|||
|
||||
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||
|
||||
export const getOpenaiBaseUrl = () =>
|
||||
loadConfig().API_KEYS.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
||||
|
||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
||||
|
||||
export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
||||
|
|
|
|||
|
|
@ -5,11 +5,13 @@ import {
|
|||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
getOpenaiBaseUrl,
|
||||
} from '../config';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
export const getAvailableProviders = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
const openAIBaseUrl = getOpenaiBaseUrl();
|
||||
const groqApiKey = getGroqApiKey();
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
|
|
@ -18,25 +20,45 @@ export const getAvailableProviders = async () => {
|
|||
if (openAIApiKey) {
|
||||
try {
|
||||
models['openai'] = {
|
||||
'GPT-3.5 turbo': new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-3.5-turbo',
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'GPT-4': new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
}),
|
||||
'GPT-4 turbo': new ChatOpenAI({
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4-turbo',
|
||||
temperature: 0.7,
|
||||
}),
|
||||
embeddings: new OpenAIEmbeddings({
|
||||
openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
}),
|
||||
'GPT-3.5 turbo': new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-3.5-turbo',
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: openAIBaseUrl,
|
||||
},
|
||||
),
|
||||
'GPT-4': new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: openAIBaseUrl,
|
||||
},
|
||||
),
|
||||
'GPT-4 turbo': new ChatOpenAI(
|
||||
{
|
||||
openAIApiKey,
|
||||
modelName: 'gpt-4-turbo',
|
||||
temperature: 0.7,
|
||||
},
|
||||
{
|
||||
baseURL: openAIBaseUrl,
|
||||
},
|
||||
),
|
||||
embeddings: new OpenAIEmbeddings(
|
||||
{
|
||||
openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
},
|
||||
{
|
||||
baseURL: openAIBaseUrl,
|
||||
},
|
||||
),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading OpenAI models: ${err}`);
|
||||
|
|
@ -86,10 +108,15 @@ export const getAvailableProviders = async () => {
|
|||
baseURL: 'https://api.groq.com/openai/v1',
|
||||
},
|
||||
),
|
||||
embeddings: new OpenAIEmbeddings({
|
||||
openAIApiKey: openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
}),
|
||||
embeddings: new OpenAIEmbeddings(
|
||||
{
|
||||
openAIApiKey: openAIApiKey,
|
||||
modelName: 'text-embedding-3-large',
|
||||
},
|
||||
{
|
||||
baseURL: openAIBaseUrl,
|
||||
},
|
||||
),
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`Error loading Groq models: ${err}`);
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import {
|
|||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOpenaiApiKey,
|
||||
getOpenaiBaseUrl,
|
||||
updateConfig,
|
||||
} from '../config';
|
||||
|
||||
|
|
@ -25,6 +26,7 @@ router.get('/', async (_, res) => {
|
|||
}
|
||||
|
||||
config['openaiApiKey'] = getOpenaiApiKey();
|
||||
config['openaiBaseUrl'] = getOpenaiBaseUrl();
|
||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
|
||||
|
|
@ -37,6 +39,7 @@ router.post('/', async (req, res) => {
|
|||
const updatedConfig = {
|
||||
API_KEYS: {
|
||||
OPENAI: config.openaiApiKey,
|
||||
OPENAI_BASE_URL: config.openaiBaseUrl,
|
||||
GROQ: config.groqApiKey,
|
||||
},
|
||||
API_ENDPOINTS: {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ interface SettingsType {
|
|||
[key: string]: string[];
|
||||
};
|
||||
openaiApiKey: string;
|
||||
openaiBaseUrl: string;
|
||||
groqApiKey: string;
|
||||
ollamaApiUrl: string;
|
||||
}
|
||||
|
|
@ -183,6 +184,21 @@ const SettingsDialog = ({
|
|||
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-white/70 text-sm">OpenAI Base URL</p>
|
||||
<input
|
||||
type="text"
|
||||
placeholder="OpenAI Base URL"
|
||||
defaultValue={config.openaiBaseUrl}
|
||||
onChange={(e) =>
|
||||
setConfig({
|
||||
...config,
|
||||
openaiBaseUrl: e.target.value,
|
||||
})
|
||||
}
|
||||
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-1">
|
||||
<p className="text-white/70 text-sm">Ollama API URL</p>
|
||||
<input
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue