add support for openai baseUrl

This commit is contained in:
eric8810 2024-05-07 18:56:55 +08:00
parent 79f6a52b5b
commit 9dbfc69f11
3 changed files with 21 additions and 0 deletions

View file

@ -14,6 +14,7 @@ interface Config {
GROQ: string; GROQ: string;
}; };
API_ENDPOINTS: { API_ENDPOINTS: {
OPENAI_BASE_URL: string;
SEARXNG: string; SEARXNG: string;
OLLAMA: string; OLLAMA: string;
}; };
@ -41,6 +42,9 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getOpenaiApiEndpoint = () =>
loadConfig().API_ENDPOINTS.OPENAI_BASE_URL;
export const updateConfig = (config: RecursivePartial<Config>) => { export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig(); const currentConfig = loadConfig();

View file

@ -4,6 +4,7 @@ import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { import {
getGroqApiKey, getGroqApiKey,
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
} from '../config'; } from '../config';
import logger from '../utils/logger'; import logger from '../utils/logger';
@ -12,6 +13,7 @@ export const getAvailableProviders = async () => {
const openAIApiKey = getOpenaiApiKey(); const openAIApiKey = getOpenaiApiKey();
const groqApiKey = getGroqApiKey(); const groqApiKey = getGroqApiKey();
const ollamaEndpoint = getOllamaApiEndpoint(); const ollamaEndpoint = getOllamaApiEndpoint();
const openaiEndpoint = getOpenaiApiEndpoint();
const models = {}; const models = {};
@ -20,21 +22,33 @@ export const getAvailableProviders = async () => {
models['openai'] = { models['openai'] = {
'GPT-3.5 turbo': new ChatOpenAI({ 'GPT-3.5 turbo': new ChatOpenAI({
openAIApiKey, openAIApiKey,
configuration: {
baseURL: openaiEndpoint,
},
modelName: 'gpt-3.5-turbo', modelName: 'gpt-3.5-turbo',
temperature: 0.7, temperature: 0.7,
}), }),
'GPT-4': new ChatOpenAI({ 'GPT-4': new ChatOpenAI({
openAIApiKey, openAIApiKey,
configuration: {
baseURL: openaiEndpoint,
},
modelName: 'gpt-4', modelName: 'gpt-4',
temperature: 0.7, temperature: 0.7,
}), }),
'GPT-4 turbo': new ChatOpenAI({ 'GPT-4 turbo': new ChatOpenAI({
openAIApiKey, openAIApiKey,
configuration: {
baseURL: openaiEndpoint,
},
modelName: 'gpt-4-turbo', modelName: 'gpt-4-turbo',
temperature: 0.7, temperature: 0.7,
}), }),
embeddings: new OpenAIEmbeddings({ embeddings: new OpenAIEmbeddings({
openAIApiKey, openAIApiKey,
configuration: {
baseURL: openaiEndpoint,
},
modelName: 'text-embedding-3-large', modelName: 'text-embedding-3-large',
}), }),
}; };

View file

@ -4,6 +4,7 @@ import {
getGroqApiKey, getGroqApiKey,
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
getOpenaiApiEndpoint,
updateConfig, updateConfig,
} from '../config'; } from '../config';
@ -27,6 +28,7 @@ router.get('/', async (_, res) => {
config['openaiApiKey'] = getOpenaiApiKey(); config['openaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
config['openaiApiUrl'] = getOpenaiApiEndpoint();
res.status(200).json(config); res.status(200).json(config);
}); });
@ -41,6 +43,7 @@ router.post('/', async (req, res) => {
}, },
API_ENDPOINTS: { API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl, OLLAMA: config.ollamaApiUrl,
OPENAI: config.openaiApiUrl,
}, },
}; };