Added a feature to switch API service providers, enabling users to easily toggle between providers as needed.

Added a feature to switch API service providers to the project. Introduced an input field for API providers in the settings interface and included the OPENAI_URL parameter in the config file.
This commit is contained in:
Github-CJX 2024-05-02 11:43:24 +08:00
parent f21f5c9611
commit 1ebe69ec94
5 changed files with 35 additions and 2 deletions

View file

@ -7,6 +7,7 @@ CHAT_MODEL = "gpt-3.5-turbo" # Name of the model to use
[API_KEYS]
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
OPENAI_URL = "https://api.openai.com/v1"
[API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL

View file

@ -14,6 +14,7 @@ interface Config {
API_KEYS: {
OPENAI: string;
GROQ: string;
OPENAI_URL: string;
};
API_ENDPOINTS: {
SEARXNG: string;
@ -40,6 +41,8 @@ export const getChatModelProvider = () =>
export const getChatModel = () => loadConfig().GENERAL.CHAT_MODEL;
export const getOpenaiUrl = () => loadConfig().API_KEYS.OPENAI_URL;
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;

View file

@ -1,3 +1,4 @@
import { type ClientOptions } from 'openai';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
@ -5,6 +6,7 @@ import {
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiApiKey,
getOpenaiUrl,
} from '../config';
import logger from '../utils/logger';
@ -12,6 +14,10 @@ export const getAvailableProviders = async () => {
const openAIApiKey = getOpenaiApiKey();
const groqApiKey = getGroqApiKey();
const ollamaEndpoint = getOllamaApiEndpoint();
const openaiUrl = getOpenaiUrl();
const configuration: ClientOptions = {
baseURL: openaiUrl
};
const models = {};
@ -20,22 +26,26 @@ export const getAvailableProviders = async () => {
models['openai'] = {
'GPT-3.5 turbo': new ChatOpenAI({
openAIApiKey,
modelName: 'gpt-3.5-turbo',
modelName: 'gpt-3.5-turbo-1106',
temperature: 0.7,
configuration,
}),
'GPT-4': new ChatOpenAI({
openAIApiKey,
modelName: 'gpt-4',
modelName: 'gpt-4-1106-preview',
temperature: 0.7,
configuration,
}),
'GPT-4 turbo': new ChatOpenAI({
openAIApiKey,
modelName: 'gpt-4-turbo',
temperature: 0.7,
configuration,
}),
embeddings: new OpenAIEmbeddings({
openAIApiKey,
modelName: 'text-embedding-3-large',
configuration,
}),
};
} catch (err) {

View file

@ -5,6 +5,7 @@ import {
getChatModelProvider,
getGroqApiKey,
getOllamaApiEndpoint,
getOpenaiUrl,
getOpenaiApiKey,
updateConfig,
} from '../config';
@ -29,6 +30,7 @@ router.get('/', async (_, res) => {
config['selectedProvider'] = getChatModelProvider();
config['selectedChatModel'] = getChatModel();
config['openaiUrl'] = getOpenaiUrl();
config['openeaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['groqApiKey'] = getGroqApiKey();
@ -47,6 +49,7 @@ router.post('/', async (req, res) => {
API_KEYS: {
OPENAI: config.openeaiApiKey,
GROQ: config.groqApiKey,
OPENAI_URL: config.openaiUrl,
},
API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl,

View file

@ -8,6 +8,7 @@ interface SettingsType {
};
selectedProvider: string;
selectedChatModel: string;
openaiUrl: string;
openeaiApiKey: string;
groqApiKey: string;
ollamaApiUrl: string;
@ -165,6 +166,21 @@ const SettingsDialog = ({
</select>
</div>
)}
<div className="flex flex-col space-y-1">
<p className="text-white/70 text-sm">OpenAI Url</p>
<input
type="text"
placeholder="OpenAI Url"
defaultValue={config.openaiUrl}
onChange={(e) =>
setConfig({
...config,
openaiUrl: e.target.value,
})
}
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-white/70 text-sm">OpenAI API Key</p>
<input