Added a feature to switch API service providers, enabling users to easily toggle between providers as needed.

Added a feature to switch API service providers to the project. Introduced an input field for API providers in the settings interface and included the OPENAI_URL parameter in the config file.
This commit is contained in:
Github-CJX 2024-05-02 11:43:24 +08:00
parent f21f5c9611
commit 1ebe69ec94
5 changed files with 35 additions and 2 deletions

View file

@ -7,6 +7,7 @@ CHAT_MODEL = "gpt-3.5-turbo" # Name of the model to use
[API_KEYS] [API_KEYS]
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
OPENAI_URL = "https://api.openai.com/v1"
[API_ENDPOINTS] [API_ENDPOINTS]
SEARXNG = "http://localhost:32768" # SearxNG API URL SEARXNG = "http://localhost:32768" # SearxNG API URL

View file

@ -14,6 +14,7 @@ interface Config {
API_KEYS: { API_KEYS: {
OPENAI: string; OPENAI: string;
GROQ: string; GROQ: string;
OPENAI_URL: string;
}; };
API_ENDPOINTS: { API_ENDPOINTS: {
SEARXNG: string; SEARXNG: string;
@ -40,6 +41,8 @@ export const getChatModelProvider = () =>
export const getChatModel = () => loadConfig().GENERAL.CHAT_MODEL; export const getChatModel = () => loadConfig().GENERAL.CHAT_MODEL;
export const getOpenaiUrl = () => loadConfig().API_KEYS.OPENAI_URL;
export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;

View file

@ -1,3 +1,4 @@
import { type ClientOptions } from 'openai';
import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai';
import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
@ -5,6 +6,7 @@ import {
getGroqApiKey, getGroqApiKey,
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiApiKey, getOpenaiApiKey,
getOpenaiUrl,
} from '../config'; } from '../config';
import logger from '../utils/logger'; import logger from '../utils/logger';
@ -12,6 +14,10 @@ export const getAvailableProviders = async () => {
const openAIApiKey = getOpenaiApiKey(); const openAIApiKey = getOpenaiApiKey();
const groqApiKey = getGroqApiKey(); const groqApiKey = getGroqApiKey();
const ollamaEndpoint = getOllamaApiEndpoint(); const ollamaEndpoint = getOllamaApiEndpoint();
const openaiUrl = getOpenaiUrl();
const configuration: ClientOptions = {
baseURL: openaiUrl
};
const models = {}; const models = {};
@ -20,22 +26,26 @@ export const getAvailableProviders = async () => {
models['openai'] = { models['openai'] = {
'GPT-3.5 turbo': new ChatOpenAI({ 'GPT-3.5 turbo': new ChatOpenAI({
openAIApiKey, openAIApiKey,
modelName: 'gpt-3.5-turbo', modelName: 'gpt-3.5-turbo-1106',
temperature: 0.7, temperature: 0.7,
configuration,
}), }),
'GPT-4': new ChatOpenAI({ 'GPT-4': new ChatOpenAI({
openAIApiKey, openAIApiKey,
modelName: 'gpt-4', modelName: 'gpt-4-1106-preview',
temperature: 0.7, temperature: 0.7,
configuration,
}), }),
'GPT-4 turbo': new ChatOpenAI({ 'GPT-4 turbo': new ChatOpenAI({
openAIApiKey, openAIApiKey,
modelName: 'gpt-4-turbo', modelName: 'gpt-4-turbo',
temperature: 0.7, temperature: 0.7,
configuration,
}), }),
embeddings: new OpenAIEmbeddings({ embeddings: new OpenAIEmbeddings({
openAIApiKey, openAIApiKey,
modelName: 'text-embedding-3-large', modelName: 'text-embedding-3-large',
configuration,
}), }),
}; };
} catch (err) { } catch (err) {

View file

@ -5,6 +5,7 @@ import {
getChatModelProvider, getChatModelProvider,
getGroqApiKey, getGroqApiKey,
getOllamaApiEndpoint, getOllamaApiEndpoint,
getOpenaiUrl,
getOpenaiApiKey, getOpenaiApiKey,
updateConfig, updateConfig,
} from '../config'; } from '../config';
@ -29,6 +30,7 @@ router.get('/', async (_, res) => {
config['selectedProvider'] = getChatModelProvider(); config['selectedProvider'] = getChatModelProvider();
config['selectedChatModel'] = getChatModel(); config['selectedChatModel'] = getChatModel();
config['openaiUrl'] = getOpenaiUrl();
config['openeaiApiKey'] = getOpenaiApiKey(); config['openeaiApiKey'] = getOpenaiApiKey();
config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['ollamaApiUrl'] = getOllamaApiEndpoint();
config['groqApiKey'] = getGroqApiKey(); config['groqApiKey'] = getGroqApiKey();
@ -47,6 +49,7 @@ router.post('/', async (req, res) => {
API_KEYS: { API_KEYS: {
OPENAI: config.openeaiApiKey, OPENAI: config.openeaiApiKey,
GROQ: config.groqApiKey, GROQ: config.groqApiKey,
OPENAI_URL: config.openaiUrl,
}, },
API_ENDPOINTS: { API_ENDPOINTS: {
OLLAMA: config.ollamaApiUrl, OLLAMA: config.ollamaApiUrl,

View file

@ -8,6 +8,7 @@ interface SettingsType {
}; };
selectedProvider: string; selectedProvider: string;
selectedChatModel: string; selectedChatModel: string;
openaiUrl: string;
openeaiApiKey: string; openeaiApiKey: string;
groqApiKey: string; groqApiKey: string;
ollamaApiUrl: string; ollamaApiUrl: string;
@ -165,6 +166,21 @@ const SettingsDialog = ({
</select> </select>
</div> </div>
)} )}
<div className="flex flex-col space-y-1">
<p className="text-white/70 text-sm">OpenAI Url</p>
<input
type="text"
placeholder="OpenAI Url"
defaultValue={config.openaiUrl}
onChange={(e) =>
setConfig({
...config,
openaiUrl: e.target.value,
})
}
className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm"
/>
</div>
<div className="flex flex-col space-y-1"> <div className="flex flex-col space-y-1">
<p className="text-white/70 text-sm">OpenAI API Key</p> <p className="text-white/70 text-sm">OpenAI API Key</p>
<input <input