Add OPENAI_CUSTOM_CHAT_MODEL configuration option
This commit is contained in:
parent
57ee62f3af
commit
e6365c8ac6
4 changed files with 24 additions and 0 deletions
|
|
@ -5,6 +5,7 @@ SIMILARITY_MEASURE = "cosine" # "cosine" or "dot"
|
||||||
[API_KEYS]
|
[API_KEYS]
|
||||||
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef
|
||||||
OPENAI_BASE_URL = "https://api.openai.com/v1" # OpenAI API base URL
|
OPENAI_BASE_URL = "https://api.openai.com/v1" # OpenAI API base URL
|
||||||
|
OPENAI_CUSTOM_CHAT_MODEL = "" # OpenAI custom chat model ID - gpt-3.5-turbo-1106
|
||||||
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef
|
||||||
|
|
||||||
[API_ENDPOINTS]
|
[API_ENDPOINTS]
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ interface Config {
|
||||||
API_KEYS: {
|
API_KEYS: {
|
||||||
OPENAI: string;
|
OPENAI: string;
|
||||||
OPENAI_BASE_URL: string;
|
OPENAI_BASE_URL: string;
|
||||||
|
OPENAI_CUSTOM_CHAT_MODEL: string;
|
||||||
GROQ: string;
|
GROQ: string;
|
||||||
};
|
};
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
|
|
@ -39,6 +40,9 @@ export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI;
|
||||||
export const getOpenaiBaseUrl = () =>
|
export const getOpenaiBaseUrl = () =>
|
||||||
loadConfig().API_KEYS.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
loadConfig().API_KEYS.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
||||||
|
|
||||||
|
export const getOpenaiCustomChatModel = () =>
|
||||||
|
loadConfig().API_KEYS.OPENAI_CUSTOM_CHAT_MODEL;
|
||||||
|
|
||||||
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ;
|
||||||
|
|
||||||
export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,14 @@ import {
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
getOpenaiBaseUrl,
|
getOpenaiBaseUrl,
|
||||||
|
getOpenaiCustomChatModel,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
export const getAvailableProviders = async () => {
|
export const getAvailableProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const openAIBaseUrl = getOpenaiBaseUrl();
|
const openAIBaseUrl = getOpenaiBaseUrl();
|
||||||
|
const openAICustomChatModel = getOpenaiCustomChatModel();
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
|
|
||||||
|
|
@ -50,6 +52,20 @@ export const getAvailableProviders = async () => {
|
||||||
baseURL: openAIBaseUrl,
|
baseURL: openAIBaseUrl,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
...(openAICustomChatModel.length > 0
|
||||||
|
? {
|
||||||
|
openAICustomChatModel: new ChatOpenAI(
|
||||||
|
{
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: openAICustomChatModel,
|
||||||
|
temperature: 0.7,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
baseURL: openAIBaseUrl,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
embeddings: new OpenAIEmbeddings(
|
embeddings: new OpenAIEmbeddings(
|
||||||
{
|
{
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import {
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
getOpenaiBaseUrl,
|
getOpenaiBaseUrl,
|
||||||
|
getOpenaiCustomChatModel,
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
|
|
||||||
|
|
@ -27,6 +28,7 @@ router.get('/', async (_, res) => {
|
||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
config['openaiBaseUrl'] = getOpenaiBaseUrl();
|
config['openaiBaseUrl'] = getOpenaiBaseUrl();
|
||||||
|
config['openaiCustomChatModel'] = getOpenaiCustomChatModel();
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
|
|
||||||
|
|
@ -40,6 +42,7 @@ router.post('/', async (req, res) => {
|
||||||
API_KEYS: {
|
API_KEYS: {
|
||||||
OPENAI: config.openaiApiKey,
|
OPENAI: config.openaiApiKey,
|
||||||
OPENAI_BASE_URL: config.openaiBaseUrl,
|
OPENAI_BASE_URL: config.openaiBaseUrl,
|
||||||
|
OPENAI_CUSTOM_CHAT_MODEL: config.openaiCustomChatModel,
|
||||||
GROQ: config.groqApiKey,
|
GROQ: config.groqApiKey,
|
||||||
},
|
},
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue