support AzureOpenAI
This commit is contained in:
parent
e226645bc7
commit
28e308db01
11 changed files with 256 additions and 8 deletions
|
|
@ -19,6 +19,12 @@ API_KEY = ""
|
||||||
API_URL = ""
|
API_URL = ""
|
||||||
MODEL_NAME = ""
|
MODEL_NAME = ""
|
||||||
|
|
||||||
|
[MODELS.AZURE_OPENAI]
|
||||||
|
API_KEY = ""
|
||||||
|
ENDPOINT = ""
|
||||||
|
MODEL_NAME = ""
|
||||||
|
API_VERSION = ""
|
||||||
|
|
||||||
[MODELS.OLLAMA]
|
[MODELS.OLLAMA]
|
||||||
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,12 +14,18 @@ import { chats, messages as messagesSchema } from '@/lib/db/schema';
|
||||||
import { and, eq, gt } from 'drizzle-orm';
|
import { and, eq, gt } from 'drizzle-orm';
|
||||||
import { getFileDetails } from '@/lib/utils/files';
|
import { getFileDetails } from '@/lib/utils/files';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
import {
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '@/lib/config';
|
||||||
import { searchHandlers } from '@/lib/search';
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
|
||||||
export const runtime = 'nodejs';
|
export const runtime = 'nodejs';
|
||||||
|
|
@ -186,6 +192,8 @@ export const POST = async (req: Request) => {
|
||||||
const body = (await req.json()) as Body;
|
const body = (await req.json()) as Body;
|
||||||
const { message } = body;
|
const { message } = body;
|
||||||
|
|
||||||
|
console.error('An error occurred while processing chat request:', "here");
|
||||||
|
|
||||||
if (message.content === '') {
|
if (message.content === '') {
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{
|
{
|
||||||
|
|
@ -222,6 +230,7 @@ export const POST = async (req: Request) => {
|
||||||
let embedding = embeddingModel.model;
|
let embedding = embeddingModel.model;
|
||||||
|
|
||||||
if (body.chatModel?.provider === 'custom_openai') {
|
if (body.chatModel?.provider === 'custom_openai') {
|
||||||
|
console.error('An error occurred while processing chat request:', "custom_openai");
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
openAIApiKey: getCustomOpenaiApiKey(),
|
openAIApiKey: getCustomOpenaiApiKey(),
|
||||||
modelName: getCustomOpenaiModelName(),
|
modelName: getCustomOpenaiModelName(),
|
||||||
|
|
@ -230,6 +239,15 @@ export const POST = async (req: Request) => {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: getCustomOpenaiApiUrl(),
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (body.chatModel?.provider == 'azure_openai') {
|
||||||
|
console.error('An error occurred while processing chat request:', "azure_openai");
|
||||||
|
llm = new AzureChatOpenAI({
|
||||||
|
openAIApiKey: getAzureOpenaiApiKey(),
|
||||||
|
deploymentName: getAzureOpenaiModelName(),
|
||||||
|
openAIBasePath: getAzureOpenaiEndpoint(),
|
||||||
|
openAIApiVersion: getAzureOpenaiApiVersion(),
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
}
|
}
|
||||||
|
|
@ -297,7 +315,7 @@ export const POST = async (req: Request) => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('An error occurred while processing chat request:', err);
|
console.error('An error occurred while processing chat request 123:', err);
|
||||||
return Response.json(
|
return Response.json(
|
||||||
{ message: 'An error occurred while processing chat request' },
|
{ message: 'An error occurred while processing chat request' },
|
||||||
{ status: 500 },
|
{ status: 500 },
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,10 @@ import {
|
||||||
getCustomOpenaiApiKey,
|
getCustomOpenaiApiKey,
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
getGeminiApiKey,
|
getGeminiApiKey,
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
|
|
@ -58,6 +62,10 @@ export const GET = async (req: Request) => {
|
||||||
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
|
||||||
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
|
||||||
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
config['customOpenaiModelName'] = getCustomOpenaiModelName();
|
||||||
|
config['azureOpenaiApiKey'] = getAzureOpenaiApiKey();
|
||||||
|
config['azureOpenaiApiVersion'] = getAzureOpenaiApiVersion();
|
||||||
|
config['azureOpenaiModelName'] = getAzureOpenaiModelName();
|
||||||
|
config['azureOpenaiEndpoint'] = getAzureOpenaiEndpoint();
|
||||||
|
|
||||||
return Response.json({ ...config }, { status: 200 });
|
return Response.json({ ...config }, { status: 200 });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -98,6 +106,12 @@ export const POST = async (req: Request) => {
|
||||||
API_KEY: config.customOpenaiApiKey,
|
API_KEY: config.customOpenaiApiKey,
|
||||||
MODEL_NAME: config.customOpenaiModelName,
|
MODEL_NAME: config.customOpenaiModelName,
|
||||||
},
|
},
|
||||||
|
AZURE_OPENAI: {
|
||||||
|
API_KEY: config.azureOpenaiApiKey,
|
||||||
|
MODEL_NAME: config.azureOpenaiModelName,
|
||||||
|
ENDPOINT: config.azureOpenaiEndpoint,
|
||||||
|
API_VERSION: config.azureOpenaiApiVersion,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,16 @@ import {
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '@/lib/config';
|
||||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
|
|
@ -56,6 +62,14 @@ export const POST = async (req: Request) => {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: getCustomOpenaiApiUrl(),
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (body.chatModel?.provider == 'azure_openai') {
|
||||||
|
llm = new AzureChatOpenAI({
|
||||||
|
openAIApiKey: getAzureOpenaiApiKey(),
|
||||||
|
deploymentName: getAzureOpenaiModelName(),
|
||||||
|
openAIBasePath: getAzureOpenaiEndpoint(),
|
||||||
|
openAIApiVersion: getAzureOpenaiApiVersion(),
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,15 @@ import {
|
||||||
|
|
||||||
export const GET = async (req: Request) => {
|
export const GET = async (req: Request) => {
|
||||||
try {
|
try {
|
||||||
|
console.error('here ok0');
|
||||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
getAvailableChatModelProviders(),
|
getAvailableChatModelProviders(),
|
||||||
getAvailableEmbeddingModelProviders(),
|
getAvailableEmbeddingModelProviders(),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
||||||
|
console.error('here ok1');
|
||||||
|
|
||||||
Object.keys(chatModelProviders).forEach((provider) => {
|
Object.keys(chatModelProviders).forEach((provider) => {
|
||||||
Object.keys(chatModelProviders[provider]).forEach((model) => {
|
Object.keys(chatModelProviders[provider]).forEach((model) => {
|
||||||
delete (chatModelProviders[provider][model] as { model?: unknown })
|
delete (chatModelProviders[provider][model] as { model?: unknown })
|
||||||
|
|
@ -17,6 +21,8 @@ export const GET = async (req: Request) => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
console.error('here ok2');
|
||||||
|
|
||||||
Object.keys(embeddingModelProviders).forEach((provider) => {
|
Object.keys(embeddingModelProviders).forEach((provider) => {
|
||||||
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
|
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
|
||||||
delete (embeddingModelProviders[provider][model] as { model?: unknown })
|
delete (embeddingModelProviders[provider][model] as { model?: unknown })
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import type { Embeddings } from '@langchain/core/embeddings';
|
import type { Embeddings } from '@langchain/core/embeddings';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
import {
|
import {
|
||||||
getAvailableChatModelProviders,
|
getAvailableChatModelProviders,
|
||||||
getAvailableEmbeddingModelProviders,
|
getAvailableEmbeddingModelProviders,
|
||||||
|
|
@ -12,6 +12,12 @@ import {
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '@/lib/config';
|
||||||
import { searchHandlers } from '@/lib/search';
|
import { searchHandlers } from '@/lib/search';
|
||||||
|
|
||||||
interface chatModel {
|
interface chatModel {
|
||||||
|
|
@ -19,6 +25,10 @@ interface chatModel {
|
||||||
name: string;
|
name: string;
|
||||||
customOpenAIKey?: string;
|
customOpenAIKey?: string;
|
||||||
customOpenAIBaseURL?: string;
|
customOpenAIBaseURL?: string;
|
||||||
|
azureOpenAIApiVersion?: string;
|
||||||
|
azureOpenAIApiKey?: string;
|
||||||
|
azureOpenAIApiDeploymentName?: string;
|
||||||
|
azureOpenAIEndpoint?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface embeddingModel {
|
interface embeddingModel {
|
||||||
|
|
@ -89,6 +99,14 @@ export const POST = async (req: Request) => {
|
||||||
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (body.chatModel?.provider == 'azure_openai') {
|
||||||
|
llm = new AzureChatOpenAI({
|
||||||
|
openAIApiKey: body.chatModel?.azureOpenAIApiKey || getAzureOpenaiApiKey(),
|
||||||
|
deploymentName: body.chatModel?.azureOpenAIApiDeploymentName || getAzureOpenaiModelName(),
|
||||||
|
openAIBasePath: body.chatModel?.azureOpenAIEndpoint || getAzureOpenaiEndpoint(),
|
||||||
|
openAIApiVersion: body.chatModel?.azureOpenAIApiVersion || getAzureOpenaiApiVersion(),
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
} else if (
|
} else if (
|
||||||
chatModelProviders[chatModelProvider] &&
|
chatModelProviders[chatModelProvider] &&
|
||||||
chatModelProviders[chatModelProvider][chatModel]
|
chatModelProviders[chatModelProvider][chatModel]
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,16 @@ import {
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '@/lib/config';
|
||||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
|
|
@ -55,6 +61,14 @@ export const POST = async (req: Request) => {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: getCustomOpenaiApiUrl(),
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (body.chatModel?.provider == 'azure_openai') {
|
||||||
|
llm = new AzureChatOpenAI({
|
||||||
|
openAIApiKey: getAzureOpenaiApiKey(),
|
||||||
|
deploymentName: getAzureOpenaiModelName(),
|
||||||
|
openAIBasePath: getAzureOpenaiEndpoint(),
|
||||||
|
openAIApiVersion: getAzureOpenaiApiVersion(),
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,16 @@ import {
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '@/lib/config';
|
} from '@/lib/config';
|
||||||
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '@/lib/config';
|
||||||
import { getAvailableChatModelProviders } from '@/lib/providers';
|
import { getAvailableChatModelProviders } from '@/lib/providers';
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
|
|
||||||
interface ChatModel {
|
interface ChatModel {
|
||||||
provider: string;
|
provider: string;
|
||||||
|
|
@ -56,6 +62,14 @@ export const POST = async (req: Request) => {
|
||||||
baseURL: getCustomOpenaiApiUrl(),
|
baseURL: getCustomOpenaiApiUrl(),
|
||||||
},
|
},
|
||||||
}) as unknown as BaseChatModel;
|
}) as unknown as BaseChatModel;
|
||||||
|
} else if (body.chatModel?.provider == 'azure_openai') {
|
||||||
|
llm = new AzureChatOpenAI({
|
||||||
|
openAIApiKey: getAzureOpenaiApiKey(),
|
||||||
|
deploymentName: getAzureOpenaiModelName(),
|
||||||
|
openAIBasePath: getAzureOpenaiEndpoint(),
|
||||||
|
openAIApiVersion: getAzureOpenaiApiVersion(),
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
} else if (chatModelProvider && chatModel) {
|
} else if (chatModelProvider && chatModel) {
|
||||||
llm = chatModel.model;
|
llm = chatModel.model;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,10 @@ interface SettingsType {
|
||||||
customOpenaiApiKey: string;
|
customOpenaiApiKey: string;
|
||||||
customOpenaiApiUrl: string;
|
customOpenaiApiUrl: string;
|
||||||
customOpenaiModelName: string;
|
customOpenaiModelName: string;
|
||||||
|
azureOpenaiModelName: string;
|
||||||
|
azureOpenaiEndpoint: string;
|
||||||
|
azureOpenaiApiKey: string;
|
||||||
|
azureOpenaiApiVersion: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||||
|
|
@ -556,7 +560,8 @@ const Page = () => {
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{selectedChatModelProvider &&
|
{selectedChatModelProvider &&
|
||||||
selectedChatModelProvider != 'custom_openai' && (
|
selectedChatModelProvider != 'custom_openai' &&
|
||||||
|
selectedChatModelProvider != 'azure_openai' && (
|
||||||
<div className="flex flex-col space-y-1">
|
<div className="flex flex-col space-y-1">
|
||||||
<p className="text-black/70 dark:text-white/70 text-sm">
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
Chat Model
|
Chat Model
|
||||||
|
|
@ -666,6 +671,93 @@ const Page = () => {
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{selectedChatModelProvider &&
|
||||||
|
selectedChatModelProvider === 'azure_openai' && (
|
||||||
|
<div className="flex flex-col space-y-4">
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Model Name
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Model name"
|
||||||
|
value={config.azureOpenaiModelName}
|
||||||
|
isSaving={savingStates['azureOpenaiModelName']}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
azureOpenaiModelName: e.target.value,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) =>
|
||||||
|
saveConfig('azureOpenaiModelName', value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Azure OpenAI API Key
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Azure OpenAI API Key"
|
||||||
|
value={config.azureOpenaiApiKey}
|
||||||
|
isSaving={savingStates['azureOpenaiApiKey']}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
azureOpenaiApiKey: e.target.value,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) =>
|
||||||
|
saveConfig('azureOpenaiApiKey', value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Azure OpenAI Base URL
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Azure OpenAI Base URL"
|
||||||
|
value={config.azureOpenaiEndpoint}
|
||||||
|
isSaving={savingStates['azureOpenaiEndpoint']}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
azureOpenaiEndpoint: e.target.value,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) =>
|
||||||
|
saveConfig('azureOpenaiEndpoint', value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col space-y-1">
|
||||||
|
<p className="text-black/70 dark:text-white/70 text-sm">
|
||||||
|
Azure OpenAI Api Version
|
||||||
|
</p>
|
||||||
|
<Input
|
||||||
|
type="text"
|
||||||
|
placeholder="Azure OpenAI Api Version"
|
||||||
|
value={config.azureOpenaiApiVersion}
|
||||||
|
isSaving={savingStates['azureOpenaiApiVersion']}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setConfig((prev) => ({
|
||||||
|
...prev!,
|
||||||
|
azureOpenaiApiVersion: e.target.value,
|
||||||
|
}));
|
||||||
|
}}
|
||||||
|
onSave={(value) =>
|
||||||
|
saveConfig('azureOpenaiApiVersion', value)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
|
||||||
{config.embeddingModelProviders && (
|
{config.embeddingModelProviders && (
|
||||||
<div className="flex flex-col space-y-4 mt-4 pt-4 border-t border-light-200 dark:border-dark-200">
|
<div className="flex flex-col space-y-4 mt-4 pt-4 border-t border-light-200 dark:border-dark-200">
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,12 @@ interface Config {
|
||||||
API_KEY: string;
|
API_KEY: string;
|
||||||
MODEL_NAME: string;
|
MODEL_NAME: string;
|
||||||
};
|
};
|
||||||
|
AZURE_OPENAI: {
|
||||||
|
ENDPOINT: string;
|
||||||
|
API_KEY: string;
|
||||||
|
MODEL_NAME: string;
|
||||||
|
API_VERSION: string;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
|
|
@ -77,6 +83,18 @@ export const getCustomOpenaiApiUrl = () =>
|
||||||
export const getCustomOpenaiModelName = () =>
|
export const getCustomOpenaiModelName = () =>
|
||||||
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
|
export const getAzureOpenaiApiKey = () =>
|
||||||
|
loadConfig().MODELS.AZURE_OPENAI.API_KEY;
|
||||||
|
|
||||||
|
export const getAzureOpenaiEndpoint = () =>
|
||||||
|
loadConfig().MODELS.AZURE_OPENAI.ENDPOINT;
|
||||||
|
|
||||||
|
export const getAzureOpenaiModelName = () =>
|
||||||
|
loadConfig().MODELS.AZURE_OPENAI.MODEL_NAME;
|
||||||
|
|
||||||
|
export const getAzureOpenaiApiVersion = () =>
|
||||||
|
loadConfig().MODELS.AZURE_OPENAI.API_VERSION;
|
||||||
|
|
||||||
const mergeConfigs = (current: any, update: any): any => {
|
const mergeConfigs = (current: any, update: any): any => {
|
||||||
if (update === null || update === undefined) {
|
if (update === null || update === undefined) {
|
||||||
return current;
|
return current;
|
||||||
|
|
|
||||||
|
|
@ -6,13 +6,20 @@ import {
|
||||||
getCustomOpenaiApiUrl,
|
getCustomOpenaiApiUrl,
|
||||||
getCustomOpenaiModelName,
|
getCustomOpenaiModelName,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import { ChatOpenAI } from '@langchain/openai';
|
import {
|
||||||
|
getAzureOpenaiApiKey,
|
||||||
|
getAzureOpenaiEndpoint,
|
||||||
|
getAzureOpenaiModelName,
|
||||||
|
getAzureOpenaiApiVersion,
|
||||||
|
} from '../config';
|
||||||
|
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
||||||
import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
|
import { loadOllamaChatModels, loadOllamaEmbeddingModels } from './ollama';
|
||||||
import { loadGroqChatModels } from './groq';
|
import { loadGroqChatModels } from './groq';
|
||||||
import { loadAnthropicChatModels } from './anthropic';
|
import { loadAnthropicChatModels } from './anthropic';
|
||||||
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
|
import { loadGeminiChatModels, loadGeminiEmbeddingModels } from './gemini';
|
||||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||||
import { loadDeepseekChatModels } from './deepseek';
|
import { loadDeepseekChatModels } from './deepseek';
|
||||||
|
import Chat from '@/components/Chat';
|
||||||
|
|
||||||
export interface ChatModel {
|
export interface ChatModel {
|
||||||
displayName: string;
|
displayName: string;
|
||||||
|
|
@ -60,6 +67,11 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
|
||||||
const customOpenAiModelName = getCustomOpenaiModelName();
|
const customOpenAiModelName = getCustomOpenaiModelName();
|
||||||
|
|
||||||
|
const azureOpenAiApiKey = getAzureOpenaiApiKey();
|
||||||
|
const azureOpenAiModelName = getAzureOpenaiModelName();
|
||||||
|
const azureOpenAiApiVersion = getAzureOpenaiApiVersion();
|
||||||
|
const azureOpenAiEndpoint = getAzureOpenaiEndpoint();
|
||||||
|
|
||||||
models['custom_openai'] = {
|
models['custom_openai'] = {
|
||||||
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
|
||||||
? {
|
? {
|
||||||
|
|
@ -78,6 +90,28 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
: {}),
|
: {}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
console.log("here ok1 - start azure_openai");
|
||||||
|
console.log(azureOpenAiApiKey, azureOpenAiEndpoint, azureOpenAiApiVersion, azureOpenAiModelName);
|
||||||
|
|
||||||
|
models['azure_openai'] = {
|
||||||
|
...(azureOpenAiApiKey && azureOpenAiEndpoint && azureOpenAiApiVersion && azureOpenAiModelName
|
||||||
|
? {
|
||||||
|
[azureOpenAiModelName]: {
|
||||||
|
displayName: azureOpenAiModelName,
|
||||||
|
model: new AzureChatOpenAI({
|
||||||
|
openAIApiKey: azureOpenAiApiKey,
|
||||||
|
deploymentName: azureOpenAiModelName,
|
||||||
|
openAIBasePath: azureOpenAiEndpoint,
|
||||||
|
openAIApiVersion: azureOpenAiApiVersion,
|
||||||
|
temperature: 0.7
|
||||||
|
}) as unknown as BaseChatModel
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(models);
|
||||||
|
|
||||||
return models;
|
return models;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue