feat(UI): allow system prompts and persona prompts to be saved server side and individually included or excluded from messages
This commit is contained in:
parent
8e6934bb64
commit
011d10df29
27 changed files with 1345 additions and 132 deletions
|
|
@ -91,7 +91,14 @@ const outputParser = new LineOutputParser({
|
|||
key: 'answer',
|
||||
});
|
||||
|
||||
const createImageSearchChain = (llm: BaseChatModel) => {
|
||||
const createImageSearchChain = (
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
|
||||
|
||||
const fullPrompt = `${systemPrompt}${imageSearchChainPrompt}`;
|
||||
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
chat_history: (input: ImageSearchChainInput) => {
|
||||
|
|
@ -102,7 +109,7 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||
},
|
||||
date: () => formatDateForLLM(),
|
||||
}),
|
||||
PromptTemplate.fromTemplate(imageSearchChainPrompt),
|
||||
PromptTemplate.fromTemplate(fullPrompt),
|
||||
llm,
|
||||
outputParser,
|
||||
RunnableLambda.from(async (searchQuery: string) => {
|
||||
|
|
@ -130,8 +137,9 @@ const createImageSearchChain = (llm: BaseChatModel) => {
|
|||
const handleImageSearch = (
|
||||
input: ImageSearchChainInput,
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
const imageSearchChain = createImageSearchChain(llm);
|
||||
const imageSearchChain = createImageSearchChain(llm, systemInstructions);
|
||||
return imageSearchChain.invoke(input);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -45,13 +45,20 @@ const outputParser = new ListLineOutputParser({
|
|||
key: 'suggestions',
|
||||
});
|
||||
|
||||
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
||||
const createSuggestionGeneratorChain = (
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
|
||||
|
||||
const fullPrompt = `${systemPrompt}${suggestionGeneratorPrompt}`;
|
||||
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
chat_history: (input: SuggestionGeneratorInput) =>
|
||||
formatChatHistoryAsString(input.chat_history),
|
||||
}),
|
||||
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
|
||||
PromptTemplate.fromTemplate(fullPrompt),
|
||||
llm,
|
||||
outputParser,
|
||||
]);
|
||||
|
|
@ -60,9 +67,13 @@ const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
|
|||
const generateSuggestions = (
|
||||
input: SuggestionGeneratorInput,
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
(llm as unknown as ChatOpenAI).temperature = 0;
|
||||
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
|
||||
const suggestionGeneratorChain = createSuggestionGeneratorChain(
|
||||
llm,
|
||||
systemInstructions,
|
||||
);
|
||||
return suggestionGeneratorChain.invoke(input);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -92,7 +92,14 @@ const answerParser = new LineOutputParser({
|
|||
key: 'answer',
|
||||
});
|
||||
|
||||
const createVideoSearchChain = (llm: BaseChatModel) => {
|
||||
const createVideoSearchChain = (
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
|
||||
|
||||
const fullPrompt = `${systemPrompt}${VideoSearchChainPrompt}`;
|
||||
|
||||
return RunnableSequence.from([
|
||||
RunnableMap.from({
|
||||
chat_history: (input: VideoSearchChainInput) => {
|
||||
|
|
@ -103,7 +110,7 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||
},
|
||||
date: () => formatDateForLLM(),
|
||||
}),
|
||||
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
|
||||
PromptTemplate.fromTemplate(fullPrompt),
|
||||
llm,
|
||||
answerParser,
|
||||
RunnableLambda.from(async (searchQuery: string) => {
|
||||
|
|
@ -137,8 +144,9 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
|
|||
const handleVideoSearch = (
|
||||
input: VideoSearchChainInput,
|
||||
llm: BaseChatModel,
|
||||
systemInstructions?: string,
|
||||
) => {
|
||||
const VideoSearchChain = createVideoSearchChain(llm);
|
||||
const VideoSearchChain = createVideoSearchChain(llm, systemInstructions);
|
||||
return VideoSearchChain.invoke(input);
|
||||
};
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue