feat(UI): allow system prompts and persona prompts to be saved server side and individually included or excluded from messages

This commit is contained in:
Willie Zutz 2025-05-27 12:53:30 -06:00
parent 8e6934bb64
commit 011d10df29
27 changed files with 1345 additions and 132 deletions

View file

@ -91,7 +91,14 @@ const outputParser = new LineOutputParser({
key: 'answer',
});
const createImageSearchChain = (llm: BaseChatModel) => {
const createImageSearchChain = (
llm: BaseChatModel,
systemInstructions?: string,
) => {
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
const fullPrompt = `${systemPrompt}${imageSearchChainPrompt}`;
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: ImageSearchChainInput) => {
@ -102,7 +109,7 @@ const createImageSearchChain = (llm: BaseChatModel) => {
},
date: () => formatDateForLLM(),
}),
PromptTemplate.fromTemplate(imageSearchChainPrompt),
PromptTemplate.fromTemplate(fullPrompt),
llm,
outputParser,
RunnableLambda.from(async (searchQuery: string) => {
@ -130,8 +137,9 @@ const createImageSearchChain = (llm: BaseChatModel) => {
const handleImageSearch = (
input: ImageSearchChainInput,
llm: BaseChatModel,
systemInstructions?: string,
) => {
const imageSearchChain = createImageSearchChain(llm);
const imageSearchChain = createImageSearchChain(llm, systemInstructions);
return imageSearchChain.invoke(input);
};

View file

@ -45,13 +45,20 @@ const outputParser = new ListLineOutputParser({
key: 'suggestions',
});
const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
const createSuggestionGeneratorChain = (
llm: BaseChatModel,
systemInstructions?: string,
) => {
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
const fullPrompt = `${systemPrompt}${suggestionGeneratorPrompt}`;
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: SuggestionGeneratorInput) =>
formatChatHistoryAsString(input.chat_history),
}),
PromptTemplate.fromTemplate(suggestionGeneratorPrompt),
PromptTemplate.fromTemplate(fullPrompt),
llm,
outputParser,
]);
@ -60,9 +67,13 @@ const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
const generateSuggestions = (
input: SuggestionGeneratorInput,
llm: BaseChatModel,
systemInstructions?: string,
) => {
(llm as unknown as ChatOpenAI).temperature = 0;
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm);
const suggestionGeneratorChain = createSuggestionGeneratorChain(
llm,
systemInstructions,
);
return suggestionGeneratorChain.invoke(input);
};

View file

@ -92,7 +92,14 @@ const answerParser = new LineOutputParser({
key: 'answer',
});
const createVideoSearchChain = (llm: BaseChatModel) => {
const createVideoSearchChain = (
llm: BaseChatModel,
systemInstructions?: string,
) => {
const systemPrompt = systemInstructions ? `${systemInstructions}\n\n` : '';
const fullPrompt = `${systemPrompt}${VideoSearchChainPrompt}`;
return RunnableSequence.from([
RunnableMap.from({
chat_history: (input: VideoSearchChainInput) => {
@ -103,7 +110,7 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
},
date: () => formatDateForLLM(),
}),
PromptTemplate.fromTemplate(VideoSearchChainPrompt),
PromptTemplate.fromTemplate(fullPrompt),
llm,
answerParser,
RunnableLambda.from(async (searchQuery: string) => {
@ -137,8 +144,9 @@ const createVideoSearchChain = (llm: BaseChatModel) => {
const handleVideoSearch = (
input: VideoSearchChainInput,
llm: BaseChatModel,
systemInstructions?: string,
) => {
const VideoSearchChain = createVideoSearchChain(llm);
const VideoSearchChain = createVideoSearchChain(llm, systemInstructions);
return VideoSearchChain.invoke(input);
};