feat(tracing): Integrate Langfuse for LLM call tracing and add documentation
This commit is contained in:
parent
9670003970
commit
7b4a7a531e
17 changed files with 183 additions and 36 deletions
|
|
@ -16,6 +16,7 @@ import { allTools } from '@/lib/tools';
|
|||
import { Source } from '@/lib/types/widget';
|
||||
import { WidgetProcessRequest } from '@/lib/types/api';
|
||||
import axios from 'axios';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
// Helper function to fetch content from a single source
|
||||
async function fetchSourceContent(
|
||||
|
|
@ -149,6 +150,7 @@ async function processWithLLM(
|
|||
},
|
||||
{
|
||||
recursionLimit: 15, // Limit recursion depth to prevent infinite loops
|
||||
...getLangfuseCallbacks(),
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import { ChatOpenAI } from '@langchain/openai';
|
|||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { z } from 'zod';
|
||||
import { withStructuredOutput } from '@/lib/utils/structuredOutput';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
interface FileRes {
|
||||
fileName: string;
|
||||
|
|
@ -71,7 +72,9 @@ Generate topics that describe what this document is about, its domain, and key s
|
|||
name: 'generate_topics',
|
||||
});
|
||||
|
||||
const result = await structuredLlm.invoke(prompt);
|
||||
const result = await structuredLlm.invoke(prompt, {
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
console.log('Generated topics:', result.topics);
|
||||
// Filename is included for context
|
||||
return filename + ', ' + result.topics.join(', ');
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ export default function RootLayout({
|
|||
children: React.ReactNode;
|
||||
}>) {
|
||||
return (
|
||||
<html className="h-full" lang="en" suppressHydrationWarning>
|
||||
<html className="h-full" lang="en" suppressHydrationWarning>
|
||||
<head>
|
||||
<link
|
||||
rel="search"
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import LineOutputParser from '../outputParsers/lineOutputParser';
|
|||
import { searchSearxng } from '../searxng';
|
||||
import { formatDateForLLM } from '../utils';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
const imageSearchChainPrompt = `
|
||||
# Instructions
|
||||
|
|
@ -140,7 +141,7 @@ const handleImageSearch = (
|
|||
systemInstructions?: string,
|
||||
) => {
|
||||
const imageSearchChain = createImageSearchChain(llm, systemInstructions);
|
||||
return imageSearchChain.invoke(input);
|
||||
return imageSearchChain.invoke(input, { ...getLangfuseCallbacks() });
|
||||
};
|
||||
|
||||
export default handleImageSearch;
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import formatChatHistoryAsString from '../utils/formatHistory';
|
|||
import { BaseMessage } from '@langchain/core/messages';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
const suggestionGeneratorPrompt = `
|
||||
You are an AI suggestion generator for an AI powered search engine.
|
||||
|
|
@ -74,7 +75,9 @@ const generateSuggestions = (
|
|||
llm,
|
||||
systemInstructions,
|
||||
);
|
||||
return suggestionGeneratorChain.invoke(input);
|
||||
return suggestionGeneratorChain.invoke(input, {
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
};
|
||||
|
||||
export default generateSuggestions;
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import LineOutputParser from '../outputParsers/lineOutputParser';
|
|||
import { searchSearxng } from '../searxng';
|
||||
import { formatDateForLLM } from '../utils';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
const VideoSearchChainPrompt = `
|
||||
# Instructions
|
||||
|
|
@ -147,7 +148,7 @@ const handleVideoSearch = (
|
|||
systemInstructions?: string,
|
||||
) => {
|
||||
const VideoSearchChain = createVideoSearchChain(llm, systemInstructions);
|
||||
return VideoSearchChain.invoke(input);
|
||||
return VideoSearchChain.invoke(input, { ...getLangfuseCallbacks() });
|
||||
};
|
||||
|
||||
export default handleVideoSearch;
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import {
|
|||
import { formatDateForLLM } from '../utils';
|
||||
import { getModelName } from '../utils/modelUtils';
|
||||
import { removeThinkingBlocks } from '../utils/contentUtils';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
/**
|
||||
* Normalize usage metadata from different LLM providers
|
||||
|
|
@ -511,12 +512,14 @@ Use all available tools strategically to provide comprehensive, well-researched,
|
|||
},
|
||||
recursionLimit: 25, // Allow sufficient iterations for tool use
|
||||
signal: this.signal,
|
||||
...getLangfuseCallbacks(),
|
||||
};
|
||||
|
||||
// Use streamEvents to capture both tool calls and token-level streaming
|
||||
const eventStream = agent.streamEvents(initialState, {
|
||||
...config,
|
||||
version: 'v2',
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
|
||||
let finalResult: any = null;
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import { formatDateForLLM } from '../utils';
|
|||
import { getDocumentsFromLinks } from '../utils/documents';
|
||||
import formatChatHistoryAsString from '../utils/formatHistory';
|
||||
import { getModelName } from '../utils/modelUtils';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
export interface SpeedSearchAgentType {
|
||||
searchAndAnswer: (
|
||||
|
|
@ -103,7 +104,7 @@ class SpeedSearchAgent implements SpeedSearchAgentType {
|
|||
|
||||
this.emitProgress(emitter, 10, `Building search query`);
|
||||
|
||||
return RunnableSequence.from([
|
||||
return RunnableSequence.from([
|
||||
PromptTemplate.fromTemplate(this.config.queryGeneratorPrompt),
|
||||
llm,
|
||||
this.strParser,
|
||||
|
|
@ -235,8 +236,8 @@ class SpeedSearchAgent implements SpeedSearchAgentType {
|
|||
</text>
|
||||
|
||||
Make sure to answer the query in the summary.
|
||||
`,
|
||||
{ signal },
|
||||
`,
|
||||
{ signal, ...getLangfuseCallbacks() },
|
||||
);
|
||||
|
||||
const document = new Document({
|
||||
|
|
@ -348,7 +349,7 @@ class SpeedSearchAgent implements SpeedSearchAgentType {
|
|||
date,
|
||||
systemInstructions,
|
||||
},
|
||||
{ signal: options?.signal },
|
||||
{ signal: options?.signal, ...getLangfuseCallbacks() },
|
||||
);
|
||||
|
||||
query = searchRetrieverResult.query;
|
||||
|
|
@ -379,6 +380,7 @@ class SpeedSearchAgent implements SpeedSearchAgentType {
|
|||
)
|
||||
.withConfig({
|
||||
runName: 'FinalSourceRetriever',
|
||||
...getLangfuseCallbacks(),
|
||||
})
|
||||
.pipe(this.processDocs),
|
||||
}),
|
||||
|
|
@ -391,6 +393,7 @@ class SpeedSearchAgent implements SpeedSearchAgentType {
|
|||
this.strParser,
|
||||
]).withConfig({
|
||||
runName: 'FinalResponseGenerator',
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -539,7 +542,7 @@ ${docs[index].metadata?.url.toLowerCase().includes('file') ? '' : '\n<url>' + do
|
|||
personaInstructions,
|
||||
);
|
||||
|
||||
const stream = answeringChain.streamEvents(
|
||||
const stream = answeringChain.streamEvents(
|
||||
{
|
||||
chat_history: history,
|
||||
query: message,
|
||||
|
|
@ -547,7 +550,8 @@ ${docs[index].metadata?.url.toLowerCase().includes('file') ? '' : '\n<url>' + do
|
|||
{
|
||||
version: 'v1',
|
||||
// Pass the abort signal to the LLM streaming chain
|
||||
signal,
|
||||
signal,
|
||||
...getLangfuseCallbacks(),
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { removeThinkingBlocks } from '@/lib/utils/contentUtils';
|
|||
import { Command, getCurrentTaskInput } from '@langchain/langgraph';
|
||||
import { SimplifiedAgentStateType } from '@/lib/state/chatAgentState';
|
||||
import { ToolMessage } from '@langchain/core/messages';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
// Schema for URL summarization tool input
|
||||
const URLSummarizationToolSchema = z.object({
|
||||
|
|
@ -144,6 +145,7 @@ Provide a comprehensive summary of the above web page content, focusing on infor
|
|||
|
||||
const result = await llm.invoke(summarizationPrompt, {
|
||||
signal: config?.signal,
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
|
||||
finalContent = removeThinkingBlocks(result.content as string);
|
||||
|
|
|
|||
34
src/lib/tracing/langfuse.ts
Normal file
34
src/lib/tracing/langfuse.ts
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
// Centralized Langfuse tracing utility
|
||||
// Provides a singleton CallbackHandler and a helper to attach callbacks
|
||||
|
||||
import type { Callbacks } from '@langchain/core/callbacks/manager';
|
||||
import { CallbackHandler } from 'langfuse-langchain';
|
||||
|
||||
let handler: CallbackHandler | null = null;
|
||||
|
||||
export function getLangfuseHandler(): CallbackHandler | null {
|
||||
// Only initialize on server
|
||||
if (typeof window !== 'undefined') return null;
|
||||
|
||||
if (handler) return handler;
|
||||
|
||||
try {
|
||||
// The handler reads LANGFUSE_* env vars by default. You can also pass keys here if desired.
|
||||
handler = new CallbackHandler({
|
||||
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
|
||||
secretKey: process.env.LANGFUSE_SECRET_KEY,
|
||||
baseUrl: process.env.LANGFUSE_BASE_URL,
|
||||
});
|
||||
} catch (e) {
|
||||
// If initialization fails (e.g., missing envs), disable tracing gracefully
|
||||
handler = null;
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
|
||||
// Convenience helper to spread into LangChain invoke/config objects
|
||||
export function getLangfuseCallbacks(): { callbacks?: Callbacks } {
|
||||
const h = getLangfuseHandler();
|
||||
return h ? { callbacks: [h] } : {};
|
||||
}
|
||||
|
|
@ -5,6 +5,7 @@ import { formatDateForLLM } from '../utils';
|
|||
import { ChatOpenAI, OpenAIClient } from '@langchain/openai';
|
||||
import { removeThinkingBlocks } from './contentUtils';
|
||||
import { withStructuredOutput } from './structuredOutput';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
export type PreviewAnalysisResult = {
|
||||
isSufficient: boolean;
|
||||
|
|
@ -81,7 +82,7 @@ Snippet: ${content.snippet}
|
|||
name: 'analyze_preview_content',
|
||||
});
|
||||
|
||||
const analysisResult = await structuredLLM.invoke(
|
||||
const analysisResult = await structuredLLM.invoke(
|
||||
`You are a preview content analyzer, tasked with determining if search result snippets contain sufficient information to answer the Task Query.
|
||||
|
||||
# Instructions
|
||||
|
|
@ -118,7 +119,7 @@ ${taskQuery}
|
|||
# Search Result Previews to Analyze:
|
||||
${formattedPreviewContent}
|
||||
`,
|
||||
{ signal },
|
||||
{ signal, ...getLangfuseCallbacks() },
|
||||
);
|
||||
|
||||
if (!analysisResult) {
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import { getWebContent } from './documents';
|
|||
import { removeThinkingBlocks } from './contentUtils';
|
||||
import { setTemperature } from './modelUtils';
|
||||
import { withStructuredOutput } from './structuredOutput';
|
||||
import { getLangfuseCallbacks } from '@/lib/tracing/langfuse';
|
||||
|
||||
export type SummarizeResult = {
|
||||
document: Document | null;
|
||||
|
|
@ -95,7 +96,7 @@ Here is the query you need to answer: ${query}
|
|||
|
||||
Here is the content to analyze:
|
||||
${contentToAnalyze}`,
|
||||
{ signal },
|
||||
{ signal, ...getLangfuseCallbacks() },
|
||||
);
|
||||
|
||||
if (!relevanceResult) {
|
||||
|
|
@ -168,7 +169,10 @@ Here is the query you need to answer: ${query}
|
|||
Here is the content to summarize:
|
||||
${i === 0 ? content.metadata.html : content.pageContent}`;
|
||||
|
||||
const result = await llm.invoke(prompt, { signal });
|
||||
const result = await llm.invoke(prompt, {
|
||||
signal,
|
||||
...getLangfuseCallbacks(),
|
||||
});
|
||||
summary = removeThinkingBlocks(result.content as string);
|
||||
break;
|
||||
} catch (error) {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue