mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-02 22:32:27 +08:00
feat: restore Langfuse observability integration (#103)
- Add lib/langfuse.ts with client, trace input/output, telemetry config - Add instrumentation.ts for OpenTelemetry setup with Langfuse span processor - Add /api/log-save endpoint for logging diagram saves - Add /api/log-feedback endpoint for thumbs up/down feedback - Update chat route with sessionId tracking and telemetry - Add feedback buttons (thumbs up/down) to chat messages - Add sessionId tracking throughout the app - Update env.example with Langfuse configuration - Add @langfuse/client, @langfuse/otel, @langfuse/tracing, @opentelemetry/sdk-trace-node
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import { streamText, convertToModelMessages, createUIMessageStream, createUIMessageStreamResponse } from 'ai';
|
||||
import { getAIModel } from '@/lib/ai-providers';
|
||||
import { findCachedResponse } from '@/lib/cached-responses';
|
||||
import { setTraceInput, setTraceOutput, getTelemetryConfig, wrapWithObserve } from '@/lib/langfuse';
|
||||
import { getSystemPrompt } from '@/lib/system-prompts';
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -61,7 +62,27 @@ function createCachedStreamResponse(xml: string): Response {
|
||||
|
||||
// Inner handler function
|
||||
async function handleChatRequest(req: Request): Promise<Response> {
|
||||
const { messages, xml } = await req.json();
|
||||
const { messages, xml, sessionId } = await req.json();
|
||||
|
||||
// Get user IP for Langfuse tracking
|
||||
const forwardedFor = req.headers.get('x-forwarded-for');
|
||||
const userId = forwardedFor?.split(',')[0]?.trim() || 'anonymous';
|
||||
|
||||
// Validate sessionId for Langfuse (must be string, max 200 chars)
|
||||
const validSessionId = sessionId && typeof sessionId === 'string' && sessionId.length <= 200
|
||||
? sessionId
|
||||
: undefined;
|
||||
|
||||
// Extract user input text for Langfuse trace
|
||||
const currentMessage = messages[messages.length - 1];
|
||||
const userInputText = currentMessage?.parts?.find((p: any) => p.type === 'text')?.text || '';
|
||||
|
||||
// Update Langfuse trace with input, session, and user
|
||||
setTraceInput({
|
||||
input: userInputText,
|
||||
sessionId: validSessionId,
|
||||
userId: userId,
|
||||
});
|
||||
|
||||
// === FILE VALIDATION START ===
|
||||
const fileValidation = validateFileParts(messages);
|
||||
@@ -191,9 +212,19 @@ ${lastMessageText}
|
||||
messages: allMessages,
|
||||
...(providerOptions && { providerOptions }),
|
||||
...(headers && { headers }),
|
||||
onFinish: ({ usage, providerMetadata }) => {
|
||||
console.log('[Cache] providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
||||
// Langfuse telemetry config (returns undefined if not configured)
|
||||
...(getTelemetryConfig({ sessionId: validSessionId, userId }) && {
|
||||
experimental_telemetry: getTelemetryConfig({ sessionId: validSessionId, userId }),
|
||||
}),
|
||||
onFinish: ({ text, usage, providerMetadata }) => {
|
||||
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
||||
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
|
||||
// Pass usage to Langfuse (Bedrock streaming doesn't auto-report tokens to telemetry)
|
||||
// AI SDK uses inputTokens/outputTokens, Langfuse expects promptTokens/completionTokens
|
||||
setTraceOutput(text, {
|
||||
promptTokens: usage?.inputTokens,
|
||||
completionTokens: usage?.outputTokens,
|
||||
});
|
||||
},
|
||||
tools: {
|
||||
// Client-side tool that will be executed on the client
|
||||
@@ -260,7 +291,8 @@ IMPORTANT: Keep edits concise:
|
||||
return result.toUIMessageStreamResponse();
|
||||
}
|
||||
|
||||
export async function POST(req: Request) {
|
||||
// Wrap handler with error handling
|
||||
async function safeHandler(req: Request): Promise<Response> {
|
||||
try {
|
||||
return await handleChatRequest(req);
|
||||
} catch (error) {
|
||||
@@ -268,3 +300,10 @@ export async function POST(req: Request) {
|
||||
return Response.json({ error: 'Internal server error' }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
// Wrap with Langfuse observe (if configured)
|
||||
const observedHandler = wrapWithObserve(safeHandler);
|
||||
|
||||
export async function POST(req: Request) {
|
||||
return observedHandler(req);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user