feat: integrate Langfuse for LLM observability

- Add instrumentation.ts with Langfuse OpenTelemetry exporter
- Enable experimental telemetry on streamText calls
- Add instrumentationHook to Next.js config
- Install required dependencies (@vercel/otel, langfuse-vercel, etc.)
This commit is contained in:
dayuan.jiang
2025-12-03 23:41:20 +09:00
parent 39322c2793
commit d84edb529c
5 changed files with 257 additions and 1 deletions

View File

@@ -257,6 +257,7 @@ ${lastMessageText}
messages: [systemMessageWithCache, ...enhancedMessages],
...(providerOptions && { providerOptions }),
...(headers && { headers }),
experimental_telemetry: { isEnabled: true },
onFinish: ({ usage, providerMetadata }) => {
console.log('[Cache] Usage:', JSON.stringify({
inputTokens: usage?.inputTokens,