Compare commits

..

13 Commits

Author SHA1 Message Date
dayuan.jiang
22d0d4039d fix: add confirmation dialog to prevent accidental back navigation
Addresses conflict between right-click drag and browser back gesture in
Chromium-based browsers. Shows browser confirmation dialog when user
tries to navigate away, preventing accidental page exits.

Closes #80
2025-12-05 18:39:49 +09:00
Dayuan Jiang
57bfc9cef7 fix: update status indicator to show outage resolved (#98) 2025-12-05 18:07:25 +09:00
Dayuan Jiang
0543f71c43 fix: use console.log instead of console.error for XML validation during streaming (#96) 2025-12-05 16:59:14 +09:00
Dayuan Jiang
970b88612d fix: add service status indicator for ongoing issues (#95) 2025-12-05 16:46:17 +09:00
Dayuan Jiang
c805277a76 fix: enable UI retry when Bedrock returns early 503 error (#94)
- Add error prop to ChatInput to detect error state
- Update isDisabled logic to allow retry when there's an error
- Pass combined error (SDK error + streamingError) to ChatInput

When Bedrock returns 503 ServiceUnavailableException before streaming
starts, AI SDK's onError fires but status may not transition to "ready".
This fix ensures the input is re-enabled when an error occurs, allowing
users to retry their request.
2025-12-05 16:22:38 +09:00
Dayuan Jiang
95160f5a21 fix: handle Bedrock 503 streaming errors with timeout detection (#92)
- Add 15s streaming timeout to detect mid-stream stalls (e.g., Bedrock 503)
- Add stop() call to allow user retry after timeout
- Add streamingError state for timeout-detected errors
- Improve server-side error logging for empty usage detection
- Add user-friendly error messages for ServiceUnavailable and Throttling errors
2025-12-05 14:23:47 +09:00
broBinChen
b206e16c02 fix: clear files when clicking text-only examples (#82)
Fixed an issue where files from previous examples would persist when clicking on "Animated Diagram" or "Creative Drawing" examples that don't require image uploads.
2025-12-05 14:07:14 +09:00
broBinChen
563b18e8ff refactor: replace deprecated addToolResult with addToolOutput (#85)
Replaced the deprecated addToolResult API with the new addToolOutput API from ai to ensure compatibility with future versions.
2025-12-05 14:02:45 +09:00
dayuan.jiang
2366255e8f fix: use credential provider chain for bedrock IAM role support 2025-12-05 09:19:26 +09:00
dayuan.jiang
255308f829 fix: make bedrock credentials optional for IAM role support 2025-12-05 09:11:10 +09:00
dayuan.jiang
a9493c8877 fix: write env vars to .env.production for Amplify SSR runtime 2025-12-05 09:04:54 +09:00
dayuan.jiang
a0c3db100a fix: add favicon.ico to public folder for header logo 2025-12-05 08:56:34 +09:00
dayuan.jiang
ff6f130f8a refactor: remove Langfuse observability integration
- Delete lib/langfuse.ts, instrumentation.ts
- Remove API routes: log-save, log-feedback
- Remove feedback buttons (thumbs up/down) from chat
- Remove sessionId tracking throughout codebase
- Remove @langfuse/*, @opentelemetry dependencies
- Clean up env.example
2025-12-05 01:30:02 +09:00
18 changed files with 1490 additions and 912 deletions

22
amplify.yml Normal file
View File

@@ -0,0 +1,22 @@
version: 1
frontend:
phases:
preBuild:
commands:
- npm ci --cache .npm --prefer-offline
build:
commands:
# Write env vars to .env.production for Next.js SSR runtime
- env | grep -e AI_MODEL >> .env.production
- env | grep -e AI_PROVIDER >> .env.production
- env | grep -e OPENAI_API_KEY >> .env.production
- env | grep -e NEXT_PUBLIC_ >> .env.production
- npm run build
artifacts:
baseDirectory: .next
files:
- '**/*'
cache:
paths:
- .next/cache/**/*
- .npm/**/*

View File

@@ -1,7 +1,6 @@
import { streamText, convertToModelMessages, createUIMessageStream, createUIMessageStreamResponse } from 'ai'; import { streamText, convertToModelMessages, createUIMessageStream, createUIMessageStreamResponse } from 'ai';
import { getAIModel } from '@/lib/ai-providers'; import { getAIModel } from '@/lib/ai-providers';
import { findCachedResponse } from '@/lib/cached-responses'; import { findCachedResponse } from '@/lib/cached-responses';
import { setTraceInput, setTraceOutput, getTelemetryConfig, wrapWithObserve } from '@/lib/langfuse';
import { getSystemPrompt } from '@/lib/system-prompts'; import { getSystemPrompt } from '@/lib/system-prompts';
import { z } from "zod"; import { z } from "zod";
@@ -32,27 +31,7 @@ function createCachedStreamResponse(xml: string): Response {
// Inner handler function // Inner handler function
async function handleChatRequest(req: Request): Promise<Response> { async function handleChatRequest(req: Request): Promise<Response> {
const { messages, xml, sessionId } = await req.json(); const { messages, xml } = await req.json();
// Get user IP for Langfuse tracking
const forwardedFor = req.headers.get('x-forwarded-for');
const userId = forwardedFor?.split(',')[0]?.trim() || 'anonymous';
// Validate sessionId for Langfuse (must be string, max 200 chars)
const validSessionId = sessionId && typeof sessionId === 'string' && sessionId.length <= 200
? sessionId
: undefined;
// Extract user input text for Langfuse trace
const currentMessage = messages[messages.length - 1];
const userInputText = currentMessage?.parts?.find((p: any) => p.type === 'text')?.text || '';
// Update Langfuse trace with input, session, and user
setTraceInput({
input: userInputText,
sessionId: validSessionId,
userId: userId,
});
// === CACHE CHECK START === // === CACHE CHECK START ===
const isFirstMessage = messages.length === 1; const isFirstMessage = messages.length === 1;
@@ -175,19 +154,28 @@ ${lastMessageText}
messages: allMessages, messages: allMessages,
...(providerOptions && { providerOptions }), ...(providerOptions && { providerOptions }),
...(headers && { headers }), ...(headers && { headers }),
// Langfuse telemetry config (returns undefined if not configured) onFinish: ({ usage, providerMetadata, finishReason, text, toolCalls }) => {
...(getTelemetryConfig({ sessionId: validSessionId, userId }) && { // Detect potential mid-stream failures (e.g., Bedrock 503 ServiceUnavailableException)
experimental_telemetry: getTelemetryConfig({ sessionId: validSessionId, userId }), // When this happens, usage is empty and providerMetadata is undefined
}), const hasUsage = usage && Object.keys(usage).length > 0;
onFinish: ({ text, usage, providerMetadata }) => { if (!hasUsage) {
console.error('[Stream Error] Empty usage detected - possible Bedrock 503 or mid-stream failure');
console.error('[Stream Error] finishReason:', finishReason);
console.error('[Stream Error] text received:', text?.substring(0, 200) || '(none)');
console.error('[Stream Error] toolCalls:', toolCalls?.length || 0);
// Log the user's last message for debugging
const lastUserMsg = enhancedMessages.filter(m => m.role === 'user').pop();
if (lastUserMsg) {
const content = lastUserMsg.content;
const preview = Array.isArray(content)
? (content.find((c) => c.type === 'text') as { type: 'text'; text: string } | undefined)?.text?.substring(0, 100)
: String(content).substring(0, 100);
console.error('[Stream Error] Last user message preview:', preview);
}
} else {
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2)); console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2)); console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
// Pass usage to Langfuse (Bedrock streaming doesn't auto-report tokens to telemetry) }
// AI SDK uses inputTokens/outputTokens, Langfuse expects promptTokens/completionTokens
setTraceOutput(text, {
promptTokens: usage?.inputTokens,
completionTokens: usage?.outputTokens,
});
}, },
tools: { tools: {
// Client-side tool that will be executed on the client // Client-side tool that will be executed on the client
@@ -263,6 +251,23 @@ IMPORTANT: Keep edits concise:
? error.message ? error.message
: JSON.stringify(error); : JSON.stringify(error);
// Check for Bedrock service errors (503, throttling, etc.)
if (errorString.includes('ServiceUnavailable') ||
errorString.includes('503') ||
errorString.includes('temporarily unavailable')) {
console.error('[Bedrock Error] ServiceUnavailableException:', errorString);
return 'The AI service is temporarily unavailable. Please try again in a few seconds.';
}
// Check for throttling errors
if (errorString.includes('ThrottlingException') ||
errorString.includes('rate limit') ||
errorString.includes('too many requests') ||
errorString.includes('429')) {
console.error('[Bedrock Error] ThrottlingException:', errorString);
return 'Too many requests. Please wait a moment and try again.';
}
// Check for image not supported error (e.g., DeepSeek models) // Check for image not supported error (e.g., DeepSeek models)
if (errorString.includes('image_url') || if (errorString.includes('image_url') ||
errorString.includes('unknown variant') || errorString.includes('unknown variant') ||
@@ -278,8 +283,7 @@ IMPORTANT: Keep edits concise:
}); });
} }
// Wrap handler with error handling export async function POST(req: Request) {
async function safeHandler(req: Request): Promise<Response> {
try { try {
return await handleChatRequest(req); return await handleChatRequest(req);
} catch (error) { } catch (error) {
@@ -287,10 +291,3 @@ async function safeHandler(req: Request): Promise<Response> {
return Response.json({ error: 'Internal server error' }, { status: 500 }); return Response.json({ error: 'Internal server error' }, { status: 500 });
} }
} }
// Wrap with Langfuse observe (if configured)
const observedHandler = wrapWithObserve(safeHandler);
export async function POST(req: Request) {
return observedHandler(req);
}

View File

@@ -1,103 +0,0 @@
import { getLangfuseClient } from '@/lib/langfuse';
import { randomUUID } from 'crypto';
import { z } from 'zod';
const feedbackSchema = z.object({
messageId: z.string().min(1).max(200),
feedback: z.enum(['good', 'bad']),
sessionId: z.string().min(1).max(200).optional(),
});
export async function POST(req: Request) {
const langfuse = getLangfuseClient();
if (!langfuse) {
return Response.json({ success: true, logged: false });
}
// Validate input
let data;
try {
data = feedbackSchema.parse(await req.json());
} catch {
return Response.json({ success: false, error: 'Invalid input' }, { status: 400 });
}
const { messageId, feedback, sessionId } = data;
// Get user IP for tracking
const forwardedFor = req.headers.get('x-forwarded-for');
const userId = forwardedFor?.split(',')[0]?.trim() || 'anonymous';
try {
// Find the most recent chat trace for this session to attach the score to
const tracesResponse = await langfuse.api.trace.list({
sessionId,
limit: 1,
});
const traces = tracesResponse.data || [];
const latestTrace = traces[0];
if (!latestTrace) {
// No trace found for this session - create a standalone feedback trace
const traceId = randomUUID();
const timestamp = new Date().toISOString();
await langfuse.api.ingestion.batch({
batch: [
{
type: 'trace-create',
id: randomUUID(),
timestamp,
body: {
id: traceId,
name: 'user-feedback',
sessionId,
userId,
input: { messageId, feedback },
metadata: { source: 'feedback-button', note: 'standalone - no chat trace found' },
timestamp,
},
},
{
type: 'score-create',
id: randomUUID(),
timestamp,
body: {
id: randomUUID(),
traceId,
name: 'user-feedback',
value: feedback === 'good' ? 1 : 0,
comment: `User gave ${feedback} feedback`,
},
},
],
});
} else {
// Attach score to the existing chat trace
const timestamp = new Date().toISOString();
await langfuse.api.ingestion.batch({
batch: [
{
type: 'score-create',
id: randomUUID(),
timestamp,
body: {
id: randomUUID(),
traceId: latestTrace.id,
name: 'user-feedback',
value: feedback === 'good' ? 1 : 0,
comment: `User gave ${feedback} feedback`,
},
},
],
});
}
return Response.json({ success: true, logged: true });
} catch (error) {
console.error('Langfuse feedback error:', error);
return Response.json({ success: false, error: 'Failed to log feedback' }, { status: 500 });
}
}

View File

@@ -1,65 +0,0 @@
import { getLangfuseClient } from '@/lib/langfuse';
import { randomUUID } from 'crypto';
import { z } from 'zod';
const saveSchema = z.object({
filename: z.string().min(1).max(255),
format: z.enum(['drawio', 'png', 'svg']),
sessionId: z.string().min(1).max(200).optional(),
});
export async function POST(req: Request) {
const langfuse = getLangfuseClient();
if (!langfuse) {
return Response.json({ success: true, logged: false });
}
// Validate input
let data;
try {
data = saveSchema.parse(await req.json());
} catch {
return Response.json({ success: false, error: 'Invalid input' }, { status: 400 });
}
const { filename, format, sessionId } = data;
try {
const timestamp = new Date().toISOString();
// Find the most recent chat trace for this session to attach the save flag
const tracesResponse = await langfuse.api.trace.list({
sessionId,
limit: 1,
});
const traces = tracesResponse.data || [];
const latestTrace = traces[0];
if (latestTrace) {
// Add a score to the existing trace to flag that user saved
await langfuse.api.ingestion.batch({
batch: [
{
type: 'score-create',
id: randomUUID(),
timestamp,
body: {
id: randomUUID(),
traceId: latestTrace.id,
name: 'diagram-saved',
value: 1,
comment: `User saved diagram as ${filename}.${format}`,
},
},
],
});
}
// If no trace found, skip logging (user hasn't chatted yet)
return Response.json({ success: true, logged: !!latestTrace });
} catch (error) {
console.error('Langfuse save error:', error);
return Response.json({ success: false, error: 'Failed to log save' }, { status: 500 });
}
}

View File

@@ -32,6 +32,18 @@ export default function Home() {
return () => window.removeEventListener('keydown', handleKeyDown); return () => window.removeEventListener('keydown', handleKeyDown);
}, []); }, []);
// Show confirmation dialog when user tries to leave the page
// This helps prevent accidental navigation from browser back gestures
useEffect(() => {
const handleBeforeUnload = (event: BeforeUnloadEvent) => {
event.preventDefault();
return '';
};
window.addEventListener('beforeunload', handleBeforeUnload);
return () => window.removeEventListener('beforeunload', handleBeforeUnload);
}, []);
return ( return (
<div className="flex h-screen bg-background relative overflow-hidden"> <div className="flex h-screen bg-background relative overflow-hidden">
{/* Mobile warning overlay */} {/* Mobile warning overlay */}

View File

@@ -27,7 +27,7 @@ export function ButtonWithTooltip({
<TooltipTrigger asChild> <TooltipTrigger asChild>
<Button {...buttonProps}>{children}</Button> <Button {...buttonProps}>{children}</Button>
</TooltipTrigger> </TooltipTrigger>
<TooltipContent>{tooltipContent}</TooltipContent> <TooltipContent className="max-w-xs text-wrap">{tooltipContent}</TooltipContent>
</Tooltip> </Tooltip>
</TooltipProvider> </TooltipProvider>
); );

View File

@@ -90,7 +90,10 @@ export default function ExamplePanel({
icon={<Zap className="w-4 h-4 text-primary" />} icon={<Zap className="w-4 h-4 text-primary" />}
title="Animated Diagram" title="Animated Diagram"
description="Draw a transformer architecture with animated connectors" description="Draw a transformer architecture with animated connectors"
onClick={() => setInput("Give me a **animated connector** diagram of transformer's architecture")} onClick={() => {
setInput("Give me a **animated connector** diagram of transformer's architecture")
setFiles([])
}}
/> />
<ExampleCard <ExampleCard
@@ -111,7 +114,10 @@ export default function ExamplePanel({
icon={<Palette className="w-4 h-4 text-primary" />} icon={<Palette className="w-4 h-4 text-primary" />}
title="Creative Drawing" title="Creative Drawing"
description="Draw something fun and creative" description="Draw something fun and creative"
onClick={() => setInput("Draw a cat for me")} onClick={() => {
setInput("Draw a cat for me")
setFiles([])
}}
/> />
</div> </div>

View File

@@ -29,7 +29,7 @@ interface ChatInputProps {
onFileChange?: (files: File[]) => void; onFileChange?: (files: File[]) => void;
showHistory?: boolean; showHistory?: boolean;
onToggleHistory?: (show: boolean) => void; onToggleHistory?: (show: boolean) => void;
sessionId?: string; error?: Error | null;
} }
export function ChatInput({ export function ChatInput({
@@ -42,7 +42,7 @@ export function ChatInput({
onFileChange = () => {}, onFileChange = () => {},
showHistory = false, showHistory = false,
onToggleHistory = () => {}, onToggleHistory = () => {},
sessionId, error = null,
}: ChatInputProps) { }: ChatInputProps) {
const { diagramHistory, saveDiagramToFile } = useDiagram(); const { diagramHistory, saveDiagramToFile } = useDiagram();
const textareaRef = useRef<HTMLTextAreaElement>(null); const textareaRef = useRef<HTMLTextAreaElement>(null);
@@ -51,7 +51,8 @@ export function ChatInput({
const [showClearDialog, setShowClearDialog] = useState(false); const [showClearDialog, setShowClearDialog] = useState(false);
const [showSaveDialog, setShowSaveDialog] = useState(false); const [showSaveDialog, setShowSaveDialog] = useState(false);
const isDisabled = status === "streaming" || status === "submitted"; // Allow retry when there's an error (even if status is still "streaming" or "submitted")
const isDisabled = (status === "streaming" || status === "submitted") && !error;
useEffect(() => { useEffect(() => {
console.log('[ChatInput] Status changed to:', status, '| Input disabled:', isDisabled); console.log('[ChatInput] Status changed to:', status, '| Input disabled:', isDisabled);
@@ -251,7 +252,7 @@ export function ChatInput({
<SaveDialog <SaveDialog
open={showSaveDialog} open={showSaveDialog}
onOpenChange={setShowSaveDialog} onOpenChange={setShowSaveDialog}
onSave={(filename, format) => saveDiagramToFile(filename, format, sessionId)} onSave={(filename, format) => saveDiagramToFile(filename, format)}
defaultFilename={`diagram-${new Date().toISOString().slice(0, 10)}`} defaultFilename={`diagram-${new Date().toISOString().slice(0, 10)}`}
/> />

View File

@@ -6,7 +6,7 @@ import { ScrollArea } from "@/components/ui/scroll-area";
import ExamplePanel from "./chat-example-panel"; import ExamplePanel from "./chat-example-panel";
import { UIMessage } from "ai"; import { UIMessage } from "ai";
import { convertToLegalXml, replaceNodes, validateMxCellStructure } from "@/lib/utils"; import { convertToLegalXml, replaceNodes, validateMxCellStructure } from "@/lib/utils";
import { Copy, Check, X, ChevronDown, ChevronUp, Cpu, Minus, Plus, ThumbsUp, ThumbsDown, RotateCcw, Pencil } from "lucide-react"; import { Copy, Check, X, ChevronDown, ChevronUp, Cpu, Minus, Plus, RotateCcw, Pencil } from "lucide-react";
import { CodeBlock } from "./code-block"; import { CodeBlock } from "./code-block";
interface EditPair { interface EditPair {
@@ -67,7 +67,6 @@ interface ChatMessageDisplayProps {
error?: Error | null; error?: Error | null;
setInput: (input: string) => void; setInput: (input: string) => void;
setFiles: (files: File[]) => void; setFiles: (files: File[]) => void;
sessionId?: string;
onRegenerate?: (messageIndex: number) => void; onRegenerate?: (messageIndex: number) => void;
onEditMessage?: (messageIndex: number, newText: string) => void; onEditMessage?: (messageIndex: number, newText: string) => void;
} }
@@ -77,7 +76,6 @@ export function ChatMessageDisplay({
error, error,
setInput, setInput,
setFiles, setFiles,
sessionId,
onRegenerate, onRegenerate,
onEditMessage, onEditMessage,
}: ChatMessageDisplayProps) { }: ChatMessageDisplayProps) {
@@ -90,7 +88,6 @@ export function ChatMessageDisplay({
); );
const [copiedMessageId, setCopiedMessageId] = useState<string | null>(null); const [copiedMessageId, setCopiedMessageId] = useState<string | null>(null);
const [copyFailedMessageId, setCopyFailedMessageId] = useState<string | null>(null); const [copyFailedMessageId, setCopyFailedMessageId] = useState<string | null>(null);
const [feedback, setFeedback] = useState<Record<string, "good" | "bad">>({});
const [editingMessageId, setEditingMessageId] = useState<string | null>(null); const [editingMessageId, setEditingMessageId] = useState<string | null>(null);
const [editText, setEditText] = useState<string>(""); const [editText, setEditText] = useState<string>("");
@@ -106,34 +103,6 @@ export function ChatMessageDisplay({
} }
}; };
const submitFeedback = async (messageId: string, value: "good" | "bad") => {
// Toggle off if already selected
if (feedback[messageId] === value) {
setFeedback((prev) => {
const next = { ...prev };
delete next[messageId];
return next;
});
return;
}
setFeedback((prev) => ({ ...prev, [messageId]: value }));
try {
await fetch("/api/log-feedback", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
messageId,
feedback: value,
sessionId,
}),
});
} catch (error) {
console.warn("Failed to log feedback:", error);
}
};
const handleDisplayChart = useCallback( const handleDisplayChart = useCallback(
(xml: string) => { (xml: string) => {
const currentXml = xml || ""; const currentXml = xml || "";
@@ -146,7 +115,7 @@ export function ChatMessageDisplay({
previousXML.current = convertedXml; previousXML.current = convertedXml;
onDisplayChart(replacedXML); onDisplayChart(replacedXML);
} else { } else {
console.error("[ChatMessageDisplay] XML validation failed:", validationError); console.log("[ChatMessageDisplay] XML validation failed:", validationError);
} }
} }
}, },
@@ -467,32 +436,6 @@ export function ChatMessageDisplay({
<RotateCcw className="h-3.5 w-3.5" /> <RotateCcw className="h-3.5 w-3.5" />
</button> </button>
)} )}
{/* Divider */}
<div className="w-px h-4 bg-border mx-1" />
{/* Thumbs up */}
<button
onClick={() => submitFeedback(message.id, "good")}
className={`p-1.5 rounded-lg transition-colors ${
feedback[message.id] === "good"
? "text-green-600 bg-green-100"
: "text-muted-foreground/60 hover:text-green-600 hover:bg-green-50"
}`}
title="Good response"
>
<ThumbsUp className="h-3.5 w-3.5" />
</button>
{/* Thumbs down */}
<button
onClick={() => submitFeedback(message.id, "bad")}
className={`p-1.5 rounded-lg transition-colors ${
feedback[message.id] === "bad"
? "text-red-600 bg-red-100"
: "text-muted-foreground/60 hover:text-red-600 hover:bg-red-50"
}`}
title="Bad response"
>
<ThumbsDown className="h-3.5 w-3.5" />
</button>
</div> </div>
)} )}
</div> </div>

View File

@@ -4,7 +4,7 @@ import type React from "react";
import { useRef, useEffect, useState } from "react"; import { useRef, useEffect, useState } from "react";
import { flushSync } from "react-dom"; import { flushSync } from "react-dom";
import { FaGithub } from "react-icons/fa"; import { FaGithub } from "react-icons/fa";
import { PanelRightClose, PanelRightOpen } from "lucide-react"; import { PanelRightClose, PanelRightOpen, CheckCircle } from "lucide-react";
import Link from "next/link"; import Link from "next/link";
import Image from "next/image"; import Image from "next/image";
@@ -61,9 +61,7 @@ export default function ChatPanel({
const [files, setFiles] = useState<File[]>([]); const [files, setFiles] = useState<File[]>([]);
const [showHistory, setShowHistory] = useState(false); const [showHistory, setShowHistory] = useState(false);
const [input, setInput] = useState(""); const [input, setInput] = useState("");
const [streamingError, setStreamingError] = useState<Error | null>(null);
// Generate a unique session ID for Langfuse tracing
const [sessionId, setSessionId] = useState(() => `session-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`);
// Store XML snapshots for each user message (keyed by message index) // Store XML snapshots for each user message (keyed by message index)
const xmlSnapshotsRef = useRef<Map<number, string>>(new Map()); const xmlSnapshotsRef = useRef<Map<number, string>>(new Map());
@@ -74,8 +72,15 @@ export default function ChatPanel({
chartXMLRef.current = chartXML; chartXMLRef.current = chartXML;
}, [chartXML]); }, [chartXML]);
const { messages, sendMessage, addToolResult, status, error, setMessages } = const {
useChat({ messages,
sendMessage,
addToolResult,
status,
error,
setMessages,
stop,
} = useChat({
transport: new DefaultChatTransport({ transport: new DefaultChatTransport({
api: "/api/chat", api: "/api/chat",
}), }),
@@ -112,12 +117,20 @@ export default function ChatPanel({
const cachedXML = chartXMLRef.current; const cachedXML = chartXMLRef.current;
if (cachedXML) { if (cachedXML) {
currentXml = cachedXML; currentXml = cachedXML;
console.log("[edit_diagram] Using cached chartXML, length:", currentXml.length); console.log(
"[edit_diagram] Using cached chartXML, length:",
currentXml.length
);
} else { } else {
// Fallback to export only if no cached XML // Fallback to export only if no cached XML
console.log("[edit_diagram] No cached XML, fetching from DrawIO..."); console.log(
"[edit_diagram] No cached XML, fetching from DrawIO..."
);
currentXml = await onFetchChart(false); currentXml = await onFetchChart(false);
console.log("[edit_diagram] Got XML from export, length:", currentXml.length); console.log(
"[edit_diagram] Got XML from export, length:",
currentXml.length
);
} }
const { replaceXMLParts } = await import("@/lib/utils"); const { replaceXMLParts } = await import("@/lib/utils");
@@ -135,9 +148,7 @@ export default function ChatPanel({
console.error("[edit_diagram] Failed:", error); console.error("[edit_diagram] Failed:", error);
const errorMessage = const errorMessage =
error instanceof Error error instanceof Error ? error.message : String(error);
? error.message
: String(error);
addToolResult({ addToolResult({
tool: "edit_diagram", tool: "edit_diagram",
@@ -156,9 +167,68 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
}, },
onError: (error) => { onError: (error) => {
console.error("Chat error:", error); console.error("Chat error:", error);
setStreamingError(error);
}, },
}); });
// Streaming timeout detection - detects when stream stalls mid-response (e.g., Bedrock 503)
// This catches cases where onError doesn't fire because headers were already sent
const lastMessageCountRef = useRef(0);
const lastMessagePartsRef = useRef(0);
useEffect(() => {
// Clear streaming error when status changes to ready
if (status === "ready") {
setStreamingError(null);
lastMessageCountRef.current = 0;
lastMessagePartsRef.current = 0;
return;
}
if (status !== "streaming") return;
const STALL_TIMEOUT_MS = 15000; // 15 seconds without any update
// Capture current state BEFORE setting timeout
// This way we compare against values at the time timeout was set
const currentPartsCount = messages.reduce(
(acc, msg) => acc + (msg.parts?.length || 0),
0
);
const capturedMessageCount = messages.length;
const capturedPartsCount = currentPartsCount;
// Update refs immediately so next effect run has fresh values
lastMessageCountRef.current = messages.length;
lastMessagePartsRef.current = currentPartsCount;
const timeoutId = setTimeout(() => {
// Re-count parts at timeout time
const newPartsCount = messages.reduce(
(acc, msg) => acc + (msg.parts?.length || 0),
0
);
// If no change since timeout was set, stream has stalled
if (
messages.length === capturedMessageCount &&
newPartsCount === capturedPartsCount
) {
console.error(
"[Streaming Timeout] No activity for 15s - forcing error state"
);
setStreamingError(
new Error(
"Connection lost. The AI service may be temporarily unavailable. Please try again."
)
);
stop(); // Allow user to retry by transitioning status to "ready"
}
}, STALL_TIMEOUT_MS);
return () => clearTimeout(timeoutId);
}, [status, messages, stop]);
const messagesEndRef = useRef<HTMLDivElement>(null); const messagesEndRef = useRef<HTMLDivElement>(null);
useEffect(() => { useEffect(() => {
@@ -167,11 +237,15 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
} }
}, [messages]); }, [messages]);
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => { const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault(); e.preventDefault();
const isProcessing = status === "streaming" || status === "submitted"; // Allow retry if there's a streaming error (workaround for stop() not transitioning status)
const isProcessing =
(status === "streaming" || status === "submitted") &&
!streamingError;
if (input.trim() && !isProcessing) { if (input.trim() && !isProcessing) {
// Clear any previous streaming error before starting new request
setStreamingError(null);
try { try {
let chartXml = await onFetchChart(); let chartXml = await onFetchChart();
chartXml = formatXML(chartXml); chartXml = formatXML(chartXml);
@@ -208,7 +282,6 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
{ {
body: { body: {
xml: chartXml, xml: chartXml,
sessionId,
}, },
} }
); );
@@ -237,7 +310,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
// Find the user message before this assistant message // Find the user message before this assistant message
let userMessageIndex = messageIndex - 1; let userMessageIndex = messageIndex - 1;
while (userMessageIndex >= 0 && messages[userMessageIndex].role !== "user") { while (
userMessageIndex >= 0 &&
messages[userMessageIndex].role !== "user"
) {
userMessageIndex--; userMessageIndex--;
} }
@@ -253,7 +329,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
// Get the saved XML snapshot for this user message // Get the saved XML snapshot for this user message
const savedXml = xmlSnapshotsRef.current.get(userMessageIndex); const savedXml = xmlSnapshotsRef.current.get(userMessageIndex);
if (!savedXml) { if (!savedXml) {
console.error("No saved XML snapshot for message index:", userMessageIndex); console.error(
"No saved XML snapshot for message index:",
userMessageIndex
);
return; return;
} }
@@ -283,7 +362,6 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
{ {
body: { body: {
xml: savedXml, xml: savedXml,
sessionId,
}, },
} }
); );
@@ -299,7 +377,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
// Get the saved XML snapshot for this user message // Get the saved XML snapshot for this user message
const savedXml = xmlSnapshotsRef.current.get(messageIndex); const savedXml = xmlSnapshotsRef.current.get(messageIndex);
if (!savedXml) { if (!savedXml) {
console.error("No saved XML snapshot for message index:", messageIndex); console.error(
"No saved XML snapshot for message index:",
messageIndex
);
return; return;
} }
@@ -337,7 +418,6 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
{ {
body: { body: {
xml: savedXml, xml: savedXml,
sessionId,
}, },
} }
); );
@@ -394,6 +474,14 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
> >
About About
</Link> </Link>
<ButtonWithTooltip
tooltipContent="Recent generation failures were caused by our AI provider's infrastructure issue, not the app code. After extensive debugging, I've switched providers and observed 30+ minutes of stability. If issues persist, please report on GitHub."
variant="ghost"
size="icon"
className="h-6 w-6 text-green-500 hover:text-green-600"
>
<CheckCircle className="h-4 w-4" />
</ButtonWithTooltip>
</div> </div>
<div className="flex items-center gap-1"> <div className="flex items-center gap-1">
<a <a
@@ -421,10 +509,9 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
<main className="flex-1 overflow-hidden"> <main className="flex-1 overflow-hidden">
<ChatMessageDisplay <ChatMessageDisplay
messages={messages} messages={messages}
error={error} error={error || streamingError}
setInput={setInput} setInput={setInput}
setFiles={handleFileChange} setFiles={handleFileChange}
sessionId={sessionId}
onRegenerate={handleRegenerate} onRegenerate={handleRegenerate}
onEditMessage={handleEditMessage} onEditMessage={handleEditMessage}
/> />
@@ -440,14 +527,13 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
onClearChat={() => { onClearChat={() => {
setMessages([]); setMessages([]);
clearDiagram(); clearDiagram();
setSessionId(`session-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`);
xmlSnapshotsRef.current.clear(); xmlSnapshotsRef.current.clear();
}} }}
files={files} files={files}
onFileChange={handleFileChange} onFileChange={handleFileChange}
showHistory={showHistory} showHistory={showHistory}
onToggleHistory={setShowHistory} onToggleHistory={setShowHistory}
sessionId={sessionId} error={error || streamingError}
/> />
</footer> </footer>
</div> </div>

View File

@@ -16,7 +16,7 @@ interface DiagramContextType {
drawioRef: React.Ref<DrawIoEmbedRef | null>; drawioRef: React.Ref<DrawIoEmbedRef | null>;
handleDiagramExport: (data: any) => void; handleDiagramExport: (data: any) => void;
clearDiagram: () => void; clearDiagram: () => void;
saveDiagramToFile: (filename: string, format: ExportFormat, sessionId?: string) => void; saveDiagramToFile: (filename: string, format: ExportFormat) => void;
} }
const DiagramContext = createContext<DiagramContextType | undefined>(undefined); const DiagramContext = createContext<DiagramContextType | undefined>(undefined);
@@ -107,7 +107,7 @@ export function DiagramProvider({ children }: { children: React.ReactNode }) {
setDiagramHistory([]); setDiagramHistory([]);
}; };
const saveDiagramToFile = (filename: string, format: ExportFormat, sessionId?: string) => { const saveDiagramToFile = (filename: string, format: ExportFormat) => {
if (!drawioRef.current) { if (!drawioRef.current) {
console.warn("Draw.io editor not ready"); console.warn("Draw.io editor not ready");
return; return;
@@ -145,9 +145,6 @@ export function DiagramProvider({ children }: { children: React.ReactNode }) {
extension = ".svg"; extension = ".svg";
} }
// Log save event to Langfuse (flags the trace)
logSaveToLangfuse(filename, format, sessionId);
// Handle download // Handle download
let url: string; let url: string;
if (typeof fileContent === "string" && fileContent.startsWith("data:")) { if (typeof fileContent === "string" && fileContent.startsWith("data:")) {
@@ -177,19 +174,6 @@ export function DiagramProvider({ children }: { children: React.ReactNode }) {
drawioRef.current.exportDiagram({ format: drawioFormat }); drawioRef.current.exportDiagram({ format: drawioFormat });
}; };
// Log save event to Langfuse (just flags the trace, doesn't send content)
const logSaveToLangfuse = async (filename: string, format: string, sessionId?: string) => {
try {
await fetch("/api/log-save", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ filename, format, sessionId }),
});
} catch (error) {
console.warn("Failed to log save to Langfuse:", error);
}
};
return ( return (
<DiagramContext.Provider <DiagramContext.Provider
value={{ value={{

View File

@@ -41,9 +41,3 @@ AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# DeepSeek Configuration # DeepSeek Configuration
# DEEPSEEK_API_KEY=sk-... # DEEPSEEK_API_KEY=sk-...
# DEEPSEEK_BASE_URL=https://api.deepseek.com/v1 # Optional: Custom endpoint # DEEPSEEK_BASE_URL=https://api.deepseek.com/v1 # Optional: Custom endpoint
# Langfuse Observability (Optional)
# Enable LLM tracing and analytics - https://langfuse.com
# LANGFUSE_PUBLIC_KEY=pk-lf-...
# LANGFUSE_SECRET_KEY=sk-lf-...
# LANGFUSE_BASEURL=https://cloud.langfuse.com # EU region, use https://us.cloud.langfuse.com for US

View File

@@ -1,35 +0,0 @@
import { LangfuseSpanProcessor } from '@langfuse/otel';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
export function register() {
// Skip telemetry if Langfuse env vars are not configured
if (!process.env.LANGFUSE_PUBLIC_KEY || !process.env.LANGFUSE_SECRET_KEY) {
console.warn('[Langfuse] Environment variables not configured - telemetry disabled');
return;
}
const langfuseSpanProcessor = new LangfuseSpanProcessor({
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
secretKey: process.env.LANGFUSE_SECRET_KEY,
baseUrl: process.env.LANGFUSE_BASEURL,
// Filter out Next.js HTTP request spans so AI SDK spans become root traces
shouldExportSpan: ({ otelSpan }) => {
const spanName = otelSpan.name;
// Skip Next.js HTTP infrastructure spans
if (spanName.startsWith('POST /') ||
spanName.startsWith('GET /') ||
spanName.includes('BaseServer') ||
spanName.includes('handleRequest')) {
return false;
}
return true;
},
});
const tracerProvider = new NodeTracerProvider({
spanProcessors: [langfuseSpanProcessor],
});
// Register globally so AI SDK's telemetry also uses this processor
tracerProvider.register();
}

View File

@@ -1,4 +1,5 @@
import { bedrock } from '@ai-sdk/amazon-bedrock'; import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
import { fromNodeProviderChain } from '@aws-sdk/credential-providers';
import { openai, createOpenAI } from '@ai-sdk/openai'; import { openai, createOpenAI } from '@ai-sdk/openai';
import { createAnthropic } from '@ai-sdk/anthropic'; import { createAnthropic } from '@ai-sdk/anthropic';
import { google, createGoogleGenerativeAI } from '@ai-sdk/google'; import { google, createGoogleGenerativeAI } from '@ai-sdk/google';
@@ -38,7 +39,7 @@ const ANTHROPIC_BETA_HEADERS = {
// Map of provider to required environment variable // Map of provider to required environment variable
const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = { const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = {
bedrock: 'AWS_ACCESS_KEY_ID', bedrock: null, // AWS SDK auto-uses IAM role on AWS, or env vars locally
openai: 'OPENAI_API_KEY', openai: 'OPENAI_API_KEY',
anthropic: 'ANTHROPIC_API_KEY', anthropic: 'ANTHROPIC_API_KEY',
google: 'GOOGLE_GENERATIVE_AI_API_KEY', google: 'GOOGLE_GENERATIVE_AI_API_KEY',
@@ -159,13 +160,20 @@ export function getAIModel(): ModelConfig {
let headers: Record<string, string> | undefined = undefined; let headers: Record<string, string> | undefined = undefined;
switch (provider) { switch (provider) {
case 'bedrock': case 'bedrock': {
model = bedrock(modelId); // Use credential provider chain for IAM role support (Amplify, Lambda, etc.)
// Falls back to env vars (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) for local dev
const bedrockProvider = createAmazonBedrock({
region: process.env.AWS_REGION || 'us-west-2',
credentialProvider: fromNodeProviderChain(),
});
model = bedrockProvider(modelId);
// Add Anthropic beta options if using Claude models via Bedrock // Add Anthropic beta options if using Claude models via Bedrock
if (modelId.includes('anthropic.claude')) { if (modelId.includes('anthropic.claude')) {
providerOptions = BEDROCK_ANTHROPIC_BETA; providerOptions = BEDROCK_ANTHROPIC_BETA;
} }
break; break;
}
case 'openai': case 'openai':
if (process.env.OPENAI_BASE_URL) { if (process.env.OPENAI_BASE_URL) {

View File

@@ -1,95 +0,0 @@
import { observe, updateActiveTrace } from '@langfuse/tracing';
import { LangfuseClient } from '@langfuse/client';
import * as api from '@opentelemetry/api';
// Singleton LangfuseClient instance for direct API calls
let langfuseClient: LangfuseClient | null = null;
export function getLangfuseClient(): LangfuseClient | null {
if (!process.env.LANGFUSE_PUBLIC_KEY || !process.env.LANGFUSE_SECRET_KEY) {
return null;
}
if (!langfuseClient) {
langfuseClient = new LangfuseClient({
publicKey: process.env.LANGFUSE_PUBLIC_KEY,
secretKey: process.env.LANGFUSE_SECRET_KEY,
baseUrl: process.env.LANGFUSE_BASEURL,
});
}
return langfuseClient;
}
// Check if Langfuse is configured
export function isLangfuseEnabled(): boolean {
return !!process.env.LANGFUSE_PUBLIC_KEY;
}
// Update trace with input data at the start of request
export function setTraceInput(params: {
input: string;
sessionId?: string;
userId?: string;
}) {
if (!isLangfuseEnabled()) return;
updateActiveTrace({
name: 'chat',
input: params.input,
sessionId: params.sessionId,
userId: params.userId,
});
}
// Update trace with output and end the span
export function setTraceOutput(output: string, usage?: { promptTokens?: number; completionTokens?: number }) {
if (!isLangfuseEnabled()) return;
updateActiveTrace({ output });
const activeSpan = api.trace.getActiveSpan();
if (activeSpan) {
// Manually set usage attributes since AI SDK Bedrock streaming doesn't provide them
if (usage?.promptTokens) {
activeSpan.setAttribute('ai.usage.promptTokens', usage.promptTokens);
activeSpan.setAttribute('gen_ai.usage.input_tokens', usage.promptTokens);
}
if (usage?.completionTokens) {
activeSpan.setAttribute('ai.usage.completionTokens', usage.completionTokens);
activeSpan.setAttribute('gen_ai.usage.output_tokens', usage.completionTokens);
}
activeSpan.end();
}
}
// Get telemetry config for streamText
export function getTelemetryConfig(params: {
sessionId?: string;
userId?: string;
}) {
if (!isLangfuseEnabled()) return undefined;
return {
isEnabled: true,
// Disable automatic input recording to avoid uploading large base64 images to Langfuse media
// User text input is recorded manually via setTraceInput
recordInputs: false,
recordOutputs: true,
metadata: {
sessionId: params.sessionId,
userId: params.userId,
},
};
}
// Wrap a handler with Langfuse observe
export function wrapWithObserve<T>(
handler: (req: Request) => Promise<T>
): (req: Request) => Promise<T> {
if (!isLangfuseEnabled()) {
return handler;
}
return observe(handler, { name: 'chat', endOnExit: false });
}

1604
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -17,12 +17,9 @@
"@ai-sdk/google": "^2.0.0", "@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19", "@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.22", "@ai-sdk/react": "^2.0.22",
"@langfuse/client": "^4.4.9", "@aws-sdk/credential-providers": "^3.943.0",
"@langfuse/otel": "^4.4.4",
"@langfuse/tracing": "^4.4.9",
"@next/third-parties": "^16.0.6", "@next/third-parties": "^16.0.6",
"@openrouter/ai-sdk-provider": "^1.2.3", "@openrouter/ai-sdk-provider": "^1.2.3",
"@opentelemetry/sdk-trace-node": "^2.2.0",
"@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-dialog": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-select": "^2.2.6", "@radix-ui/react-select": "^2.2.6",

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB