mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-02 22:32:27 +08:00
fix: handle Bedrock 503 streaming errors with timeout detection (#92)
- Add 15s streaming timeout to detect mid-stream stalls (e.g., Bedrock 503) - Add stop() call to allow user retry after timeout - Add streamingError state for timeout-detected errors - Improve server-side error logging for empty usage detection - Add user-friendly error messages for ServiceUnavailable and Throttling errors
This commit is contained in:
@@ -61,6 +61,7 @@ export default function ChatPanel({
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
const [showHistory, setShowHistory] = useState(false);
|
||||
const [input, setInput] = useState("");
|
||||
const [streamingError, setStreamingError] = useState<Error | null>(null);
|
||||
|
||||
// Store XML snapshots for each user message (keyed by message index)
|
||||
const xmlSnapshotsRef = useRef<Map<number, string>>(new Map());
|
||||
@@ -71,7 +72,7 @@ export default function ChatPanel({
|
||||
chartXMLRef.current = chartXML;
|
||||
}, [chartXML]);
|
||||
|
||||
const { messages, sendMessage, addToolOutput, status, error, setMessages } =
|
||||
const { messages, sendMessage, addToolResult, status, error, setMessages, stop } =
|
||||
useChat({
|
||||
transport: new DefaultChatTransport({
|
||||
api: "/api/chat",
|
||||
@@ -83,13 +84,13 @@ export default function ChatPanel({
|
||||
const validationError = validateMxCellStructure(xml);
|
||||
|
||||
if (validationError) {
|
||||
addToolOutput({
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: validationError,
|
||||
});
|
||||
} else {
|
||||
addToolOutput({
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: "Successfully displayed the diagram.",
|
||||
@@ -122,7 +123,7 @@ export default function ChatPanel({
|
||||
|
||||
onDisplayChart(editedXml);
|
||||
|
||||
addToolOutput({
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Successfully applied ${edits.length} edit(s) to the diagram.`,
|
||||
@@ -136,7 +137,7 @@ export default function ChatPanel({
|
||||
? error.message
|
||||
: String(error);
|
||||
|
||||
addToolOutput({
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Edit failed: ${errorMessage}
|
||||
@@ -153,9 +154,64 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error("Chat error:", error);
|
||||
setStreamingError(error);
|
||||
},
|
||||
});
|
||||
|
||||
// Streaming timeout detection - detects when stream stalls mid-response (e.g., Bedrock 503)
|
||||
// This catches cases where onError doesn't fire because headers were already sent
|
||||
const lastMessageCountRef = useRef(0);
|
||||
const lastMessagePartsRef = useRef(0);
|
||||
|
||||
useEffect(() => {
|
||||
// Clear streaming error and reset refs when status changes to ready
|
||||
if (status === "ready") {
|
||||
setStreamingError(null);
|
||||
lastMessageCountRef.current = 0;
|
||||
lastMessagePartsRef.current = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (status !== "streaming") return;
|
||||
|
||||
const STALL_TIMEOUT_MS = 15000; // 15 seconds without any update
|
||||
|
||||
// Capture current state BEFORE setting timeout
|
||||
// This way we compare against values at the time timeout was set
|
||||
const currentPartsCount = messages.reduce(
|
||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
||||
0
|
||||
);
|
||||
const capturedMessageCount = messages.length;
|
||||
const capturedPartsCount = currentPartsCount;
|
||||
|
||||
// Update refs immediately so next effect run has fresh values
|
||||
lastMessageCountRef.current = messages.length;
|
||||
lastMessagePartsRef.current = currentPartsCount;
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
// Re-count parts at timeout time
|
||||
const newPartsCount = messages.reduce(
|
||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
||||
0
|
||||
);
|
||||
|
||||
// If no change since timeout was set, stream has stalled
|
||||
if (
|
||||
messages.length === capturedMessageCount &&
|
||||
newPartsCount === capturedPartsCount
|
||||
) {
|
||||
console.error("[Streaming Timeout] No activity for 15s - forcing error state");
|
||||
setStreamingError(
|
||||
new Error("Connection lost. The AI service may be temporarily unavailable. Please try again.")
|
||||
);
|
||||
stop(); // Allow user to retry by transitioning status to "ready"
|
||||
}
|
||||
}, STALL_TIMEOUT_MS);
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
}, [status, messages, stop]);
|
||||
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -167,8 +223,11 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
|
||||
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
const isProcessing = status === "streaming" || status === "submitted";
|
||||
// Allow retry if there's a streaming error (workaround for stop() not transitioning status)
|
||||
const isProcessing = (status === "streaming" || status === "submitted") && !streamingError;
|
||||
if (input.trim() && !isProcessing) {
|
||||
// Clear any previous streaming error before starting new request
|
||||
setStreamingError(null);
|
||||
try {
|
||||
let chartXml = await onFetchChart();
|
||||
chartXml = formatXML(chartXml);
|
||||
@@ -415,7 +474,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
<main className="flex-1 overflow-hidden">
|
||||
<ChatMessageDisplay
|
||||
messages={messages}
|
||||
error={error}
|
||||
error={error || streamingError}
|
||||
setInput={setInput}
|
||||
setFiles={handleFileChange}
|
||||
onRegenerate={handleRegenerate}
|
||||
|
||||
Reference in New Issue
Block a user