mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-02 14:22:28 +08:00
chore: remove complex 503 error handling code (#102)
- Remove 15s streaming timeout detection (too slow, added complexity) - Remove status indicator (issue resolved by switching model) - Remove streamingError state and related refs - Simplify onFinish callback (remove 503 detection logging) - Remove errorHandler function (use default AI SDK errors) The real fix was switching from global.* to us.* Bedrock model. This removes ~134 lines of unnecessary complexity.
This commit is contained in:
@@ -191,28 +191,9 @@ ${lastMessageText}
|
|||||||
messages: allMessages,
|
messages: allMessages,
|
||||||
...(providerOptions && { providerOptions }),
|
...(providerOptions && { providerOptions }),
|
||||||
...(headers && { headers }),
|
...(headers && { headers }),
|
||||||
onFinish: ({ usage, providerMetadata, finishReason, text, toolCalls }) => {
|
onFinish: ({ usage, providerMetadata }) => {
|
||||||
// Detect potential mid-stream failures (e.g., Bedrock 503 ServiceUnavailableException)
|
console.log('[Cache] providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
||||||
// When this happens, usage is empty and providerMetadata is undefined
|
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
|
||||||
const hasUsage = usage && Object.keys(usage).length > 0;
|
|
||||||
if (!hasUsage) {
|
|
||||||
console.error('[Stream Error] Empty usage detected - possible Bedrock 503 or mid-stream failure');
|
|
||||||
console.error('[Stream Error] finishReason:', finishReason);
|
|
||||||
console.error('[Stream Error] text received:', text?.substring(0, 200) || '(none)');
|
|
||||||
console.error('[Stream Error] toolCalls:', toolCalls?.length || 0);
|
|
||||||
// Log the user's last message for debugging
|
|
||||||
const lastUserMsg = enhancedMessages.filter(m => m.role === 'user').pop();
|
|
||||||
if (lastUserMsg) {
|
|
||||||
const content = lastUserMsg.content;
|
|
||||||
const preview = Array.isArray(content)
|
|
||||||
? (content.find((c) => c.type === 'text') as { type: 'text'; text: string } | undefined)?.text?.substring(0, 100)
|
|
||||||
: String(content).substring(0, 100);
|
|
||||||
console.error('[Stream Error] Last user message preview:', preview);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
|
||||||
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
tools: {
|
tools: {
|
||||||
// Client-side tool that will be executed on the client
|
// Client-side tool that will be executed on the client
|
||||||
@@ -276,48 +257,7 @@ IMPORTANT: Keep edits concise:
|
|||||||
temperature: 0,
|
temperature: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Error handler function to provide detailed error messages
|
return result.toUIMessageStreamResponse();
|
||||||
function errorHandler(error: unknown) {
|
|
||||||
if (error == null) {
|
|
||||||
return 'unknown error';
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorString = typeof error === 'string'
|
|
||||||
? error
|
|
||||||
: error instanceof Error
|
|
||||||
? error.message
|
|
||||||
: JSON.stringify(error);
|
|
||||||
|
|
||||||
// Check for Bedrock service errors (503, throttling, etc.)
|
|
||||||
if (errorString.includes('ServiceUnavailable') ||
|
|
||||||
errorString.includes('503') ||
|
|
||||||
errorString.includes('temporarily unavailable')) {
|
|
||||||
console.error('[Bedrock Error] ServiceUnavailableException:', errorString);
|
|
||||||
return 'The AI service is temporarily unavailable. Please try again in a few seconds.';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for throttling errors
|
|
||||||
if (errorString.includes('ThrottlingException') ||
|
|
||||||
errorString.includes('rate limit') ||
|
|
||||||
errorString.includes('too many requests') ||
|
|
||||||
errorString.includes('429')) {
|
|
||||||
console.error('[Bedrock Error] ThrottlingException:', errorString);
|
|
||||||
return 'Too many requests. Please wait a moment and try again.';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for image not supported error (e.g., DeepSeek models)
|
|
||||||
if (errorString.includes('image_url') ||
|
|
||||||
errorString.includes('unknown variant') ||
|
|
||||||
(errorString.includes('image') && errorString.includes('not supported'))) {
|
|
||||||
return 'This model does not support image inputs. Please remove the image and try again, or switch to a vision-capable model.';
|
|
||||||
}
|
|
||||||
|
|
||||||
return errorString;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.toUIMessageStreamResponse({
|
|
||||||
onError: errorHandler,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function POST(req: Request) {
|
export async function POST(req: Request) {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import type React from "react";
|
|||||||
import { useRef, useEffect, useState } from "react";
|
import { useRef, useEffect, useState } from "react";
|
||||||
import { flushSync } from "react-dom";
|
import { flushSync } from "react-dom";
|
||||||
import { FaGithub } from "react-icons/fa";
|
import { FaGithub } from "react-icons/fa";
|
||||||
import { PanelRightClose, PanelRightOpen, CheckCircle } from "lucide-react";
|
import { PanelRightClose, PanelRightOpen } from "lucide-react";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
|
|
||||||
@@ -62,7 +62,6 @@ export default function ChatPanel({
|
|||||||
const [files, setFiles] = useState<File[]>([]);
|
const [files, setFiles] = useState<File[]>([]);
|
||||||
const [showHistory, setShowHistory] = useState(false);
|
const [showHistory, setShowHistory] = useState(false);
|
||||||
const [input, setInput] = useState("");
|
const [input, setInput] = useState("");
|
||||||
const [streamingError, setStreamingError] = useState<Error | null>(null);
|
|
||||||
|
|
||||||
// Store XML snapshots for each user message (keyed by message index)
|
// Store XML snapshots for each user message (keyed by message index)
|
||||||
const xmlSnapshotsRef = useRef<Map<number, string>>(new Map());
|
const xmlSnapshotsRef = useRef<Map<number, string>>(new Map());
|
||||||
@@ -80,7 +79,6 @@ export default function ChatPanel({
|
|||||||
status,
|
status,
|
||||||
error,
|
error,
|
||||||
setMessages,
|
setMessages,
|
||||||
stop,
|
|
||||||
} = useChat({
|
} = useChat({
|
||||||
transport: new DefaultChatTransport({
|
transport: new DefaultChatTransport({
|
||||||
api: "/api/chat",
|
api: "/api/chat",
|
||||||
@@ -168,68 +166,9 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
|||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
console.error("Chat error:", error);
|
console.error("Chat error:", error);
|
||||||
setStreamingError(error);
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Streaming timeout detection - detects when stream stalls mid-response (e.g., Bedrock 503)
|
|
||||||
// This catches cases where onError doesn't fire because headers were already sent
|
|
||||||
const lastMessageCountRef = useRef(0);
|
|
||||||
const lastMessagePartsRef = useRef(0);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
// Clear streaming error when status changes to ready
|
|
||||||
if (status === "ready") {
|
|
||||||
setStreamingError(null);
|
|
||||||
lastMessageCountRef.current = 0;
|
|
||||||
lastMessagePartsRef.current = 0;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (status !== "streaming") return;
|
|
||||||
|
|
||||||
const STALL_TIMEOUT_MS = 15000; // 15 seconds without any update
|
|
||||||
|
|
||||||
// Capture current state BEFORE setting timeout
|
|
||||||
// This way we compare against values at the time timeout was set
|
|
||||||
const currentPartsCount = messages.reduce(
|
|
||||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
|
||||||
0
|
|
||||||
);
|
|
||||||
const capturedMessageCount = messages.length;
|
|
||||||
const capturedPartsCount = currentPartsCount;
|
|
||||||
|
|
||||||
// Update refs immediately so next effect run has fresh values
|
|
||||||
lastMessageCountRef.current = messages.length;
|
|
||||||
lastMessagePartsRef.current = currentPartsCount;
|
|
||||||
|
|
||||||
const timeoutId = setTimeout(() => {
|
|
||||||
// Re-count parts at timeout time
|
|
||||||
const newPartsCount = messages.reduce(
|
|
||||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
|
||||||
0
|
|
||||||
);
|
|
||||||
|
|
||||||
// If no change since timeout was set, stream has stalled
|
|
||||||
if (
|
|
||||||
messages.length === capturedMessageCount &&
|
|
||||||
newPartsCount === capturedPartsCount
|
|
||||||
) {
|
|
||||||
console.error(
|
|
||||||
"[Streaming Timeout] No activity for 15s - forcing error state"
|
|
||||||
);
|
|
||||||
setStreamingError(
|
|
||||||
new Error(
|
|
||||||
"Connection lost. The AI service may be temporarily unavailable. Please try again."
|
|
||||||
)
|
|
||||||
);
|
|
||||||
stop(); // Allow user to retry by transitioning status to "ready"
|
|
||||||
}
|
|
||||||
}, STALL_TIMEOUT_MS);
|
|
||||||
|
|
||||||
return () => clearTimeout(timeoutId);
|
|
||||||
}, [status, messages, stop]);
|
|
||||||
|
|
||||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -240,13 +179,8 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
|||||||
|
|
||||||
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
|
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
// Allow retry if there's a streaming error (workaround for stop() not transitioning status)
|
const isProcessing = status === "streaming" || status === "submitted";
|
||||||
const isProcessing =
|
|
||||||
(status === "streaming" || status === "submitted") &&
|
|
||||||
!streamingError;
|
|
||||||
if (input.trim() && !isProcessing) {
|
if (input.trim() && !isProcessing) {
|
||||||
// Clear any previous streaming error before starting new request
|
|
||||||
setStreamingError(null);
|
|
||||||
try {
|
try {
|
||||||
let chartXml = await onFetchChart();
|
let chartXml = await onFetchChart();
|
||||||
chartXml = formatXML(chartXml);
|
chartXml = formatXML(chartXml);
|
||||||
@@ -476,14 +410,6 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
|||||||
>
|
>
|
||||||
About
|
About
|
||||||
</Link>
|
</Link>
|
||||||
<ButtonWithTooltip
|
|
||||||
tooltipContent="Recent generation failures were caused by our AI provider's infrastructure issue, not the app code. After extensive debugging, I've switched providers and observed 30+ minutes of stability. If issues persist, please report on GitHub."
|
|
||||||
variant="ghost"
|
|
||||||
size="icon"
|
|
||||||
className="h-6 w-6 text-green-500 hover:text-green-600"
|
|
||||||
>
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
</ButtonWithTooltip>
|
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-1">
|
<div className="flex items-center gap-1">
|
||||||
<a
|
<a
|
||||||
@@ -511,7 +437,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
|||||||
<main className="flex-1 overflow-hidden">
|
<main className="flex-1 overflow-hidden">
|
||||||
<ChatMessageDisplay
|
<ChatMessageDisplay
|
||||||
messages={messages}
|
messages={messages}
|
||||||
error={error || streamingError}
|
error={error}
|
||||||
setInput={setInput}
|
setInput={setInput}
|
||||||
setFiles={handleFileChange}
|
setFiles={handleFileChange}
|
||||||
onRegenerate={handleRegenerate}
|
onRegenerate={handleRegenerate}
|
||||||
@@ -535,7 +461,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
|||||||
onFileChange={handleFileChange}
|
onFileChange={handleFileChange}
|
||||||
showHistory={showHistory}
|
showHistory={showHistory}
|
||||||
onToggleHistory={setShowHistory}
|
onToggleHistory={setShowHistory}
|
||||||
error={error || streamingError}
|
error={error}
|
||||||
/>
|
/>
|
||||||
</footer>
|
</footer>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
Reference in New Issue
Block a user