chore: remove complex 503 error handling code (#102)

- Remove 15s streaming timeout detection (too slow, added complexity)
- Remove status indicator (issue resolved by switching model)
- Remove streamingError state and related refs
- Simplify onFinish callback (remove 503 detection logging)
- Remove errorHandler function (use default AI SDK errors)

The real fix was switching from global.* to us.* Bedrock model.
This removes ~134 lines of unnecessary complexity.
This commit is contained in:
Dayuan Jiang
2025-12-05 20:18:19 +09:00
committed by GitHub
parent e0c5d966e3
commit 4cd78dc561
2 changed files with 8 additions and 142 deletions

View File

@@ -191,28 +191,9 @@ ${lastMessageText}
messages: allMessages,
...(providerOptions && { providerOptions }),
...(headers && { headers }),
onFinish: ({ usage, providerMetadata, finishReason, text, toolCalls }) => {
// Detect potential mid-stream failures (e.g., Bedrock 503 ServiceUnavailableException)
// When this happens, usage is empty and providerMetadata is undefined
const hasUsage = usage && Object.keys(usage).length > 0;
if (!hasUsage) {
console.error('[Stream Error] Empty usage detected - possible Bedrock 503 or mid-stream failure');
console.error('[Stream Error] finishReason:', finishReason);
console.error('[Stream Error] text received:', text?.substring(0, 200) || '(none)');
console.error('[Stream Error] toolCalls:', toolCalls?.length || 0);
// Log the user's last message for debugging
const lastUserMsg = enhancedMessages.filter(m => m.role === 'user').pop();
if (lastUserMsg) {
const content = lastUserMsg.content;
const preview = Array.isArray(content)
? (content.find((c) => c.type === 'text') as { type: 'text'; text: string } | undefined)?.text?.substring(0, 100)
: String(content).substring(0, 100);
console.error('[Stream Error] Last user message preview:', preview);
}
} else {
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
}
onFinish: ({ usage, providerMetadata }) => {
console.log('[Cache] providerMetadata:', JSON.stringify(providerMetadata, null, 2));
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
},
tools: {
// Client-side tool that will be executed on the client
@@ -276,48 +257,7 @@ IMPORTANT: Keep edits concise:
temperature: 0,
});
// Error handler function to provide detailed error messages
function errorHandler(error: unknown) {
if (error == null) {
return 'unknown error';
}
const errorString = typeof error === 'string'
? error
: error instanceof Error
? error.message
: JSON.stringify(error);
// Check for Bedrock service errors (503, throttling, etc.)
if (errorString.includes('ServiceUnavailable') ||
errorString.includes('503') ||
errorString.includes('temporarily unavailable')) {
console.error('[Bedrock Error] ServiceUnavailableException:', errorString);
return 'The AI service is temporarily unavailable. Please try again in a few seconds.';
}
// Check for throttling errors
if (errorString.includes('ThrottlingException') ||
errorString.includes('rate limit') ||
errorString.includes('too many requests') ||
errorString.includes('429')) {
console.error('[Bedrock Error] ThrottlingException:', errorString);
return 'Too many requests. Please wait a moment and try again.';
}
// Check for image not supported error (e.g., DeepSeek models)
if (errorString.includes('image_url') ||
errorString.includes('unknown variant') ||
(errorString.includes('image') && errorString.includes('not supported'))) {
return 'This model does not support image inputs. Please remove the image and try again, or switch to a vision-capable model.';
}
return errorString;
}
return result.toUIMessageStreamResponse({
onError: errorHandler,
});
return result.toUIMessageStreamResponse();
}
export async function POST(req: Request) {