mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-03 06:42:27 +08:00
Compare commits
12 Commits
feat/messa
...
fix/preven
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22d0d4039d | ||
|
|
57bfc9cef7 | ||
|
|
0543f71c43 | ||
|
|
970b88612d | ||
|
|
c805277a76 | ||
|
|
95160f5a21 | ||
|
|
b206e16c02 | ||
|
|
563b18e8ff | ||
|
|
2366255e8f | ||
|
|
255308f829 | ||
|
|
a9493c8877 | ||
|
|
a0c3db100a |
22
amplify.yml
Normal file
22
amplify.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 1
|
||||
frontend:
|
||||
phases:
|
||||
preBuild:
|
||||
commands:
|
||||
- npm ci --cache .npm --prefer-offline
|
||||
build:
|
||||
commands:
|
||||
# Write env vars to .env.production for Next.js SSR runtime
|
||||
- env | grep -e AI_MODEL >> .env.production
|
||||
- env | grep -e AI_PROVIDER >> .env.production
|
||||
- env | grep -e OPENAI_API_KEY >> .env.production
|
||||
- env | grep -e NEXT_PUBLIC_ >> .env.production
|
||||
- npm run build
|
||||
artifacts:
|
||||
baseDirectory: .next
|
||||
files:
|
||||
- '**/*'
|
||||
cache:
|
||||
paths:
|
||||
- .next/cache/**/*
|
||||
- .npm/**/*
|
||||
@@ -154,9 +154,28 @@ ${lastMessageText}
|
||||
messages: allMessages,
|
||||
...(providerOptions && { providerOptions }),
|
||||
...(headers && { headers }),
|
||||
onFinish: ({ usage, providerMetadata }) => {
|
||||
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
||||
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
|
||||
onFinish: ({ usage, providerMetadata, finishReason, text, toolCalls }) => {
|
||||
// Detect potential mid-stream failures (e.g., Bedrock 503 ServiceUnavailableException)
|
||||
// When this happens, usage is empty and providerMetadata is undefined
|
||||
const hasUsage = usage && Object.keys(usage).length > 0;
|
||||
if (!hasUsage) {
|
||||
console.error('[Stream Error] Empty usage detected - possible Bedrock 503 or mid-stream failure');
|
||||
console.error('[Stream Error] finishReason:', finishReason);
|
||||
console.error('[Stream Error] text received:', text?.substring(0, 200) || '(none)');
|
||||
console.error('[Stream Error] toolCalls:', toolCalls?.length || 0);
|
||||
// Log the user's last message for debugging
|
||||
const lastUserMsg = enhancedMessages.filter(m => m.role === 'user').pop();
|
||||
if (lastUserMsg) {
|
||||
const content = lastUserMsg.content;
|
||||
const preview = Array.isArray(content)
|
||||
? (content.find((c) => c.type === 'text') as { type: 'text'; text: string } | undefined)?.text?.substring(0, 100)
|
||||
: String(content).substring(0, 100);
|
||||
console.error('[Stream Error] Last user message preview:', preview);
|
||||
}
|
||||
} else {
|
||||
console.log('[Cache] Full providerMetadata:', JSON.stringify(providerMetadata, null, 2));
|
||||
console.log('[Cache] Usage:', JSON.stringify(usage, null, 2));
|
||||
}
|
||||
},
|
||||
tools: {
|
||||
// Client-side tool that will be executed on the client
|
||||
@@ -232,6 +251,23 @@ IMPORTANT: Keep edits concise:
|
||||
? error.message
|
||||
: JSON.stringify(error);
|
||||
|
||||
// Check for Bedrock service errors (503, throttling, etc.)
|
||||
if (errorString.includes('ServiceUnavailable') ||
|
||||
errorString.includes('503') ||
|
||||
errorString.includes('temporarily unavailable')) {
|
||||
console.error('[Bedrock Error] ServiceUnavailableException:', errorString);
|
||||
return 'The AI service is temporarily unavailable. Please try again in a few seconds.';
|
||||
}
|
||||
|
||||
// Check for throttling errors
|
||||
if (errorString.includes('ThrottlingException') ||
|
||||
errorString.includes('rate limit') ||
|
||||
errorString.includes('too many requests') ||
|
||||
errorString.includes('429')) {
|
||||
console.error('[Bedrock Error] ThrottlingException:', errorString);
|
||||
return 'Too many requests. Please wait a moment and try again.';
|
||||
}
|
||||
|
||||
// Check for image not supported error (e.g., DeepSeek models)
|
||||
if (errorString.includes('image_url') ||
|
||||
errorString.includes('unknown variant') ||
|
||||
|
||||
12
app/page.tsx
12
app/page.tsx
@@ -32,6 +32,18 @@ export default function Home() {
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, []);
|
||||
|
||||
// Show confirmation dialog when user tries to leave the page
|
||||
// This helps prevent accidental navigation from browser back gestures
|
||||
useEffect(() => {
|
||||
const handleBeforeUnload = (event: BeforeUnloadEvent) => {
|
||||
event.preventDefault();
|
||||
return '';
|
||||
};
|
||||
|
||||
window.addEventListener('beforeunload', handleBeforeUnload);
|
||||
return () => window.removeEventListener('beforeunload', handleBeforeUnload);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="flex h-screen bg-background relative overflow-hidden">
|
||||
{/* Mobile warning overlay */}
|
||||
|
||||
@@ -27,7 +27,7 @@ export function ButtonWithTooltip({
|
||||
<TooltipTrigger asChild>
|
||||
<Button {...buttonProps}>{children}</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{tooltipContent}</TooltipContent>
|
||||
<TooltipContent className="max-w-xs text-wrap">{tooltipContent}</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
);
|
||||
|
||||
@@ -90,7 +90,10 @@ export default function ExamplePanel({
|
||||
icon={<Zap className="w-4 h-4 text-primary" />}
|
||||
title="Animated Diagram"
|
||||
description="Draw a transformer architecture with animated connectors"
|
||||
onClick={() => setInput("Give me a **animated connector** diagram of transformer's architecture")}
|
||||
onClick={() => {
|
||||
setInput("Give me a **animated connector** diagram of transformer's architecture")
|
||||
setFiles([])
|
||||
}}
|
||||
/>
|
||||
|
||||
<ExampleCard
|
||||
@@ -111,7 +114,10 @@ export default function ExamplePanel({
|
||||
icon={<Palette className="w-4 h-4 text-primary" />}
|
||||
title="Creative Drawing"
|
||||
description="Draw something fun and creative"
|
||||
onClick={() => setInput("Draw a cat for me")}
|
||||
onClick={() => {
|
||||
setInput("Draw a cat for me")
|
||||
setFiles([])
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ interface ChatInputProps {
|
||||
onFileChange?: (files: File[]) => void;
|
||||
showHistory?: boolean;
|
||||
onToggleHistory?: (show: boolean) => void;
|
||||
error?: Error | null;
|
||||
}
|
||||
|
||||
export function ChatInput({
|
||||
@@ -41,6 +42,7 @@ export function ChatInput({
|
||||
onFileChange = () => {},
|
||||
showHistory = false,
|
||||
onToggleHistory = () => {},
|
||||
error = null,
|
||||
}: ChatInputProps) {
|
||||
const { diagramHistory, saveDiagramToFile } = useDiagram();
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
@@ -49,7 +51,8 @@ export function ChatInput({
|
||||
const [showClearDialog, setShowClearDialog] = useState(false);
|
||||
const [showSaveDialog, setShowSaveDialog] = useState(false);
|
||||
|
||||
const isDisabled = status === "streaming" || status === "submitted";
|
||||
// Allow retry when there's an error (even if status is still "streaming" or "submitted")
|
||||
const isDisabled = (status === "streaming" || status === "submitted") && !error;
|
||||
|
||||
useEffect(() => {
|
||||
console.log('[ChatInput] Status changed to:', status, '| Input disabled:', isDisabled);
|
||||
|
||||
@@ -115,7 +115,7 @@ export function ChatMessageDisplay({
|
||||
previousXML.current = convertedXml;
|
||||
onDisplayChart(replacedXML);
|
||||
} else {
|
||||
console.error("[ChatMessageDisplay] XML validation failed:", validationError);
|
||||
console.log("[ChatMessageDisplay] XML validation failed:", validationError);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ import type React from "react";
|
||||
import { useRef, useEffect, useState } from "react";
|
||||
import { flushSync } from "react-dom";
|
||||
import { FaGithub } from "react-icons/fa";
|
||||
import { PanelRightClose, PanelRightOpen } from "lucide-react";
|
||||
import { PanelRightClose, PanelRightOpen, CheckCircle } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import Image from "next/image";
|
||||
|
||||
@@ -61,6 +61,7 @@ export default function ChatPanel({
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
const [showHistory, setShowHistory] = useState(false);
|
||||
const [input, setInput] = useState("");
|
||||
const [streamingError, setStreamingError] = useState<Error | null>(null);
|
||||
|
||||
// Store XML snapshots for each user message (keyed by message index)
|
||||
const xmlSnapshotsRef = useRef<Map<number, string>>(new Map());
|
||||
@@ -71,75 +72,88 @@ export default function ChatPanel({
|
||||
chartXMLRef.current = chartXML;
|
||||
}, [chartXML]);
|
||||
|
||||
const { messages, sendMessage, addToolResult, status, error, setMessages } =
|
||||
useChat({
|
||||
transport: new DefaultChatTransport({
|
||||
api: "/api/chat",
|
||||
}),
|
||||
async onToolCall({ toolCall }) {
|
||||
if (toolCall.toolName === "display_diagram") {
|
||||
const { xml } = toolCall.input as { xml: string };
|
||||
const {
|
||||
messages,
|
||||
sendMessage,
|
||||
addToolResult,
|
||||
status,
|
||||
error,
|
||||
setMessages,
|
||||
stop,
|
||||
} = useChat({
|
||||
transport: new DefaultChatTransport({
|
||||
api: "/api/chat",
|
||||
}),
|
||||
async onToolCall({ toolCall }) {
|
||||
if (toolCall.toolName === "display_diagram") {
|
||||
const { xml } = toolCall.input as { xml: string };
|
||||
|
||||
const validationError = validateMxCellStructure(xml);
|
||||
const validationError = validateMxCellStructure(xml);
|
||||
|
||||
if (validationError) {
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: validationError,
|
||||
});
|
||||
if (validationError) {
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: validationError,
|
||||
});
|
||||
} else {
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: "Successfully displayed the diagram.",
|
||||
});
|
||||
}
|
||||
} else if (toolCall.toolName === "edit_diagram") {
|
||||
const { edits } = toolCall.input as {
|
||||
edits: Array<{ search: string; replace: string }>;
|
||||
};
|
||||
|
||||
let currentXml = "";
|
||||
try {
|
||||
console.log("[edit_diagram] Starting...");
|
||||
// Use chartXML from ref directly - more reliable than export
|
||||
// especially on Vercel where DrawIO iframe may have latency issues
|
||||
// Using ref to avoid stale closure in callback
|
||||
const cachedXML = chartXMLRef.current;
|
||||
if (cachedXML) {
|
||||
currentXml = cachedXML;
|
||||
console.log(
|
||||
"[edit_diagram] Using cached chartXML, length:",
|
||||
currentXml.length
|
||||
);
|
||||
} else {
|
||||
addToolResult({
|
||||
tool: "display_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: "Successfully displayed the diagram.",
|
||||
});
|
||||
// Fallback to export only if no cached XML
|
||||
console.log(
|
||||
"[edit_diagram] No cached XML, fetching from DrawIO..."
|
||||
);
|
||||
currentXml = await onFetchChart(false);
|
||||
console.log(
|
||||
"[edit_diagram] Got XML from export, length:",
|
||||
currentXml.length
|
||||
);
|
||||
}
|
||||
} else if (toolCall.toolName === "edit_diagram") {
|
||||
const { edits } = toolCall.input as {
|
||||
edits: Array<{ search: string; replace: string }>;
|
||||
};
|
||||
|
||||
let currentXml = "";
|
||||
try {
|
||||
console.log("[edit_diagram] Starting...");
|
||||
// Use chartXML from ref directly - more reliable than export
|
||||
// especially on Vercel where DrawIO iframe may have latency issues
|
||||
// Using ref to avoid stale closure in callback
|
||||
const cachedXML = chartXMLRef.current;
|
||||
if (cachedXML) {
|
||||
currentXml = cachedXML;
|
||||
console.log("[edit_diagram] Using cached chartXML, length:", currentXml.length);
|
||||
} else {
|
||||
// Fallback to export only if no cached XML
|
||||
console.log("[edit_diagram] No cached XML, fetching from DrawIO...");
|
||||
currentXml = await onFetchChart(false);
|
||||
console.log("[edit_diagram] Got XML from export, length:", currentXml.length);
|
||||
}
|
||||
const { replaceXMLParts } = await import("@/lib/utils");
|
||||
const editedXml = replaceXMLParts(currentXml, edits);
|
||||
|
||||
const { replaceXMLParts } = await import("@/lib/utils");
|
||||
const editedXml = replaceXMLParts(currentXml, edits);
|
||||
onDisplayChart(editedXml);
|
||||
|
||||
onDisplayChart(editedXml);
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Successfully applied ${edits.length} edit(s) to the diagram.`,
|
||||
});
|
||||
console.log("[edit_diagram] Success");
|
||||
} catch (error) {
|
||||
console.error("[edit_diagram] Failed:", error);
|
||||
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Successfully applied ${edits.length} edit(s) to the diagram.`,
|
||||
});
|
||||
console.log("[edit_diagram] Success");
|
||||
} catch (error) {
|
||||
console.error("[edit_diagram] Failed:", error);
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: String(error);
|
||||
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Edit failed: ${errorMessage}
|
||||
addToolResult({
|
||||
tool: "edit_diagram",
|
||||
toolCallId: toolCall.toolCallId,
|
||||
output: `Edit failed: ${errorMessage}
|
||||
|
||||
Current diagram XML:
|
||||
\`\`\`xml
|
||||
@@ -147,14 +161,73 @@ ${currentXml || "No XML available"}
|
||||
\`\`\`
|
||||
|
||||
Please retry with an adjusted search pattern or use display_diagram if retries are exhausted.`,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error("Chat error:", error);
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error("Chat error:", error);
|
||||
setStreamingError(error);
|
||||
},
|
||||
});
|
||||
|
||||
// Streaming timeout detection - detects when stream stalls mid-response (e.g., Bedrock 503)
|
||||
// This catches cases where onError doesn't fire because headers were already sent
|
||||
const lastMessageCountRef = useRef(0);
|
||||
const lastMessagePartsRef = useRef(0);
|
||||
|
||||
useEffect(() => {
|
||||
// Clear streaming error when status changes to ready
|
||||
if (status === "ready") {
|
||||
setStreamingError(null);
|
||||
lastMessageCountRef.current = 0;
|
||||
lastMessagePartsRef.current = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (status !== "streaming") return;
|
||||
|
||||
const STALL_TIMEOUT_MS = 15000; // 15 seconds without any update
|
||||
|
||||
// Capture current state BEFORE setting timeout
|
||||
// This way we compare against values at the time timeout was set
|
||||
const currentPartsCount = messages.reduce(
|
||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
||||
0
|
||||
);
|
||||
const capturedMessageCount = messages.length;
|
||||
const capturedPartsCount = currentPartsCount;
|
||||
|
||||
// Update refs immediately so next effect run has fresh values
|
||||
lastMessageCountRef.current = messages.length;
|
||||
lastMessagePartsRef.current = currentPartsCount;
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
// Re-count parts at timeout time
|
||||
const newPartsCount = messages.reduce(
|
||||
(acc, msg) => acc + (msg.parts?.length || 0),
|
||||
0
|
||||
);
|
||||
|
||||
// If no change since timeout was set, stream has stalled
|
||||
if (
|
||||
messages.length === capturedMessageCount &&
|
||||
newPartsCount === capturedPartsCount
|
||||
) {
|
||||
console.error(
|
||||
"[Streaming Timeout] No activity for 15s - forcing error state"
|
||||
);
|
||||
setStreamingError(
|
||||
new Error(
|
||||
"Connection lost. The AI service may be temporarily unavailable. Please try again."
|
||||
)
|
||||
);
|
||||
stop(); // Allow user to retry by transitioning status to "ready"
|
||||
}
|
||||
}, STALL_TIMEOUT_MS);
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
}, [status, messages, stop]);
|
||||
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -164,11 +237,15 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
}
|
||||
}, [messages]);
|
||||
|
||||
|
||||
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
const isProcessing = status === "streaming" || status === "submitted";
|
||||
// Allow retry if there's a streaming error (workaround for stop() not transitioning status)
|
||||
const isProcessing =
|
||||
(status === "streaming" || status === "submitted") &&
|
||||
!streamingError;
|
||||
if (input.trim() && !isProcessing) {
|
||||
// Clear any previous streaming error before starting new request
|
||||
setStreamingError(null);
|
||||
try {
|
||||
let chartXml = await onFetchChart();
|
||||
chartXml = formatXML(chartXml);
|
||||
@@ -233,7 +310,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
|
||||
// Find the user message before this assistant message
|
||||
let userMessageIndex = messageIndex - 1;
|
||||
while (userMessageIndex >= 0 && messages[userMessageIndex].role !== "user") {
|
||||
while (
|
||||
userMessageIndex >= 0 &&
|
||||
messages[userMessageIndex].role !== "user"
|
||||
) {
|
||||
userMessageIndex--;
|
||||
}
|
||||
|
||||
@@ -249,7 +329,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
// Get the saved XML snapshot for this user message
|
||||
const savedXml = xmlSnapshotsRef.current.get(userMessageIndex);
|
||||
if (!savedXml) {
|
||||
console.error("No saved XML snapshot for message index:", userMessageIndex);
|
||||
console.error(
|
||||
"No saved XML snapshot for message index:",
|
||||
userMessageIndex
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -294,7 +377,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
// Get the saved XML snapshot for this user message
|
||||
const savedXml = xmlSnapshotsRef.current.get(messageIndex);
|
||||
if (!savedXml) {
|
||||
console.error("No saved XML snapshot for message index:", messageIndex);
|
||||
console.error(
|
||||
"No saved XML snapshot for message index:",
|
||||
messageIndex
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -388,6 +474,14 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
>
|
||||
About
|
||||
</Link>
|
||||
<ButtonWithTooltip
|
||||
tooltipContent="Recent generation failures were caused by our AI provider's infrastructure issue, not the app code. After extensive debugging, I've switched providers and observed 30+ minutes of stability. If issues persist, please report on GitHub."
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-6 w-6 text-green-500 hover:text-green-600"
|
||||
>
|
||||
<CheckCircle className="h-4 w-4" />
|
||||
</ButtonWithTooltip>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<a
|
||||
@@ -415,7 +509,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
<main className="flex-1 overflow-hidden">
|
||||
<ChatMessageDisplay
|
||||
messages={messages}
|
||||
error={error}
|
||||
error={error || streamingError}
|
||||
setInput={setInput}
|
||||
setFiles={handleFileChange}
|
||||
onRegenerate={handleRegenerate}
|
||||
@@ -439,6 +533,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
|
||||
onFileChange={handleFileChange}
|
||||
showHistory={showHistory}
|
||||
onToggleHistory={setShowHistory}
|
||||
error={error || streamingError}
|
||||
/>
|
||||
</footer>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { bedrock } from '@ai-sdk/amazon-bedrock';
|
||||
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
|
||||
import { fromNodeProviderChain } from '@aws-sdk/credential-providers';
|
||||
import { openai, createOpenAI } from '@ai-sdk/openai';
|
||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||
import { google, createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
@@ -38,7 +39,7 @@ const ANTHROPIC_BETA_HEADERS = {
|
||||
|
||||
// Map of provider to required environment variable
|
||||
const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = {
|
||||
bedrock: 'AWS_ACCESS_KEY_ID',
|
||||
bedrock: null, // AWS SDK auto-uses IAM role on AWS, or env vars locally
|
||||
openai: 'OPENAI_API_KEY',
|
||||
anthropic: 'ANTHROPIC_API_KEY',
|
||||
google: 'GOOGLE_GENERATIVE_AI_API_KEY',
|
||||
@@ -159,13 +160,20 @@ export function getAIModel(): ModelConfig {
|
||||
let headers: Record<string, string> | undefined = undefined;
|
||||
|
||||
switch (provider) {
|
||||
case 'bedrock':
|
||||
model = bedrock(modelId);
|
||||
case 'bedrock': {
|
||||
// Use credential provider chain for IAM role support (Amplify, Lambda, etc.)
|
||||
// Falls back to env vars (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) for local dev
|
||||
const bedrockProvider = createAmazonBedrock({
|
||||
region: process.env.AWS_REGION || 'us-west-2',
|
||||
credentialProvider: fromNodeProviderChain(),
|
||||
});
|
||||
model = bedrockProvider(modelId);
|
||||
// Add Anthropic beta options if using Claude models via Bedrock
|
||||
if (modelId.includes('anthropic.claude')) {
|
||||
providerOptions = BEDROCK_ANTHROPIC_BETA;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'openai':
|
||||
if (process.env.OPENAI_BASE_URL) {
|
||||
|
||||
1234
package-lock.json
generated
1234
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -17,6 +17,7 @@
|
||||
"@ai-sdk/google": "^2.0.0",
|
||||
"@ai-sdk/openai": "^2.0.19",
|
||||
"@ai-sdk/react": "^2.0.22",
|
||||
"@aws-sdk/credential-providers": "^3.943.0",
|
||||
"@next/third-parties": "^16.0.6",
|
||||
"@openrouter/ai-sdk-provider": "^1.2.3",
|
||||
"@radix-ui/react-dialog": "^1.1.6",
|
||||
|
||||
BIN
public/favicon.ico
Normal file
BIN
public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
Reference in New Issue
Block a user