feat: add OpenRouter support and fix input disabling

- Add OpenRouter provider support with @openrouter/ai-sdk-provider
- Fix input not disabling during 'submitted' state for fast providers
- Apply disable logic to all interactive elements (textarea, buttons, handlers)
- Clean up env.example by removing model examples and separator blocks
- Upgrade zod to v4.1.12 for compatibility with ollama-ai-provider-v2
- Add debug logging for status changes in chat components
This commit is contained in:
dayuan.jiang
2025-11-15 14:29:18 +09:00
parent 4a3abc2e39
commit 58dcb3c41a
6 changed files with 83 additions and 66 deletions

View File

@@ -45,6 +45,12 @@ export function ChatInput({
const [isDragging, setIsDragging] = useState(false); const [isDragging, setIsDragging] = useState(false);
const [showClearDialog, setShowClearDialog] = useState(false); const [showClearDialog, setShowClearDialog] = useState(false);
// Debug: Log status changes
const isDisabled = status === "streaming" || status === "submitted";
useEffect(() => {
console.log('[ChatInput] Status changed to:', status, '| Input disabled:', isDisabled);
}, [status, isDisabled]);
// Auto-resize textarea based on content // Auto-resize textarea based on content
const adjustTextareaHeight = useCallback(() => { const adjustTextareaHeight = useCallback(() => {
const textarea = textareaRef.current; const textarea = textareaRef.current;
@@ -63,7 +69,7 @@ export function ChatInput({
if ((e.metaKey || e.ctrlKey) && e.key === "Enter") { if ((e.metaKey || e.ctrlKey) && e.key === "Enter") {
e.preventDefault(); e.preventDefault();
const form = e.currentTarget.closest("form"); const form = e.currentTarget.closest("form");
if (form && input.trim() && status !== "streaming") { if (form && input.trim() && !isDisabled) {
form.requestSubmit(); form.requestSubmit();
} }
} }
@@ -71,7 +77,7 @@ export function ChatInput({
// Handle clipboard paste // Handle clipboard paste
const handlePaste = async (e: React.ClipboardEvent) => { const handlePaste = async (e: React.ClipboardEvent) => {
if (status === "streaming") return; if (isDisabled) return;
const items = e.clipboardData.items; const items = e.clipboardData.items;
const imageItems = Array.from(items).filter((item) => const imageItems = Array.from(items).filter((item) =>
@@ -140,7 +146,7 @@ export function ChatInput({
e.stopPropagation(); e.stopPropagation();
setIsDragging(false); setIsDragging(false);
if (status === "streaming") return; if (isDisabled) return;
const droppedFiles = e.dataTransfer.files; const droppedFiles = e.dataTransfer.files;
@@ -183,7 +189,7 @@ export function ChatInput({
placeholder="Describe what changes you want to make to the diagram placeholder="Describe what changes you want to make to the diagram
or upload(paste) an image to replicate a diagram. or upload(paste) an image to replicate a diagram.
(Press Cmd/Ctrl + Enter to send)" (Press Cmd/Ctrl + Enter to send)"
disabled={status === "streaming"} disabled={isDisabled}
aria-label="Chat input" aria-label="Chat input"
className="min-h-[80px] resize-none transition-all duration-200 px-1 py-0" className="min-h-[80px] resize-none transition-all duration-200 px-1 py-0"
/> />
@@ -220,7 +226,7 @@ export function ChatInput({
size="icon" size="icon"
onClick={() => onToggleHistory(true)} onClick={() => onToggleHistory(true)}
disabled={ disabled={
status === "streaming" || isDisabled ||
diagramHistory.length === 0 diagramHistory.length === 0
} }
title="Diagram History" title="Diagram History"
@@ -234,7 +240,7 @@ export function ChatInput({
variant="outline" variant="outline"
size="icon" size="icon"
onClick={triggerFileInput} onClick={triggerFileInput}
disabled={status === "streaming"} disabled={isDisabled}
title="Upload image" title="Upload image"
> >
<ImageIcon className="h-4 w-4" /> <ImageIcon className="h-4 w-4" />
@@ -247,21 +253,21 @@ export function ChatInput({
onChange={handleFileChange} onChange={handleFileChange}
accept="image/*" accept="image/*"
multiple multiple
disabled={status === "streaming"} disabled={isDisabled}
/> />
</div> </div>
<Button <Button
type="submit" type="submit"
disabled={status === "streaming" || !input.trim()} disabled={isDisabled || !input.trim()}
className="transition-opacity" className="transition-opacity"
aria-label={ aria-label={
status === "streaming" isDisabled
? "Sending message..." ? "Sending message..."
: "Send message" : "Send message"
} }
> >
{status === "streaming" ? ( {isDisabled ? (
<Loader2 className="mr-2 h-4 w-4 animate-spin" /> <Loader2 className="mr-2 h-4 w-4 animate-spin" />
) : ( ) : (
<Send className="mr-2 h-4 w-4" /> <Send className="mr-2 h-4 w-4" />

View File

@@ -133,9 +133,15 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
} }
}, [messages]); }, [messages]);
// Debug: Log status changes
useEffect(() => {
console.log('[ChatPanel] Status changed to:', status);
}, [status]);
const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => { const onFormSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault(); e.preventDefault();
if (input.trim() && status !== "streaming") { const isProcessing = status === "streaming" || status === "submitted";
if (input.trim() && !isProcessing) {
try { try {
// Fetch chart data before sending message // Fetch chart data before sending message
let chartXml = await onFetchChart(); let chartXml = await onFetchChart();

View File

@@ -1,83 +1,34 @@
# AI Provider Configuration # AI Provider Configuration
# Choose your AI provider and model
# AI_PROVIDER: Which provider to use # AI_PROVIDER: Which provider to use
# Options: bedrock, openai, anthropic, google, azure, ollama # Options: bedrock, openai, anthropic, google, azure, ollama, openrouter
# Default: bedrock # Default: bedrock
AI_PROVIDER=bedrock AI_PROVIDER=bedrock
# AI_MODEL: The model ID for your chosen provider (REQUIRED) # AI_MODEL: The model ID for your chosen provider (REQUIRED)
# See examples below for each provider
AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0 AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# ===========================================
# AWS Bedrock Configuration # AWS Bedrock Configuration
# ===========================================
# AWS_REGION=us-east-1 # AWS_REGION=us-east-1
# AWS_ACCESS_KEY_ID=your-access-key-id # AWS_ACCESS_KEY_ID=your-access-key-id
# AWS_SECRET_ACCESS_KEY=your-secret-access-key # AWS_SECRET_ACCESS_KEY=your-secret-access-key
# Popular Bedrock Models (Claude via Bedrock):
# - global.anthropic.claude-sonnet-4-5-20250929-v1:0 (Latest Claude Sonnet 4.5)
# - anthropic.claude-sonnet-4-20250514-v1:0 (Claude Sonnet 4)
# - anthropic.claude-3-7-sonnet-20250219-v3:0 (Claude 3.7 Sonnet)
# - anthropic.claude-3-5-haiku-20241022-v1:0 (Claude 3.5 Haiku)
# ===========================================
# OpenAI Configuration # OpenAI Configuration
# ===========================================
# OPENAI_API_KEY=sk-... # OPENAI_API_KEY=sk-...
# OPENAI_ORGANIZATION=org-... # Optional # OPENAI_ORGANIZATION=org-... # Optional
# OPENAI_PROJECT=proj_... # Optional # OPENAI_PROJECT=proj_... # Optional
# Popular OpenAI Models:
# - gpt-5 (GPT-5)
# - gpt-4o (GPT-4 Optimized)
# - gpt-4-turbo (GPT-4 Turbo)
# - o1 (Reasoning model)
# - o3 (Reasoning model)
# - o4 (Reasoning model)
# ===========================================
# Anthropic (Direct) Configuration # Anthropic (Direct) Configuration
# ===========================================
# ANTHROPIC_API_KEY=sk-ant-... # ANTHROPIC_API_KEY=sk-ant-...
# Popular Anthropic Models:
# - claude-sonnet-4-5 (Latest Claude Sonnet 4.5)
# - claude-haiku-4-5 (Claude Haiku 4.5)
# - claude-opus-4-1 (Claude Opus 4.1)
# - claude-3-7-sonnet-latest (Claude 3.7 Sonnet)
# - claude-3-5-haiku-latest (Claude 3.5 Haiku)
# ===========================================
# Google Generative AI Configuration # Google Generative AI Configuration
# ===========================================
# GOOGLE_GENERATIVE_AI_API_KEY=... # GOOGLE_GENERATIVE_AI_API_KEY=...
# Popular Google Models:
# - gemini-2.5-pro (Gemini 2.5 Pro)
# - gemini-2.5-flash (Gemini 2.5 Flash)
# - gemini-2.0-flash (Gemini 2.0 Flash)
# - gemini-1.5-pro (Gemini 1.5 Pro)
# - gemini-1.5-flash (Gemini 1.5 Flash)
# - gemini-1.5-flash-8b (Gemini 1.5 Flash 8B)
# ===========================================
# Azure OpenAI Configuration # Azure OpenAI Configuration
# ===========================================
# AZURE_RESOURCE_NAME=your-resource-name # AZURE_RESOURCE_NAME=your-resource-name
# AZURE_API_KEY=... # AZURE_API_KEY=...
# Azure Model Configuration:
# Use your deployment name (not the model ID)
# Example: if you deployed GPT-4 as "my-gpt4-deployment"
# AI_MODEL=my-gpt4-deployment
# ===========================================
# Ollama (Local) Configuration # Ollama (Local) Configuration
# ===========================================
# OLLAMA_BASE_URL=http://localhost:11434/api # Optional, defaults to localhost # OLLAMA_BASE_URL=http://localhost:11434/api # Optional, defaults to localhost
# IMPORTANT: This app requires models that support tool calling (v2 specification) # OpenRouter Configuration
# Note: Models must be installed locally with `ollama pull <model-name>` # OPENROUTER_API_KEY=sk-or-v1-...

View File

@@ -4,6 +4,7 @@ import { anthropic } from '@ai-sdk/anthropic';
import { google } from '@ai-sdk/google'; import { google } from '@ai-sdk/google';
import { azure } from '@ai-sdk/azure'; import { azure } from '@ai-sdk/azure';
import { ollama } from 'ollama-ai-provider-v2'; import { ollama } from 'ollama-ai-provider-v2';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
export type ProviderName = export type ProviderName =
| 'bedrock' | 'bedrock'
@@ -11,7 +12,8 @@ export type ProviderName =
| 'anthropic' | 'anthropic'
| 'google' | 'google'
| 'azure' | 'azure'
| 'ollama'; | 'ollama'
| 'openrouter';
interface ModelConfig { interface ModelConfig {
model: any; model: any;
@@ -38,6 +40,7 @@ function validateProviderCredentials(provider: ProviderName): void {
google: 'GOOGLE_GENERATIVE_AI_API_KEY', google: 'GOOGLE_GENERATIVE_AI_API_KEY',
azure: 'AZURE_API_KEY', azure: 'AZURE_API_KEY',
ollama: null, // No credentials needed for local Ollama ollama: null, // No credentials needed for local Ollama
openrouter: 'OPENROUTER_API_KEY',
}; };
const requiredVar = requiredEnvVars[provider]; const requiredVar = requiredEnvVars[provider];
@@ -53,7 +56,7 @@ function validateProviderCredentials(provider: ProviderName): void {
* Get the AI model based on environment variables * Get the AI model based on environment variables
* *
* Environment variables: * Environment variables:
* - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama) * - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama, openrouter)
* - AI_MODEL: The model ID/name for the selected provider * - AI_MODEL: The model ID/name for the selected provider
* *
* Provider-specific env vars: * Provider-specific env vars:
@@ -63,6 +66,7 @@ function validateProviderCredentials(provider: ProviderName): void {
* - AZURE_RESOURCE_NAME, AZURE_API_KEY: Azure OpenAI credentials * - AZURE_RESOURCE_NAME, AZURE_API_KEY: Azure OpenAI credentials
* - AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY: AWS Bedrock credentials * - AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY: AWS Bedrock credentials
* - OLLAMA_BASE_URL: Ollama server URL (optional, defaults to http://localhost:11434) * - OLLAMA_BASE_URL: Ollama server URL (optional, defaults to http://localhost:11434)
* - OPENROUTER_API_KEY: OpenRouter API key
*/ */
export function getAIModel(): ModelConfig { export function getAIModel(): ModelConfig {
const provider = (process.env.AI_PROVIDER || 'bedrock') as ProviderName; const provider = (process.env.AI_PROVIDER || 'bedrock') as ProviderName;
@@ -114,9 +118,16 @@ export function getAIModel(): ModelConfig {
model = ollama(modelId); model = ollama(modelId);
break; break;
case 'openrouter':
const openrouter = createOpenRouter({
apiKey: process.env.OPENROUTER_API_KEY,
});
model = openrouter(modelId);
break;
default: default:
throw new Error( throw new Error(
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama` `Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter`
); );
} }

42
package-lock.json generated
View File

@@ -14,6 +14,7 @@
"@ai-sdk/google": "^2.0.0", "@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19", "@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.22", "@ai-sdk/react": "^2.0.22",
"@openrouter/ai-sdk-provider": "^1.2.3",
"@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-dialog": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-slot": "^1.1.2", "@radix-ui/react-slot": "^1.1.2",
@@ -1113,6 +1114,47 @@
"node": ">= 10" "node": ">= 10"
} }
}, },
"node_modules/@openrouter/ai-sdk-provider": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.2.3.tgz",
"integrity": "sha512-a6Nc8dPRHakRH9966YJ/HZJhLOds7DuPTscNZDoAr+Aw+tEFUlacSJMvb/b3gukn74mgbuaJRji9YOn62ipfVg==",
"license": "Apache-2.0",
"dependencies": {
"@openrouter/sdk": "^0.1.8"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"ai": "^5.0.0",
"zod": "^3.24.1 || ^v4"
}
},
"node_modules/@openrouter/sdk": {
"version": "0.1.11",
"resolved": "https://registry.npmjs.org/@openrouter/sdk/-/sdk-0.1.11.tgz",
"integrity": "sha512-OuPc8qqidL/PUM8+9WgrOfSR9+b6rKIWiezGcUJ54iPTdh+Gye5Qjut6hrLWlOCMZE7Z853gN90r1ft4iChj7Q==",
"license": "Apache-2.0",
"dependencies": {
"zod": "^3.25.0 || ^4.0.0"
},
"peerDependencies": {
"@tanstack/react-query": "^5",
"react": "^18 || ^19",
"react-dom": "^18 || ^19"
},
"peerDependenciesMeta": {
"@tanstack/react-query": {
"optional": true
},
"react": {
"optional": true
},
"react-dom": {
"optional": true
}
}
},
"node_modules/@opentelemetry/api": { "node_modules/@opentelemetry/api": {
"version": "1.9.0", "version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",

View File

@@ -15,6 +15,7 @@
"@ai-sdk/google": "^2.0.0", "@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19", "@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.22", "@ai-sdk/react": "^2.0.22",
"@openrouter/ai-sdk-provider": "^1.2.3",
"@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-dialog": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-slot": "^1.1.2", "@radix-ui/react-slot": "^1.1.2",