From a047a6ff97800cd119a746c501160958291591ec Mon Sep 17 00:00:00 2001 From: Biki Kalita <86558912+Biki-dev@users.noreply.github.com> Date: Wed, 10 Dec 2025 20:54:43 +0530 Subject: [PATCH] feat: Display AI reasoning/thinking blocks in chat interface (#152) * feat: Add reasoning/thinking blocks display in chat interface * feat: add multi-provider options support and replace custom reasoning UI with AI Elements * resolve conflicting reasoning configs and correct provider-specific reasoning parameters * try to solve conflict * fix: simplify reasoning display and remove unnecessary dependencies - Remove Streamdown dependency (~5MB) - reasoning is plain text only - Fix Bedrock providerOptions merging for Claude reasoning configs - Remove unsupported DeepSeek reasoning configuration - Clean up unused environment variables (REASONING_BUDGET_TOKENS, REASONING_EFFORT, DEEPSEEK_REASONING_*) - Remove dead commented code from route.ts Reasoning blocks contain plain thinking text and don't need markdown/diagram/code rendering. * feat: comprehensive reasoning support improvements Major improvements: - Auto-enable reasoning display for all supported models - Fix provider-specific reasoning configurations - Remove unnecessary Streamdown dependency (~5MB) - Clean up debug logging Provider changes: - OpenAI: Auto-enable reasoningSummary for o1/o3/gpt-5 models - Google: Auto-enable includeThoughts for Gemini 2.5/3 models - Bedrock: Restrict reasoningConfig to only Claude/Nova (fixes MiniMax error) - Ollama: Add thinking support for qwen3-like models Other improvements: - Remove ENABLE_REASONING toggle (always enabled) - Fix Bedrock providerOptions merging for Claude - Simplify reasoning component (plain text rendering) - Clean up unused environment variables * fix: critical bugs and documentation gaps in reasoning support Critical fixes: - Fix Bedrock shallow merge bug (deep merge preserves anthropicBeta + reasoningConfig) - Add parseInt validation with parseIntSafe helper (prevents NaN errors) - Validate all numeric env vars with min/max ranges Documentation improvements: - Add BEDROCK_REASONING_BUDGET_TOKENS and BEDROCK_REASONING_EFFORT to env.example - Add OLLAMA_ENABLE_THINKING to env.example - Update JSDoc with accurate env var list and ranges Code cleanup: - Remove debug console.log statements from route.ts - Refactor duplicate providerOptions assignments --------- Co-authored-by: Dayuan Jiang <34411969+DayuanJiang@users.noreply.github.com> Co-authored-by: Dayuan Jiang --- app/api/chat/route.ts | 17 +- components/ai-elements/reasoning.tsx | 186 +++++++++++++++ components/ai-elements/shimmer.tsx | 64 +++++ components/chat-message-display.tsx | 53 +++++ components/chat-panel.tsx | 1 + components/ui/collapsible.tsx | 33 +++ env.example | 17 ++ lib/ai-providers.ts | 307 +++++++++++++++++++++++- package-lock.json | 339 +++++++++++++++++++++++---- package.json | 3 + 10 files changed, 959 insertions(+), 61 deletions(-) create mode 100644 components/ai-elements/reasoning.tsx create mode 100644 components/ai-elements/shimmer.tsx create mode 100644 components/ui/collapsible.tsx diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts index 605feea..153313b 100644 --- a/app/api/chat/route.ts +++ b/app/api/chat/route.ts @@ -208,13 +208,6 @@ async function handleChatRequest(req: Request): Promise { const isFirstMessage = messages.length === 1 const isEmptyDiagram = !xml || xml.trim() === "" || isMinimalDiagram(xml) - // DEBUG: Log cache check conditions - console.log("[Cache DEBUG] messages.length:", messages.length) - console.log("[Cache DEBUG] isFirstMessage:", isFirstMessage) - console.log("[Cache DEBUG] xml length:", xml?.length || 0) - console.log("[Cache DEBUG] xml preview:", xml?.substring(0, 200)) - console.log("[Cache DEBUG] isEmptyDiagram:", isEmptyDiagram) - if (isFirstMessage && isEmptyDiagram) { const lastMessage = messages[0] const textPart = lastMessage.parts?.find((p: any) => p.type === "text") @@ -358,7 +351,7 @@ ${lastMessageText} model, stopWhen: stepCountIs(5), messages: allMessages, - ...(providerOptions && { providerOptions }), + ...(providerOptions && { providerOptions }), // This now includes all reasoning configs ...(headers && { headers }), // Langfuse telemetry config (returns undefined if not configured) ...(getTelemetryConfig({ sessionId: validSessionId, userId }) && { @@ -394,13 +387,6 @@ ${lastMessageText} return null }, onFinish: ({ text, usage }) => { - // Log token usage - if (usage) { - const cachedTokens = (usage as any).cachedInputTokens ?? 0 - console.log( - `[Token Usage] input: ${usage.inputTokens ?? 0}, cached: ${cachedTokens}, output: ${usage.outputTokens ?? 0}, total: ${(usage.inputTokens ?? 0) + cachedTokens + (usage.outputTokens ?? 0)}`, - ) - } // Pass usage to Langfuse (Bedrock streaming doesn't auto-report tokens to telemetry) setTraceOutput(text, { promptTokens: usage?.inputTokens, @@ -488,6 +474,7 @@ IMPORTANT: Keep edits concise: }) return result.toUIMessageStreamResponse({ + sendReasoning: true, messageMetadata: ({ part }) => { if (part.type === "finish") { const usage = (part as any).totalUsage diff --git a/components/ai-elements/reasoning.tsx b/components/ai-elements/reasoning.tsx new file mode 100644 index 0000000..271db5c --- /dev/null +++ b/components/ai-elements/reasoning.tsx @@ -0,0 +1,186 @@ +"use client" + +import { useControllableState } from "@radix-ui/react-use-controllable-state" +import { BrainIcon, ChevronDownIcon } from "lucide-react" +import type { ComponentProps, ReactNode } from "react" +import { createContext, memo, useContext, useEffect, useState } from "react" +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@/components/ui/collapsible" +import { cn } from "@/lib/utils" +import { Shimmer } from "./shimmer" + +type ReasoningContextValue = { + isStreaming: boolean + isOpen: boolean + setIsOpen: (open: boolean) => void + duration: number | undefined +} + +const ReasoningContext = createContext(null) + +export const useReasoning = () => { + const context = useContext(ReasoningContext) + if (!context) { + throw new Error("Reasoning components must be used within Reasoning") + } + return context +} + +export type ReasoningProps = ComponentProps & { + isStreaming?: boolean + open?: boolean + defaultOpen?: boolean + onOpenChange?: (open: boolean) => void + duration?: number +} + +const AUTO_CLOSE_DELAY = 1000 +const MS_IN_S = 1000 + +export const Reasoning = memo( + ({ + className, + isStreaming = false, + open, + defaultOpen = true, + onOpenChange, + duration: durationProp, + children, + ...props + }: ReasoningProps) => { + const [isOpen, setIsOpen] = useControllableState({ + prop: open, + defaultProp: defaultOpen, + onChange: onOpenChange, + }) + const [duration, setDuration] = useControllableState({ + prop: durationProp, + defaultProp: undefined, + }) + + const [hasAutoClosed, setHasAutoClosed] = useState(false) + const [startTime, setStartTime] = useState(null) + + // Track duration when streaming starts and ends + useEffect(() => { + if (isStreaming) { + if (startTime === null) { + setStartTime(Date.now()) + } + } else if (startTime !== null) { + setDuration(Math.ceil((Date.now() - startTime) / MS_IN_S)) + setStartTime(null) + } + }, [isStreaming, startTime, setDuration]) + + // Auto-open when streaming starts, auto-close when streaming ends (once only) + useEffect(() => { + if (defaultOpen && !isStreaming && isOpen && !hasAutoClosed) { + // Add a small delay before closing to allow user to see the content + const timer = setTimeout(() => { + setIsOpen(false) + setHasAutoClosed(true) + }, AUTO_CLOSE_DELAY) + + return () => clearTimeout(timer) + } + }, [isStreaming, isOpen, defaultOpen, setIsOpen, hasAutoClosed]) + + const handleOpenChange = (newOpen: boolean) => { + setIsOpen(newOpen) + } + + return ( + + + {children} + + + ) + }, +) + +export type ReasoningTriggerProps = ComponentProps< + typeof CollapsibleTrigger +> & { + getThinkingMessage?: (isStreaming: boolean, duration?: number) => ReactNode +} + +const defaultGetThinkingMessage = (isStreaming: boolean, duration?: number) => { + if (isStreaming || duration === 0) { + return Thinking... + } + if (duration === undefined) { + return

Thought for a few seconds

+ } + return

Thought for {duration} seconds

+} + +export const ReasoningTrigger = memo( + ({ + className, + children, + getThinkingMessage = defaultGetThinkingMessage, + ...props + }: ReasoningTriggerProps) => { + const { isStreaming, isOpen, duration } = useReasoning() + + return ( + + {children ?? ( + <> + + {getThinkingMessage(isStreaming, duration)} + + + )} + + ) + }, +) + +export type ReasoningContentProps = ComponentProps< + typeof CollapsibleContent +> & { + children: string +} + +export const ReasoningContent = memo( + ({ className, children, ...props }: ReasoningContentProps) => ( + +
{children}
+
+ ), +) + +Reasoning.displayName = "Reasoning" +ReasoningTrigger.displayName = "ReasoningTrigger" +ReasoningContent.displayName = "ReasoningContent" diff --git a/components/ai-elements/shimmer.tsx b/components/ai-elements/shimmer.tsx new file mode 100644 index 0000000..274bf85 --- /dev/null +++ b/components/ai-elements/shimmer.tsx @@ -0,0 +1,64 @@ +"use client" + +import { motion } from "motion/react" +import { + type CSSProperties, + type ElementType, + type JSX, + memo, + useMemo, +} from "react" +import { cn } from "@/lib/utils" + +export type TextShimmerProps = { + children: string + as?: ElementType + className?: string + duration?: number + spread?: number +} + +const ShimmerComponent = ({ + children, + as: Component = "p", + className, + duration = 2, + spread = 2, +}: TextShimmerProps) => { + const MotionComponent = motion.create( + Component as keyof JSX.IntrinsicElements, + ) + + const dynamicSpread = useMemo( + () => (children?.length ?? 0) * spread, + [children, spread], + ) + + return ( + + {children} + + ) +} + +export const Shimmer = memo(ShimmerComponent) diff --git a/components/chat-message-display.tsx b/components/chat-message-display.tsx index 954c4fd..9de439b 100644 --- a/components/chat-message-display.tsx +++ b/components/chat-message-display.tsx @@ -21,6 +21,11 @@ import { import Image from "next/image" import { useCallback, useEffect, useRef, useState } from "react" import ReactMarkdown from "react-markdown" +import { + Reasoning, + ReasoningContent, + ReasoningTrigger, +} from "@/components/ai-elements/reasoning" import { ScrollArea } from "@/components/ui/scroll-area" import { convertToLegalXml, @@ -167,6 +172,7 @@ interface ChatMessageDisplayProps { sessionId?: string onRegenerate?: (messageIndex: number) => void onEditMessage?: (messageIndex: number, newText: string) => void + status?: "streaming" | "submitted" | "idle" | "error" | "ready" } export function ChatMessageDisplay({ @@ -176,6 +182,7 @@ export function ChatMessageDisplay({ sessionId, onRegenerate, onEditMessage, + status = "idle", }: ChatMessageDisplayProps) { const { chartXML, loadDiagram: onDisplayChart } = useDiagram() const messagesEndRef = useRef(null) @@ -501,6 +508,52 @@ export function ChatMessageDisplay({ )}
+ {/* Reasoning blocks - displayed first for assistant messages */} + {message.role === "assistant" && + message.parts?.map( + (part, partIndex) => { + if (part.type === "reasoning") { + const reasoningPart = + part as { + type: "reasoning" + text: string + } + const isLastPart = + partIndex === + (message.parts + ?.length ?? 0) - + 1 + const isLastMessage = + message.id === + messages[ + messages.length - 1 + ]?.id + const isStreamingReasoning = + status === + "streaming" && + isLastPart && + isLastMessage + + return ( + + + + { + reasoningPart.text + } + + + ) + } + return null + }, + )} {/* Edit mode for user messages */} {isEditing && message.role === "user" ? (
diff --git a/components/chat-panel.tsx b/components/chat-panel.tsx index 6e2651f..25b2997 100644 --- a/components/chat-panel.tsx +++ b/components/chat-panel.tsx @@ -1320,6 +1320,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a setFiles={handleFileChange} sessionId={sessionId} onRegenerate={handleRegenerate} + status={status} onEditMessage={handleEditMessage} /> diff --git a/components/ui/collapsible.tsx b/components/ui/collapsible.tsx new file mode 100644 index 0000000..ae9fad0 --- /dev/null +++ b/components/ui/collapsible.tsx @@ -0,0 +1,33 @@ +"use client" + +import * as CollapsiblePrimitive from "@radix-ui/react-collapsible" + +function Collapsible({ + ...props +}: React.ComponentProps) { + return +} + +function CollapsibleTrigger({ + ...props +}: React.ComponentProps) { + return ( + + ) +} + +function CollapsibleContent({ + ...props +}: React.ComponentProps) { + return ( + + ) +} + +export { Collapsible, CollapsibleTrigger, CollapsibleContent } diff --git a/env.example b/env.example index 777ed5b..8ec6d43 100644 --- a/env.example +++ b/env.example @@ -11,28 +11,45 @@ AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0 # AWS_REGION=us-east-1 # AWS_ACCESS_KEY_ID=your-access-key-id # AWS_SECRET_ACCESS_KEY=your-secret-access-key +# Note: Claude and Nova models support reasoning/extended thinking +# BEDROCK_REASONING_BUDGET_TOKENS=12000 # Optional: Claude reasoning budget in tokens (1024-64000) +# BEDROCK_REASONING_EFFORT=medium # Optional: Nova reasoning effort (low/medium/high) # OpenAI Configuration # OPENAI_API_KEY=sk-... # OPENAI_BASE_URL=https://api.openai.com/v1 # Optional: Custom OpenAI-compatible endpoint # OPENAI_ORGANIZATION=org-... # Optional # OPENAI_PROJECT=proj_... # Optional +# Note: o1/o3/gpt-5 models automatically enable reasoning summary (default: detailed) +# OPENAI_REASONING_EFFORT=low # Optional: Reasoning effort (minimal/low/medium/high) - for o1/o3/gpt-5 +# OPENAI_REASONING_SUMMARY=detailed # Optional: Override reasoning summary (none/brief/detailed) # Anthropic (Direct) Configuration # ANTHROPIC_API_KEY=sk-ant-... # ANTHROPIC_BASE_URL=https://your-custom-anthropic/v1 +# ANTHROPIC_THINKING_TYPE=enabled # Optional: Anthropic extended thinking (enabled) +# ANTHROPIC_THINKING_BUDGET_TOKENS=12000 # Optional: Budget for extended thinking in tokens # Google Generative AI Configuration # GOOGLE_GENERATIVE_AI_API_KEY=... # GOOGLE_BASE_URL=https://generativelanguage.googleapis.com/v1beta # Optional: Custom endpoint +# GOOGLE_CANDIDATE_COUNT=1 # Optional: Number of candidates to generate +# GOOGLE_TOP_K=40 # Optional: Top K sampling parameter +# GOOGLE_TOP_P=0.95 # Optional: Nucleus sampling parameter +# Note: Gemini 2.5/3 models automatically enable reasoning display (includeThoughts: true) +# GOOGLE_THINKING_BUDGET=8192 # Optional: Gemini 2.5 thinking budget in tokens (for more/less thinking) +# GOOGLE_THINKING_LEVEL=high # Optional: Gemini 3 thinking level (low/high) # Azure OpenAI Configuration # AZURE_RESOURCE_NAME=your-resource-name # AZURE_API_KEY=... # AZURE_BASE_URL=https://your-resource.openai.azure.com # Optional: Custom endpoint (overrides resourceName) +# AZURE_REASONING_EFFORT=low # Optional: Azure reasoning effort (low, medium, high) +# AZURE_REASONING_SUMMARY=detailed # Ollama (Local) Configuration # OLLAMA_BASE_URL=http://localhost:11434/api # Optional, defaults to localhost +# OLLAMA_ENABLE_THINKING=true # Optional: Enable thinking for models that support it (e.g., qwen3) # OpenRouter Configuration # OPENROUTER_API_KEY=sk-or-v1-... diff --git a/lib/ai-providers.ts b/lib/ai-providers.ts index d421a15..3fafe94 100644 --- a/lib/ai-providers.ts +++ b/lib/ai-providers.ts @@ -56,6 +56,295 @@ const ANTHROPIC_BETA_HEADERS = { "anthropic-beta": "fine-grained-tool-streaming-2025-05-14", } +/** + * Safely parse integer from environment variable with validation + */ +function parseIntSafe( + value: string | undefined, + varName: string, + min?: number, + max?: number, +): number | undefined { + if (!value) return undefined + const parsed = Number.parseInt(value, 10) + if (Number.isNaN(parsed)) { + throw new Error(`${varName} must be a valid integer, got: ${value}`) + } + if (min !== undefined && parsed < min) { + throw new Error(`${varName} must be >= ${min}, got: ${parsed}`) + } + if (max !== undefined && parsed > max) { + throw new Error(`${varName} must be <= ${max}, got: ${parsed}`) + } + return parsed +} + +/** + * Build provider-specific options from environment variables + * Supports various AI SDK providers with their unique configuration options + * + * Environment variables: + * - OPENAI_REASONING_EFFORT: OpenAI reasoning effort level (minimal/low/medium/high) - for o1/o3/gpt-5 + * - OPENAI_REASONING_SUMMARY: OpenAI reasoning summary (none/brief/detailed) - auto-enabled for o1/o3/gpt-5 + * - ANTHROPIC_THINKING_BUDGET_TOKENS: Anthropic thinking budget in tokens (1024-64000) + * - ANTHROPIC_THINKING_TYPE: Anthropic thinking type (enabled) + * - GOOGLE_THINKING_BUDGET: Google Gemini 2.5 thinking budget in tokens (1024-100000) + * - GOOGLE_THINKING_LEVEL: Google Gemini 3 thinking level (low/high) + * - AZURE_REASONING_EFFORT: Azure/OpenAI reasoning effort (low/medium/high) + * - AZURE_REASONING_SUMMARY: Azure reasoning summary (none/brief/detailed) + * - BEDROCK_REASONING_BUDGET_TOKENS: Bedrock Claude reasoning budget in tokens (1024-64000) + * - BEDROCK_REASONING_EFFORT: Bedrock Nova reasoning effort (low/medium/high) + * - OLLAMA_ENABLE_THINKING: Enable Ollama thinking mode (set to "true") + */ +function buildProviderOptions( + provider: ProviderName, + modelId?: string, +): Record | undefined { + const options: Record = {} + + switch (provider) { + case "openai": { + const reasoningEffort = process.env.OPENAI_REASONING_EFFORT + const reasoningSummary = process.env.OPENAI_REASONING_SUMMARY + + // OpenAI reasoning models (o1, o3, gpt-5) need reasoningSummary to return thoughts + if ( + modelId && + (modelId.includes("o1") || + modelId.includes("o3") || + modelId.includes("gpt-5")) + ) { + options.openai = { + // Auto-enable reasoning summary for reasoning models (default: detailed) + reasoningSummary: + (reasoningSummary as "none" | "brief" | "detailed") || + "detailed", + } + + // Optionally configure reasoning effort + if (reasoningEffort) { + options.openai.reasoningEffort = reasoningEffort as + | "minimal" + | "low" + | "medium" + | "high" + } + } else if (reasoningEffort || reasoningSummary) { + // Non-reasoning models: only apply if explicitly configured + options.openai = {} + if (reasoningEffort) { + options.openai.reasoningEffort = reasoningEffort as + | "minimal" + | "low" + | "medium" + | "high" + } + if (reasoningSummary) { + options.openai.reasoningSummary = reasoningSummary as + | "none" + | "brief" + | "detailed" + } + } + break + } + + case "anthropic": { + const thinkingBudget = parseIntSafe( + process.env.ANTHROPIC_THINKING_BUDGET_TOKENS, + "ANTHROPIC_THINKING_BUDGET_TOKENS", + 1024, + 64000, + ) + const thinkingType = + process.env.ANTHROPIC_THINKING_TYPE || "enabled" + + if (thinkingBudget) { + options.anthropic = { + thinking: { + type: thinkingType, + budgetTokens: thinkingBudget, + }, + } + } + break + } + + case "google": { + const reasoningEffort = process.env.GOOGLE_REASONING_EFFORT + const thinkingBudgetVal = parseIntSafe( + process.env.GOOGLE_THINKING_BUDGET, + "GOOGLE_THINKING_BUDGET", + 1024, + 100000, + ) + const thinkingLevel = process.env.GOOGLE_THINKING_LEVEL + + // Google Gemini 2.5/3 models think by default, but need includeThoughts: true + // to return the reasoning in the response + if ( + modelId && + (modelId.includes("gemini-2") || + modelId.includes("gemini-3") || + modelId.includes("gemini2") || + modelId.includes("gemini3")) + ) { + const thinkingConfig: Record = { + includeThoughts: true, + } + + // Optionally configure thinking budget or level + if ( + thinkingBudgetVal && + (modelId.includes("2.5") || modelId.includes("2-5")) + ) { + thinkingConfig.thinkingBudget = thinkingBudgetVal + } else if ( + thinkingLevel && + (modelId.includes("gemini-3") || + modelId.includes("gemini3")) + ) { + thinkingConfig.thinkingLevel = thinkingLevel as + | "low" + | "high" + } + + options.google = { thinkingConfig } + } else if (reasoningEffort) { + options.google = { + reasoningEffort: reasoningEffort as + | "low" + | "medium" + | "high", + } + } + + // Keep existing Google options + const options_obj: Record = {} + const candidateCount = parseIntSafe( + process.env.GOOGLE_CANDIDATE_COUNT, + "GOOGLE_CANDIDATE_COUNT", + 1, + 8, + ) + if (candidateCount) { + options_obj.candidateCount = candidateCount + } + const topK = parseIntSafe( + process.env.GOOGLE_TOP_K, + "GOOGLE_TOP_K", + 1, + 100, + ) + if (topK) { + options_obj.topK = topK + } + if (process.env.GOOGLE_TOP_P) { + const topP = Number.parseFloat(process.env.GOOGLE_TOP_P) + if (Number.isNaN(topP) || topP < 0 || topP > 1) { + throw new Error( + `GOOGLE_TOP_P must be a number between 0 and 1, got: ${process.env.GOOGLE_TOP_P}`, + ) + } + options_obj.topP = topP + } + + if (Object.keys(options_obj).length > 0) { + options.google = { ...options.google, ...options_obj } + } + break + } + + case "azure": { + const reasoningEffort = process.env.AZURE_REASONING_EFFORT + const reasoningSummary = process.env.AZURE_REASONING_SUMMARY + + if (reasoningEffort || reasoningSummary) { + options.azure = {} + if (reasoningEffort) { + options.azure.reasoningEffort = reasoningEffort as + | "low" + | "medium" + | "high" + } + if (reasoningSummary) { + options.azure.reasoningSummary = reasoningSummary as + | "none" + | "brief" + | "detailed" + } + } + break + } + + case "bedrock": { + const budgetTokens = parseIntSafe( + process.env.BEDROCK_REASONING_BUDGET_TOKENS, + "BEDROCK_REASONING_BUDGET_TOKENS", + 1024, + 64000, + ) + const reasoningEffort = process.env.BEDROCK_REASONING_EFFORT + + // Bedrock reasoning ONLY for Claude and Nova models + // Other models (MiniMax, etc.) don't support reasoningConfig + if ( + modelId && + (budgetTokens || reasoningEffort) && + (modelId.includes("claude") || + modelId.includes("anthropic") || + modelId.includes("nova") || + modelId.includes("amazon")) + ) { + const reasoningConfig: Record = { type: "enabled" } + + // Claude models: use budgetTokens (1024-64000) + if ( + budgetTokens && + (modelId.includes("claude") || + modelId.includes("anthropic")) + ) { + reasoningConfig.budgetTokens = budgetTokens + } + // Nova models: use maxReasoningEffort (low/medium/high) + else if ( + reasoningEffort && + (modelId.includes("nova") || modelId.includes("amazon")) + ) { + reasoningConfig.maxReasoningEffort = reasoningEffort as + | "low" + | "medium" + | "high" + } + + options.bedrock = { reasoningConfig } + } + break + } + + case "ollama": { + const enableThinking = process.env.OLLAMA_ENABLE_THINKING + // Ollama supports reasoning with think: true for models like qwen3 + if (enableThinking === "true") { + options.ollama = { think: true } + } + break + } + + case "deepseek": + case "openrouter": + case "siliconflow": { + // These providers don't have reasoning configs in AI SDK yet + break + } + + default: + break + } + + return Object.keys(options).length > 0 ? options : undefined +} + // Map of provider to required environment variable const PROVIDER_ENV_VARS: Record = { bedrock: null, // AWS SDK auto-uses IAM role on AWS, or env vars locally @@ -205,6 +494,9 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig { let providerOptions: any let headers: Record | undefined + // Build provider-specific options from environment variables + const customProviderOptions = buildProviderOptions(provider, modelId) + switch (provider) { case "bedrock": { // Use credential provider chain for IAM role support (Lambda, EC2, etc.) @@ -216,7 +508,15 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig { model = bedrockProvider(modelId) // Add Anthropic beta options if using Claude models via Bedrock if (modelId.includes("anthropic.claude")) { - providerOptions = BEDROCK_ANTHROPIC_BETA + // Deep merge to preserve both anthropicBeta and reasoningConfig + providerOptions = { + bedrock: { + ...BEDROCK_ANTHROPIC_BETA.bedrock, + ...(customProviderOptions?.bedrock || {}), + }, + } + } else if (customProviderOptions) { + providerOptions = customProviderOptions } break } @@ -342,6 +642,11 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig { ) } + // Apply provider-specific options for all providers except bedrock (which has special handling) + if (customProviderOptions && provider !== "bedrock" && !providerOptions) { + providerOptions = customProviderOptions + } + return { model, providerOptions, headers, modelId } } diff --git a/package-lock.json b/package-lock.json index a2fb1f3..bc8950a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -24,6 +24,7 @@ "@openrouter/ai-sdk-provider": "^1.2.3", "@opentelemetry/exporter-trace-otlp-http": "^0.208.0", "@opentelemetry/sdk-trace-node": "^2.2.0", + "@radix-ui/react-collapsible": "^1.1.12", "@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-label": "^2.1.8", "@radix-ui/react-scroll-area": "^1.2.3", @@ -31,6 +32,7 @@ "@radix-ui/react-slot": "^1.1.2", "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-tooltip": "^1.1.8", + "@radix-ui/react-use-controllable-state": "^1.2.2", "@vercel/analytics": "^1.5.0", "@xmldom/xmldom": "^0.9.8", "ai": "^5.0.89", @@ -40,6 +42,7 @@ "js-tiktoken": "^1.0.21", "jsdom": "^26.0.0", "lucide-react": "^0.483.0", + "motion": "^12.23.25", "next": "^16.0.7", "ollama-ai-provider-v2": "^1.5.4", "pako": "^2.1.0", @@ -3026,6 +3029,170 @@ } } }, + "node_modules/@radix-ui/react-collapsible": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz", + "integrity": "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible/node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-collection": { "version": "1.1.7", "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", @@ -3189,6 +3356,24 @@ } } }, + "node_modules/@radix-ui/react-dialog/node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz", + "integrity": "sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-direction": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.0.tgz", @@ -3822,25 +4007,6 @@ } } }, - "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", - "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-use-escape-keydown": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", @@ -4063,25 +4229,6 @@ } } }, - "node_modules/@radix-ui/react-switch/node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", - "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@radix-ui/react-switch/node_modules/@radix-ui/react-use-layout-effect": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", @@ -4149,6 +4296,24 @@ } } }, + "node_modules/@radix-ui/react-tooltip/node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz", + "integrity": "sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-use-callback-ref": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.0.tgz", @@ -4165,12 +4330,13 @@ } }, "node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz", - "integrity": "sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", "license": "MIT", "dependencies": { - "@radix-ui/react-use-callback-ref": "1.1.0" + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", @@ -4182,6 +4348,21 @@ } } }, + "node_modules/@radix-ui/react-use-controllable-state/node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-use-effect-event": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", @@ -7850,6 +8031,33 @@ "node": ">= 6" } }, + "node_modules/framer-motion": { + "version": "12.23.25", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.25.tgz", + "integrity": "sha512-gUHGl2e4VG66jOcH0JHhuJQr6ZNwrET9g31ZG0xdXzT0CznP7fHX4P8Bcvuc4MiUB90ysNnWX2ukHRIggkl6hQ==", + "license": "MIT", + "dependencies": { + "motion-dom": "^12.23.23", + "motion-utils": "^12.23.6", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -10357,6 +10565,47 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/motion": { + "version": "12.23.25", + "resolved": "https://registry.npmjs.org/motion/-/motion-12.23.25.tgz", + "integrity": "sha512-Fk5Y1kcgxYiTYOUjmwfXQAP7tP+iGqw/on1UID9WEL/6KpzxPr9jY2169OsjgZvXJdpraKXy0orkjaCVIl5fgQ==", + "license": "MIT", + "dependencies": { + "framer-motion": "^12.23.25", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/motion-dom": { + "version": "12.23.23", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.23.tgz", + "integrity": "sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==", + "license": "MIT", + "dependencies": { + "motion-utils": "^12.23.6" + } + }, + "node_modules/motion-utils": { + "version": "12.23.6", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.23.6.tgz", + "integrity": "sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==", + "license": "MIT" + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -12047,9 +12296,9 @@ "license": "MIT" }, "node_modules/tailwind-merge": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.1.0.tgz", - "integrity": "sha512-aV27Oj8B7U/tAOMhJsSGdWqelfmudnGMdXIlMnk1JfsjwSjts6o8HyfN7SFH3EztzH4YH8kk6GbLTHzITJO39Q==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.4.0.tgz", + "integrity": "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g==", "license": "MIT", "funding": { "type": "github", diff --git a/package.json b/package.json index 1e719ee..f8c2860 100644 --- a/package.json +++ b/package.json @@ -28,6 +28,7 @@ "@openrouter/ai-sdk-provider": "^1.2.3", "@opentelemetry/exporter-trace-otlp-http": "^0.208.0", "@opentelemetry/sdk-trace-node": "^2.2.0", + "@radix-ui/react-collapsible": "^1.1.12", "@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-label": "^2.1.8", "@radix-ui/react-scroll-area": "^1.2.3", @@ -35,6 +36,7 @@ "@radix-ui/react-slot": "^1.1.2", "@radix-ui/react-switch": "^1.2.6", "@radix-ui/react-tooltip": "^1.1.8", + "@radix-ui/react-use-controllable-state": "^1.2.2", "@vercel/analytics": "^1.5.0", "@xmldom/xmldom": "^0.9.8", "ai": "^5.0.89", @@ -44,6 +46,7 @@ "js-tiktoken": "^1.0.21", "jsdom": "^26.0.0", "lucide-react": "^0.483.0", + "motion": "^12.23.25", "next": "^16.0.7", "ollama-ai-provider-v2": "^1.5.4", "pako": "^2.1.0",