add multiple provider

This commit is contained in:
dayuan.jiang
2025-11-15 13:36:42 +09:00
parent e53f77a2a6
commit 4a3abc2e39
6 changed files with 450 additions and 135 deletions

View File

@@ -26,6 +26,86 @@ The application uses the following technologies:
Diagrams are represented as XML that can be rendered in draw.io. The AI processes your commands and generates or modifies this XML accordingly.
## Multi-Provider Support
This application supports multiple AI providers, making it easy to deploy with your preferred service. Choose from:
### Supported Providers
| Provider | Status | Best For |
|----------|--------|----------|
| **AWS Bedrock** | ✅ Default | Claude models via AWS infrastructure |
| **OpenAI** | ✅ Supported | GPT-4, GPT-5, and reasoning models |
| **Anthropic** | ✅ Supported | Direct access to Claude models |
| **Google AI** | ✅ Supported | Gemini models with multi-modal capabilities |
| **Azure OpenAI** | ✅ Supported | Enterprise OpenAI deployments |
| **Ollama** | ✅ Supported | Local/self-hosted open source models |
### Quick Setup by Provider
#### AWS Bedrock (Default)
```bash
AI_PROVIDER=bedrock
AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
AWS_REGION=us-east-1
AWS_ACCESS_KEY_ID=your-access-key
AWS_SECRET_ACCESS_KEY=your-secret-key
```
#### OpenAI
```bash
AI_PROVIDER=openai
AI_MODEL=gpt-4o
OPENAI_API_KEY=sk-...
```
#### Anthropic
```bash
AI_PROVIDER=anthropic
AI_MODEL=claude-sonnet-4-5
ANTHROPIC_API_KEY=sk-ant-...
```
#### Google Generative AI
```bash
AI_PROVIDER=google
AI_MODEL=gemini-2.5-flash
GOOGLE_GENERATIVE_AI_API_KEY=...
```
#### Azure OpenAI
```bash
AI_PROVIDER=azure
AI_MODEL=your-deployment-name
AZURE_RESOURCE_NAME=your-resource
AZURE_API_KEY=...
```
#### Ollama (Local)
```bash
AI_PROVIDER=ollama
AI_MODEL=phi3
OLLAMA_BASE_URL=http://localhost:11434/api # Optional
```
Note: Install models locally first with `ollama pull <model-name>`
### Recommended Models
**Best Quality:**
- AWS Bedrock: `global.anthropic.claude-sonnet-4-5-20250929-v1:0`
- Anthropic: `claude-sonnet-4-5`
- OpenAI: `gpt-4o` or `gpt-5`
**Best Speed:**
- Google: `gemini-2.5-flash`
- OpenAI: `gpt-4o`
- Anthropic: `claude-haiku-4-5`
**Best Cost:**
- Ollama: Free (local models)
- Google: `gemini-1.5-flash-8b`
- OpenAI: `gpt-4o-mini`
## Getting Started
### Installation
@@ -45,15 +125,20 @@ npm install
yarn install
```
3. Create a `.env.local` file in the root directory. You can use `env.example` as a template:
3. Configure your AI provider:
Create a `.env.local` file in the root directory:
```bash
cp env.example .env.local
cp .env.example .env.local
```
Then update `.env.local` with your actual API keys:
Edit `.env.local` and configure your chosen provider:
- Set `AI_PROVIDER` to your chosen provider (bedrock, openai, anthropic, google, azure, ollama)
- Set `AI_MODEL` to the specific model you want to use
- Add the required API keys for your provider
Note: Not all variables are required. At minimum, you'll need at least one AI provider API key (OpenAI, Google, or OpenRouter).
See the [Multi-Provider Support](#multi-provider-support) section above for provider-specific configuration examples.
4. Run the development server:
@@ -90,7 +175,7 @@ public/ # Static assets including example images
- [x] Allow the LLM to modify the XML instead of generating it from scratch everytime.
- [x] Improve the smoothness of shape streaming updates.
- [ ] Add multiple AI provider support (Google PaLM, Anthropic Claude, etc.)
- [x] Add multiple AI provider support (OpenAI, Anthropic, Google, Azure, Ollama)
## License

View File

@@ -1,16 +1,8 @@
import { bedrock } from '@ai-sdk/amazon-bedrock';
import { openai } from '@ai-sdk/openai';
import { google } from '@ai-sdk/google';
import { smoothStream, streamText, convertToModelMessages } from 'ai';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { createOpenAI } from '@ai-sdk/openai';
import { streamText, convertToModelMessages } from 'ai';
import { getAIModel } from '@/lib/ai-providers';
import { z } from "zod";
import { z } from "zod/v3";
import { replaceXMLParts } from "@/lib/utils";
export const maxDuration = 60
const openrouter = createOpenRouter({ apiKey: process.env.OPENROUTER_API_KEY });
export const maxDuration = 60;
export async function POST(req: Request) {
try {
@@ -127,34 +119,14 @@ ${lastMessageText}
console.log("Enhanced messages:", enhancedMessages);
// Get AI model from environment configuration
const { model, providerOptions } = getAIModel();
const result = streamText({
// model: google("gemini-2.5-flash-preview-05-20"),
// model: google("gemini-2.5-pro"),
// model: bedrock('anthropic.claude-sonnet-4-20250514-v1:0'),
model,
system: systemMessage,
model: bedrock('global.anthropic.claude-sonnet-4-5-20250929-v1:0'),
// model: openrouter('moonshotai/kimi-k2:free'),
// model: model,
// providerOptions: {
// google: {
// thinkingConfig: {
// thinkingBudget: 128,
// },
// }
// },
// providerOptions: {
// openai: {
// reasoningEffort: "minimal"
// },
// },
providerOptions: {
anthropic: {
additionalModelRequestFields: {
anthropic_beta: ['fine-grained-tool-streaming-2025-05-14']
}
}
},
messages: enhancedMessages,
...(providerOptions && { providerOptions }),
tools: {
// Client-side tool that will be executed on the client
display_diagram: {

View File

@@ -1,9 +1,83 @@
# add the needed api
# AI Provider Configuration
# Choose your AI provider and model
GOOGLE_GENERATIVE_AI_API_KEY="your-google-api-key-here"
OPENAI_API_KEY="your-openai-api-key-here"
PERSONAL_ACCESS_TOKEN="your-github-personal-access-token-here"
AWS_ACCESS_KEY_ID=your-aws-access-key-id
AWS_SECRET_ACCESS_KEY=your-aws-secret-access-key
AWS_REGION=your-region
OPENROUTER_API_KEY="your-openrouter-api-key-here"
# AI_PROVIDER: Which provider to use
# Options: bedrock, openai, anthropic, google, azure, ollama
# Default: bedrock
AI_PROVIDER=bedrock
# AI_MODEL: The model ID for your chosen provider (REQUIRED)
# See examples below for each provider
AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# ===========================================
# AWS Bedrock Configuration
# ===========================================
# AWS_REGION=us-east-1
# AWS_ACCESS_KEY_ID=your-access-key-id
# AWS_SECRET_ACCESS_KEY=your-secret-access-key
# Popular Bedrock Models (Claude via Bedrock):
# - global.anthropic.claude-sonnet-4-5-20250929-v1:0 (Latest Claude Sonnet 4.5)
# - anthropic.claude-sonnet-4-20250514-v1:0 (Claude Sonnet 4)
# - anthropic.claude-3-7-sonnet-20250219-v3:0 (Claude 3.7 Sonnet)
# - anthropic.claude-3-5-haiku-20241022-v1:0 (Claude 3.5 Haiku)
# ===========================================
# OpenAI Configuration
# ===========================================
# OPENAI_API_KEY=sk-...
# OPENAI_ORGANIZATION=org-... # Optional
# OPENAI_PROJECT=proj_... # Optional
# Popular OpenAI Models:
# - gpt-5 (GPT-5)
# - gpt-4o (GPT-4 Optimized)
# - gpt-4-turbo (GPT-4 Turbo)
# - o1 (Reasoning model)
# - o3 (Reasoning model)
# - o4 (Reasoning model)
# ===========================================
# Anthropic (Direct) Configuration
# ===========================================
# ANTHROPIC_API_KEY=sk-ant-...
# Popular Anthropic Models:
# - claude-sonnet-4-5 (Latest Claude Sonnet 4.5)
# - claude-haiku-4-5 (Claude Haiku 4.5)
# - claude-opus-4-1 (Claude Opus 4.1)
# - claude-3-7-sonnet-latest (Claude 3.7 Sonnet)
# - claude-3-5-haiku-latest (Claude 3.5 Haiku)
# ===========================================
# Google Generative AI Configuration
# ===========================================
# GOOGLE_GENERATIVE_AI_API_KEY=...
# Popular Google Models:
# - gemini-2.5-pro (Gemini 2.5 Pro)
# - gemini-2.5-flash (Gemini 2.5 Flash)
# - gemini-2.0-flash (Gemini 2.0 Flash)
# - gemini-1.5-pro (Gemini 1.5 Pro)
# - gemini-1.5-flash (Gemini 1.5 Flash)
# - gemini-1.5-flash-8b (Gemini 1.5 Flash 8B)
# ===========================================
# Azure OpenAI Configuration
# ===========================================
# AZURE_RESOURCE_NAME=your-resource-name
# AZURE_API_KEY=...
# Azure Model Configuration:
# Use your deployment name (not the model ID)
# Example: if you deployed GPT-4 as "my-gpt4-deployment"
# AI_MODEL=my-gpt4-deployment
# ===========================================
# Ollama (Local) Configuration
# ===========================================
# OLLAMA_BASE_URL=http://localhost:11434/api # Optional, defaults to localhost
# IMPORTANT: This app requires models that support tool calling (v2 specification)
# Note: Models must be installed locally with `ollama pull <model-name>`

129
lib/ai-providers.ts Normal file
View File

@@ -0,0 +1,129 @@
import { bedrock } from '@ai-sdk/amazon-bedrock';
import { openai } from '@ai-sdk/openai';
import { anthropic } from '@ai-sdk/anthropic';
import { google } from '@ai-sdk/google';
import { azure } from '@ai-sdk/azure';
import { ollama } from 'ollama-ai-provider-v2';
export type ProviderName =
| 'bedrock'
| 'openai'
| 'anthropic'
| 'google'
| 'azure'
| 'ollama';
interface ModelConfig {
model: any;
providerOptions?: any;
}
// Anthropic beta headers for fine-grained tool streaming
const ANTHROPIC_BETA_OPTIONS = {
anthropic: {
additionalModelRequestFields: {
anthropic_beta: ['fine-grained-tool-streaming-2025-05-14']
}
}
};
/**
* Validate that required API keys are present for the selected provider
*/
function validateProviderCredentials(provider: ProviderName): void {
const requiredEnvVars: Record<ProviderName, string | null> = {
bedrock: 'AWS_ACCESS_KEY_ID',
openai: 'OPENAI_API_KEY',
anthropic: 'ANTHROPIC_API_KEY',
google: 'GOOGLE_GENERATIVE_AI_API_KEY',
azure: 'AZURE_API_KEY',
ollama: null, // No credentials needed for local Ollama
};
const requiredVar = requiredEnvVars[provider];
if (requiredVar && !process.env[requiredVar]) {
throw new Error(
`${requiredVar} environment variable is required for ${provider} provider. ` +
`Please set it in your .env.local file.`
);
}
}
/**
* Get the AI model based on environment variables
*
* Environment variables:
* - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama)
* - AI_MODEL: The model ID/name for the selected provider
*
* Provider-specific env vars:
* - OPENAI_API_KEY: OpenAI API key
* - ANTHROPIC_API_KEY: Anthropic API key
* - GOOGLE_GENERATIVE_AI_API_KEY: Google API key
* - AZURE_RESOURCE_NAME, AZURE_API_KEY: Azure OpenAI credentials
* - AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY: AWS Bedrock credentials
* - OLLAMA_BASE_URL: Ollama server URL (optional, defaults to http://localhost:11434)
*/
export function getAIModel(): ModelConfig {
const provider = (process.env.AI_PROVIDER || 'bedrock') as ProviderName;
const modelId = process.env.AI_MODEL;
if (!modelId) {
throw new Error(
`AI_MODEL environment variable is required. Example: AI_MODEL=claude-sonnet-4-5`
);
}
// Validate provider credentials
validateProviderCredentials(provider);
// Log initialization for debugging
console.log(`[AI Provider] Initializing ${provider} with model: ${modelId}`);
let model: any;
let providerOptions: any = undefined;
switch (provider) {
case 'bedrock':
model = bedrock(modelId);
// Add Anthropic beta headers if using Claude models via Bedrock
if (modelId.includes('anthropic.claude')) {
providerOptions = ANTHROPIC_BETA_OPTIONS;
}
break;
case 'openai':
model = openai(modelId);
break;
case 'anthropic':
model = anthropic(modelId);
// Add beta headers for fine-grained tool streaming
providerOptions = ANTHROPIC_BETA_OPTIONS;
break;
case 'google':
model = google(modelId);
break;
case 'azure':
model = azure(modelId);
break;
case 'ollama':
model = ollama(modelId);
break;
default:
throw new Error(
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama`
);
}
// Log if provider options are being applied
if (providerOptions) {
console.log('[AI Provider] Applying provider-specific options');
}
return { model, providerOptions };
}

219
package-lock.json generated
View File

@@ -9,10 +9,11 @@
"version": "0.1.0",
"dependencies": {
"@ai-sdk/amazon-bedrock": "^3.0.52",
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69",
"@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.22",
"@openrouter/ai-sdk-provider": "^0.4.6",
"@radix-ui/react-dialog": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-slot": "^1.1.2",
@@ -26,6 +27,7 @@
"jsdom": "^26.0.0",
"lucide-react": "^0.483.0",
"next": "15.2.3",
"ollama-ai-provider-v2": "^1.5.4",
"pako": "^2.1.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
@@ -34,7 +36,7 @@
"remark-gfm": "^4.0.1",
"tailwind-merge": "^3.0.2",
"tailwindcss-animate": "^1.0.7",
"zod": "^3.25.76"
"zod": "^4.1.12"
},
"devDependencies": {
"@tailwindcss/postcss": "^4",
@@ -66,7 +68,7 @@
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/anthropic": {
"node_modules/@ai-sdk/amazon-bedrock/node_modules/@ai-sdk/anthropic": {
"version": "2.0.43",
"resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.43.tgz",
"integrity": "sha512-YQWYdoU6X1E16BS/KYCkage18q8sqj3FguCZ/RQs/wxS1551DVeD5DrWiYXxm5T293HzeAVJssQFEx67kc4LmA==",
@@ -82,6 +84,73 @@
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/anthropic": {
"version": "2.0.44",
"resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.44.tgz",
"integrity": "sha512-o8TfNXRzO/KZkBrcx+CL9LQsPhx7PHyqzUGjza3TJaF9WxfH1S5UQLAmEw8F7lQoHNLU0IX03WT8o8R/4JbUxQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.17"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/anthropic/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.17.tgz",
"integrity": "sha512-TR3Gs4I3Tym4Ll+EPdzRdvo/rc8Js6c4nVhFLuvGLX/Y4V9ZcQMa/HTiYsHEgmYrf1zVi6Q145UEZUfleOwOjw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/azure": {
"version": "2.0.69",
"resolved": "https://registry.npmjs.org/@ai-sdk/azure/-/azure-2.0.69.tgz",
"integrity": "sha512-0Y+f0XHviWw9ixB2Dkqyg07V67oczUh8adh4B/t0LgVMVkvOsf/WEzfYx2/LDqdvI/o8IYyJ6JzsCKpBwbS61g==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/openai": "2.0.67",
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.17"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/azure/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.17.tgz",
"integrity": "sha512-TR3Gs4I3Tym4Ll+EPdzRdvo/rc8Js6c4nVhFLuvGLX/Y4V9ZcQMa/HTiYsHEgmYrf1zVi6Q145UEZUfleOwOjw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/gateway": {
"version": "2.0.7",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.7.tgz",
@@ -133,38 +202,46 @@
"zod": "^3.25.76 || ^4"
}
},
"node_modules/@ai-sdk/google/node_modules/@ai-sdk/provider-utils/node_modules/zod-to-json-schema": {
"version": "3.24.6",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz",
"integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.24.1"
}
},
"node_modules/@ai-sdk/openai": {
"version": "2.0.19",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.19.tgz",
"integrity": "sha512-sG3/IVaPvV7Vn6513I1bcJILHpLCXbVif2ht6CyROcB9FzXCJe2K5uRbAg30HWsdCEe7xu4OAWtMK6yWTOcsSA==",
"version": "2.0.67",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.67.tgz",
"integrity": "sha512-JhB3fUpY+IxAocyJt2PHuhfNwH+e+rDbZ8Q+d0hgSyNycuPRrV0xutLaf7mgDTvjr5FCrVEkXmM73tJprzZMiA==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.5"
"@ai-sdk/provider-utils": "3.0.17"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4"
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/openai/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.5.tgz",
"integrity": "sha512-HliwB/yzufw3iwczbFVE2Fiwf1XqROB/I6ng8EKUsPM5+2wnIa8f4VbljZcDx+grhFrPV+PnRZH7zBqi8WZM7Q==",
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.17.tgz",
"integrity": "sha512-TR3Gs4I3Tym4Ll+EPdzRdvo/rc8Js6c4nVhFLuvGLX/Y4V9ZcQMa/HTiYsHEgmYrf1zVi6Q145UEZUfleOwOjw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.3",
"zod-to-json-schema": "^3.24.1"
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4"
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/provider": {
@@ -254,6 +331,15 @@
"zod": "^3.25.76 || ^4"
}
},
"node_modules/@ai-sdk/react/node_modules/@ai-sdk/provider-utils/node_modules/zod-to-json-schema": {
"version": "3.24.6",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz",
"integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.24.1"
}
},
"node_modules/@ai-sdk/react/node_modules/ai": {
"version": "5.0.22",
"resolved": "https://registry.npmjs.org/ai/-/ai-5.0.22.tgz",
@@ -1027,57 +1113,6 @@
"node": ">= 10"
}
},
"node_modules/@openrouter/ai-sdk-provider": {
"version": "0.4.6",
"resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-0.4.6.tgz",
"integrity": "sha512-oUa8xtssyUhiKEU/aW662lsZ0HUvIUTRk8vVIF3Ha3KI/DnqX54zmVIuzYnaDpermqhy18CHqblAY4dDt1JW3g==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.0.9",
"@ai-sdk/provider-utils": "2.1.10"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.0.0"
}
},
"node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.9.tgz",
"integrity": "sha512-jie6ZJT2ZR0uVOVCDc9R2xCX5I/Dum/wEK28lx21PJx6ZnFAN9EzD2WsPhcDWfCgGx3OAZZ0GyM3CEobXpa9LA==",
"license": "Apache-2.0",
"dependencies": {
"json-schema": "^0.4.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider-utils": {
"version": "2.1.10",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.1.10.tgz",
"integrity": "sha512-4GZ8GHjOFxePFzkl3q42AU0DQOtTQ5w09vmaWUf/pKFXJPizlnzKSUkF0f+VkapIUfDugyMqPMT1ge8XQzVI7Q==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.0.9",
"eventsource-parser": "^3.0.0",
"nanoid": "^3.3.8",
"secure-json-parse": "^2.7.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.0.0"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
}
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
@@ -3933,6 +3968,39 @@
"integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==",
"license": "MIT"
},
"node_modules/ollama-ai-provider-v2": {
"version": "1.5.4",
"resolved": "https://registry.npmjs.org/ollama-ai-provider-v2/-/ollama-ai-provider-v2-1.5.4.tgz",
"integrity": "sha512-OTxzIvxW7GutgkyYe55Y4lJeUbnDjH1jDkAQhjGiynffkDn0wyWbv/dD92A8HX1ni5Ec+i+ksYMXXlVOYPQR4g==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "^2.0.0",
"@ai-sdk/provider-utils": "^3.0.17"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^4.0.16"
}
},
"node_modules/ollama-ai-provider-v2/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.17.tgz",
"integrity": "sha512-TR3Gs4I3Tym4Ll+EPdzRdvo/rc8Js6c4nVhFLuvGLX/Y4V9ZcQMa/HTiYsHEgmYrf1zVi6Q145UEZUfleOwOjw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/pako": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz",
@@ -4182,12 +4250,6 @@
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==",
"license": "MIT"
},
"node_modules/secure-json-parse": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz",
"integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==",
"license": "BSD-3-Clause"
},
"node_modules/semver": {
"version": "7.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz",
@@ -4682,23 +4744,14 @@
"license": "MIT"
},
"node_modules/zod": {
"version": "3.25.76",
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"version": "4.1.12",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.1.12.tgz",
"integrity": "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
},
"node_modules/zod-to-json-schema": {
"version": "3.24.6",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz",
"integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.24.1"
}
},
"node_modules/zwitch": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",

View File

@@ -10,10 +10,11 @@
},
"dependencies": {
"@ai-sdk/amazon-bedrock": "^3.0.52",
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69",
"@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.22",
"@openrouter/ai-sdk-provider": "^0.4.6",
"@radix-ui/react-dialog": "^1.1.6",
"@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-slot": "^1.1.2",
@@ -27,6 +28,7 @@
"jsdom": "^26.0.0",
"lucide-react": "^0.483.0",
"next": "15.2.3",
"ollama-ai-provider-v2": "^1.5.4",
"pako": "^2.1.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
@@ -35,7 +37,7 @@
"remark-gfm": "^4.0.1",
"tailwind-merge": "^3.0.2",
"tailwindcss-animate": "^1.0.7",
"zod": "^3.25.76"
"zod": "^4.1.12"
},
"devDependencies": {
"@tailwindcss/postcss": "^4",