feat: add Vercel AI Gateway support (#274)

* feat: add Vercel AI Gateway support

- Updated environment configuration to include AI_GATEWAY_API_KEY for unified access to multiple AI providers.
- Added gateway provider to the list of supported AI providers in the codebase.
- Enhanced documentation to explain the usage of Vercel AI Gateway and its model format.

This change simplifies authentication and allows users to switch between providers seamlessly.

* Update package
@ai-sdk/gateway to latest version 2.0.21
This commit is contained in:
Ted Cao
2025-12-16 22:43:33 -05:00
committed by GitHub
parent f039e4a3c8
commit 98b890bb06
5 changed files with 83 additions and 10 deletions

View File

@@ -136,6 +136,23 @@ Optional custom URL:
OLLAMA_BASE_URL=http://localhost:11434
```
### Vercel AI Gateway
Vercel AI Gateway provides unified access to multiple AI providers through a single API key. This simplifies authentication and allows you to switch between providers without managing multiple API keys.
```bash
AI_GATEWAY_API_KEY=your_gateway_api_key
AI_MODEL=openai/gpt-4o
```
Model format uses `provider/model` syntax:
- `openai/gpt-4o` - OpenAI GPT-4o
- `anthropic/claude-sonnet-4-5` - Anthropic Claude Sonnet 4.5
- `google/gemini-2.0-flash` - Google Gemini 2.0 Flash
Get your API key from the [Vercel AI Gateway dashboard](https://vercel.com/ai-gateway).
## Auto-Detection
If you only configure **one** provider's API key, the system will automatically detect and use that provider. No need to set `AI_PROVIDER`.
@@ -143,7 +160,7 @@ If you only configure **one** provider's API key, the system will automatically
If you configure **multiple** API keys, you must explicitly set `AI_PROVIDER`:
```bash
AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, azure, bedrock, openrouter, ollama
AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, azure, bedrock, openrouter, ollama, gateway
```
## Model Capability Requirements

View File

@@ -1,6 +1,6 @@
# AI Provider Configuration
# AI_PROVIDER: Which provider to use
# Options: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow
# Options: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, gateway
# Default: bedrock
AI_PROVIDER=bedrock
@@ -68,6 +68,11 @@ AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# SILICONFLOW_API_KEY=sk-...
# SILICONFLOW_BASE_URL=https://api.siliconflow.com/v1 # Optional: switch to https://api.siliconflow.cn/v1 if needed
# Vercel AI Gateway Configuration
# Get your API key from: https://vercel.com/ai-gateway
# Model format: "provider/model" e.g., "openai/gpt-4o", "anthropic/claude-sonnet-4-5"
# AI_GATEWAY_API_KEY=...
# Langfuse Observability (Optional)
# Enable LLM tracing and analytics - https://langfuse.com
# LANGFUSE_PUBLIC_KEY=pk-lf-...

View File

@@ -2,6 +2,7 @@ import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock"
import { createAnthropic } from "@ai-sdk/anthropic"
import { azure, createAzure } from "@ai-sdk/azure"
import { createDeepSeek, deepseek } from "@ai-sdk/deepseek"
import { gateway } from "@ai-sdk/gateway"
import { createGoogleGenerativeAI, google } from "@ai-sdk/google"
import { createOpenAI, openai } from "@ai-sdk/openai"
import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
@@ -18,6 +19,7 @@ export type ProviderName =
| "openrouter"
| "deepseek"
| "siliconflow"
| "gateway"
interface ModelConfig {
model: any
@@ -42,6 +44,7 @@ const ALLOWED_CLIENT_PROVIDERS: ProviderName[] = [
"openrouter",
"deepseek",
"siliconflow",
"gateway",
]
// Bedrock provider options for Anthropic beta features
@@ -333,8 +336,10 @@ function buildProviderOptions(
case "deepseek":
case "openrouter":
case "siliconflow": {
case "siliconflow":
case "gateway": {
// These providers don't have reasoning configs in AI SDK yet
// Gateway passes through to underlying providers which handle their own configs
break
}
@@ -356,6 +361,7 @@ const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = {
openrouter: "OPENROUTER_API_KEY",
deepseek: "DEEPSEEK_API_KEY",
siliconflow: "SILICONFLOW_API_KEY",
gateway: "AI_GATEWAY_API_KEY",
}
/**
@@ -495,6 +501,7 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
if (configured.length === 0) {
throw new Error(
`No AI provider configured. Please set one of the following API keys in your .env.local file:\n` +
`- AI_GATEWAY_API_KEY for Vercel AI Gateway\n` +
`- DEEPSEEK_API_KEY for DeepSeek\n` +
`- OPENAI_API_KEY for OpenAI\n` +
`- ANTHROPIC_API_KEY for Anthropic\n` +
@@ -672,9 +679,17 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
break
}
case "gateway": {
// Vercel AI Gateway - unified access to multiple AI providers
// Model format: "provider/model" e.g., "openai/gpt-4o", "anthropic/claude-sonnet-4-5"
// See: https://vercel.com/ai-gateway
model = gateway(modelId)
break
}
default:
throw new Error(
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow`,
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, gateway`,
)
}

47
package-lock.json generated
View File

@@ -1,18 +1,19 @@
{
"name": "next-ai-draw-io",
"version": "0.4.0",
"version": "0.4.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "next-ai-draw-io",
"version": "0.4.0",
"version": "0.4.2",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/amazon-bedrock": "^3.0.70",
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69",
"@ai-sdk/deepseek": "^1.0.30",
"@ai-sdk/gateway": "^2.0.21",
"@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.107",
@@ -199,13 +200,13 @@
}
},
"node_modules/@ai-sdk/gateway": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.18.tgz",
"integrity": "sha512-sDQcW+6ck2m0pTIHW6BPHD7S125WD3qNkx/B8sEzJp/hurocmJ5Cni0ybExg6sQMGo+fr/GWOwpHF1cmCdg5rQ==",
"version": "2.0.21",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.21.tgz",
"integrity": "sha512-BwV7DU/lAm3Xn6iyyvZdWgVxgLu3SNXzl5y57gMvkW4nGhAOV5269IrJzQwGt03bb107sa6H6uJwWxc77zXoGA==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.18",
"@ai-sdk/provider-utils": "3.0.19",
"@vercel/oidc": "3.0.5"
},
"engines": {
@@ -215,6 +216,23 @@
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.19",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.19.tgz",
"integrity": "sha512-W41Wc9/jbUVXVwCN/7bWa4IKe8MtxO3EyA0Hfhx6grnmiYlCvpI8neSYWFE0zScXJkgA/YK3BRybzgyiXuu6JA==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/google": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.0.tgz",
@@ -6130,6 +6148,23 @@
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/ai/node_modules/@ai-sdk/gateway": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.18.tgz",
"integrity": "sha512-sDQcW+6ck2m0pTIHW6BPHD7S125WD3qNkx/B8sEzJp/hurocmJ5Cni0ybExg6sQMGo+fr/GWOwpHF1cmCdg5rQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.18",
"@vercel/oidc": "3.0.5"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",

View File

@@ -17,6 +17,7 @@
"@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69",
"@ai-sdk/deepseek": "^1.0.30",
"@ai-sdk/gateway": "^2.0.21",
"@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.107",