2025-12-06 12:46:40 +09:00
|
|
|
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock"
|
|
|
|
|
import { createAnthropic } from "@ai-sdk/anthropic"
|
|
|
|
|
import { azure, createAzure } from "@ai-sdk/azure"
|
|
|
|
|
import { createDeepSeek, deepseek } from "@ai-sdk/deepseek"
|
|
|
|
|
import { createGoogleGenerativeAI, google } from "@ai-sdk/google"
|
|
|
|
|
import { createOpenAI, openai } from "@ai-sdk/openai"
|
|
|
|
|
import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
|
|
|
|
|
import { createOpenRouter } from "@openrouter/ai-sdk-provider"
|
|
|
|
|
import { createOllama, ollama } from "ollama-ai-provider-v2"
|
2025-11-15 13:36:42 +09:00
|
|
|
|
|
|
|
|
export type ProviderName =
|
2025-12-06 12:46:40 +09:00
|
|
|
| "bedrock"
|
|
|
|
|
| "openai"
|
|
|
|
|
| "anthropic"
|
|
|
|
|
| "google"
|
|
|
|
|
| "azure"
|
|
|
|
|
| "ollama"
|
|
|
|
|
| "openrouter"
|
|
|
|
|
| "deepseek"
|
2025-12-07 09:22:57 +08:00
|
|
|
| "siliconflow"
|
2025-11-15 13:36:42 +09:00
|
|
|
|
|
|
|
|
interface ModelConfig {
|
2025-12-06 12:46:40 +09:00
|
|
|
model: any
|
|
|
|
|
providerOptions?: any
|
|
|
|
|
headers?: Record<string, string>
|
|
|
|
|
modelId: string
|
2025-11-15 13:36:42 +09:00
|
|
|
}
|
|
|
|
|
|
2025-11-30 16:34:42 +09:00
|
|
|
// Bedrock provider options for Anthropic beta features
|
|
|
|
|
const BEDROCK_ANTHROPIC_BETA = {
|
2025-12-06 12:46:40 +09:00
|
|
|
bedrock: {
|
|
|
|
|
anthropicBeta: ["fine-grained-tool-streaming-2025-05-14"],
|
|
|
|
|
},
|
|
|
|
|
}
|
2025-11-30 16:34:42 +09:00
|
|
|
|
|
|
|
|
// Direct Anthropic API headers for beta features
|
|
|
|
|
const ANTHROPIC_BETA_HEADERS = {
|
2025-12-06 12:46:40 +09:00
|
|
|
"anthropic-beta": "fine-grained-tool-streaming-2025-05-14",
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-04 14:13:10 +09:00
|
|
|
// Map of provider to required environment variable
|
|
|
|
|
const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = {
|
2025-12-06 12:46:40 +09:00
|
|
|
bedrock: null, // AWS SDK auto-uses IAM role on AWS, or env vars locally
|
|
|
|
|
openai: "OPENAI_API_KEY",
|
|
|
|
|
anthropic: "ANTHROPIC_API_KEY",
|
|
|
|
|
google: "GOOGLE_GENERATIVE_AI_API_KEY",
|
|
|
|
|
azure: "AZURE_API_KEY",
|
|
|
|
|
ollama: null, // No credentials needed for local Ollama
|
|
|
|
|
openrouter: "OPENROUTER_API_KEY",
|
|
|
|
|
deepseek: "DEEPSEEK_API_KEY",
|
2025-12-07 09:22:57 +08:00
|
|
|
siliconflow: "SILICONFLOW_API_KEY",
|
2025-12-06 12:46:40 +09:00
|
|
|
}
|
2025-12-04 14:13:10 +09:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Auto-detect provider based on available API keys
|
|
|
|
|
* Returns the provider if exactly one is configured, otherwise null
|
|
|
|
|
*/
|
|
|
|
|
function detectProvider(): ProviderName | null {
|
2025-12-06 12:46:40 +09:00
|
|
|
const configuredProviders: ProviderName[] = []
|
2025-12-04 14:13:10 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
for (const [provider, envVar] of Object.entries(PROVIDER_ENV_VARS)) {
|
|
|
|
|
if (envVar === null) {
|
|
|
|
|
// Skip ollama - it doesn't require credentials
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if (process.env[envVar]) {
|
|
|
|
|
configuredProviders.push(provider as ProviderName)
|
|
|
|
|
}
|
2025-12-04 14:13:10 +09:00
|
|
|
}
|
|
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
if (configuredProviders.length === 1) {
|
|
|
|
|
return configuredProviders[0]
|
|
|
|
|
}
|
2025-12-04 14:13:10 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
return null
|
2025-12-04 14:13:10 +09:00
|
|
|
}
|
|
|
|
|
|
2025-11-15 13:36:42 +09:00
|
|
|
/**
|
|
|
|
|
* Validate that required API keys are present for the selected provider
|
|
|
|
|
*/
|
|
|
|
|
function validateProviderCredentials(provider: ProviderName): void {
|
2025-12-06 12:46:40 +09:00
|
|
|
const requiredVar = PROVIDER_ENV_VARS[provider]
|
|
|
|
|
if (requiredVar && !process.env[requiredVar]) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`${requiredVar} environment variable is required for ${provider} provider. ` +
|
|
|
|
|
`Please set it in your .env.local file.`,
|
|
|
|
|
)
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get the AI model based on environment variables
|
|
|
|
|
*
|
|
|
|
|
* Environment variables:
|
2025-12-07 09:22:57 +08:00
|
|
|
* - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow)
|
2025-11-15 13:36:42 +09:00
|
|
|
* - AI_MODEL: The model ID/name for the selected provider
|
|
|
|
|
*
|
|
|
|
|
* Provider-specific env vars:
|
|
|
|
|
* - OPENAI_API_KEY: OpenAI API key
|
2025-11-21 16:58:42 +08:00
|
|
|
* - OPENAI_BASE_URL: Custom OpenAI-compatible endpoint (optional)
|
2025-11-15 13:36:42 +09:00
|
|
|
* - ANTHROPIC_API_KEY: Anthropic API key
|
|
|
|
|
* - GOOGLE_GENERATIVE_AI_API_KEY: Google API key
|
|
|
|
|
* - AZURE_RESOURCE_NAME, AZURE_API_KEY: Azure OpenAI credentials
|
|
|
|
|
* - AWS_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY: AWS Bedrock credentials
|
|
|
|
|
* - OLLAMA_BASE_URL: Ollama server URL (optional, defaults to http://localhost:11434)
|
2025-11-15 14:29:18 +09:00
|
|
|
* - OPENROUTER_API_KEY: OpenRouter API key
|
2025-12-02 11:52:09 +09:00
|
|
|
* - DEEPSEEK_API_KEY: DeepSeek API key
|
|
|
|
|
* - DEEPSEEK_BASE_URL: DeepSeek endpoint (optional)
|
2025-12-07 09:22:57 +08:00
|
|
|
* - SILICONFLOW_API_KEY: SiliconFlow API key
|
|
|
|
|
* - SILICONFLOW_BASE_URL: SiliconFlow endpoint (optional, defaults to https://api.siliconflow.com/v1)
|
2025-11-15 13:36:42 +09:00
|
|
|
*/
|
|
|
|
|
export function getAIModel(): ModelConfig {
|
2025-12-06 12:46:40 +09:00
|
|
|
const modelId = process.env.AI_MODEL
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
if (!modelId) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`AI_MODEL environment variable is required. Example: AI_MODEL=claude-sonnet-4-5`,
|
|
|
|
|
)
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
// Determine provider: explicit config > auto-detect > error
|
|
|
|
|
let provider: ProviderName
|
|
|
|
|
if (process.env.AI_PROVIDER) {
|
|
|
|
|
provider = process.env.AI_PROVIDER as ProviderName
|
2025-12-04 14:13:10 +09:00
|
|
|
} else {
|
2025-12-06 12:46:40 +09:00
|
|
|
const detected = detectProvider()
|
|
|
|
|
if (detected) {
|
|
|
|
|
provider = detected
|
|
|
|
|
console.log(`[AI Provider] Auto-detected provider: ${provider}`)
|
|
|
|
|
} else {
|
|
|
|
|
// List configured providers for better error message
|
|
|
|
|
const configured = Object.entries(PROVIDER_ENV_VARS)
|
|
|
|
|
.filter(([, envVar]) => envVar && process.env[envVar as string])
|
|
|
|
|
.map(([p]) => p)
|
2025-12-04 14:13:10 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
if (configured.length === 0) {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`No AI provider configured. Please set one of the following API keys in your .env.local file:\n` +
|
|
|
|
|
`- DEEPSEEK_API_KEY for DeepSeek\n` +
|
|
|
|
|
`- OPENAI_API_KEY for OpenAI\n` +
|
|
|
|
|
`- ANTHROPIC_API_KEY for Anthropic\n` +
|
|
|
|
|
`- GOOGLE_GENERATIVE_AI_API_KEY for Google\n` +
|
|
|
|
|
`- AWS_ACCESS_KEY_ID for Bedrock\n` +
|
|
|
|
|
`- OPENROUTER_API_KEY for OpenRouter\n` +
|
|
|
|
|
`- AZURE_API_KEY for Azure\n` +
|
2025-12-07 09:22:57 +08:00
|
|
|
`- SILICONFLOW_API_KEY for SiliconFlow\n` +
|
2025-12-06 12:46:40 +09:00
|
|
|
`Or set AI_PROVIDER=ollama for local Ollama.`,
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
throw new Error(
|
|
|
|
|
`Multiple AI providers configured (${configured.join(", ")}). ` +
|
|
|
|
|
`Please set AI_PROVIDER to specify which one to use.`,
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-12-04 14:13:10 +09:00
|
|
|
}
|
|
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
// Validate provider credentials
|
|
|
|
|
validateProviderCredentials(provider)
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
console.log(`[AI Provider] Initializing ${provider} with model: ${modelId}`)
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
let model: any
|
|
|
|
|
let providerOptions: any
|
|
|
|
|
let headers: Record<string, string> | undefined
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
switch (provider) {
|
|
|
|
|
case "bedrock": {
|
|
|
|
|
// Use credential provider chain for IAM role support (Amplify, Lambda, etc.)
|
|
|
|
|
// Falls back to env vars (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) for local dev
|
|
|
|
|
const bedrockProvider = createAmazonBedrock({
|
|
|
|
|
region: process.env.AWS_REGION || "us-west-2",
|
|
|
|
|
credentialProvider: fromNodeProviderChain(),
|
|
|
|
|
})
|
|
|
|
|
model = bedrockProvider(modelId)
|
|
|
|
|
// Add Anthropic beta options if using Claude models via Bedrock
|
|
|
|
|
if (modelId.includes("anthropic.claude")) {
|
|
|
|
|
providerOptions = BEDROCK_ANTHROPIC_BETA
|
|
|
|
|
}
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "openai":
|
|
|
|
|
if (process.env.OPENAI_BASE_URL) {
|
|
|
|
|
const customOpenAI = createOpenAI({
|
|
|
|
|
apiKey: process.env.OPENAI_API_KEY,
|
|
|
|
|
baseURL: process.env.OPENAI_BASE_URL,
|
|
|
|
|
})
|
|
|
|
|
model = customOpenAI.chat(modelId)
|
|
|
|
|
} else {
|
|
|
|
|
model = openai(modelId)
|
|
|
|
|
}
|
|
|
|
|
break
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "anthropic": {
|
|
|
|
|
const customProvider = createAnthropic({
|
|
|
|
|
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
|
|
|
baseURL:
|
|
|
|
|
process.env.ANTHROPIC_BASE_URL ||
|
|
|
|
|
"https://api.anthropic.com/v1",
|
|
|
|
|
headers: ANTHROPIC_BETA_HEADERS,
|
|
|
|
|
})
|
|
|
|
|
model = customProvider(modelId)
|
|
|
|
|
// Add beta headers for fine-grained tool streaming
|
|
|
|
|
headers = ANTHROPIC_BETA_HEADERS
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "google":
|
|
|
|
|
if (process.env.GOOGLE_BASE_URL) {
|
|
|
|
|
const customGoogle = createGoogleGenerativeAI({
|
|
|
|
|
apiKey: process.env.GOOGLE_GENERATIVE_AI_API_KEY,
|
|
|
|
|
baseURL: process.env.GOOGLE_BASE_URL,
|
|
|
|
|
})
|
|
|
|
|
model = customGoogle(modelId)
|
|
|
|
|
} else {
|
|
|
|
|
model = google(modelId)
|
|
|
|
|
}
|
|
|
|
|
break
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "azure":
|
|
|
|
|
if (process.env.AZURE_BASE_URL) {
|
|
|
|
|
const customAzure = createAzure({
|
|
|
|
|
apiKey: process.env.AZURE_API_KEY,
|
|
|
|
|
baseURL: process.env.AZURE_BASE_URL,
|
|
|
|
|
})
|
|
|
|
|
model = customAzure(modelId)
|
|
|
|
|
} else {
|
|
|
|
|
model = azure(modelId)
|
|
|
|
|
}
|
|
|
|
|
break
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "ollama":
|
|
|
|
|
if (process.env.OLLAMA_BASE_URL) {
|
|
|
|
|
const customOllama = createOllama({
|
|
|
|
|
baseURL: process.env.OLLAMA_BASE_URL,
|
|
|
|
|
})
|
|
|
|
|
model = customOllama(modelId)
|
|
|
|
|
} else {
|
|
|
|
|
model = ollama(modelId)
|
|
|
|
|
}
|
|
|
|
|
break
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "openrouter": {
|
|
|
|
|
const openrouter = createOpenRouter({
|
|
|
|
|
apiKey: process.env.OPENROUTER_API_KEY,
|
|
|
|
|
...(process.env.OPENROUTER_BASE_URL && {
|
|
|
|
|
baseURL: process.env.OPENROUTER_BASE_URL,
|
|
|
|
|
}),
|
|
|
|
|
})
|
|
|
|
|
model = openrouter(modelId)
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-11-15 14:29:18 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
case "deepseek":
|
|
|
|
|
if (process.env.DEEPSEEK_BASE_URL) {
|
|
|
|
|
const customDeepSeek = createDeepSeek({
|
|
|
|
|
apiKey: process.env.DEEPSEEK_API_KEY,
|
|
|
|
|
baseURL: process.env.DEEPSEEK_BASE_URL,
|
|
|
|
|
})
|
|
|
|
|
model = customDeepSeek(modelId)
|
|
|
|
|
} else {
|
|
|
|
|
model = deepseek(modelId)
|
|
|
|
|
}
|
|
|
|
|
break
|
2025-12-02 11:52:09 +09:00
|
|
|
|
2025-12-07 09:22:57 +08:00
|
|
|
case "siliconflow": {
|
|
|
|
|
const siliconflowProvider = createOpenAI({
|
|
|
|
|
apiKey: process.env.SILICONFLOW_API_KEY,
|
|
|
|
|
baseURL:
|
|
|
|
|
process.env.SILICONFLOW_BASE_URL ||
|
|
|
|
|
"https://api.siliconflow.com/v1",
|
|
|
|
|
})
|
|
|
|
|
model = siliconflowProvider.chat(modelId)
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
default:
|
|
|
|
|
throw new Error(
|
2025-12-07 09:22:57 +08:00
|
|
|
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow`,
|
2025-12-06 12:46:40 +09:00
|
|
|
)
|
|
|
|
|
}
|
2025-11-15 13:36:42 +09:00
|
|
|
|
2025-12-06 12:46:40 +09:00
|
|
|
return { model, providerOptions, headers, modelId }
|
2025-11-15 13:36:42 +09:00
|
|
|
}
|