feat: add bring-your-own-API-key support (#186)

- Add AI provider settings to config panel (provider, model, API key, base URL)
- Support 7 providers: OpenAI, Anthropic, Google, Azure, OpenRouter, DeepSeek, SiliconFlow
- Client API keys stored in localStorage, never stored on server
- Client settings override server env vars when provided
- Skip server credential validation when client provides API key
- Bypass usage limits (request/token/TPM) when using own API key
- Add /api/config endpoint for fetching usage limits
- Add privacy notices to settings dialog, about pages, and quota toast
- Add clear settings button to reset saved API keys
- Update README files (EN/CN/JA) with BYOK documentation

Co-authored-by: dayuan.jiang <jiangdy@amazon.co.jp>
This commit is contained in:
Dayuan Jiang
2025-12-09 17:50:07 +09:00
committed by GitHub
parent 77cb10393b
commit 97ab82e027
12 changed files with 434 additions and 43 deletions

View File

@@ -96,6 +96,8 @@ No installation needed! Try the app directly on our demo site:
> Note: Due to high traffic, the demo site currently uses minimax-m2. For best results, we recommend self-hosting with Claude Sonnet 4.5 or Claude Opus 4.5.
> **Bring Your Own API Key**: You can use your own API key to bypass usage limits on the demo site. Click the Settings icon in the chat panel to configure your provider and API key. Your key is stored locally in your browser and is never stored on the server.
### Run with Docker (Recommended)
If you just want to run it locally, the best way is to use Docker.

View File

@@ -170,6 +170,27 @@ export default function AboutCN() {
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Bring Your Own Key */}
<div className="text-center mb-5">
<h4 className="text-base font-bold text-gray-900 mb-2">
使 API Key
</h4>
<p className="text-sm text-gray-600 mb-2 max-w-md mx-auto">
使 API Key
Provider API Key
</p>
<p className="text-xs text-gray-500 max-w-md mx-auto">
Key
</p>
</div>
{/* Divider */}
<div className="flex items-center gap-3 mb-5">
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Sponsorship CTA */}
<div className="text-center">
<h4 className="text-base font-bold text-gray-900 mb-2">

View File

@@ -179,6 +179,24 @@ export default function AboutJA() {
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Bring Your Own Key */}
<div className="text-center mb-5">
<h4 className="text-base font-bold text-gray-900 mb-2">
APIキーを使用
</h4>
<p className="text-sm text-gray-600 mb-2 max-w-md mx-auto">
APIキーを使用することでAPIキーを設定してください
</p>
<p className="text-xs text-gray-500 max-w-md mx-auto">
</p>
</div>
{/* Divider */}
<div className="flex items-center gap-3 mb-5">
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Sponsorship CTA */}
<div className="text-center">
<h4 className="text-base font-bold text-gray-900 mb-2">

View File

@@ -185,6 +185,28 @@ export default function About() {
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Bring Your Own Key */}
<div className="text-center mb-5">
<h4 className="text-base font-bold text-gray-900 mb-2">
Bring Your Own API Key
</h4>
<p className="text-sm text-gray-600 mb-2 max-w-md mx-auto">
You can use your own API key to bypass these
limits. Click the Settings icon in the chat
panel to configure your provider and API
key.
</p>
<p className="text-xs text-gray-500 max-w-md mx-auto">
Your key is stored locally in your browser
and is never stored on the server.
</p>
</div>
{/* Divider */}
<div className="flex items-center gap-3 mb-5">
<div className="flex-1 h-px bg-gradient-to-r from-transparent via-amber-300 to-transparent" />
</div>
{/* Sponsorship CTA */}
<div className="text-center">
<h4 className="text-base font-bold text-gray-900 mb-2">

View File

@@ -199,8 +199,17 @@ async function handleChatRequest(req: Request): Promise<Response> {
}
// === CACHE CHECK END ===
// Get AI model from environment configuration
const { model, providerOptions, headers, modelId } = getAIModel()
// Read client AI provider overrides from headers
const clientOverrides = {
provider: req.headers.get("x-ai-provider"),
baseUrl: req.headers.get("x-ai-base-url"),
apiKey: req.headers.get("x-ai-api-key"),
modelId: req.headers.get("x-ai-model"),
}
// Get AI model with optional client overrides
const { model, providerOptions, headers, modelId } =
getAIModel(clientOverrides)
// Check if model supports prompt caching
const shouldCache = supportsPromptCaching(modelId)

10
app/api/config/route.ts Normal file
View File

@@ -0,0 +1,10 @@
import { NextResponse } from "next/server"
export async function GET() {
return NextResponse.json({
accessCodeRequired: !!process.env.ACCESS_CODE_LIST,
dailyRequestLimit: Number(process.env.DAILY_REQUEST_LIMIT) || 0,
dailyTokenLimit: Number(process.env.DAILY_TOKEN_LIMIT) || 0,
tpmLimit: Number(process.env.TPM_LIMIT) || 0,
})
}

View File

@@ -24,6 +24,10 @@ import { QuotaLimitToast } from "@/components/quota-limit-toast"
import {
SettingsDialog,
STORAGE_ACCESS_CODE_KEY,
STORAGE_AI_API_KEY_KEY,
STORAGE_AI_BASE_URL_KEY,
STORAGE_AI_MODEL_KEY,
STORAGE_AI_PROVIDER_KEY,
} from "@/components/settings-dialog"
// localStorage keys for persistence
@@ -119,11 +123,20 @@ export default function ChatPanel({
}, [])
// Helper to check daily request limit
// Check if user has their own API key configured (bypass limits)
const hasOwnApiKey = useCallback((): boolean => {
const provider = localStorage.getItem(STORAGE_AI_PROVIDER_KEY)
const apiKey = localStorage.getItem(STORAGE_AI_API_KEY_KEY)
return !!(provider && apiKey)
}, [])
const checkDailyLimit = useCallback((): {
allowed: boolean
remaining: number
used: number
} => {
// Skip limit if user has their own API key
if (hasOwnApiKey()) return { allowed: true, remaining: -1, used: 0 }
if (dailyRequestLimit <= 0)
return { allowed: true, remaining: -1, used: 0 }
@@ -145,7 +158,7 @@ export default function ChatPanel({
remaining: dailyRequestLimit - count,
used: count,
}
}, [dailyRequestLimit])
}, [dailyRequestLimit, hasOwnApiKey])
// Helper to increment request count
const incrementRequestCount = useCallback((): void => {
@@ -168,7 +181,7 @@ export default function ChatPanel({
),
{ duration: 15000 },
)
}, [dailyRequestLimit])
}, [dailyRequestLimit, hasOwnApiKey])
// Helper to check daily token limit (checks if already over limit)
const checkTokenLimit = useCallback((): {
@@ -176,6 +189,8 @@ export default function ChatPanel({
remaining: number
used: number
} => {
// Skip limit if user has their own API key
if (hasOwnApiKey()) return { allowed: true, remaining: -1, used: 0 }
if (dailyTokenLimit <= 0)
return { allowed: true, remaining: -1, used: 0 }
@@ -200,7 +215,7 @@ export default function ChatPanel({
remaining: dailyTokenLimit - count,
used: count,
}
}, [dailyTokenLimit])
}, [dailyTokenLimit, hasOwnApiKey])
// Helper to increment token count
const incrementTokenCount = useCallback((tokens: number): void => {
@@ -242,6 +257,8 @@ export default function ChatPanel({
remaining: number
used: number
} => {
// Skip limit if user has their own API key
if (hasOwnApiKey()) return { allowed: true, remaining: -1, used: 0 }
if (tpmLimit <= 0) return { allowed: true, remaining: -1, used: 0 }
const currentMinute = Math.floor(Date.now() / 60000).toString()
@@ -264,7 +281,7 @@ export default function ChatPanel({
remaining: tpmLimit - count,
used: count,
}
}, [tpmLimit])
}, [tpmLimit, hasOwnApiKey])
// Helper to increment TPM count
const incrementTPMCount = useCallback((tokens: number): void => {
@@ -777,6 +794,14 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
const accessCode =
localStorage.getItem(STORAGE_ACCESS_CODE_KEY) || ""
const aiProvider =
localStorage.getItem(STORAGE_AI_PROVIDER_KEY) || ""
const aiBaseUrl =
localStorage.getItem(STORAGE_AI_BASE_URL_KEY) || ""
const aiApiKey =
localStorage.getItem(STORAGE_AI_API_KEY_KEY) || ""
const aiModel = localStorage.getItem(STORAGE_AI_MODEL_KEY) || ""
sendMessage(
{ parts },
{
@@ -786,6 +811,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
},
headers: {
"x-access-code": accessCode,
...(aiProvider && { "x-ai-provider": aiProvider }),
...(aiBaseUrl && { "x-ai-base-url": aiBaseUrl }),
...(aiApiKey && { "x-ai-api-key": aiApiKey }),
...(aiModel && { "x-ai-model": aiModel }),
},
},
)
@@ -886,6 +915,11 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
// Now send the message after state is guaranteed to be updated
const accessCode = localStorage.getItem(STORAGE_ACCESS_CODE_KEY) || ""
const aiProvider = localStorage.getItem(STORAGE_AI_PROVIDER_KEY) || ""
const aiBaseUrl = localStorage.getItem(STORAGE_AI_BASE_URL_KEY) || ""
const aiApiKey = localStorage.getItem(STORAGE_AI_API_KEY_KEY) || ""
const aiModel = localStorage.getItem(STORAGE_AI_MODEL_KEY) || ""
sendMessage(
{ parts: userParts },
{
@@ -895,6 +929,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
},
headers: {
"x-access-code": accessCode,
...(aiProvider && { "x-ai-provider": aiProvider }),
...(aiBaseUrl && { "x-ai-base-url": aiBaseUrl }),
...(aiApiKey && { "x-ai-api-key": aiApiKey }),
...(aiModel && { "x-ai-model": aiModel }),
},
},
)
@@ -972,6 +1010,11 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
// Now send the edited message after state is guaranteed to be updated
const accessCode = localStorage.getItem(STORAGE_ACCESS_CODE_KEY) || ""
const aiProvider = localStorage.getItem(STORAGE_AI_PROVIDER_KEY) || ""
const aiBaseUrl = localStorage.getItem(STORAGE_AI_BASE_URL_KEY) || ""
const aiApiKey = localStorage.getItem(STORAGE_AI_API_KEY_KEY) || ""
const aiModel = localStorage.getItem(STORAGE_AI_MODEL_KEY) || ""
sendMessage(
{ parts: newParts },
{
@@ -981,6 +1024,10 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
},
headers: {
"x-access-code": accessCode,
...(aiProvider && { "x-ai-provider": aiProvider }),
...(aiBaseUrl && { "x-ai-base-url": aiBaseUrl }),
...(aiApiKey && { "x-ai-api-key": aiApiKey }),
...(aiModel && { "x-ai-model": aiModel }),
},
},
)

View File

@@ -82,9 +82,9 @@ export function QuotaLimitToast({
</Link>
</p>
<p>
The good news is that you can self-host the project in
seconds on Vercel (it's fully open-source), or if you love
it, consider sponsoring to help keep the lights on!
<strong>Tip:</strong> You can use your own API key (click
the Settings icon) or self-host the project to bypass these
limits.
</p>
<p>Your limit resets tomorrow. Thanks for understanding!</p>
</div>

View File

@@ -11,6 +11,13 @@ import {
} from "@/components/ui/dialog"
import { Input } from "@/components/ui/input"
import { Label } from "@/components/ui/label"
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select"
import { Switch } from "@/components/ui/switch"
interface SettingsDialogProps {
@@ -22,6 +29,10 @@ interface SettingsDialogProps {
export const STORAGE_ACCESS_CODE_KEY = "next-ai-draw-io-access-code"
export const STORAGE_CLOSE_PROTECTION_KEY = "next-ai-draw-io-close-protection"
const STORAGE_ACCESS_CODE_REQUIRED_KEY = "next-ai-draw-io-access-code-required"
export const STORAGE_AI_PROVIDER_KEY = "next-ai-draw-io-ai-provider"
export const STORAGE_AI_BASE_URL_KEY = "next-ai-draw-io-ai-base-url"
export const STORAGE_AI_API_KEY_KEY = "next-ai-draw-io-ai-api-key"
export const STORAGE_AI_MODEL_KEY = "next-ai-draw-io-ai-model"
function getStoredAccessCodeRequired(): boolean | null {
if (typeof window === "undefined") return null
@@ -42,6 +53,10 @@ export function SettingsDialog({
const [accessCodeRequired, setAccessCodeRequired] = useState(
() => getStoredAccessCodeRequired() ?? false,
)
const [provider, setProvider] = useState("")
const [baseUrl, setBaseUrl] = useState("")
const [apiKey, setApiKey] = useState("")
const [modelId, setModelId] = useState("")
useEffect(() => {
// Only fetch if not cached in localStorage
@@ -77,6 +92,13 @@ export function SettingsDialog({
)
// Default to true if not set
setCloseProtection(storedCloseProtection !== "false")
// Load AI provider settings
setProvider(localStorage.getItem(STORAGE_AI_PROVIDER_KEY) || "")
setBaseUrl(localStorage.getItem(STORAGE_AI_BASE_URL_KEY) || "")
setApiKey(localStorage.getItem(STORAGE_AI_API_KEY_KEY) || "")
setModelId(localStorage.getItem(STORAGE_AI_MODEL_KEY) || "")
setError("")
}
}, [open])
@@ -160,6 +182,181 @@ export function SettingsDialog({
)}
</div>
)}
<div className="space-y-2">
<Label>AI Provider Settings</Label>
<p className="text-[0.8rem] text-muted-foreground">
Use your own API key to bypass usage limits. Your
key is stored locally in your browser and is never
stored on the server.
</p>
<div className="space-y-3 pt-2">
<div className="space-y-2">
<Label htmlFor="ai-provider">Provider</Label>
<Select
value={provider || "default"}
onValueChange={(value) => {
const actualValue =
value === "default" ? "" : value
setProvider(actualValue)
localStorage.setItem(
STORAGE_AI_PROVIDER_KEY,
actualValue,
)
}}
>
<SelectTrigger id="ai-provider">
<SelectValue placeholder="Use Server Default" />
</SelectTrigger>
<SelectContent>
<SelectItem value="default">
Use Server Default
</SelectItem>
<SelectItem value="openai">
OpenAI
</SelectItem>
<SelectItem value="anthropic">
Anthropic
</SelectItem>
<SelectItem value="google">
Google
</SelectItem>
<SelectItem value="azure">
Azure OpenAI
</SelectItem>
<SelectItem value="openrouter">
OpenRouter
</SelectItem>
<SelectItem value="deepseek">
DeepSeek
</SelectItem>
<SelectItem value="siliconflow">
SiliconFlow
</SelectItem>
</SelectContent>
</Select>
</div>
{provider && provider !== "default" && (
<>
<div className="space-y-2">
<Label htmlFor="ai-model">
Model ID
</Label>
<Input
id="ai-model"
value={modelId}
onChange={(e) => {
setModelId(e.target.value)
localStorage.setItem(
STORAGE_AI_MODEL_KEY,
e.target.value,
)
}}
placeholder={
provider === "openai"
? "e.g., gpt-4o"
: provider === "anthropic"
? "e.g., claude-sonnet-4-5"
: provider === "google"
? "e.g., gemini-2.0-flash-exp"
: provider ===
"deepseek"
? "e.g., deepseek-chat"
: "Model ID"
}
/>
</div>
<div className="space-y-2">
<Label htmlFor="ai-api-key">
API Key
</Label>
<Input
id="ai-api-key"
type="password"
value={apiKey}
onChange={(e) => {
setApiKey(e.target.value)
localStorage.setItem(
STORAGE_AI_API_KEY_KEY,
e.target.value,
)
}}
placeholder="Your API key"
autoComplete="off"
/>
<p className="text-[0.8rem] text-muted-foreground">
Overrides{" "}
{provider === "openai"
? "OPENAI_API_KEY"
: provider === "anthropic"
? "ANTHROPIC_API_KEY"
: provider === "google"
? "GOOGLE_GENERATIVE_AI_API_KEY"
: provider === "azure"
? "AZURE_API_KEY"
: provider ===
"openrouter"
? "OPENROUTER_API_KEY"
: provider ===
"deepseek"
? "DEEPSEEK_API_KEY"
: provider ===
"siliconflow"
? "SILICONFLOW_API_KEY"
: "server API key"}
</p>
</div>
<div className="space-y-2">
<Label htmlFor="ai-base-url">
Base URL (optional)
</Label>
<Input
id="ai-base-url"
value={baseUrl}
onChange={(e) => {
setBaseUrl(e.target.value)
localStorage.setItem(
STORAGE_AI_BASE_URL_KEY,
e.target.value,
)
}}
placeholder={
provider === "anthropic"
? "https://api.anthropic.com/v1"
: provider === "siliconflow"
? "https://api.siliconflow.com/v1"
: "Custom endpoint URL"
}
/>
</div>
<Button
variant="outline"
size="sm"
className="w-full"
onClick={() => {
localStorage.removeItem(
STORAGE_AI_PROVIDER_KEY,
)
localStorage.removeItem(
STORAGE_AI_BASE_URL_KEY,
)
localStorage.removeItem(
STORAGE_AI_API_KEY_KEY,
)
localStorage.removeItem(
STORAGE_AI_MODEL_KEY,
)
setProvider("")
setBaseUrl("")
setApiKey("")
setModelId("")
}}
>
Clear Settings
</Button>
</>
)}
</div>
</div>
<div className="flex items-center justify-between">
<div className="space-y-0.5">
<Label htmlFor="close-protection">

View File

@@ -15,6 +15,8 @@
> 注意:由于访问量较大,演示站点目前使用 minimax-m2 模型。如需获得最佳效果,建议使用 Claude Sonnet 4.5 或 Claude Opus 4.5 自行部署。
> **使用自己的 API Key**:您可以使用自己的 API Key 来绕过演示站点的用量限制。点击聊天面板中的设置图标即可配置您的 Provider 和 API Key。您的 Key 仅保存在浏览器本地,不会被存储在服务器上。
</div>
一个集成了AI功能的Next.js网页应用与draw.io图表无缝结合。通过自然语言命令和AI辅助可视化来创建、修改和增强图表。

View File

@@ -15,6 +15,8 @@
> 注意:アクセス数が多いため、デモサイトでは現在 minimax-m2 モデルを使用しています。最高の結果を得るには、Claude Sonnet 4.5 または Claude Opus 4.5 でのセルフホスティングをお勧めします。
> **自分のAPIキーを使用**自分のAPIキーを使用することで、デモサイトの利用制限を回避できます。チャットパネルの設定アイコンをクリックして、プロバイダーとAPIキーを設定してください。キーはブラウザのローカルに保存され、サーバーには保存されません。
</div>
AI機能とdraw.ioダイアグラムを統合したNext.jsウェブアプリケーションです。自然言語コマンドとAI支援の可視化により、ダイアグラムを作成、修正、強化できます。

View File

@@ -26,6 +26,24 @@ interface ModelConfig {
modelId: string
}
export interface ClientOverrides {
provider?: string | null
baseUrl?: string | null
apiKey?: string | null
modelId?: string | null
}
// Providers that can be used with client-provided API keys
const ALLOWED_CLIENT_PROVIDERS: ProviderName[] = [
"openai",
"anthropic",
"google",
"azure",
"openrouter",
"deepseek",
"siliconflow",
]
// Bedrock provider options for Anthropic beta features
const BEDROCK_ANTHROPIC_BETA = {
bedrock: {
@@ -109,18 +127,39 @@ function validateProviderCredentials(provider: ProviderName): void {
* - SILICONFLOW_API_KEY: SiliconFlow API key
* - SILICONFLOW_BASE_URL: SiliconFlow endpoint (optional, defaults to https://api.siliconflow.com/v1)
*/
export function getAIModel(): ModelConfig {
const modelId = process.env.AI_MODEL
export function getAIModel(overrides?: ClientOverrides): ModelConfig {
// Check if client is providing their own provider override
const isClientOverride = !!(overrides?.provider && overrides?.apiKey)
// Use client override if provided, otherwise fall back to env vars
const modelId = overrides?.modelId || process.env.AI_MODEL
if (!modelId) {
if (isClientOverride) {
throw new Error(
`Model ID is required when using custom AI provider. Please specify a model in Settings.`,
)
}
throw new Error(
`AI_MODEL environment variable is required. Example: AI_MODEL=claude-sonnet-4-5`,
)
}
// Determine provider: explicit config > auto-detect > error
// Determine provider: client override > explicit config > auto-detect > error
let provider: ProviderName
if (process.env.AI_PROVIDER) {
if (overrides?.provider) {
// Validate client-provided provider
if (
!ALLOWED_CLIENT_PROVIDERS.includes(
overrides.provider as ProviderName,
)
) {
throw new Error(
`Invalid provider: ${overrides.provider}. Allowed providers: ${ALLOWED_CLIENT_PROVIDERS.join(", ")}`,
)
}
provider = overrides.provider as ProviderName
} else if (process.env.AI_PROVIDER) {
provider = process.env.AI_PROVIDER as ProviderName
} else {
const detected = detectProvider()
@@ -155,8 +194,10 @@ export function getAIModel(): ModelConfig {
}
}
// Validate provider credentials
// Only validate server credentials if client isn't providing their own API key
if (!isClientOverride) {
validateProviderCredentials(provider)
}
console.log(`[AI Provider] Initializing ${provider} with model: ${modelId}`)
@@ -180,24 +221,30 @@ export function getAIModel(): ModelConfig {
break
}
case "openai":
if (process.env.OPENAI_BASE_URL) {
case "openai": {
const apiKey = overrides?.apiKey || process.env.OPENAI_API_KEY
const baseURL = overrides?.baseUrl || process.env.OPENAI_BASE_URL
if (baseURL || overrides?.apiKey) {
const customOpenAI = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL,
apiKey,
...(baseURL && { baseURL }),
})
model = customOpenAI.chat(modelId)
} else {
model = openai(modelId)
}
break
}
case "anthropic": {
const customProvider = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
baseURL:
const apiKey = overrides?.apiKey || process.env.ANTHROPIC_API_KEY
const baseURL =
overrides?.baseUrl ||
process.env.ANTHROPIC_BASE_URL ||
"https://api.anthropic.com/v1",
"https://api.anthropic.com/v1"
const customProvider = createAnthropic({
apiKey,
baseURL,
headers: ANTHROPIC_BETA_HEADERS,
})
model = customProvider(modelId)
@@ -206,29 +253,36 @@ export function getAIModel(): ModelConfig {
break
}
case "google":
if (process.env.GOOGLE_BASE_URL) {
case "google": {
const apiKey =
overrides?.apiKey || process.env.GOOGLE_GENERATIVE_AI_API_KEY
const baseURL = overrides?.baseUrl || process.env.GOOGLE_BASE_URL
if (baseURL || overrides?.apiKey) {
const customGoogle = createGoogleGenerativeAI({
apiKey: process.env.GOOGLE_GENERATIVE_AI_API_KEY,
baseURL: process.env.GOOGLE_BASE_URL,
apiKey,
...(baseURL && { baseURL }),
})
model = customGoogle(modelId)
} else {
model = google(modelId)
}
break
}
case "azure":
if (process.env.AZURE_BASE_URL) {
case "azure": {
const apiKey = overrides?.apiKey || process.env.AZURE_API_KEY
const baseURL = overrides?.baseUrl || process.env.AZURE_BASE_URL
if (baseURL || overrides?.apiKey) {
const customAzure = createAzure({
apiKey: process.env.AZURE_API_KEY,
baseURL: process.env.AZURE_BASE_URL,
apiKey,
...(baseURL && { baseURL }),
})
model = customAzure(modelId)
} else {
model = azure(modelId)
}
break
}
case "ollama":
if (process.env.OLLAMA_BASE_URL) {
@@ -242,34 +296,41 @@ export function getAIModel(): ModelConfig {
break
case "openrouter": {
const apiKey = overrides?.apiKey || process.env.OPENROUTER_API_KEY
const baseURL =
overrides?.baseUrl || process.env.OPENROUTER_BASE_URL
const openrouter = createOpenRouter({
apiKey: process.env.OPENROUTER_API_KEY,
...(process.env.OPENROUTER_BASE_URL && {
baseURL: process.env.OPENROUTER_BASE_URL,
}),
apiKey,
...(baseURL && { baseURL }),
})
model = openrouter(modelId)
break
}
case "deepseek":
if (process.env.DEEPSEEK_BASE_URL) {
case "deepseek": {
const apiKey = overrides?.apiKey || process.env.DEEPSEEK_API_KEY
const baseURL = overrides?.baseUrl || process.env.DEEPSEEK_BASE_URL
if (baseURL || overrides?.apiKey) {
const customDeepSeek = createDeepSeek({
apiKey: process.env.DEEPSEEK_API_KEY,
baseURL: process.env.DEEPSEEK_BASE_URL,
apiKey,
...(baseURL && { baseURL }),
})
model = customDeepSeek(modelId)
} else {
model = deepseek(modelId)
}
break
}
case "siliconflow": {
const siliconflowProvider = createOpenAI({
apiKey: process.env.SILICONFLOW_API_KEY,
baseURL:
const apiKey = overrides?.apiKey || process.env.SILICONFLOW_API_KEY
const baseURL =
overrides?.baseUrl ||
process.env.SILICONFLOW_BASE_URL ||
"https://api.siliconflow.com/v1",
"https://api.siliconflow.com/v1"
const siliconflowProvider = createOpenAI({
apiKey,
baseURL,
})
model = siliconflowProvider.chat(modelId)
break