feat: add multi-provider model configuration

- Add model config dialog for managing multiple AI providers
- Support for OpenAI, Anthropic, Google, Azure, Bedrock, OpenRouter, DeepSeek, SiliconFlow, Ollama, and AI Gateway
- Add model selector dropdown in chat panel header
- Add API key validation endpoint
- Add custom model ID input with keyboard navigation
- Fix hover highlight in Command component
- Add suggested models for each provider including latest Claude 4.5 series
- Store configuration locally in browser
This commit is contained in:
dayuan.jiang
2025-12-22 17:58:05 +09:00
parent b088a0653e
commit 1e916aa86e
16 changed files with 2791 additions and 276 deletions

View File

@@ -180,5 +180,24 @@
"seekingSponsorship": "Call for Sponsorship",
"contactMe": "Contact Me",
"usageNotice": "Due to high usage, I have changed the model from Claude to minimax-m2 and added some usage limits. See About page for details."
},
"modelConfig": {
"title": "AI Model Configuration",
"description": "Configure multiple AI providers and models",
"configure": "Configure",
"addProvider": "Add Provider",
"addModel": "Add Model",
"modelId": "Model ID",
"modelLabel": "Display Label",
"streaming": "Enable Streaming",
"deleteProvider": "Delete Provider",
"deleteModel": "Delete Model",
"noModels": "No models configured. Add a model to get started.",
"selectProvider": "Select a provider or add a new one",
"configureMultiple": "Configure multiple AI providers and switch between them easily",
"apiKeyStored": "API keys are stored locally in your browser",
"test": "Test",
"validationError": "Validation failed",
"addModelFirst": "Add at least one model to validate"
}
}

View File

@@ -180,5 +180,24 @@
"seekingSponsorship": "スポンサー募集",
"contactMe": "お問い合わせ",
"usageNotice": "利用量の増加に伴い、コスト削減のためモデルを Claude から minimax-m2 に変更し、いくつかの利用制限を設けました。詳細は概要ページをご覧ください。"
},
"modelConfig": {
"title": "AIモデル設定",
"description": "複数のAIプロバイダーとモデルを設定",
"configure": "設定",
"addProvider": "プロバイダーを追加",
"addModel": "モデルを追加",
"modelId": "モデルID",
"modelLabel": "表示名",
"streaming": "ストリーミングを有効",
"deleteProvider": "プロバイダーを削除",
"deleteModel": "モデルを削除",
"noModels": "モデルが設定されていません。モデルを追加してください。",
"selectProvider": "プロバイダーを選択または追加してください",
"configureMultiple": "複数のAIプロバイダーを設定して簡単に切り替え",
"apiKeyStored": "APIキーはブラウザにローカル保存されます",
"test": "テスト",
"validationError": "検証に失敗しました",
"addModelFirst": "検証するには少なくとも1つのモデルを追加してください"
}
}

View File

@@ -180,5 +180,24 @@
"seekingSponsorship": "寻求赞助(求大佬捞一把)",
"contactMe": "联系我",
"usageNotice": "由于使用量过高,我已将模型从 Claude 更换为 minimax-m2并设置了一些用量限制。详情请查看关于页面。"
},
"modelConfig": {
"title": "AI 模型配置",
"description": "配置多个 AI 提供商和模型",
"configure": "配置",
"addProvider": "添加提供商",
"addModel": "添加模型",
"modelId": "模型 ID",
"modelLabel": "显示名称",
"streaming": "启用流式输出",
"deleteProvider": "删除提供商",
"deleteModel": "删除模型",
"noModels": "尚未配置模型。添加模型以开始使用。",
"selectProvider": "选择一个提供商或添加新的",
"configureMultiple": "配置多个 AI 提供商并轻松切换",
"apiKeyStored": "API 密钥存储在您的浏览器本地",
"test": "测试",
"validationError": "验证失败",
"addModelFirst": "请先添加至少一个模型以进行验证"
}
}

View File

@@ -24,4 +24,8 @@ export const STORAGE_KEYS = {
aiBaseUrl: "next-ai-draw-io-ai-base-url",
aiApiKey: "next-ai-draw-io-ai-api-key",
aiModel: "next-ai-draw-io-ai-model",
// Multi-model configuration
modelConfigs: "next-ai-draw-io-model-configs",
selectedModelId: "next-ai-draw-io-selected-model-id",
} as const

290
lib/types/model-config.ts Normal file
View File

@@ -0,0 +1,290 @@
// Types for multi-provider model configuration
export type ProviderName =
| "openai"
| "anthropic"
| "google"
| "azure"
| "bedrock"
| "openrouter"
| "deepseek"
| "siliconflow"
| "ollama"
| "gateway"
// Individual model configuration
export interface ModelConfig {
id: string // UUID for this model
modelId: string // e.g., "gpt-4o", "claude-sonnet-4-5"
streaming?: boolean // Default true
}
// Provider configuration
export interface ProviderConfig {
id: string // UUID for this provider config
provider: ProviderName
name?: string // Custom display name (e.g., "OpenAI Production")
apiKey: string
baseUrl?: string
models: ModelConfig[]
validated?: boolean // Has API key been validated
}
// The complete multi-model configuration
export interface MultiModelConfig {
version: 1
providers: ProviderConfig[]
selectedModelId?: string // Currently selected model's UUID
}
// Flattened model for dropdown display
export interface FlattenedModel {
id: string // Model config UUID
modelId: string // Actual model ID
provider: ProviderName
providerLabel: string // Provider display name
apiKey: string
baseUrl?: string
streaming?: boolean
}
// Provider metadata
export const PROVIDER_INFO: Record<
ProviderName,
{ label: string; defaultBaseUrl?: string }
> = {
openai: { label: "OpenAI" },
anthropic: {
label: "Anthropic",
defaultBaseUrl: "https://api.anthropic.com/v1",
},
google: { label: "Google" },
azure: { label: "Azure OpenAI" },
bedrock: { label: "Amazon Bedrock" },
openrouter: { label: "OpenRouter" },
deepseek: { label: "DeepSeek" },
siliconflow: {
label: "SiliconFlow",
defaultBaseUrl: "https://api.siliconflow.com/v1",
},
ollama: { label: "Ollama", defaultBaseUrl: "http://localhost:11434" },
gateway: { label: "AI Gateway" },
}
// Suggested models per provider for quick add
export const SUGGESTED_MODELS: Record<ProviderName, string[]> = {
openai: [
// GPT-4o series
"gpt-4o",
"gpt-4o-mini",
"gpt-4o-audio-preview",
// GPT-4.1 series
"gpt-4.1",
"gpt-4.1-mini",
"gpt-4.1-nano",
// GPT-4 Turbo
"gpt-4-turbo",
"gpt-4-turbo-preview",
// Reasoning models
"o1",
"o1-mini",
"o1-preview",
"o3",
"o3-mini",
"o4-mini",
// Legacy
"gpt-4",
"gpt-3.5-turbo",
],
anthropic: [
// Claude 4.5 series (latest)
"claude-opus-4-5-20250514",
"claude-sonnet-4-5-20250514",
// Claude 4 series
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
// Claude 3.7 series
"claude-3-7-sonnet-20250219",
// Claude 3.5 series
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
// Claude 3 series
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
],
google: [
// Gemini 2.5 series
"gemini-2.5-pro",
"gemini-2.5-flash",
"gemini-2.5-flash-preview-05-20",
// Gemini 2.0 series
"gemini-2.0-flash",
"gemini-2.0-flash-exp",
"gemini-2.0-flash-lite",
// Gemini 1.5 series
"gemini-1.5-pro",
"gemini-1.5-flash",
// Legacy
"gemini-pro",
],
azure: ["gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-4", "gpt-35-turbo"],
bedrock: [
// Anthropic Claude
"anthropic.claude-opus-4-5-20250514-v1:0",
"anthropic.claude-sonnet-4-5-20250514-v1:0",
"anthropic.claude-opus-4-20250514-v1:0",
"anthropic.claude-sonnet-4-20250514-v1:0",
"anthropic.claude-3-7-sonnet-20250219-v1:0",
"anthropic.claude-3-5-sonnet-20241022-v2:0",
"anthropic.claude-3-5-haiku-20241022-v1:0",
"anthropic.claude-3-opus-20240229-v1:0",
"anthropic.claude-3-sonnet-20240229-v1:0",
"anthropic.claude-3-haiku-20240307-v1:0",
// Amazon Nova
"amazon.nova-pro-v1:0",
"amazon.nova-lite-v1:0",
"amazon.nova-micro-v1:0",
// Meta Llama
"meta.llama3-3-70b-instruct-v1:0",
"meta.llama3-1-405b-instruct-v1:0",
"meta.llama3-1-70b-instruct-v1:0",
// Mistral
"mistral.mistral-large-2411-v1:0",
"mistral.mistral-small-2503-v1:0",
],
openrouter: [
// Anthropic
"anthropic/claude-sonnet-4",
"anthropic/claude-opus-4",
"anthropic/claude-3.5-sonnet",
"anthropic/claude-3.5-haiku",
// OpenAI
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/o1",
"openai/o3-mini",
// Google
"google/gemini-2.5-pro",
"google/gemini-2.5-flash",
"google/gemini-2.0-flash-exp:free",
// Meta Llama
"meta-llama/llama-3.3-70b-instruct",
"meta-llama/llama-3.1-405b-instruct",
"meta-llama/llama-3.1-70b-instruct",
// DeepSeek
"deepseek/deepseek-chat",
"deepseek/deepseek-r1",
// Qwen
"qwen/qwen-2.5-72b-instruct",
],
deepseek: ["deepseek-chat", "deepseek-reasoner", "deepseek-coder"],
siliconflow: [
// DeepSeek
"deepseek-ai/DeepSeek-V3",
"deepseek-ai/DeepSeek-R1",
"deepseek-ai/DeepSeek-V2.5",
// Qwen
"Qwen/Qwen2.5-72B-Instruct",
"Qwen/Qwen2.5-32B-Instruct",
"Qwen/Qwen2.5-Coder-32B-Instruct",
"Qwen/Qwen2.5-7B-Instruct",
"Qwen/Qwen2-VL-72B-Instruct",
],
ollama: [
"llama3.3",
"llama3.2",
"llama3.1",
"qwen2.5",
"qwen2.5-coder",
"deepseek-r1",
"deepseek-coder-v2",
"gemma2",
"phi4",
"mistral",
"codellama",
],
gateway: [
"openai/gpt-4o",
"openai/gpt-4o-mini",
"anthropic/claude-sonnet-4-5",
"anthropic/claude-3-5-sonnet",
"google/gemini-2.0-flash",
],
}
// Helper to generate UUID
export function generateId(): string {
return `${Date.now()}-${Math.random().toString(36).slice(2, 9)}`
}
// Create empty config
export function createEmptyConfig(): MultiModelConfig {
return {
version: 1,
providers: [],
selectedModelId: undefined,
}
}
// Create new provider config
export function createProviderConfig(provider: ProviderName): ProviderConfig {
return {
id: generateId(),
provider,
apiKey: "",
baseUrl: PROVIDER_INFO[provider].defaultBaseUrl,
models: [],
validated: false,
}
}
// Create new model config
export function createModelConfig(modelId: string): ModelConfig {
return {
id: generateId(),
modelId,
streaming: true,
}
}
// Get all models as flattened list for dropdown
export function flattenModels(config: MultiModelConfig): FlattenedModel[] {
const models: FlattenedModel[] = []
for (const provider of config.providers) {
// Use custom name if provided, otherwise use default provider label
const providerLabel =
provider.name || PROVIDER_INFO[provider.provider].label
for (const model of provider.models) {
models.push({
id: model.id,
modelId: model.modelId,
provider: provider.provider,
providerLabel,
apiKey: provider.apiKey,
baseUrl: provider.baseUrl,
streaming: model.streaming,
})
}
}
return models
}
// Find model by ID
export function findModelById(
config: MultiModelConfig,
modelId: string,
): FlattenedModel | undefined {
return flattenModels(config).find((m) => m.id === modelId)
}
// Get selected model
export function getSelectedModel(
config: MultiModelConfig,
): FlattenedModel | undefined {
if (!config.selectedModelId) return undefined
return findModelById(config, config.selectedModelId)
}