From 54fd48506daf8fffc2013992cca0369cbc6a5847 Mon Sep 17 00:00:00 2001
From: yrk111222 <2493404415@qq.com>
Date: Tue, 6 Jan 2026 18:41:25 +0800
Subject: [PATCH] Feat/add modelscope support (#521)
* add ModelScope API support
* update some documentation
* modify some details
---
README.md | 1 +
app/[lang]/about/cn/page.tsx | 1 +
app/[lang]/about/ja/page.tsx | 1 +
app/[lang]/about/page.tsx | 1 +
app/api/validate-model/route.ts | 69 ++++++++++++++++++++++++++++++
components/model-config-dialog.tsx | 1 +
components/model-selector.tsx | 1 +
docs/cn/README_CN.md | 1 +
docs/cn/ai-providers.md | 13 ++++++
docs/en/ai-providers.md | 15 ++++++-
docs/ja/README_JA.md | 1 +
docs/ja/ai-providers.md | 13 ++++++
electron/main/config-manager.ts | 4 ++
electron/settings/index.html | 1 +
electron/settings/settings.js | 1 +
env.example | 4 ++
lib/ai-providers.ts | 25 ++++++++++-
lib/i18n/dictionaries/en.json | 3 +-
lib/i18n/dictionaries/ja.json | 3 +-
lib/i18n/dictionaries/zh.json | 3 +-
lib/types/model-config.ts | 16 +++++++
21 files changed, 172 insertions(+), 6 deletions(-)
diff --git a/README.md b/README.md
index c88d2a2..5b9e5b3 100644
--- a/README.md
+++ b/README.md
@@ -211,6 +211,7 @@ See the [Next.js deployment documentation](https://nextjs.org/docs/app/building-
- OpenRouter
- DeepSeek
- SiliconFlow
+- ModelScope
- SGLang
- Vercel AI Gateway
diff --git a/app/[lang]/about/cn/page.tsx b/app/[lang]/about/cn/page.tsx
index 359945c..e88a8ad 100644
--- a/app/[lang]/about/cn/page.tsx
+++ b/app/[lang]/about/cn/page.tsx
@@ -297,6 +297,7 @@ export default function AboutCN() {
OpenRouter
DeepSeek
SiliconFlow
+ ModelScope
注意:claude-sonnet-4-5{" "}
diff --git a/app/[lang]/about/ja/page.tsx b/app/[lang]/about/ja/page.tsx
index 3453395..dbbabaf 100644
--- a/app/[lang]/about/ja/page.tsx
+++ b/app/[lang]/about/ja/page.tsx
@@ -312,6 +312,7 @@ export default function AboutJA() {
OpenRouter
DeepSeek
SiliconFlow
+ ModelScope
注:claude-sonnet-4-5
diff --git a/app/[lang]/about/page.tsx b/app/[lang]/about/page.tsx
index c805bec..2750b51 100644
--- a/app/[lang]/about/page.tsx
+++ b/app/[lang]/about/page.tsx
@@ -331,6 +331,7 @@ export default function About() {
OpenRouter
DeepSeek
SiliconFlow
+ ModelScope
Note that claude-sonnet-4-5 has trained on
diff --git a/app/api/validate-model/route.ts b/app/api/validate-model/route.ts
index b00e734..79e9369 100644
--- a/app/api/validate-model/route.ts
+++ b/app/api/validate-model/route.ts
@@ -274,6 +274,75 @@ export async function POST(req: Request) {
break
}
+ case "modelscope": {
+ const baseURL =
+ baseUrl || "https://api-inference.modelscope.cn/v1"
+ const startTime = Date.now()
+
+ try {
+ // Initiate a streaming request (required for QwQ-32B and certain Qwen3 models)
+ const response = await fetch(
+ `${baseURL}/chat/completions`,
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${apiKey}`,
+ },
+ body: JSON.stringify({
+ model: modelId,
+ messages: [
+ { role: "user", content: "Say 'OK'" },
+ ],
+ max_tokens: 20,
+ stream: true,
+ enable_thinking: false,
+ }),
+ },
+ )
+
+ if (!response.ok) {
+ const errorText = await response.text()
+ throw new Error(
+ `ModelScope API error (${response.status}): ${errorText}`,
+ )
+ }
+
+ const contentType =
+ response.headers.get("content-type") || ""
+ const isValidStreamingResponse =
+ response.status === 200 &&
+ (contentType.includes("text/event-stream") ||
+ contentType.includes("application/json"))
+
+ if (!isValidStreamingResponse) {
+ throw new Error(
+ `Unexpected response format: ${contentType}`,
+ )
+ }
+
+ const responseTime = Date.now() - startTime
+
+ if (response.body) {
+ response.body.cancel().catch(() => {
+ /* Ignore cancellation errors */
+ })
+ }
+
+ return NextResponse.json({
+ valid: true,
+ responseTime,
+ note: "ModelScope model validated (using streaming API)",
+ })
+ } catch (error) {
+ console.error(
+ "[validate-model] ModelScope validation failed:",
+ error,
+ )
+ throw error
+ }
+ }
+
default:
return NextResponse.json(
{ valid: false, error: `Unknown provider: ${provider}` },
diff --git a/components/model-config-dialog.tsx b/components/model-config-dialog.tsx
index a576e2e..0c3aae3 100644
--- a/components/model-config-dialog.tsx
+++ b/components/model-config-dialog.tsx
@@ -79,6 +79,7 @@ const PROVIDER_LOGO_MAP: Record = {
gateway: "vercel",
edgeone: "tencent-cloud",
doubao: "bytedance",
+ modelscope: "modelscope",
}
// Provider logo component
diff --git a/components/model-selector.tsx b/components/model-selector.tsx
index 027c699..49e8b95 100644
--- a/components/model-selector.tsx
+++ b/components/model-selector.tsx
@@ -50,6 +50,7 @@ const PROVIDER_LOGO_MAP: Record = {
gateway: "vercel",
edgeone: "tencent-cloud",
doubao: "bytedance",
+ modelscope: "modelscope",
}
// Group models by providerLabel (handles duplicate providers)
diff --git a/docs/cn/README_CN.md b/docs/cn/README_CN.md
index 6d61bd8..67ab4c0 100644
--- a/docs/cn/README_CN.md
+++ b/docs/cn/README_CN.md
@@ -204,6 +204,7 @@ npm run dev
- OpenRouter
- DeepSeek
- SiliconFlow
+- ModelScope
- SGLang
- Vercel AI Gateway
diff --git a/docs/cn/ai-providers.md b/docs/cn/ai-providers.md
index 25e5e31..fe2de2a 100644
--- a/docs/cn/ai-providers.md
+++ b/docs/cn/ai-providers.md
@@ -152,6 +152,19 @@ AI_PROVIDER=ollama
AI_MODEL=llama3.2
```
+### ModelScope
+
+```bash
+MODELSCOPE_API_KEY=your_api_key
+AI_MODEL=Qwen/Qwen3-235B-A22B-Instruct-2507
+```
+
+可选的自定义端点:
+
+```bash
+MODELSCOPE_BASE_URL=https://your-custom-endpoint
+```
+
可选的自定义 URL:
```bash
diff --git a/docs/en/ai-providers.md b/docs/en/ai-providers.md
index 0f0b0d1..0df82bf 100644
--- a/docs/en/ai-providers.md
+++ b/docs/en/ai-providers.md
@@ -158,6 +158,19 @@ Optional custom URL:
OLLAMA_BASE_URL=http://localhost:11434
```
+### ModelScope
+
+```bash
+MODELSCOPE_API_KEY=your_api_key
+AI_MODEL=Qwen/Qwen3-235B-A22B-Instruct-2507
+```
+
+Optional custom endpoint:
+
+```bash
+MODELSCOPE_BASE_URL=https://your-custom-endpoint
+```
+
### Vercel AI Gateway
Vercel AI Gateway provides unified access to multiple AI providers through a single API key. This simplifies authentication and allows you to switch between providers without managing multiple API keys.
@@ -201,7 +214,7 @@ If you only configure **one** provider's API key, the system will automatically
If you configure **multiple** API keys, you must explicitly set `AI_PROVIDER`:
```bash
-AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, doubao, azure, bedrock, openrouter, ollama, gateway, sglang
+AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, doubao, azure, bedrock, openrouter, ollama, gateway, sglang, modelscope
```
## Model Capability Requirements
diff --git a/docs/ja/README_JA.md b/docs/ja/README_JA.md
index 84f7f7e..db2267a 100644
--- a/docs/ja/README_JA.md
+++ b/docs/ja/README_JA.md
@@ -205,6 +205,7 @@ Next.jsアプリをデプロイする最も簡単な方法は、Next.jsの作成
- OpenRouter
- DeepSeek
- SiliconFlow
+- ModelScope
- SGLang
- Vercel AI Gateway
diff --git a/docs/ja/ai-providers.md b/docs/ja/ai-providers.md
index 63d23d6..a7ad931 100644
--- a/docs/ja/ai-providers.md
+++ b/docs/ja/ai-providers.md
@@ -158,6 +158,19 @@ AI_MODEL=llama3.2
OLLAMA_BASE_URL=http://localhost:11434
```
+### ModelScope
+
+```bash
+MODELSCOPE_API_KEY=your_api_key
+AI_MODEL=Qwen/Qwen3-235B-A22B-Instruct-2507
+```
+
+任意のカスタムエンドポイント:
+
+```bash
+MODELSCOPE_BASE_URL=https://your-custom-endpoint
+```
+
### Vercel AI Gateway
Vercel AI Gateway は、単一の API キーで複数の AI プロバイダーへの統合アクセスを提供します。これにより認証が簡素化され、複数の API キーを管理することなくプロバイダーを切り替えることができます。
diff --git a/electron/main/config-manager.ts b/electron/main/config-manager.ts
index 2a66d35..b1489e0 100644
--- a/electron/main/config-manager.ts
+++ b/electron/main/config-manager.ts
@@ -351,6 +351,10 @@ const PROVIDER_ENV_MAP: Record = {
apiKey: "SILICONFLOW_API_KEY",
baseUrl: "SILICONFLOW_BASE_URL",
},
+ modelscope: {
+ apiKey: "MODELSCOPE_API_KEY",
+ baseUrl: "MODELSCOPE_BASE_URL",
+ },
gateway: { apiKey: "AI_GATEWAY_API_KEY", baseUrl: "AI_GATEWAY_BASE_URL" },
// bedrock and ollama don't use API keys in the same way
bedrock: { apiKey: "", baseUrl: "" },
diff --git a/electron/settings/index.html b/electron/settings/index.html
index 81ad8ff..9f5fed1 100644
--- a/electron/settings/index.html
+++ b/electron/settings/index.html
@@ -55,6 +55,7 @@
+
diff --git a/electron/settings/settings.js b/electron/settings/settings.js
index 3e550c9..235b2fe 100644
--- a/electron/settings/settings.js
+++ b/electron/settings/settings.js
@@ -288,6 +288,7 @@ function getProviderLabel(provider) {
openrouter: "OpenRouter",
deepseek: "DeepSeek",
siliconflow: "SiliconFlow",
+ modelscope: "ModelScope",
ollama: "Ollama",
}
return labels[provider] || provider
diff --git a/env.example b/env.example
index a1b5269..fcd63f0 100644
--- a/env.example
+++ b/env.example
@@ -72,6 +72,10 @@ AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# SGLANG_API_KEY=your-sglang-api-key
# SGLANG_BASE_URL=http://127.0.0.1:8000/v1 # Your SGLang endpoint
+# ModelScope Configuration
+# MODELSCOPE_API_KEY=ms-...
+# MODELSCOPE_BASE_URL=https://api-inference.modelscope.cn/v1 # Optional: Custom endpoint
+
# ByteDance Doubao Configuration (via Volcengine)
# DOUBAO_API_KEY=your-doubao-api-key
# DOUBAO_BASE_URL=https://ark.cn-beijing.volces.com/api/v3 # ByteDance Volcengine endpoint
diff --git a/lib/ai-providers.ts b/lib/ai-providers.ts
index c4c8574..3513159 100644
--- a/lib/ai-providers.ts
+++ b/lib/ai-providers.ts
@@ -23,6 +23,7 @@ export type ProviderName =
| "gateway"
| "edgeone"
| "doubao"
+ | "modelscope"
interface ModelConfig {
model: any
@@ -59,6 +60,7 @@ const ALLOWED_CLIENT_PROVIDERS: ProviderName[] = [
"gateway",
"edgeone",
"doubao",
+ "modelscope",
]
// Bedrock provider options for Anthropic beta features
@@ -353,6 +355,7 @@ function buildProviderOptions(
case "siliconflow":
case "sglang":
case "gateway":
+ case "modelscope":
case "doubao": {
// These providers don't have reasoning configs in AI SDK yet
// Gateway passes through to underlying providers which handle their own configs
@@ -381,6 +384,7 @@ const PROVIDER_ENV_VARS: Record = {
gateway: "AI_GATEWAY_API_KEY",
edgeone: null, // No credentials needed - uses EdgeOne Edge AI
doubao: "DOUBAO_API_KEY",
+ modelscope: "MODELSCOPE_API_KEY",
}
/**
@@ -445,7 +449,7 @@ function validateProviderCredentials(provider: ProviderName): void {
* Get the AI model based on environment variables
*
* Environment variables:
- * - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, sglang, gateway)
+ * - AI_PROVIDER: The provider to use (bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, sglang, gateway, modelscope)
* - AI_MODEL: The model ID/name for the selected provider
*
* Provider-specific env vars:
@@ -463,6 +467,8 @@ function validateProviderCredentials(provider: ProviderName): void {
* - SILICONFLOW_BASE_URL: SiliconFlow endpoint (optional, defaults to https://api.siliconflow.com/v1)
* - SGLANG_API_KEY: SGLang API key
* - SGLANG_BASE_URL: SGLang endpoint (optional)
+ * - MODELSCOPE_API_KEY: ModelScope API key
+ * - MODELSCOPE_BASE_URL: ModelScope endpoint (optional)
*/
export function getAIModel(overrides?: ClientOverrides): ModelConfig {
// SECURITY: Prevent SSRF attacks (GHSA-9qf7-mprq-9qgm)
@@ -537,6 +543,7 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
`- AZURE_API_KEY for Azure\n` +
`- SILICONFLOW_API_KEY for SiliconFlow\n` +
`- SGLANG_API_KEY for SGLang\n` +
+ `- MODELSCOPE_API_KEY for ModelScope\n` +
`Or set AI_PROVIDER=ollama for local Ollama.`,
)
} else {
@@ -892,9 +899,23 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
break
}
+ case "modelscope": {
+ const apiKey = overrides?.apiKey || process.env.MODELSCOPE_API_KEY
+ const baseURL =
+ overrides?.baseUrl ||
+ process.env.MODELSCOPE_BASE_URL ||
+ "https://api-inference.modelscope.cn/v1"
+ const modelscopeProvider = createOpenAI({
+ apiKey,
+ baseURL,
+ })
+ model = modelscopeProvider.chat(modelId)
+ break
+ }
+
default:
throw new Error(
- `Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, sglang, gateway, edgeone, doubao`,
+ `Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, sglang, gateway, edgeone, doubao, modelscope`,
)
}
diff --git a/lib/i18n/dictionaries/en.json b/lib/i18n/dictionaries/en.json
index 1d845c7..027690a 100644
--- a/lib/i18n/dictionaries/en.json
+++ b/lib/i18n/dictionaries/en.json
@@ -28,7 +28,8 @@
"azure": "Azure OpenAI",
"openrouter": "OpenRouter",
"deepseek": "DeepSeek",
- "siliconflow": "SiliconFlow"
+ "siliconflow": "SiliconFlow",
+ "modelscope": "ModelScope"
},
"chat": {
"placeholder": "Describe your diagram or upload a file...",
diff --git a/lib/i18n/dictionaries/ja.json b/lib/i18n/dictionaries/ja.json
index f515f0c..522d979 100644
--- a/lib/i18n/dictionaries/ja.json
+++ b/lib/i18n/dictionaries/ja.json
@@ -28,7 +28,8 @@
"azure": "Azure OpenAI",
"openrouter": "OpenRouter",
"deepseek": "DeepSeek",
- "siliconflow": "SiliconFlow"
+ "siliconflow": "SiliconFlow",
+ "modelscope": "ModelScope"
},
"chat": {
"placeholder": "ダイアグラムを説明するか、ファイルをアップロード...",
diff --git a/lib/i18n/dictionaries/zh.json b/lib/i18n/dictionaries/zh.json
index adcbfa1..026e1f5 100644
--- a/lib/i18n/dictionaries/zh.json
+++ b/lib/i18n/dictionaries/zh.json
@@ -28,7 +28,8 @@
"azure": "Azure OpenAI",
"openrouter": "OpenRouter",
"deepseek": "DeepSeek",
- "siliconflow": "SiliconFlow"
+ "siliconflow": "SiliconFlow",
+ "modelscope": "ModelScope"
},
"chat": {
"placeholder": "描述您的图表或上传文件...",
diff --git a/lib/types/model-config.ts b/lib/types/model-config.ts
index 6ea73da..1699953 100644
--- a/lib/types/model-config.ts
+++ b/lib/types/model-config.ts
@@ -13,6 +13,7 @@ export type ProviderName =
| "gateway"
| "edgeone"
| "doubao"
+ | "modelscope"
// Individual model configuration
export interface ModelConfig {
@@ -91,6 +92,10 @@ export const PROVIDER_INFO: Record<
label: "Doubao (ByteDance)",
defaultBaseUrl: "https://ark.cn-beijing.volces.com/api/v3",
},
+ modelscope: {
+ label: "ModelScope",
+ defaultBaseUrl: "https://api-inference.modelscope.cn/v1",
+ },
}
// Suggested models per provider for quick add
@@ -231,6 +236,17 @@ export const SUGGESTED_MODELS: Record = {
"doubao-pro-32k-241215",
"doubao-pro-256k-241215",
],
+ modelscope: [
+ // Qwen
+ "Qwen/Qwen2.5-72B-Instruct",
+ "Qwen/Qwen2.5-32B-Instruct",
+ "Qwen/Qwen3-235B-A22B-Instruct-2507",
+ "Qwen/Qwen3-VL-235B-A22B-Instruct",
+ "Qwen/Qwen3-32B",
+ // DeepSeek
+ "deepseek-ai/DeepSeek-R1-0528",
+ "deepseek-ai/DeepSeek-V3.2",
+ ],
}
// Helper to generate UUID