mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-02 22:32:27 +08:00
Compare commits
2 Commits
fix/image-
...
7bdc1fe612
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7bdc1fe612 | ||
|
|
03ac9a79de |
@@ -12,7 +12,11 @@ import fs from "fs/promises"
|
|||||||
import { jsonrepair } from "jsonrepair"
|
import { jsonrepair } from "jsonrepair"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import { z } from "zod"
|
import { z } from "zod"
|
||||||
import { getAIModel, supportsPromptCaching } from "@/lib/ai-providers"
|
import {
|
||||||
|
getAIModel,
|
||||||
|
supportsImageInput,
|
||||||
|
supportsPromptCaching,
|
||||||
|
} from "@/lib/ai-providers"
|
||||||
import { findCachedResponse } from "@/lib/cached-responses"
|
import { findCachedResponse } from "@/lib/cached-responses"
|
||||||
import {
|
import {
|
||||||
checkAndIncrementRequest,
|
checkAndIncrementRequest,
|
||||||
@@ -295,6 +299,17 @@ async function handleChatRequest(req: Request): Promise<Response> {
|
|||||||
lastUserMessage?.parts?.filter((part: any) => part.type === "file") ||
|
lastUserMessage?.parts?.filter((part: any) => part.type === "file") ||
|
||||||
[]
|
[]
|
||||||
|
|
||||||
|
// Check if user is sending images to a model that doesn't support them
|
||||||
|
// AI SDK silently drops unsupported parts, so we need to catch this early
|
||||||
|
if (fileParts.length > 0 && !supportsImageInput(modelId)) {
|
||||||
|
return Response.json(
|
||||||
|
{
|
||||||
|
error: `The model "${modelId}" does not support image input. Please use a vision-capable model (e.g., GPT-4o, Claude, Gemini) or remove the image.`,
|
||||||
|
},
|
||||||
|
{ status: 400 },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// User input only - XML is now in a separate cached system message
|
// User input only - XML is now in a separate cached system message
|
||||||
const formattedUserInput = `User input:
|
const formattedUserInput = `User input:
|
||||||
"""md
|
"""md
|
||||||
|
|||||||
@@ -906,3 +906,34 @@ export function supportsPromptCaching(modelId: string): boolean {
|
|||||||
modelId.startsWith("eu.anthropic")
|
modelId.startsWith("eu.anthropic")
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a model supports image/vision input.
|
||||||
|
* Some models silently drop image parts without error (AI SDK warning only).
|
||||||
|
*/
|
||||||
|
export function supportsImageInput(modelId: string): boolean {
|
||||||
|
const lowerModelId = modelId.toLowerCase()
|
||||||
|
|
||||||
|
// Helper to check if model has vision capability indicator
|
||||||
|
const hasVisionIndicator =
|
||||||
|
lowerModelId.includes("vision") || lowerModelId.includes("vl")
|
||||||
|
|
||||||
|
// Models that DON'T support image/vision input (unless vision variant)
|
||||||
|
// Kimi K2 models don't support images
|
||||||
|
if (lowerModelId.includes("kimi") && !hasVisionIndicator) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeepSeek text models (not vision variants)
|
||||||
|
if (lowerModelId.includes("deepseek") && !hasVisionIndicator) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Qwen text models (not vision variants like qwen-vl)
|
||||||
|
if (lowerModelId.includes("qwen") && !hasVisionIndicator) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: assume model supports images
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@next-ai-drawio/mcp-server",
|
"name": "@next-ai-drawio/mcp-server",
|
||||||
"version": "0.1.10",
|
"version": "0.1.11",
|
||||||
"description": "MCP server for Next AI Draw.io - AI-powered diagram generation with real-time browser preview",
|
"description": "MCP server for Next AI Draw.io - AI-powered diagram generation with real-time browser preview",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
|
|||||||
@@ -127,7 +127,12 @@ function cleanupExpiredSessions(): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setInterval(cleanupExpiredSessions, 5 * 60 * 1000)
|
const cleanupIntervalId = setInterval(cleanupExpiredSessions, 5 * 60 * 1000)
|
||||||
|
|
||||||
|
export function shutdown(): void {
|
||||||
|
clearInterval(cleanupIntervalId)
|
||||||
|
stopHttpServer()
|
||||||
|
}
|
||||||
|
|
||||||
export function getServerPort(): number {
|
export function getServerPort(): number {
|
||||||
return serverPort
|
return serverPort
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ import {
|
|||||||
getState,
|
getState,
|
||||||
requestSync,
|
requestSync,
|
||||||
setState,
|
setState,
|
||||||
|
shutdown,
|
||||||
startHttpServer,
|
startHttpServer,
|
||||||
waitForSync,
|
waitForSync,
|
||||||
} from "./http-server.js"
|
} from "./http-server.js"
|
||||||
@@ -618,6 +619,31 @@ server.registerTool(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Graceful shutdown handler
|
||||||
|
let isShuttingDown = false
|
||||||
|
function gracefulShutdown(reason: string) {
|
||||||
|
if (isShuttingDown) return
|
||||||
|
isShuttingDown = true
|
||||||
|
log.info(`Shutting down: ${reason}`)
|
||||||
|
shutdown()
|
||||||
|
process.exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle stdin close (primary method - works on all platforms including Windows)
|
||||||
|
process.stdin.on("close", () => gracefulShutdown("stdin closed"))
|
||||||
|
process.stdin.on("end", () => gracefulShutdown("stdin ended"))
|
||||||
|
|
||||||
|
// Handle signals (may not work reliably on Windows)
|
||||||
|
process.on("SIGINT", () => gracefulShutdown("SIGINT"))
|
||||||
|
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"))
|
||||||
|
|
||||||
|
// Handle broken pipe (writing to closed stdout)
|
||||||
|
process.stdout.on("error", (err) => {
|
||||||
|
if (err.code === "EPIPE" || err.code === "ERR_STREAM_DESTROYED") {
|
||||||
|
gracefulShutdown("stdout error")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
// Start the MCP server
|
// Start the MCP server
|
||||||
async function main() {
|
async function main() {
|
||||||
log.info("Starting MCP server for Next AI Draw.io (embedded mode)...")
|
log.info("Starting MCP server for Next AI Draw.io (embedded mode)...")
|
||||||
|
|||||||
Reference in New Issue
Block a user