Compare commits

..

2 Commits

Author SHA1 Message Date
dayuan.jiang
2175e8e646 fix: update qs to fix high severity security vulnerability 2025-12-31 11:16:11 +09:00
E66Crisp
661a871d96 style(chat-panel): Improve aiChat label display in collapsed panel 2025-12-31 09:47:27 +08:00
12 changed files with 845 additions and 890 deletions

View File

@@ -28,8 +28,6 @@ export default function Home() {
} = useDiagram() } = useDiagram()
const router = useRouter() const router = useRouter()
const pathname = usePathname() const pathname = usePathname()
// Extract current language from pathname (e.g., "/zh/about" → "zh")
const currentLang = (pathname.split("/")[1] || i18n.defaultLocale) as Locale
const [isMobile, setIsMobile] = useState(false) const [isMobile, setIsMobile] = useState(false)
const [isChatVisible, setIsChatVisible] = useState(true) const [isChatVisible, setIsChatVisible] = useState(true)
const [drawioUi, setDrawioUi] = useState<"min" | "sketch">("min") const [drawioUi, setDrawioUi] = useState<"min" | "sketch">("min")
@@ -209,7 +207,7 @@ export default function Home() {
<div className="h-full rounded-xl overflow-hidden shadow-soft-lg border border-border/30"> <div className="h-full rounded-xl overflow-hidden shadow-soft-lg border border-border/30">
{isLoaded ? ( {isLoaded ? (
<DrawIoEmbed <DrawIoEmbed
key={`${drawioUi}-${darkMode}-${currentLang}`} key={`${drawioUi}-${darkMode}`}
ref={drawioRef} ref={drawioRef}
onExport={handleDiagramExport} onExport={handleDiagramExport}
onLoad={onDrawioLoad} onLoad={onDrawioLoad}
@@ -222,7 +220,6 @@ export default function Home() {
saveAndExit: false, saveAndExit: false,
noExitBtn: true, noExitBtn: true,
dark: darkMode, dark: darkMode,
lang: currentLang,
}} }}
/> />
) : ( ) : (

View File

@@ -12,11 +12,7 @@ import fs from "fs/promises"
import { jsonrepair } from "jsonrepair" import { jsonrepair } from "jsonrepair"
import path from "path" import path from "path"
import { z } from "zod" import { z } from "zod"
import { import { getAIModel, supportsPromptCaching } from "@/lib/ai-providers"
getAIModel,
supportsImageInput,
supportsPromptCaching,
} from "@/lib/ai-providers"
import { findCachedResponse } from "@/lib/cached-responses" import { findCachedResponse } from "@/lib/cached-responses"
import { import {
checkAndIncrementRequest, checkAndIncrementRequest,
@@ -299,17 +295,6 @@ async function handleChatRequest(req: Request): Promise<Response> {
lastUserMessage?.parts?.filter((part: any) => part.type === "file") || lastUserMessage?.parts?.filter((part: any) => part.type === "file") ||
[] []
// Check if user is sending images to a model that doesn't support them
// AI SDK silently drops unsupported parts, so we need to catch this early
if (fileParts.length > 0 && !supportsImageInput(modelId)) {
return Response.json(
{
error: `The model "${modelId}" does not support image input. Please use a vision-capable model (e.g., GPT-4o, Claude, Gemini) or remove the image.`,
},
{ status: 400 },
)
}
// User input only - XML is now in a separate cached system message // User input only - XML is now in a separate cached system message
const formattedUserInput = `User input: const formattedUserInput = `User input:
"""md """md

View File

@@ -1,5 +1,5 @@
{ {
"$schema": "https://biomejs.dev/schemas/2.3.10/schema.json", "$schema": "https://biomejs.dev/schemas/2.3.8/schema.json",
"vcs": { "vcs": {
"enabled": true, "enabled": true,
"clientKind": "git", "clientKind": "git",

View File

@@ -283,7 +283,7 @@ export function ChatMessageDisplay({
try { try {
await navigator.clipboard.writeText(text) await navigator.clipboard.writeText(text)
setCopyState(messageId, isToolCall, true) setCopyState(messageId, isToolCall, true)
} catch (_err) { } catch (err) {
// Fallback for non-secure contexts (HTTP) or permission denied // Fallback for non-secure contexts (HTTP) or permission denied
const textarea = document.createElement("textarea") const textarea = document.createElement("textarea")
textarea.value = text textarea.value = text

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,7 @@ import {
flattenModels, flattenModels,
type ModelConfig, type ModelConfig,
type MultiModelConfig, type MultiModelConfig,
PROVIDER_INFO,
type ProviderConfig, type ProviderConfig,
type ProviderName, type ProviderName,
} from "@/lib/types/model-config" } from "@/lib/types/model-config"

View File

@@ -786,7 +786,7 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
`data: ${JSON.stringify(data)}\n\n`, `data: ${JSON.stringify(data)}\n\n`,
), ),
) )
} catch (_e) { } catch (e) {
// If parsing fails, forward the original message to avoid breaking the stream. // If parsing fails, forward the original message to avoid breaking the stream.
controller.enqueue( controller.enqueue(
new TextEncoder().encode( new TextEncoder().encode(
@@ -906,34 +906,3 @@ export function supportsPromptCaching(modelId: string): boolean {
modelId.startsWith("eu.anthropic") modelId.startsWith("eu.anthropic")
) )
} }
/**
* Check if a model supports image/vision input.
* Some models silently drop image parts without error (AI SDK warning only).
*/
export function supportsImageInput(modelId: string): boolean {
const lowerModelId = modelId.toLowerCase()
// Helper to check if model has vision capability indicator
const hasVisionIndicator =
lowerModelId.includes("vision") || lowerModelId.includes("vl")
// Models that DON'T support image/vision input (unless vision variant)
// Kimi K2 models don't support images
if (lowerModelId.includes("kimi") && !hasVisionIndicator) {
return false
}
// DeepSeek text models (not vision variants)
if (lowerModelId.includes("deepseek") && !hasVisionIndicator) {
return false
}
// Qwen text models (not vision variants like qwen-vl)
if (lowerModelId.includes("qwen") && !hasVisionIndicator) {
return false
}
// Default: assume model supports images
return true
}

View File

@@ -1,18 +1,18 @@
{ {
"name": "@next-ai-drawio/mcp-server", "name": "@next-ai-drawio/mcp-server",
"version": "0.1.11", "version": "0.1.6",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@next-ai-drawio/mcp-server", "name": "@next-ai-drawio/mcp-server",
"version": "0.1.11", "version": "0.1.6",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4", "@modelcontextprotocol/sdk": "^1.0.4",
"linkedom": "^0.18.0", "linkedom": "^0.18.0",
"open": "^11.0.0", "open": "^11.0.0",
"zod": "^4.0.0" "zod": "^3.24.0"
}, },
"bin": { "bin": {
"next-ai-drawio-mcp": "dist/index.js" "next-ai-drawio-mcp": "dist/index.js"
@@ -2051,9 +2051,9 @@
} }
}, },
"node_modules/zod": { "node_modules/zod": {
"version": "4.3.4", "version": "3.25.76",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.4.tgz", "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
"integrity": "sha512-Zw/uYiiyF6pUT1qmKbZziChgNPRu+ZRneAsMUDU6IwmXdWt5JwcUfy2bvLOCUtz5UniaN/Zx5aFttZYbYc7O/A==", "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
"license": "MIT", "license": "MIT",
"peer": true, "peer": true,
"funding": { "funding": {

View File

@@ -1,6 +1,6 @@
{ {
"name": "@next-ai-drawio/mcp-server", "name": "@next-ai-drawio/mcp-server",
"version": "0.1.11", "version": "0.1.10",
"description": "MCP server for Next AI Draw.io - AI-powered diagram generation with real-time browser preview", "description": "MCP server for Next AI Draw.io - AI-powered diagram generation with real-time browser preview",
"type": "module", "type": "module",
"main": "dist/index.js", "main": "dist/index.js",
@@ -39,7 +39,7 @@
"@modelcontextprotocol/sdk": "^1.0.4", "@modelcontextprotocol/sdk": "^1.0.4",
"linkedom": "^0.18.0", "linkedom": "^0.18.0",
"open": "^11.0.0", "open": "^11.0.0",
"zod": "^4.0.0" "zod": "^3.24.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^24.0.0", "@types/node": "^24.0.0",

View File

@@ -127,12 +127,7 @@ function cleanupExpiredSessions(): void {
} }
} }
const cleanupIntervalId = setInterval(cleanupExpiredSessions, 5 * 60 * 1000) setInterval(cleanupExpiredSessions, 5 * 60 * 1000)
export function shutdown(): void {
clearInterval(cleanupIntervalId)
stopHttpServer()
}
export function getServerPort(): number { export function getServerPort(): number {
return serverPort return serverPort

View File

@@ -39,7 +39,6 @@ import {
getState, getState,
requestSync, requestSync,
setState, setState,
shutdown,
startHttpServer, startHttpServer,
waitForSync, waitForSync,
} from "./http-server.js" } from "./http-server.js"
@@ -48,7 +47,7 @@ import { validateAndFixXml } from "./xml-validation.js"
// Server configuration // Server configuration
const config = { const config = {
port: parseInt(process.env.PORT || "6002", 10), port: parseInt(process.env.PORT || "6002"),
} }
// Session state (single session for simplicity) // Session state (single session for simplicity)
@@ -619,31 +618,6 @@ server.registerTool(
}, },
) )
// Graceful shutdown handler
let isShuttingDown = false
function gracefulShutdown(reason: string) {
if (isShuttingDown) return
isShuttingDown = true
log.info(`Shutting down: ${reason}`)
shutdown()
process.exit(0)
}
// Handle stdin close (primary method - works on all platforms including Windows)
process.stdin.on("close", () => gracefulShutdown("stdin closed"))
process.stdin.on("end", () => gracefulShutdown("stdin ended"))
// Handle signals (may not work reliably on Windows)
process.on("SIGINT", () => gracefulShutdown("SIGINT"))
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"))
// Handle broken pipe (writing to closed stdout)
process.stdout.on("error", (err) => {
if (err.code === "EPIPE" || err.code === "ERR_STREAM_DESTROYED") {
gracefulShutdown("stdout error")
}
})
// Start the MCP server // Start the MCP server
async function main() { async function main() {
log.info("Starting MCP server for Next AI Draw.io (embedded mode)...") log.info("Starting MCP server for Next AI Draw.io (embedded mode)...")

View File

@@ -253,7 +253,7 @@ async function main() {
}, },
) )
console.log("👀 Watching for preset configuration changes...") console.log("👀 Watching for preset configuration changes...")
} catch (_err) { } catch (err) {
// File might not exist yet, that's ok // File might not exist yet, that's ok
setTimeout(setupConfigWatcher, 5000) setTimeout(setupConfigWatcher, 5000)
} }