mirror of
https://github.com/DayuanJiang/next-ai-draw-io.git
synced 2026-01-02 22:32:27 +08:00
Compare commits
2 Commits
3e053bc904
...
fix-aichat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2175e8e646 | ||
|
|
661a871d96 |
@@ -28,8 +28,6 @@ export default function Home() {
|
||||
} = useDiagram()
|
||||
const router = useRouter()
|
||||
const pathname = usePathname()
|
||||
// Extract current language from pathname (e.g., "/zh/about" → "zh")
|
||||
const currentLang = (pathname.split("/")[1] || i18n.defaultLocale) as Locale
|
||||
const [isMobile, setIsMobile] = useState(false)
|
||||
const [isChatVisible, setIsChatVisible] = useState(true)
|
||||
const [drawioUi, setDrawioUi] = useState<"min" | "sketch">("min")
|
||||
@@ -209,7 +207,7 @@ export default function Home() {
|
||||
<div className="h-full rounded-xl overflow-hidden shadow-soft-lg border border-border/30">
|
||||
{isLoaded ? (
|
||||
<DrawIoEmbed
|
||||
key={`${drawioUi}-${darkMode}-${currentLang}`}
|
||||
key={`${drawioUi}-${darkMode}`}
|
||||
ref={drawioRef}
|
||||
onExport={handleDiagramExport}
|
||||
onLoad={onDrawioLoad}
|
||||
@@ -222,7 +220,6 @@ export default function Home() {
|
||||
saveAndExit: false,
|
||||
noExitBtn: true,
|
||||
dark: darkMode,
|
||||
lang: currentLang,
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -12,11 +12,7 @@ import fs from "fs/promises"
|
||||
import { jsonrepair } from "jsonrepair"
|
||||
import path from "path"
|
||||
import { z } from "zod"
|
||||
import {
|
||||
getAIModel,
|
||||
supportsImageInput,
|
||||
supportsPromptCaching,
|
||||
} from "@/lib/ai-providers"
|
||||
import { getAIModel, supportsPromptCaching } from "@/lib/ai-providers"
|
||||
import { findCachedResponse } from "@/lib/cached-responses"
|
||||
import {
|
||||
checkAndIncrementRequest,
|
||||
@@ -299,17 +295,6 @@ async function handleChatRequest(req: Request): Promise<Response> {
|
||||
lastUserMessage?.parts?.filter((part: any) => part.type === "file") ||
|
||||
[]
|
||||
|
||||
// Check if user is sending images to a model that doesn't support them
|
||||
// AI SDK silently drops unsupported parts, so we need to catch this early
|
||||
if (fileParts.length > 0 && !supportsImageInput(modelId)) {
|
||||
return Response.json(
|
||||
{
|
||||
error: `The model "${modelId}" does not support image input. Please use a vision-capable model (e.g., GPT-4o, Claude, Gemini) or remove the image.`,
|
||||
},
|
||||
{ status: 400 },
|
||||
)
|
||||
}
|
||||
|
||||
// User input only - XML is now in a separate cached system message
|
||||
const formattedUserInput = `User input:
|
||||
"""md
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.10/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.8/schema.json",
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
|
||||
@@ -283,7 +283,7 @@ export function ChatMessageDisplay({
|
||||
try {
|
||||
await navigator.clipboard.writeText(text)
|
||||
setCopyState(messageId, isToolCall, true)
|
||||
} catch (_err) {
|
||||
} catch (err) {
|
||||
// Fallback for non-secure contexts (HTTP) or permission denied
|
||||
const textarea = document.createElement("textarea")
|
||||
textarea.value = text
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ import {
|
||||
flattenModels,
|
||||
type ModelConfig,
|
||||
type MultiModelConfig,
|
||||
PROVIDER_INFO,
|
||||
type ProviderConfig,
|
||||
type ProviderName,
|
||||
} from "@/lib/types/model-config"
|
||||
|
||||
@@ -786,7 +786,7 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
|
||||
`data: ${JSON.stringify(data)}\n\n`,
|
||||
),
|
||||
)
|
||||
} catch (_e) {
|
||||
} catch (e) {
|
||||
// If parsing fails, forward the original message to avoid breaking the stream.
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode(
|
||||
@@ -906,34 +906,3 @@ export function supportsPromptCaching(modelId: string): boolean {
|
||||
modelId.startsWith("eu.anthropic")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a model supports image/vision input.
|
||||
* Some models silently drop image parts without error (AI SDK warning only).
|
||||
*/
|
||||
export function supportsImageInput(modelId: string): boolean {
|
||||
const lowerModelId = modelId.toLowerCase()
|
||||
|
||||
// Helper to check if model has vision capability indicator
|
||||
const hasVisionIndicator =
|
||||
lowerModelId.includes("vision") || lowerModelId.includes("vl")
|
||||
|
||||
// Models that DON'T support image/vision input (unless vision variant)
|
||||
// Kimi K2 models don't support images
|
||||
if (lowerModelId.includes("kimi") && !hasVisionIndicator) {
|
||||
return false
|
||||
}
|
||||
|
||||
// DeepSeek text models (not vision variants)
|
||||
if (lowerModelId.includes("deepseek") && !hasVisionIndicator) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Qwen text models (not vision variants like qwen-vl)
|
||||
if (lowerModelId.includes("qwen") && !hasVisionIndicator) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Default: assume model supports images
|
||||
return true
|
||||
}
|
||||
|
||||
12
packages/mcp-server/package-lock.json
generated
12
packages/mcp-server/package-lock.json
generated
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"name": "@next-ai-drawio/mcp-server",
|
||||
"version": "0.1.11",
|
||||
"version": "0.1.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@next-ai-drawio/mcp-server",
|
||||
"version": "0.1.11",
|
||||
"version": "0.1.6",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.0.4",
|
||||
"linkedom": "^0.18.0",
|
||||
"open": "^11.0.0",
|
||||
"zod": "^4.0.0"
|
||||
"zod": "^3.24.0"
|
||||
},
|
||||
"bin": {
|
||||
"next-ai-drawio-mcp": "dist/index.js"
|
||||
@@ -2051,9 +2051,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/zod": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.4.tgz",
|
||||
"integrity": "sha512-Zw/uYiiyF6pUT1qmKbZziChgNPRu+ZRneAsMUDU6IwmXdWt5JwcUfy2bvLOCUtz5UniaN/Zx5aFttZYbYc7O/A==",
|
||||
"version": "3.25.76",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
|
||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@next-ai-drawio/mcp-server",
|
||||
"version": "0.1.11",
|
||||
"version": "0.1.10",
|
||||
"description": "MCP server for Next AI Draw.io - AI-powered diagram generation with real-time browser preview",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
@@ -39,7 +39,7 @@
|
||||
"@modelcontextprotocol/sdk": "^1.0.4",
|
||||
"linkedom": "^0.18.0",
|
||||
"open": "^11.0.0",
|
||||
"zod": "^4.0.0"
|
||||
"zod": "^3.24.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.0",
|
||||
|
||||
@@ -127,12 +127,7 @@ function cleanupExpiredSessions(): void {
|
||||
}
|
||||
}
|
||||
|
||||
const cleanupIntervalId = setInterval(cleanupExpiredSessions, 5 * 60 * 1000)
|
||||
|
||||
export function shutdown(): void {
|
||||
clearInterval(cleanupIntervalId)
|
||||
stopHttpServer()
|
||||
}
|
||||
setInterval(cleanupExpiredSessions, 5 * 60 * 1000)
|
||||
|
||||
export function getServerPort(): number {
|
||||
return serverPort
|
||||
|
||||
@@ -39,7 +39,6 @@ import {
|
||||
getState,
|
||||
requestSync,
|
||||
setState,
|
||||
shutdown,
|
||||
startHttpServer,
|
||||
waitForSync,
|
||||
} from "./http-server.js"
|
||||
@@ -48,7 +47,7 @@ import { validateAndFixXml } from "./xml-validation.js"
|
||||
|
||||
// Server configuration
|
||||
const config = {
|
||||
port: parseInt(process.env.PORT || "6002", 10),
|
||||
port: parseInt(process.env.PORT || "6002"),
|
||||
}
|
||||
|
||||
// Session state (single session for simplicity)
|
||||
@@ -619,31 +618,6 @@ server.registerTool(
|
||||
},
|
||||
)
|
||||
|
||||
// Graceful shutdown handler
|
||||
let isShuttingDown = false
|
||||
function gracefulShutdown(reason: string) {
|
||||
if (isShuttingDown) return
|
||||
isShuttingDown = true
|
||||
log.info(`Shutting down: ${reason}`)
|
||||
shutdown()
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
// Handle stdin close (primary method - works on all platforms including Windows)
|
||||
process.stdin.on("close", () => gracefulShutdown("stdin closed"))
|
||||
process.stdin.on("end", () => gracefulShutdown("stdin ended"))
|
||||
|
||||
// Handle signals (may not work reliably on Windows)
|
||||
process.on("SIGINT", () => gracefulShutdown("SIGINT"))
|
||||
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"))
|
||||
|
||||
// Handle broken pipe (writing to closed stdout)
|
||||
process.stdout.on("error", (err) => {
|
||||
if (err.code === "EPIPE" || err.code === "ERR_STREAM_DESTROYED") {
|
||||
gracefulShutdown("stdout error")
|
||||
}
|
||||
})
|
||||
|
||||
// Start the MCP server
|
||||
async function main() {
|
||||
log.info("Starting MCP server for Next AI Draw.io (embedded mode)...")
|
||||
|
||||
@@ -253,7 +253,7 @@ async function main() {
|
||||
},
|
||||
)
|
||||
console.log("👀 Watching for preset configuration changes...")
|
||||
} catch (_err) {
|
||||
} catch (err) {
|
||||
// File might not exist yet, that's ok
|
||||
setTimeout(setupConfigWatcher, 5000)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user