feat: support minimax model (#185)

* feat: support minimax model with XML wrapping fix

- Add wrapWithMxFile utility to properly wrap XML for draw.io
- Fix 'Not a diagram file' error when model generates raw <root> XML
- Add supportsPromptCaching check for conditional caching
- Only enable Bedrock prompt caching for Claude models

* docs: update model mention to minimax-m2 across About pages and READMEs

- Update tooltip in chat-panel.tsx to mention minimax-m2 model change
- Update English, Chinese, and Japanese About pages with model change info
- Update English, Chinese, and Japanese READMEs with demo site model note

---------

Co-authored-by: dayuan.jiang <jiangdy@amazon.co.jp>
This commit is contained in:
Dayuan Jiang
2025-12-09 15:53:59 +09:00
committed by GitHub
parent 914e914423
commit 967d63c57e
10 changed files with 98 additions and 18 deletions

View File

@@ -94,7 +94,7 @@ No installation needed! Try the app directly on our demo site:
[![Live Demo](./public/live-demo-button.svg)](https://next-ai-drawio.jiang.jp/)
> Note: Due to high traffic, the demo site currently uses Claude Haiku 4.5. For best results, we recommend self-hosting with Claude Opus 4.5.
> Note: Due to high traffic, the demo site currently uses minimax-m2. For best results, we recommend self-hosting with Claude Sonnet 4.5 or Claude Opus 4.5.
### Run with Docker (Recommended)

View File

@@ -102,7 +102,7 @@ export default function AboutCN() {
{/* Header */}
<div className="mb-4">
<h3 className="text-lg font-bold text-gray-900 tracking-tight">
{" "}
{" "}
<span className="text-sm text-amber-600 font-medium italic font-normal">
()
</span>
@@ -116,13 +116,20 @@ export default function AboutCN() {
AI
(TPS/TPM)
</p>
<p>
使 Claude {" "}
<span className="font-semibold text-amber-700">
minimax-m2
</span>
</p>
<p>
<span className="font-semibold text-amber-700">
</span>
API
</p>
</div>

View File

@@ -110,7 +110,7 @@ export default function AboutJA() {
{/* Header */}
<div className="mb-4">
<h3 className="text-lg font-bold text-gray-900 tracking-tight">
{" "}
{" "}
<span className="text-sm text-amber-600 font-medium italic font-normal">
</span>
@@ -124,13 +124,21 @@ export default function AboutJA() {
AI API (TPS/TPM)
</p>
<p>
Claude {" "}
<span className="font-semibold text-amber-700">
minimax-m2
</span>{" "}
</p>
<p>
<span className="font-semibold text-amber-700">
</span>
API
</p>
</div>

View File

@@ -110,7 +110,7 @@ export default function About() {
{/* Header */}
<div className="mb-4">
<h3 className="text-lg font-bold text-gray-900 tracking-tight">
Usage Limits & Scaling{" "}
Model Change & Usage Limits{" "}
<span className="text-sm text-amber-600 font-medium italic font-normal">
(Or: Why My Wallet is Crying)
</span>
@@ -127,6 +127,14 @@ export default function About() {
(TPS/TPM). When this happens, the system
pauses, leading to failed requests.
</p>
<p>
Due to the high usage, I have changed the
model from Claude to{" "}
<span className="font-semibold text-amber-700">
minimax-m2
</span>
, which is more cost-effective.
</p>
<p>
As an{" "}
<span className="font-semibold text-amber-700">
@@ -135,7 +143,7 @@ export default function About() {
, I am currently footing the entire API
bill. To keep the lights on and ensure the
service remains available to everyone
without sending me into debt, I have
without sending me into debt, I have also
implemented the following temporary caps:
</p>
</div>

View File

@@ -8,7 +8,7 @@ import {
streamText,
} from "ai"
import { z } from "zod"
import { getAIModel } from "@/lib/ai-providers"
import { getAIModel, supportsPromptCaching } from "@/lib/ai-providers"
import { findCachedResponse } from "@/lib/cached-responses"
import {
getTelemetryConfig,
@@ -202,6 +202,12 @@ async function handleChatRequest(req: Request): Promise<Response> {
// Get AI model from environment configuration
const { model, providerOptions, headers, modelId } = getAIModel()
// Check if model supports prompt caching
const shouldCache = supportsPromptCaching(modelId)
console.log(
`[Prompt Caching] ${shouldCache ? "ENABLED" : "DISABLED"} for model: ${modelId}`,
)
// Get the appropriate system prompt based on model (extended for Opus/Haiku 4.5)
const systemMessage = getSystemPrompt(modelId)
@@ -262,7 +268,7 @@ ${lastMessageText}
// Add cache point to the last assistant message in conversation history
// This caches the entire conversation prefix for subsequent requests
// Strategy: system (cached) + history with last assistant (cached) + new user message
if (enhancedMessages.length >= 2) {
if (shouldCache && enhancedMessages.length >= 2) {
// Find the last assistant message (should be second-to-last, before current user message)
for (let i = enhancedMessages.length - 2; i >= 0; i--) {
if (enhancedMessages[i].role === "assistant") {
@@ -287,17 +293,21 @@ ${lastMessageText}
{
role: "system" as const,
content: systemMessage,
...(shouldCache && {
providerOptions: {
bedrock: { cachePoint: { type: "default" } },
},
}),
},
// Cache breakpoint 2: Current diagram XML context
{
role: "system" as const,
content: `Current diagram XML:\n"""xml\n${xml || ""}\n"""\nWhen using edit_diagram, COPY search patterns exactly from this XML - attribute order matters!`,
...(shouldCache && {
providerOptions: {
bedrock: { cachePoint: { type: "default" } },
},
}),
},
]

View File

@@ -40,7 +40,7 @@ const STORAGE_TPM_MINUTE_KEY = "next-ai-draw-io-tpm-minute"
import { useDiagram } from "@/contexts/diagram-context"
import { findCachedResponse } from "@/lib/cached-responses"
import { formatXML } from "@/lib/utils"
import { formatXML, wrapWithMxFile } from "@/lib/utils"
import { ChatMessageDisplay } from "./chat-message-display"
interface ChatPanelProps {
@@ -340,8 +340,11 @@ export default function ChatPanel({
if (toolCall.toolName === "display_diagram") {
const { xml } = toolCall.input as { xml: string }
// Wrap raw XML with full mxfile structure for draw.io
const fullXml = wrapWithMxFile(xml)
// loadDiagram validates and returns error if invalid
const validationError = onDisplayChart(xml)
const validationError = onDisplayChart(fullXml)
if (validationError) {
console.warn(
@@ -1058,7 +1061,7 @@ Please retry with an adjusted search pattern or use display_diagram if retries a
rel="noopener noreferrer"
>
<ButtonWithTooltip
tooltipContent="Due to high usage, I have added usage limits. See About page for details."
tooltipContent="Due to high usage, I have changed the model to minimax-m2 and added some usage limits. See About page for details."
variant="ghost"
size="icon"
className="h-6 w-6 text-amber-500 hover:text-amber-600"

View File

@@ -13,6 +13,8 @@
[🚀 在线演示](https://next-ai-drawio.jiang.jp/)
> 注意:由于访问量较大,演示站点目前使用 minimax-m2 模型。如需获得最佳效果,建议使用 Claude Sonnet 4.5 或 Claude Opus 4.5 自行部署。
</div>
一个集成了AI功能的Next.js网页应用与draw.io图表无缝结合。通过自然语言命令和AI辅助可视化来创建、修改和增强图表。

View File

@@ -13,6 +13,8 @@
[🚀 ライブデモ](https://next-ai-drawio.jiang.jp/)
> 注意:アクセス数が多いため、デモサイトでは現在 minimax-m2 モデルを使用しています。最高の結果を得るには、Claude Sonnet 4.5 または Claude Opus 4.5 でのセルフホスティングをお勧めします。
</div>
AI機能とdraw.ioダイアグラムを統合したNext.jsウェブアプリケーションです。自然言語コマンドとAI支援の可視化により、ダイアグラムを作成、修正、強化できます。

View File

@@ -283,3 +283,17 @@ export function getAIModel(): ModelConfig {
return { model, providerOptions, headers, modelId }
}
/**
* Check if a model supports prompt caching.
* Currently only Claude models on Bedrock support prompt caching.
*/
export function supportsPromptCaching(modelId: string): boolean {
// Bedrock prompt caching is supported for Claude models
return (
modelId.includes("claude") ||
modelId.includes("anthropic") ||
modelId.startsWith("us.anthropic") ||
modelId.startsWith("eu.anthropic")
)
}

View File

@@ -106,6 +106,32 @@ export function convertToLegalXml(xmlString: string): string {
return result
}
/**
* Wrap XML content with the full mxfile structure required by draw.io.
* Handles cases where XML is just <root>, <mxGraphModel>, or already has <mxfile>.
* @param xml - The XML string (may be partial or complete)
* @returns Full mxfile-wrapped XML string
*/
export function wrapWithMxFile(xml: string): string {
if (!xml) {
return `<mxfile><diagram name="Page-1" id="page-1"><mxGraphModel><root><mxCell id="0"/><mxCell id="1" parent="0"/></root></mxGraphModel></diagram></mxfile>`
}
// Already has full structure
if (xml.includes("<mxfile")) {
return xml
}
// Has mxGraphModel but not mxfile
if (xml.includes("<mxGraphModel")) {
return `<mxfile><diagram name="Page-1" id="page-1">${xml}</diagram></mxfile>`
}
// Just <root> content - extract inner content and wrap fully
const rootContent = xml.replace(/<\/?root>/g, "").trim()
return `<mxfile><diagram name="Page-1" id="page-1"><mxGraphModel><root>${rootContent}</root></mxGraphModel></diagram></mxfile>`
}
/**
* Replace nodes in a Draw.io XML diagram
* @param currentXML - The original Draw.io XML string