Compare commits

..

9 Commits

Author SHA1 Message Date
Dayuan Jiang
ecf716125e feat: add MCP server notice to example panel 2025-12-17 14:48:36 +09:00
Dayuan Jiang
0a2e31b18c chore: remove release notes 2025-12-17 13:12:35 +09:00
Dayuan Jiang
78a1f978fc chore: bump version to 0.4.3 and add release notes 2025-12-17 13:10:40 +09:00
Dayuan Jiang
5ab751c986 docs: use @latest instead of -y flag for npx (match Playwright MCP style) 2025-12-17 13:00:26 +09:00
Dayuan Jiang
dad1480d8c fix: exclude packages from Next.js build 2025-12-17 12:56:43 +09:00
Dayuan Jiang
0c95e829f0 docs: add multi-client installation instructions for MCP server 2025-12-17 12:36:45 +09:00
Dayuan Jiang
f6eeeb0d5b docs: add MCP server section to README (preview feature) 2025-12-17 12:31:42 +09:00
Dayuan Jiang
c0952d6170 chore: bump version to 0.1.2 2025-12-17 12:19:27 +09:00
Dayuan Jiang
79aa8734f1 feat: add MCP server package for npx distribution
- Self-contained MCP server with embedded HTTP server
- Real-time browser preview via draw.io iframe
- Tools: start_session, display_diagram, edit_diagram, get_diagram, export_diagram
- Port retry limit (6002-6020) and session TTL cleanup (1 hour)
- Published as @next-ai-drawio/mcp-server on npm
2025-12-17 12:15:16 +09:00
12 changed files with 27 additions and 161 deletions

View File

@@ -80,11 +80,8 @@ jobs:
- name: Push to ECR (triggers App Runner auto-deploy) - name: Push to ECR (triggers App Runner auto-deploy)
if: github.event_name != 'pull_request' && github.ref == 'refs/heads/main' if: github.event_name != 'pull_request' && github.ref == 'refs/heads/main'
env:
REPO_LOWER: ${{ github.repository }}
run: | run: |
REPO_LOWER=$(echo "$REPO_LOWER" | tr '[:upper:]' '[:lower:]') docker pull ghcr.io/${{ github.repository }}:latest
docker pull ghcr.io/${REPO_LOWER}:latest docker tag ghcr.io/${{ github.repository }}:latest ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.ap-northeast-1.amazonaws.com/next-ai-draw-io:latest
docker tag ghcr.io/${REPO_LOWER}:latest ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.ap-northeast-1.amazonaws.com/next-ai-draw-io:latest
docker push ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.ap-northeast-1.amazonaws.com/next-ai-draw-io:latest docker push ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.ap-northeast-1.amazonaws.com/next-ai-draw-io:latest

View File

@@ -95,7 +95,7 @@ Here are some example prompts and their generated diagrams:
## MCP Server (Preview) ## MCP Server (Preview)
> **Preview Feature**: This feature is experimental and may not stable. > **Preview Feature**: This feature is experimental and may change.
Use Next AI Draw.io with AI agents like Claude Desktop, Cursor, and VS Code via MCP (Model Context Protocol). Use Next AI Draw.io with AI agents like Claude Desktop, Cursor, and VS Code via MCP (Model Context Protocol).

View File

@@ -1,27 +0,0 @@
import type { MetadataRoute } from "next";
export default function manifest(): MetadataRoute.Manifest {
return {
name: 'Next AI Draw.io',
short_name: 'AIDraw.io',
description: 'Create AWS architecture diagrams, flowcharts, and technical diagrams using AI. Free online tool integrating draw.io with AI assistance for professional diagram creation.',
start_url: '/',
display: 'standalone',
background_color: '#f9fafb',
theme_color: '#171d26',
icons: [
{
src: '/favicon-192x192.png',
sizes: '192x192',
type: 'image/png',
purpose: 'any',
},
{
src: '/favicon-512x512.png',
sizes: '512x512',
type: 'image/png',
purpose: 'any',
},
],
}
}

View File

@@ -15,13 +15,8 @@ const drawioBaseUrl =
process.env.NEXT_PUBLIC_DRAWIO_BASE_URL || "https://embed.diagrams.net" process.env.NEXT_PUBLIC_DRAWIO_BASE_URL || "https://embed.diagrams.net"
export default function Home() { export default function Home() {
const { const { drawioRef, handleDiagramExport, onDrawioLoad, resetDrawioReady } =
drawioRef, useDiagram()
handleDiagramExport,
onDrawioLoad,
resetDrawioReady,
saveDiagramToStorage,
} = useDiagram()
const [isMobile, setIsMobile] = useState(false) const [isMobile, setIsMobile] = useState(false)
const [isChatVisible, setIsChatVisible] = useState(true) const [isChatVisible, setIsChatVisible] = useState(true)
const [drawioUi, setDrawioUi] = useState<"min" | "sketch">("min") const [drawioUi, setDrawioUi] = useState<"min" | "sketch">("min")
@@ -40,10 +35,12 @@ export default function Home() {
const savedDarkMode = localStorage.getItem("next-ai-draw-io-dark-mode") const savedDarkMode = localStorage.getItem("next-ai-draw-io-dark-mode")
if (savedDarkMode !== null) { if (savedDarkMode !== null) {
// Use saved preference
const isDark = savedDarkMode === "true" const isDark = savedDarkMode === "true"
setDarkMode(isDark) setDarkMode(isDark)
document.documentElement.classList.toggle("dark", isDark) document.documentElement.classList.toggle("dark", isDark)
} else { } else {
// First visit: match browser preference
const prefersDark = window.matchMedia( const prefersDark = window.matchMedia(
"(prefers-color-scheme: dark)", "(prefers-color-scheme: dark)",
).matches ).matches
@@ -61,20 +58,12 @@ export default function Home() {
setIsLoaded(true) setIsLoaded(true)
}, []) }, [])
const handleDarkModeChange = async () => { const toggleDarkMode = () => {
await saveDiagramToStorage()
const newValue = !darkMode const newValue = !darkMode
setDarkMode(newValue) setDarkMode(newValue)
localStorage.setItem("next-ai-draw-io-dark-mode", String(newValue)) localStorage.setItem("next-ai-draw-io-dark-mode", String(newValue))
document.documentElement.classList.toggle("dark", newValue) document.documentElement.classList.toggle("dark", newValue)
resetDrawioReady() // Reset so onDrawioLoad fires again after remount
}
const handleDrawioUiChange = async () => {
await saveDiagramToStorage()
const newUi = drawioUi === "min" ? "sketch" : "min"
localStorage.setItem("drawio-theme", newUi)
setDrawioUi(newUi)
resetDrawioReady() resetDrawioReady()
} }
@@ -193,9 +182,15 @@ export default function Home() {
isVisible={isChatVisible} isVisible={isChatVisible}
onToggleVisibility={toggleChatPanel} onToggleVisibility={toggleChatPanel}
drawioUi={drawioUi} drawioUi={drawioUi}
onToggleDrawioUi={handleDrawioUiChange} onToggleDrawioUi={() => {
const newUi =
drawioUi === "min" ? "sketch" : "min"
localStorage.setItem("drawio-theme", newUi)
setDrawioUi(newUi)
resetDrawioReady()
}}
darkMode={darkMode} darkMode={darkMode}
onToggleDarkMode={handleDarkModeChange} onToggleDarkMode={toggleDarkMode}
isMobile={isMobile} isMobile={isMobile}
onCloseProtectionChange={setCloseProtection} onCloseProtectionChange={setCloseProtection}
/> />

View File

@@ -23,7 +23,6 @@ interface DiagramContextType {
format: ExportFormat, format: ExportFormat,
sessionId?: string, sessionId?: string,
) => void ) => void
saveDiagramToStorage: () => Promise<void>
isDrawioReady: boolean isDrawioReady: boolean
onDrawioLoad: () => void onDrawioLoad: () => void
resetDrawioReady: () => void resetDrawioReady: () => void
@@ -83,30 +82,6 @@ export function DiagramProvider({ children }: { children: React.ReactNode }) {
} }
} }
// Save current diagram to localStorage (used before theme/UI changes)
const saveDiagramToStorage = async (): Promise<void> => {
if (!drawioRef.current) return
try {
const currentXml = await Promise.race([
new Promise<string>((resolve) => {
resolverRef.current = resolve
drawioRef.current?.exportDiagram({ format: "xmlsvg" })
}),
new Promise<string>((_, reject) =>
setTimeout(() => reject(new Error("Export timeout")), 2000),
),
])
// Only save if diagram has meaningful content (not empty template)
if (currentXml && currentXml.length > 300) {
localStorage.setItem(STORAGE_DIAGRAM_XML_KEY, currentXml)
}
} catch (error) {
console.error("Failed to save diagram to storage:", error)
}
}
const loadDiagram = ( const loadDiagram = (
chart: string, chart: string,
skipValidation?: boolean, skipValidation?: boolean,
@@ -305,7 +280,6 @@ export function DiagramProvider({ children }: { children: React.ReactNode }) {
handleDiagramExport, handleDiagramExport,
clearDiagram, clearDiagram,
saveDiagramToFile, saveDiagramToFile,
saveDiagramToStorage,
isDrawioReady, isDrawioReady,
onDrawioLoad, onDrawioLoad,
resetDrawioReady, resetDrawioReady,

View File

@@ -136,23 +136,6 @@ Optional custom URL:
OLLAMA_BASE_URL=http://localhost:11434 OLLAMA_BASE_URL=http://localhost:11434
``` ```
### Vercel AI Gateway
Vercel AI Gateway provides unified access to multiple AI providers through a single API key. This simplifies authentication and allows you to switch between providers without managing multiple API keys.
```bash
AI_GATEWAY_API_KEY=your_gateway_api_key
AI_MODEL=openai/gpt-4o
```
Model format uses `provider/model` syntax:
- `openai/gpt-4o` - OpenAI GPT-4o
- `anthropic/claude-sonnet-4-5` - Anthropic Claude Sonnet 4.5
- `google/gemini-2.0-flash` - Google Gemini 2.0 Flash
Get your API key from the [Vercel AI Gateway dashboard](https://vercel.com/ai-gateway).
## Auto-Detection ## Auto-Detection
If you only configure **one** provider's API key, the system will automatically detect and use that provider. No need to set `AI_PROVIDER`. If you only configure **one** provider's API key, the system will automatically detect and use that provider. No need to set `AI_PROVIDER`.
@@ -160,7 +143,7 @@ If you only configure **one** provider's API key, the system will automatically
If you configure **multiple** API keys, you must explicitly set `AI_PROVIDER`: If you configure **multiple** API keys, you must explicitly set `AI_PROVIDER`:
```bash ```bash
AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, azure, bedrock, openrouter, ollama, gateway AI_PROVIDER=google # or: openai, anthropic, deepseek, siliconflow, azure, bedrock, openrouter, ollama
``` ```
## Model Capability Requirements ## Model Capability Requirements

View File

@@ -1,6 +1,6 @@
# AI Provider Configuration # AI Provider Configuration
# AI_PROVIDER: Which provider to use # AI_PROVIDER: Which provider to use
# Options: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, gateway # Options: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow
# Default: bedrock # Default: bedrock
AI_PROVIDER=bedrock AI_PROVIDER=bedrock
@@ -68,11 +68,6 @@ AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0
# SILICONFLOW_API_KEY=sk-... # SILICONFLOW_API_KEY=sk-...
# SILICONFLOW_BASE_URL=https://api.siliconflow.com/v1 # Optional: switch to https://api.siliconflow.cn/v1 if needed # SILICONFLOW_BASE_URL=https://api.siliconflow.com/v1 # Optional: switch to https://api.siliconflow.cn/v1 if needed
# Vercel AI Gateway Configuration
# Get your API key from: https://vercel.com/ai-gateway
# Model format: "provider/model" e.g., "openai/gpt-4o", "anthropic/claude-sonnet-4-5"
# AI_GATEWAY_API_KEY=...
# Langfuse Observability (Optional) # Langfuse Observability (Optional)
# Enable LLM tracing and analytics - https://langfuse.com # Enable LLM tracing and analytics - https://langfuse.com
# LANGFUSE_PUBLIC_KEY=pk-lf-... # LANGFUSE_PUBLIC_KEY=pk-lf-...

View File

@@ -2,7 +2,6 @@ import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock"
import { createAnthropic } from "@ai-sdk/anthropic" import { createAnthropic } from "@ai-sdk/anthropic"
import { azure, createAzure } from "@ai-sdk/azure" import { azure, createAzure } from "@ai-sdk/azure"
import { createDeepSeek, deepseek } from "@ai-sdk/deepseek" import { createDeepSeek, deepseek } from "@ai-sdk/deepseek"
import { gateway } from "@ai-sdk/gateway"
import { createGoogleGenerativeAI, google } from "@ai-sdk/google" import { createGoogleGenerativeAI, google } from "@ai-sdk/google"
import { createOpenAI, openai } from "@ai-sdk/openai" import { createOpenAI, openai } from "@ai-sdk/openai"
import { fromNodeProviderChain } from "@aws-sdk/credential-providers" import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
@@ -19,7 +18,6 @@ export type ProviderName =
| "openrouter" | "openrouter"
| "deepseek" | "deepseek"
| "siliconflow" | "siliconflow"
| "gateway"
interface ModelConfig { interface ModelConfig {
model: any model: any
@@ -44,7 +42,6 @@ const ALLOWED_CLIENT_PROVIDERS: ProviderName[] = [
"openrouter", "openrouter",
"deepseek", "deepseek",
"siliconflow", "siliconflow",
"gateway",
] ]
// Bedrock provider options for Anthropic beta features // Bedrock provider options for Anthropic beta features
@@ -336,10 +333,8 @@ function buildProviderOptions(
case "deepseek": case "deepseek":
case "openrouter": case "openrouter":
case "siliconflow": case "siliconflow": {
case "gateway": {
// These providers don't have reasoning configs in AI SDK yet // These providers don't have reasoning configs in AI SDK yet
// Gateway passes through to underlying providers which handle their own configs
break break
} }
@@ -361,7 +356,6 @@ const PROVIDER_ENV_VARS: Record<ProviderName, string | null> = {
openrouter: "OPENROUTER_API_KEY", openrouter: "OPENROUTER_API_KEY",
deepseek: "DEEPSEEK_API_KEY", deepseek: "DEEPSEEK_API_KEY",
siliconflow: "SILICONFLOW_API_KEY", siliconflow: "SILICONFLOW_API_KEY",
gateway: "AI_GATEWAY_API_KEY",
} }
/** /**
@@ -501,7 +495,6 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
if (configured.length === 0) { if (configured.length === 0) {
throw new Error( throw new Error(
`No AI provider configured. Please set one of the following API keys in your .env.local file:\n` + `No AI provider configured. Please set one of the following API keys in your .env.local file:\n` +
`- AI_GATEWAY_API_KEY for Vercel AI Gateway\n` +
`- DEEPSEEK_API_KEY for DeepSeek\n` + `- DEEPSEEK_API_KEY for DeepSeek\n` +
`- OPENAI_API_KEY for OpenAI\n` + `- OPENAI_API_KEY for OpenAI\n` +
`- ANTHROPIC_API_KEY for Anthropic\n` + `- ANTHROPIC_API_KEY for Anthropic\n` +
@@ -679,17 +672,9 @@ export function getAIModel(overrides?: ClientOverrides): ModelConfig {
break break
} }
case "gateway": {
// Vercel AI Gateway - unified access to multiple AI providers
// Model format: "provider/model" e.g., "openai/gpt-4o", "anthropic/claude-sonnet-4-5"
// See: https://vercel.com/ai-gateway
model = gateway(modelId)
break
}
default: default:
throw new Error( throw new Error(
`Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow, gateway`, `Unknown AI provider: ${provider}. Supported providers: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek, siliconflow`,
) )
} }

47
package-lock.json generated
View File

@@ -1,19 +1,18 @@
{ {
"name": "next-ai-draw-io", "name": "next-ai-draw-io",
"version": "0.4.2", "version": "0.4.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "next-ai-draw-io", "name": "next-ai-draw-io",
"version": "0.4.2", "version": "0.4.0",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/amazon-bedrock": "^3.0.70", "@ai-sdk/amazon-bedrock": "^3.0.70",
"@ai-sdk/anthropic": "^2.0.44", "@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69", "@ai-sdk/azure": "^2.0.69",
"@ai-sdk/deepseek": "^1.0.30", "@ai-sdk/deepseek": "^1.0.30",
"@ai-sdk/gateway": "^2.0.21",
"@ai-sdk/google": "^2.0.0", "@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19", "@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.107", "@ai-sdk/react": "^2.0.107",
@@ -200,13 +199,13 @@
} }
}, },
"node_modules/@ai-sdk/gateway": { "node_modules/@ai-sdk/gateway": {
"version": "2.0.21", "version": "2.0.18",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.21.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.18.tgz",
"integrity": "sha512-BwV7DU/lAm3Xn6iyyvZdWgVxgLu3SNXzl5y57gMvkW4nGhAOV5269IrJzQwGt03bb107sa6H6uJwWxc77zXoGA==", "integrity": "sha512-sDQcW+6ck2m0pTIHW6BPHD7S125WD3qNkx/B8sEzJp/hurocmJ5Cni0ybExg6sQMGo+fr/GWOwpHF1cmCdg5rQ==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "2.0.0", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.19", "@ai-sdk/provider-utils": "3.0.18",
"@vercel/oidc": "3.0.5" "@vercel/oidc": "3.0.5"
}, },
"engines": { "engines": {
@@ -216,23 +215,6 @@
"zod": "^3.25.76 || ^4.1.8" "zod": "^3.25.76 || ^4.1.8"
} }
}, },
"node_modules/@ai-sdk/gateway/node_modules/@ai-sdk/provider-utils": {
"version": "3.0.19",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.19.tgz",
"integrity": "sha512-W41Wc9/jbUVXVwCN/7bWa4IKe8MtxO3EyA0Hfhx6grnmiYlCvpI8neSYWFE0zScXJkgA/YK3BRybzgyiXuu6JA==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@standard-schema/spec": "^1.0.0",
"eventsource-parser": "^3.0.6"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/@ai-sdk/google": { "node_modules/@ai-sdk/google": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.0.tgz",
@@ -6148,23 +6130,6 @@
"zod": "^3.25.76 || ^4.1.8" "zod": "^3.25.76 || ^4.1.8"
} }
}, },
"node_modules/ai/node_modules/@ai-sdk/gateway": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-2.0.18.tgz",
"integrity": "sha512-sDQcW+6ck2m0pTIHW6BPHD7S125WD3qNkx/B8sEzJp/hurocmJ5Cni0ybExg6sQMGo+fr/GWOwpHF1cmCdg5rQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.18",
"@vercel/oidc": "3.0.5"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
}
},
"node_modules/ajv": { "node_modules/ajv": {
"version": "6.12.6", "version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",

View File

@@ -17,7 +17,6 @@
"@ai-sdk/anthropic": "^2.0.44", "@ai-sdk/anthropic": "^2.0.44",
"@ai-sdk/azure": "^2.0.69", "@ai-sdk/azure": "^2.0.69",
"@ai-sdk/deepseek": "^1.0.30", "@ai-sdk/deepseek": "^1.0.30",
"@ai-sdk/gateway": "^2.0.21",
"@ai-sdk/google": "^2.0.0", "@ai-sdk/google": "^2.0.0",
"@ai-sdk/openai": "^2.0.19", "@ai-sdk/openai": "^2.0.19",
"@ai-sdk/react": "^2.0.107", "@ai-sdk/react": "^2.0.107",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB