mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-01-30 14:32:04 +00:00
Compare commits
45 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8c76e435d | ||
|
|
fad3437977 | ||
|
|
0f15b82f1e | ||
|
|
974a9fb349 | ||
|
|
a6dcbd2473 | ||
|
|
ec5340c7e4 | ||
|
|
a9c4400a92 | ||
|
|
533b105f03 | ||
|
|
28667736cd | ||
|
|
211ae72f96 | ||
|
|
ce2c94c1a5 | ||
|
|
861005eeed | ||
|
|
7b0ff990ec | ||
|
|
25cb8bb455 | ||
|
|
2713db6d10 | ||
|
|
f10772a9d2 | ||
|
|
808088f25e | ||
|
|
20663dad0d | ||
|
|
705d31c35e | ||
|
|
d60182eeb8 | ||
|
|
a40f6a5077 | ||
|
|
fa216e4d13 | ||
|
|
562f4b0c4e | ||
|
|
0f13e7aeee | ||
|
|
551445bcd5 | ||
|
|
c6f3733fbd | ||
|
|
6e85c68d62 | ||
|
|
fa7d0b420e | ||
|
|
47510ef6da | ||
|
|
b92e511463 | ||
|
|
130dd44ea5 | ||
|
|
560e0c5b86 | ||
|
|
cdaa29e7a2 | ||
|
|
60479e0eb4 | ||
|
|
90407f845d | ||
|
|
b5759c4146 | ||
|
|
1524fd5a08 | ||
|
|
5057481e70 | ||
|
|
a70d96a373 | ||
|
|
934124fa7b | ||
|
|
c2dc7c9c51 | ||
|
|
527e9874ab | ||
|
|
ef9b6f6341 | ||
|
|
3188d209b7 | ||
|
|
33690c5650 |
@@ -1,7 +1,7 @@
|
||||
---
|
||||
name: n8n-mcp-tester
|
||||
description: Use this agent when you need to test n8n-mcp functionality after implementing fixes or new features. This agent should be invoked proactively after code changes to validate that the MCP tools work correctly with real n8n workflows. The agent tests the actual MCP server tools, not just unit tests. Examples:\n\n<example>\nContext: Developer has just fixed a validation bug in the n8n-mcp server\nuser: "I've fixed the validation issue for HTTP Request nodes"\nassistant: "I've completed the fix. Now let me build the project and then test it."\n<builds project>\nassistant: "Project built successfully. Please reload the MCP server in Claude Desktop to apply the changes."\nuser: "Reloaded"\nassistant: "Great! Now let me invoke the n8n-mcp-tester agent to verify the fix works correctly."\n<commentary>\nSince a fix has been implemented and the MCP server reloaded, use the n8n-mcp-tester agent to validate the fix works in real-world usage.\n</commentary>\n</example>\n\n<example>\nContext: Developer has added a new MCP tool for workflow validation\nuser: "I've implemented the new workflow validation tool"\nassistant: "Implementation complete. Let me build and prepare for testing."\n<builds project>\nassistant: "Build successful. Please reload the MCP server to load the new tool."\nuser: "Done"\nassistant: "Perfect! I'll now use the n8n-mcp-tester agent to test the new workflow validation tool."\n<commentary>\nAfter implementing new MCP functionality and reloading the server, invoke n8n-mcp-tester to verify it works correctly.\n</commentary>\n</example>
|
||||
tools: Glob, Grep, LS, Read, WebFetch, TodoWrite, WebSearch, mcp__puppeteer__puppeteer_navigate, mcp__puppeteer__puppeteer_screenshot, mcp__puppeteer__puppeteer_click, mcp__puppeteer__puppeteer_fill, mcp__puppeteer__puppeteer_select, mcp__puppeteer__puppeteer_hover, mcp__puppeteer__puppeteer_evaluate, ListMcpResourcesTool, ReadMcpResourceTool, mcp__supabase__list_organizations, mcp__supabase__get_organization, mcp__supabase__list_projects, mcp__supabase__get_project, mcp__supabase__get_cost, mcp__supabase__confirm_cost, mcp__supabase__create_project, mcp__supabase__pause_project, mcp__supabase__restore_project, mcp__supabase__create_branch, mcp__supabase__list_branches, mcp__supabase__delete_branch, mcp__supabase__merge_branch, mcp__supabase__reset_branch, mcp__supabase__rebase_branch, mcp__supabase__list_tables, mcp__supabase__list_extensions, mcp__supabase__list_migrations, mcp__supabase__apply_migration, mcp__supabase__execute_sql, mcp__supabase__get_logs, mcp__supabase__get_advisors, mcp__supabase__get_project_url, mcp__supabase__get_anon_key, mcp__supabase__generate_typescript_types, mcp__supabase__search_docs, mcp__supabase__list_edge_functions, mcp__supabase__deploy_edge_function, mcp__n8n-mcp__tools_documentation, mcp__n8n-mcp__list_nodes, mcp__n8n-mcp__get_node_info, mcp__n8n-mcp__search_nodes, mcp__n8n-mcp__list_ai_tools, mcp__n8n-mcp__get_node_documentation, mcp__n8n-mcp__get_database_statistics, mcp__n8n-mcp__get_node_essentials, mcp__n8n-mcp__search_node_properties, mcp__n8n-mcp__get_node_for_task, mcp__n8n-mcp__list_tasks, mcp__n8n-mcp__validate_node_operation, mcp__n8n-mcp__validate_node_minimal, mcp__n8n-mcp__get_property_dependencies, mcp__n8n-mcp__get_node_as_tool_info, mcp__n8n-mcp__list_node_templates, mcp__n8n-mcp__get_template, mcp__n8n-mcp__search_templates, mcp__n8n-mcp__get_templates_for_task, mcp__n8n-mcp__validate_workflow, mcp__n8n-mcp__validate_workflow_connections, mcp__n8n-mcp__validate_workflow_expressions, mcp__n8n-mcp__n8n_create_workflow, mcp__n8n-mcp__n8n_get_workflow, mcp__n8n-mcp__n8n_get_workflow_details, mcp__n8n-mcp__n8n_get_workflow_structure, mcp__n8n-mcp__n8n_get_workflow_minimal, mcp__n8n-mcp__n8n_update_full_workflow, mcp__n8n-mcp__n8n_update_partial_workflow, mcp__n8n-mcp__n8n_delete_workflow, mcp__n8n-mcp__n8n_list_workflows, mcp__n8n-mcp__n8n_validate_workflow, mcp__n8n-mcp__n8n_trigger_webhook_workflow, mcp__n8n-mcp__n8n_get_execution, mcp__n8n-mcp__n8n_list_executions, mcp__n8n-mcp__n8n_delete_execution, mcp__n8n-mcp__n8n_health_check, mcp__n8n-mcp__n8n_list_available_tools, mcp__n8n-mcp__n8n_diagnostic
|
||||
tools: Glob, Grep, Read, WebFetch, TodoWrite, WebSearch, mcp__supabase__create_branch, mcp__supabase__list_branches, mcp__supabase__delete_branch, mcp__supabase__merge_branch, mcp__supabase__reset_branch, mcp__supabase__rebase_branch, mcp__supabase__list_tables, mcp__supabase__list_extensions, mcp__supabase__list_migrations, mcp__supabase__apply_migration, mcp__supabase__execute_sql, mcp__supabase__get_logs, mcp__supabase__get_advisors, mcp__supabase__get_project_url, mcp__supabase__generate_typescript_types, mcp__supabase__search_docs, mcp__supabase__list_edge_functions, mcp__supabase__deploy_edge_function, mcp__n8n-mcp__tools_documentation, mcp__n8n-mcp__search_nodes, mcp__n8n-mcp__get_template, mcp__n8n-mcp__search_templates, mcp__n8n-mcp__validate_workflow, mcp__n8n-mcp__n8n_create_workflow, mcp__n8n-mcp__n8n_get_workflow, mcp__n8n-mcp__n8n_update_full_workflow, mcp__n8n-mcp__n8n_update_partial_workflow, mcp__n8n-mcp__n8n_delete_workflow, mcp__n8n-mcp__n8n_list_workflows, mcp__n8n-mcp__n8n_validate_workflow, mcp__n8n-mcp__n8n_trigger_webhook_workflow, mcp__n8n-mcp__n8n_health_check, mcp__brightdata-mcp__search_engine, mcp__brightdata-mcp__scrape_as_markdown, mcp__brightdata-mcp__search_engine_batch, mcp__brightdata-mcp__scrape_batch, mcp__supabase__get_publishable_keys, mcp__supabase__get_edge_function, mcp__n8n-mcp__get_node, mcp__n8n-mcp__validate_node, mcp__n8n-mcp__n8n_autofix_workflow, mcp__n8n-mcp__n8n_executions, mcp__n8n-mcp__n8n_workflow_versions, mcp__n8n-mcp__n8n_deploy_template, mcp__ide__getDiagnostics, mcp__ide__executeCode
|
||||
model: sonnet
|
||||
---
|
||||
|
||||
|
||||
@@ -37,9 +37,11 @@ MCP_SERVER_HOST=localhost
|
||||
# Server mode: stdio (local) or http (remote)
|
||||
MCP_MODE=stdio
|
||||
|
||||
# Use fixed HTTP implementation (recommended for stability)
|
||||
# Set to true to bypass StreamableHTTPServerTransport issues
|
||||
USE_FIXED_HTTP=true
|
||||
# DEPRECATED: USE_FIXED_HTTP is deprecated as of v2.31.8
|
||||
# The fixed HTTP implementation does not support SSE streaming required by
|
||||
# clients like OpenAI Codex. Use the default SingleSessionHTTPServer instead.
|
||||
# See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
# USE_FIXED_HTTP=true # DO NOT USE - deprecated
|
||||
|
||||
# HTTP Server Configuration (only used when MCP_MODE=http)
|
||||
PORT=3000
|
||||
|
||||
3
.github/workflows/docker-build-n8n.yml
vendored
3
.github/workflows/docker-build-n8n.yml
vendored
@@ -52,6 +52,9 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
30
.github/workflows/docker-build.yml
vendored
30
.github/workflows/docker-build.yml
vendored
@@ -53,13 +53,24 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
|
||||
- name: Sync runtime version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
@@ -144,13 +155,24 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
|
||||
- name: Sync runtime version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
|
||||
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
@@ -311,14 +311,14 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
@@ -396,7 +396,7 @@ jobs:
|
||||
npm publish --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
|
||||
- name: Clean up
|
||||
if: always()
|
||||
run: rm -rf npm-publish-temp
|
||||
@@ -427,7 +427,18 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Sufficient disk space: ${AVAILABLE_GB}GB available"
|
||||
|
||||
|
||||
- name: Sync runtime version for Docker
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -40,7 +40,7 @@ permissions:
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10 # Add a 10-minute timeout to prevent hanging
|
||||
timeout-minutes: 15 # Increased from 10 to accommodate larger database with community nodes
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
||||
6671
CHANGELOG.md
6671
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -209,7 +209,7 @@ The MCP server exposes tools in several categories:
|
||||
- **Security-first**: API keys exported as plaintext - downstream MUST encrypt
|
||||
- **Dormant sessions**: Restored sessions recreate transports on first request
|
||||
- **Automatic expiration**: Respects `sessionTimeout` setting (default 30 min)
|
||||
- **MAX_SESSIONS limit**: Caps at 100 concurrent sessions
|
||||
- **MAX_SESSIONS limit**: Caps at 100 concurrent sessions (configurable via N8N_MCP_MAX_SESSIONS env var)
|
||||
|
||||
**Important Implementation Notes:**
|
||||
- Only exports sessions with valid n8nApiUrl and n8nApiKey in context
|
||||
|
||||
@@ -14,7 +14,7 @@ RUN --mount=type=cache,target=/root/.npm \
|
||||
echo '{}' > package.json && \
|
||||
npm install --no-save typescript@^5.8.3 @types/node@^22.15.30 @types/express@^5.0.3 \
|
||||
@modelcontextprotocol/sdk@1.20.1 dotenv@^16.5.0 express@^5.1.0 axios@^1.10.0 \
|
||||
n8n-workflow@^1.96.0 uuid@^11.0.5 @types/uuid@^10.0.0 \
|
||||
n8n-workflow@^2.4.2 uuid@^11.0.5 @types/uuid@^10.0.0 \
|
||||
openai@^4.77.0 zod@3.24.1 lru-cache@^11.2.1 @supabase/supabase-js@^2.57.4
|
||||
|
||||
# Copy source and build
|
||||
|
||||
@@ -74,7 +74,8 @@ ENV AUTH_TOKEN="REPLACE_THIS_AUTH_TOKEN_32_CHARS_MIN_abcdefgh"
|
||||
ENV NODE_ENV=production
|
||||
ENV IS_DOCKER=true
|
||||
ENV MCP_MODE=http
|
||||
ENV USE_FIXED_HTTP=true
|
||||
# NOTE: USE_FIXED_HTTP is deprecated. SingleSessionHTTPServer is now the default.
|
||||
# See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
ENV LOG_LEVEL=info
|
||||
ENV TRUST_PROXY=1
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
43
README.md
43
README.md
@@ -5,23 +5,24 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 545 workflow automation nodes.
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 1,084 workflow automation nodes (537 core + 547 community).
|
||||
|
||||
## Overview
|
||||
|
||||
n8n-MCP serves as a bridge between n8n's workflow automation platform and AI models, enabling them to understand and work with n8n nodes effectively. It provides structured access to:
|
||||
|
||||
- 📚 **543 n8n nodes** from both n8n-nodes-base and @n8n/n8n-nodes-langchain
|
||||
- 📚 **1,084 n8n nodes** - 537 core nodes + 547 community nodes (301 verified)
|
||||
- 🔧 **Node properties** - 99% coverage with detailed schemas
|
||||
- ⚡ **Node operations** - 63.6% coverage of available actions
|
||||
- 📄 **Documentation** - 87% coverage from official n8n docs (including AI nodes)
|
||||
- 🤖 **AI tools** - 271 AI-capable nodes detected with full documentation
|
||||
- 🤖 **AI tools** - 265 AI-capable tool variants detected with full documentation
|
||||
- 💡 **Real-world examples** - 2,646 pre-extracted configurations from popular templates
|
||||
- 🎯 **Template library** - 2,709 workflow templates with 100% metadata coverage
|
||||
- 🌐 **Community nodes** - Search verified community integrations with `source` filter (NEW!)
|
||||
|
||||
|
||||
## ⚠️ Important Safety Warning
|
||||
@@ -598,7 +599,7 @@ ALWAYS explicitly configure ALL parameters that control node behavior.
|
||||
- `n8n_create_workflow(workflow)` - Deploy
|
||||
- `n8n_validate_workflow({id})` - Post-deployment check
|
||||
- `n8n_update_partial_workflow({id, operations: [...]})` - Batch updates
|
||||
- `n8n_trigger_webhook_workflow()` - Test webhooks
|
||||
- `n8n_test_workflow({workflowId})` - Test workflow execution
|
||||
|
||||
## Critical Warnings
|
||||
|
||||
@@ -940,7 +941,7 @@ Once connected, Claude can use these powerful tools:
|
||||
|
||||
### Core Tools (7 tools)
|
||||
- **`tools_documentation`** - Get documentation for any MCP tool (START HERE!)
|
||||
- **`search_nodes`** - Full-text search across all nodes. Use `includeExamples: true` for real-world configurations
|
||||
- **`search_nodes`** - Full-text search across all nodes. Use `source: 'community'|'verified'` for community nodes, `includeExamples: true` for configs
|
||||
- **`get_node`** - Unified node information tool with multiple modes (v2.26.0):
|
||||
- **Info mode** (default): `detail: 'minimal'|'standard'|'full'`, `includeExamples: true`
|
||||
- **Docs mode**: `mode: 'docs'` - Human-readable markdown documentation
|
||||
@@ -977,7 +978,10 @@ These tools require `N8N_API_URL` and `N8N_API_KEY` in your configuration.
|
||||
- **`n8n_deploy_template`** - Deploy templates from n8n.io directly to your instance with auto-fix
|
||||
|
||||
#### Execution Management
|
||||
- **`n8n_trigger_webhook_workflow`** - Trigger workflows via webhook URL
|
||||
- **`n8n_test_workflow`** - Test/trigger workflow execution:
|
||||
- Auto-detects trigger type (webhook, form, chat) from workflow
|
||||
- Supports custom data, headers, and HTTP methods for webhooks
|
||||
- Chat triggers support message and sessionId for conversations
|
||||
- **`n8n_executions`** - Unified execution management (v2.26.0):
|
||||
- `action: 'list'` - List executions with status filtering
|
||||
- `action: 'get'` - Get execution details by ID
|
||||
@@ -1021,6 +1025,18 @@ search_nodes({
|
||||
includeExamples: true // Returns top 2 configs per node
|
||||
})
|
||||
|
||||
// Search community nodes only
|
||||
search_nodes({
|
||||
query: "scraping",
|
||||
source: "community" // Options: all, core, community, verified
|
||||
})
|
||||
|
||||
// Search verified community nodes
|
||||
search_nodes({
|
||||
query: "pdf",
|
||||
source: "verified" // Only verified community integrations
|
||||
})
|
||||
|
||||
// Validate node configuration
|
||||
validate_node({
|
||||
nodeType: "nodes-base.httpRequest",
|
||||
@@ -1118,17 +1134,18 @@ npm run dev:http # HTTP dev mode
|
||||
|
||||
## 📊 Metrics & Coverage
|
||||
|
||||
Current database coverage (n8n v1.117.2):
|
||||
Current database coverage (n8n v2.2.3):
|
||||
|
||||
- ✅ **541/541** nodes loaded (100%)
|
||||
- ✅ **541** nodes with properties (100%)
|
||||
- ✅ **470** nodes with documentation (87%)
|
||||
- ✅ **271** AI-capable tools detected
|
||||
- ✅ **1,084 total nodes** - 537 core + 547 community
|
||||
- ✅ **301 verified** community nodes from n8n Strapi API
|
||||
- ✅ **246 popular** npm community packages indexed
|
||||
- ✅ **470** nodes with documentation (87% core coverage)
|
||||
- ✅ **265** AI-capable tool variants detected
|
||||
- ✅ **2,646** pre-extracted template configurations
|
||||
- ✅ **2,709** workflow templates available (100% metadata coverage)
|
||||
- ✅ **AI Agent & LangChain nodes** fully documented
|
||||
- ⚡ **Average response time**: ~12ms
|
||||
- 💾 **Database size**: ~68MB (includes templates with metadata)
|
||||
- 💾 **Database size**: ~70MB (includes templates and community nodes)
|
||||
|
||||
## 🔄 Recent Updates
|
||||
|
||||
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
15
dist/config/n8n-api.d.ts
vendored
Normal file
15
dist/config/n8n-api.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
export declare function getN8nApiConfig(): {
|
||||
baseUrl: string;
|
||||
apiKey: string;
|
||||
timeout: number;
|
||||
maxRetries: number;
|
||||
} | null;
|
||||
export declare function isN8nApiConfigured(): boolean;
|
||||
export declare function getN8nApiConfigFromContext(context: {
|
||||
n8nApiUrl?: string;
|
||||
n8nApiKey?: string;
|
||||
n8nApiTimeout?: number;
|
||||
n8nApiMaxRetries?: number;
|
||||
}): N8nApiConfig | null;
|
||||
export type N8nApiConfig = NonNullable<ReturnType<typeof getN8nApiConfig>>;
|
||||
//# sourceMappingURL=n8n-api.d.ts.map
|
||||
1
dist/config/n8n-api.d.ts.map
vendored
Normal file
1
dist/config/n8n-api.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"n8n-api.d.ts","sourceRoot":"","sources":["../../src/config/n8n-api.ts"],"names":[],"mappings":"AAgBA,wBAAgB,eAAe;;;;;SA0B9B;AAGD,wBAAgB,kBAAkB,IAAI,OAAO,CAG5C;AAMD,wBAAgB,0BAA0B,CAAC,OAAO,EAAE;IAClD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B,GAAG,YAAY,GAAG,IAAI,CAWtB;AAGD,MAAM,MAAM,YAAY,GAAG,WAAW,CAAC,UAAU,CAAC,OAAO,eAAe,CAAC,CAAC,CAAC"}
|
||||
53
dist/config/n8n-api.js
vendored
Normal file
53
dist/config/n8n-api.js
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getN8nApiConfig = getN8nApiConfig;
|
||||
exports.isN8nApiConfigured = isN8nApiConfigured;
|
||||
exports.getN8nApiConfigFromContext = getN8nApiConfigFromContext;
|
||||
const zod_1 = require("zod");
|
||||
const dotenv_1 = __importDefault(require("dotenv"));
|
||||
const n8nApiConfigSchema = zod_1.z.object({
|
||||
N8N_API_URL: zod_1.z.string().url().optional(),
|
||||
N8N_API_KEY: zod_1.z.string().min(1).optional(),
|
||||
N8N_API_TIMEOUT: zod_1.z.coerce.number().positive().default(30000),
|
||||
N8N_API_MAX_RETRIES: zod_1.z.coerce.number().positive().default(3),
|
||||
});
|
||||
let envLoaded = false;
|
||||
function getN8nApiConfig() {
|
||||
if (!envLoaded) {
|
||||
dotenv_1.default.config();
|
||||
envLoaded = true;
|
||||
}
|
||||
const result = n8nApiConfigSchema.safeParse(process.env);
|
||||
if (!result.success) {
|
||||
return null;
|
||||
}
|
||||
const config = result.data;
|
||||
if (!config.N8N_API_URL || !config.N8N_API_KEY) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
baseUrl: config.N8N_API_URL,
|
||||
apiKey: config.N8N_API_KEY,
|
||||
timeout: config.N8N_API_TIMEOUT,
|
||||
maxRetries: config.N8N_API_MAX_RETRIES,
|
||||
};
|
||||
}
|
||||
function isN8nApiConfigured() {
|
||||
const config = getN8nApiConfig();
|
||||
return config !== null;
|
||||
}
|
||||
function getN8nApiConfigFromContext(context) {
|
||||
if (!context.n8nApiUrl || !context.n8nApiKey) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
baseUrl: context.n8nApiUrl,
|
||||
apiKey: context.n8nApiKey,
|
||||
timeout: context.n8nApiTimeout ?? 30000,
|
||||
maxRetries: context.n8nApiMaxRetries ?? 3,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=n8n-api.js.map
|
||||
1
dist/config/n8n-api.js.map
vendored
Normal file
1
dist/config/n8n-api.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"n8n-api.js","sourceRoot":"","sources":["../../src/config/n8n-api.ts"],"names":[],"mappings":";;;;;AAgBA,0CA0BC;AAGD,gDAGC;AAMD,gEAgBC;AAtED,6BAAwB;AACxB,oDAA4B;AAI5B,MAAM,kBAAkB,GAAG,OAAC,CAAC,MAAM,CAAC;IAClC,WAAW,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC,QAAQ,EAAE;IACxC,WAAW,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE;IACzC,eAAe,EAAE,OAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IAC5D,mBAAmB,EAAE,OAAC,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC;CAC7D,CAAC,CAAC;AAGH,IAAI,SAAS,GAAG,KAAK,CAAC;AAGtB,SAAgB,eAAe;IAE7B,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,gBAAM,CAAC,MAAM,EAAE,CAAC;QAChB,SAAS,GAAG,IAAI,CAAC;IACnB,CAAC;IAED,MAAM,MAAM,GAAG,kBAAkB,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAEzD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;QACpB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC;IAG3B,IAAI,CAAC,MAAM,CAAC,WAAW,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC;QAC/C,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,OAAO,EAAE,MAAM,CAAC,WAAW;QAC3B,MAAM,EAAE,MAAM,CAAC,WAAW;QAC1B,OAAO,EAAE,MAAM,CAAC,eAAe;QAC/B,UAAU,EAAE,MAAM,CAAC,mBAAmB;KACvC,CAAC;AACJ,CAAC;AAGD,SAAgB,kBAAkB;IAChC,MAAM,MAAM,GAAG,eAAe,EAAE,CAAC;IACjC,OAAO,MAAM,KAAK,IAAI,CAAC;AACzB,CAAC;AAMD,SAAgB,0BAA0B,CAAC,OAK1C;IACC,IAAI,CAAC,OAAO,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,CAAC;QAC7C,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,OAAO,EAAE,OAAO,CAAC,SAAS;QAC1B,MAAM,EAAE,OAAO,CAAC,SAAS;QACzB,OAAO,EAAE,OAAO,CAAC,aAAa,IAAI,KAAK;QACvC,UAAU,EAAE,OAAO,CAAC,gBAAgB,IAAI,CAAC;KAC1C,CAAC;AACJ,CAAC"}
|
||||
123
dist/constants/type-structures.d.ts
vendored
Normal file
123
dist/constants/type-structures.d.ts
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
import type { NodePropertyTypes } from 'n8n-workflow';
|
||||
import type { TypeStructure } from '../types/type-structures';
|
||||
export declare const TYPE_STRUCTURES: Record<NodePropertyTypes, TypeStructure>;
|
||||
export declare const COMPLEX_TYPE_EXAMPLES: {
|
||||
collection: {
|
||||
basic: {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
nested: {
|
||||
user: {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
};
|
||||
preferences: {
|
||||
theme: string;
|
||||
notifications: boolean;
|
||||
};
|
||||
};
|
||||
withExpressions: {
|
||||
id: string;
|
||||
timestamp: string;
|
||||
data: string;
|
||||
};
|
||||
};
|
||||
fixedCollection: {
|
||||
httpHeaders: {
|
||||
headers: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
};
|
||||
queryParameters: {
|
||||
queryParameters: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
};
|
||||
multipleCollections: {
|
||||
headers: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
queryParameters: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
};
|
||||
};
|
||||
filter: {
|
||||
simple: {
|
||||
conditions: {
|
||||
id: string;
|
||||
leftValue: string;
|
||||
operator: {
|
||||
type: string;
|
||||
operation: string;
|
||||
};
|
||||
rightValue: string;
|
||||
}[];
|
||||
combinator: string;
|
||||
};
|
||||
complex: {
|
||||
conditions: ({
|
||||
id: string;
|
||||
leftValue: string;
|
||||
operator: {
|
||||
type: string;
|
||||
operation: string;
|
||||
};
|
||||
rightValue: number;
|
||||
} | {
|
||||
id: string;
|
||||
leftValue: string;
|
||||
operator: {
|
||||
type: string;
|
||||
operation: string;
|
||||
};
|
||||
rightValue: string;
|
||||
})[];
|
||||
combinator: string;
|
||||
};
|
||||
};
|
||||
resourceMapper: {
|
||||
autoMap: {
|
||||
mappingMode: string;
|
||||
value: {};
|
||||
};
|
||||
manual: {
|
||||
mappingMode: string;
|
||||
value: {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
email: string;
|
||||
status: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
assignmentCollection: {
|
||||
basic: {
|
||||
assignments: {
|
||||
id: string;
|
||||
name: string;
|
||||
value: string;
|
||||
type: string;
|
||||
}[];
|
||||
};
|
||||
multiple: {
|
||||
assignments: ({
|
||||
id: string;
|
||||
name: string;
|
||||
value: string;
|
||||
type: string;
|
||||
} | {
|
||||
id: string;
|
||||
name: string;
|
||||
value: boolean;
|
||||
type: string;
|
||||
})[];
|
||||
};
|
||||
};
|
||||
};
|
||||
//# sourceMappingURL=type-structures.d.ts.map
|
||||
1
dist/constants/type-structures.d.ts.map
vendored
Normal file
1
dist/constants/type-structures.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"type-structures.d.ts","sourceRoot":"","sources":["../../src/constants/type-structures.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAe9D,eAAO,MAAM,eAAe,EAAE,MAAM,CAAC,iBAAiB,EAAE,aAAa,CAkmBpE,CAAC;AAUF,eAAO,MAAM,qBAAqB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4GjC,CAAC"}
|
||||
670
dist/constants/type-structures.js
vendored
Normal file
670
dist/constants/type-structures.js
vendored
Normal file
@@ -0,0 +1,670 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.COMPLEX_TYPE_EXAMPLES = exports.TYPE_STRUCTURES = void 0;
|
||||
exports.TYPE_STRUCTURES = {
|
||||
string: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'A text value that can contain any characters',
|
||||
example: 'Hello World',
|
||||
examples: ['', 'A simple text', '{{ $json.name }}', 'https://example.com'],
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: ['Most common property type', 'Supports n8n expressions'],
|
||||
},
|
||||
number: {
|
||||
type: 'primitive',
|
||||
jsType: 'number',
|
||||
description: 'A numeric value (integer or decimal)',
|
||||
example: 42,
|
||||
examples: [0, -10, 3.14, 100],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: ['Can be constrained with min/max in typeOptions'],
|
||||
},
|
||||
boolean: {
|
||||
type: 'primitive',
|
||||
jsType: 'boolean',
|
||||
description: 'A true/false toggle value',
|
||||
example: true,
|
||||
examples: [true, false],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: ['Rendered as checkbox in n8n UI'],
|
||||
},
|
||||
dateTime: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'A date and time value in ISO 8601 format',
|
||||
example: '2024-01-20T10:30:00Z',
|
||||
examples: [
|
||||
'2024-01-20T10:30:00Z',
|
||||
'2024-01-20',
|
||||
'{{ $now }}',
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
pattern: '^\\d{4}-\\d{2}-\\d{2}(T\\d{2}:\\d{2}:\\d{2}(\\.\\d{3})?Z?)?$',
|
||||
},
|
||||
notes: ['Accepts ISO 8601 format', 'Can use n8n date expressions'],
|
||||
},
|
||||
color: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'A color value in hex format',
|
||||
example: '#FF5733',
|
||||
examples: ['#FF5733', '#000000', '#FFFFFF', '{{ $json.color }}'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
pattern: '^#[0-9A-Fa-f]{6}$',
|
||||
},
|
||||
notes: ['Must be 6-digit hex color', 'Rendered with color picker in UI'],
|
||||
},
|
||||
json: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'A JSON string that can be parsed into any structure',
|
||||
example: '{"key": "value", "nested": {"data": 123}}',
|
||||
examples: [
|
||||
'{}',
|
||||
'{"name": "John", "age": 30}',
|
||||
'[1, 2, 3]',
|
||||
'{{ $json }}',
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: ['Must be valid JSON when parsed', 'Often used for custom payloads'],
|
||||
},
|
||||
options: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'Single selection from a list of predefined options',
|
||||
example: 'option1',
|
||||
examples: ['GET', 'POST', 'channelMessage', 'update'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'Value must match one of the defined option values',
|
||||
'Rendered as dropdown in UI',
|
||||
'Options defined in property.options array',
|
||||
],
|
||||
},
|
||||
multiOptions: {
|
||||
type: 'array',
|
||||
jsType: 'array',
|
||||
description: 'Multiple selections from a list of predefined options',
|
||||
structure: {
|
||||
items: {
|
||||
type: 'string',
|
||||
description: 'Selected option value',
|
||||
},
|
||||
},
|
||||
example: ['option1', 'option2'],
|
||||
examples: [[], ['GET', 'POST'], ['read', 'write', 'delete']],
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'Array of option values',
|
||||
'Each value must exist in property.options',
|
||||
'Rendered as multi-select dropdown',
|
||||
],
|
||||
},
|
||||
collection: {
|
||||
type: 'collection',
|
||||
jsType: 'object',
|
||||
description: 'A group of related properties with dynamic values',
|
||||
structure: {
|
||||
properties: {
|
||||
'<propertyName>': {
|
||||
type: 'any',
|
||||
description: 'Any nested property from the collection definition',
|
||||
},
|
||||
},
|
||||
flexible: true,
|
||||
},
|
||||
example: {
|
||||
name: 'John Doe',
|
||||
email: 'john@example.com',
|
||||
age: 30,
|
||||
},
|
||||
examples: [
|
||||
{},
|
||||
{ key1: 'value1', key2: 123 },
|
||||
{ nested: { deep: { value: true } } },
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Properties defined in property.values array',
|
||||
'Each property can be any type',
|
||||
'UI renders as expandable section',
|
||||
],
|
||||
},
|
||||
fixedCollection: {
|
||||
type: 'collection',
|
||||
jsType: 'object',
|
||||
description: 'A collection with predefined groups of properties',
|
||||
structure: {
|
||||
properties: {
|
||||
'<collectionName>': {
|
||||
type: 'array',
|
||||
description: 'Array of collection items',
|
||||
items: {
|
||||
type: 'object',
|
||||
description: 'Collection item with defined properties',
|
||||
},
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
example: {
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Authorization', value: 'Bearer token' },
|
||||
],
|
||||
},
|
||||
examples: [
|
||||
{},
|
||||
{ queryParameters: [{ name: 'id', value: '123' }] },
|
||||
{
|
||||
headers: [{ name: 'Accept', value: '*/*' }],
|
||||
queryParameters: [{ name: 'limit', value: '10' }],
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Each collection has predefined structure',
|
||||
'Often used for headers, parameters, etc.',
|
||||
'Supports multiple values per collection',
|
||||
],
|
||||
},
|
||||
resourceLocator: {
|
||||
type: 'special',
|
||||
jsType: 'object',
|
||||
description: 'A flexible way to specify a resource by ID, name, URL, or list',
|
||||
structure: {
|
||||
properties: {
|
||||
mode: {
|
||||
type: 'string',
|
||||
description: 'How the resource is specified',
|
||||
enum: ['id', 'url', 'list'],
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: 'string',
|
||||
description: 'The resource identifier',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
required: ['mode', 'value'],
|
||||
},
|
||||
example: {
|
||||
mode: 'id',
|
||||
value: 'abc123',
|
||||
},
|
||||
examples: [
|
||||
{ mode: 'url', value: 'https://example.com/resource/123' },
|
||||
{ mode: 'list', value: 'item-from-dropdown' },
|
||||
{ mode: 'id', value: '{{ $json.resourceId }}' },
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Provides flexible resource selection',
|
||||
'Mode determines how value is interpreted',
|
||||
'UI adapts based on selected mode',
|
||||
],
|
||||
},
|
||||
resourceMapper: {
|
||||
type: 'special',
|
||||
jsType: 'object',
|
||||
description: 'Maps input data fields to resource fields with transformation options',
|
||||
structure: {
|
||||
properties: {
|
||||
mappingMode: {
|
||||
type: 'string',
|
||||
description: 'How fields are mapped',
|
||||
enum: ['defineBelow', 'autoMapInputData'],
|
||||
},
|
||||
value: {
|
||||
type: 'object',
|
||||
description: 'Field mappings',
|
||||
properties: {
|
||||
'<fieldName>': {
|
||||
type: 'string',
|
||||
description: 'Expression or value for this field',
|
||||
},
|
||||
},
|
||||
flexible: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
example: {
|
||||
mappingMode: 'defineBelow',
|
||||
value: {
|
||||
name: '{{ $json.fullName }}',
|
||||
email: '{{ $json.emailAddress }}',
|
||||
status: 'active',
|
||||
},
|
||||
},
|
||||
examples: [
|
||||
{ mappingMode: 'autoMapInputData', value: {} },
|
||||
{
|
||||
mappingMode: 'defineBelow',
|
||||
value: { id: '{{ $json.userId }}', name: '{{ $json.name }}' },
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Complex mapping with UI assistance',
|
||||
'Can auto-map or manually define',
|
||||
'Supports field transformations',
|
||||
],
|
||||
},
|
||||
filter: {
|
||||
type: 'special',
|
||||
jsType: 'object',
|
||||
description: 'Defines conditions for filtering data with boolean logic',
|
||||
structure: {
|
||||
properties: {
|
||||
conditions: {
|
||||
type: 'array',
|
||||
description: 'Array of filter conditions',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Unique condition identifier',
|
||||
required: true,
|
||||
},
|
||||
leftValue: {
|
||||
type: 'any',
|
||||
description: 'Left side of comparison',
|
||||
},
|
||||
operator: {
|
||||
type: 'object',
|
||||
description: 'Comparison operator',
|
||||
required: true,
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['string', 'number', 'boolean', 'dateTime', 'array', 'object'],
|
||||
required: true,
|
||||
},
|
||||
operation: {
|
||||
type: 'string',
|
||||
description: 'Operation to perform',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
rightValue: {
|
||||
type: 'any',
|
||||
description: 'Right side of comparison',
|
||||
},
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
combinator: {
|
||||
type: 'string',
|
||||
description: 'How to combine conditions',
|
||||
enum: ['and', 'or'],
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
required: ['conditions', 'combinator'],
|
||||
},
|
||||
example: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'abc-123',
|
||||
leftValue: '{{ $json.status }}',
|
||||
operator: { type: 'string', operation: 'equals' },
|
||||
rightValue: 'active',
|
||||
},
|
||||
],
|
||||
combinator: 'and',
|
||||
},
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Advanced filtering UI in n8n',
|
||||
'Supports complex boolean logic',
|
||||
'Operations vary by data type',
|
||||
],
|
||||
},
|
||||
assignmentCollection: {
|
||||
type: 'special',
|
||||
jsType: 'object',
|
||||
description: 'Defines variable assignments with expressions',
|
||||
structure: {
|
||||
properties: {
|
||||
assignments: {
|
||||
type: 'array',
|
||||
description: 'Array of variable assignments',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Unique assignment identifier',
|
||||
required: true,
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Variable name',
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: 'any',
|
||||
description: 'Value to assign',
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
description: 'Data type of the value',
|
||||
enum: ['string', 'number', 'boolean', 'array', 'object'],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
required: ['assignments'],
|
||||
},
|
||||
example: {
|
||||
assignments: [
|
||||
{
|
||||
id: 'abc-123',
|
||||
name: 'userName',
|
||||
value: '{{ $json.name }}',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
id: 'def-456',
|
||||
name: 'userAge',
|
||||
value: 30,
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Used in Set node and similar',
|
||||
'Each assignment can use expressions',
|
||||
'Type helps with validation',
|
||||
],
|
||||
},
|
||||
credentials: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Reference to credential configuration',
|
||||
example: 'googleSheetsOAuth2Api',
|
||||
examples: ['httpBasicAuth', 'slackOAuth2Api', 'postgresApi'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'References credential type name',
|
||||
'Credential must be configured in n8n',
|
||||
'Type name matches credential definition',
|
||||
],
|
||||
},
|
||||
credentialsSelect: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Dropdown to select from available credentials',
|
||||
example: 'credential-id-123',
|
||||
examples: ['cred-abc', 'cred-def', '{{ $credentials.id }}'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'User selects from configured credentials',
|
||||
'Returns credential ID',
|
||||
'Used when multiple credential instances exist',
|
||||
],
|
||||
},
|
||||
hidden: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Hidden property not shown in UI (used for internal logic)',
|
||||
example: '',
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Not rendered in UI',
|
||||
'Can store metadata or computed values',
|
||||
'Often used for version tracking',
|
||||
],
|
||||
},
|
||||
button: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Clickable button that triggers an action',
|
||||
example: '',
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'Triggers action when clicked',
|
||||
'Does not store a value',
|
||||
'Action defined in routing property',
|
||||
],
|
||||
},
|
||||
callout: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Informational message box (warning, info, success, error)',
|
||||
example: '',
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'Display-only, no value stored',
|
||||
'Used for warnings and hints',
|
||||
'Style controlled by typeOptions',
|
||||
],
|
||||
},
|
||||
notice: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Notice message displayed to user',
|
||||
example: '',
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: ['Similar to callout', 'Display-only element', 'Provides contextual information'],
|
||||
},
|
||||
workflowSelector: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Dropdown to select another workflow',
|
||||
example: 'workflow-123',
|
||||
examples: ['wf-abc', '{{ $json.workflowId }}'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: true,
|
||||
},
|
||||
notes: [
|
||||
'Selects from available workflows',
|
||||
'Returns workflow ID',
|
||||
'Used in Execute Workflow node',
|
||||
],
|
||||
},
|
||||
curlImport: {
|
||||
type: 'special',
|
||||
jsType: 'string',
|
||||
description: 'Import configuration from cURL command',
|
||||
example: 'curl -X GET https://api.example.com/data',
|
||||
validation: {
|
||||
allowEmpty: true,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'Parses cURL command to populate fields',
|
||||
'Used in HTTP Request node',
|
||||
'One-time import feature',
|
||||
],
|
||||
},
|
||||
icon: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'Icon identifier for visual representation',
|
||||
example: 'fa:envelope',
|
||||
examples: ['fa:envelope', 'fa:user', 'fa:cog', 'file:slack.svg'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'References icon by name or file path',
|
||||
'Supports Font Awesome icons (fa:) and file paths (file:)',
|
||||
'Used for visual customization in UI',
|
||||
],
|
||||
},
|
||||
};
|
||||
exports.COMPLEX_TYPE_EXAMPLES = {
|
||||
collection: {
|
||||
basic: {
|
||||
name: 'John Doe',
|
||||
email: 'john@example.com',
|
||||
},
|
||||
nested: {
|
||||
user: {
|
||||
firstName: 'Jane',
|
||||
lastName: 'Smith',
|
||||
},
|
||||
preferences: {
|
||||
theme: 'dark',
|
||||
notifications: true,
|
||||
},
|
||||
},
|
||||
withExpressions: {
|
||||
id: '{{ $json.userId }}',
|
||||
timestamp: '{{ $now }}',
|
||||
data: '{{ $json.payload }}',
|
||||
},
|
||||
},
|
||||
fixedCollection: {
|
||||
httpHeaders: {
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Authorization', value: 'Bearer {{ $credentials.token }}' },
|
||||
],
|
||||
},
|
||||
queryParameters: {
|
||||
queryParameters: [
|
||||
{ name: 'page', value: '1' },
|
||||
{ name: 'limit', value: '100' },
|
||||
],
|
||||
},
|
||||
multipleCollections: {
|
||||
headers: [{ name: 'Accept', value: 'application/json' }],
|
||||
queryParameters: [{ name: 'filter', value: 'active' }],
|
||||
},
|
||||
},
|
||||
filter: {
|
||||
simple: {
|
||||
conditions: [
|
||||
{
|
||||
id: '1',
|
||||
leftValue: '{{ $json.status }}',
|
||||
operator: { type: 'string', operation: 'equals' },
|
||||
rightValue: 'active',
|
||||
},
|
||||
],
|
||||
combinator: 'and',
|
||||
},
|
||||
complex: {
|
||||
conditions: [
|
||||
{
|
||||
id: '1',
|
||||
leftValue: '{{ $json.age }}',
|
||||
operator: { type: 'number', operation: 'gt' },
|
||||
rightValue: 18,
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
leftValue: '{{ $json.country }}',
|
||||
operator: { type: 'string', operation: 'equals' },
|
||||
rightValue: 'US',
|
||||
},
|
||||
],
|
||||
combinator: 'and',
|
||||
},
|
||||
},
|
||||
resourceMapper: {
|
||||
autoMap: {
|
||||
mappingMode: 'autoMapInputData',
|
||||
value: {},
|
||||
},
|
||||
manual: {
|
||||
mappingMode: 'defineBelow',
|
||||
value: {
|
||||
firstName: '{{ $json.first_name }}',
|
||||
lastName: '{{ $json.last_name }}',
|
||||
email: '{{ $json.email_address }}',
|
||||
status: 'active',
|
||||
},
|
||||
},
|
||||
},
|
||||
assignmentCollection: {
|
||||
basic: {
|
||||
assignments: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'fullName',
|
||||
value: '{{ $json.firstName }} {{ $json.lastName }}',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
},
|
||||
multiple: {
|
||||
assignments: [
|
||||
{ id: '1', name: 'userName', value: '{{ $json.name }}', type: 'string' },
|
||||
{ id: '2', name: 'userAge', value: '{{ $json.age }}', type: 'number' },
|
||||
{ id: '3', name: 'isActive', value: true, type: 'boolean' },
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
//# sourceMappingURL=type-structures.js.map
|
||||
1
dist/constants/type-structures.js.map
vendored
Normal file
1
dist/constants/type-structures.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
33
dist/database/database-adapter.d.ts
vendored
Normal file
33
dist/database/database-adapter.d.ts
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
export interface DatabaseAdapter {
|
||||
prepare(sql: string): PreparedStatement;
|
||||
exec(sql: string): void;
|
||||
close(): void;
|
||||
pragma(key: string, value?: any): any;
|
||||
readonly inTransaction: boolean;
|
||||
transaction<T>(fn: () => T): T;
|
||||
checkFTS5Support(): boolean;
|
||||
}
|
||||
export interface PreparedStatement {
|
||||
run(...params: any[]): RunResult;
|
||||
get(...params: any[]): any;
|
||||
all(...params: any[]): any[];
|
||||
iterate(...params: any[]): IterableIterator<any>;
|
||||
pluck(toggle?: boolean): this;
|
||||
expand(toggle?: boolean): this;
|
||||
raw(toggle?: boolean): this;
|
||||
columns(): ColumnDefinition[];
|
||||
bind(...params: any[]): this;
|
||||
}
|
||||
export interface RunResult {
|
||||
changes: number;
|
||||
lastInsertRowid: number | bigint;
|
||||
}
|
||||
export interface ColumnDefinition {
|
||||
name: string;
|
||||
column: string | null;
|
||||
table: string | null;
|
||||
database: string | null;
|
||||
type: string | null;
|
||||
}
|
||||
export declare function createDatabaseAdapter(dbPath: string): Promise<DatabaseAdapter>;
|
||||
//# sourceMappingURL=database-adapter.d.ts.map
|
||||
1
dist/database/database-adapter.d.ts.map
vendored
Normal file
1
dist/database/database-adapter.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"database-adapter.d.ts","sourceRoot":"","sources":["../../src/database/database-adapter.ts"],"names":[],"mappings":"AAQA,MAAM,WAAW,eAAe;IAC9B,OAAO,CAAC,GAAG,EAAE,MAAM,GAAG,iBAAiB,CAAC;IACxC,IAAI,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,KAAK,IAAI,IAAI,CAAC;IACd,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,GAAG,GAAG,GAAG,CAAC;IACtC,QAAQ,CAAC,aAAa,EAAE,OAAO,CAAC;IAChC,WAAW,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC;IAC/B,gBAAgB,IAAI,OAAO,CAAC;CAC7B;AAED,MAAM,WAAW,iBAAiB;IAChC,GAAG,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,SAAS,CAAC;IACjC,GAAG,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,GAAG,CAAC;IAC3B,GAAG,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,GAAG,EAAE,CAAC;IAC7B,OAAO,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAC;IACjD,KAAK,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAC9B,MAAM,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAC/B,GAAG,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;IAC5B,OAAO,IAAI,gBAAgB,EAAE,CAAC;IAC9B,IAAI,CAAC,GAAG,MAAM,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;CAC9B;AAED,MAAM,WAAW,SAAS;IACxB,OAAO,EAAE,MAAM,CAAC;IAChB,eAAe,EAAE,MAAM,GAAG,MAAM,CAAC;CAClC;AAED,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACtB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;CACrB;AAMD,wBAAsB,qBAAqB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDpF"}
|
||||
420
dist/database/database-adapter.js
vendored
Normal file
420
dist/database/database-adapter.js
vendored
Normal file
@@ -0,0 +1,420 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createDatabaseAdapter = createDatabaseAdapter;
|
||||
const fs_1 = require("fs");
|
||||
const fsSync = __importStar(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const logger_1 = require("../utils/logger");
|
||||
async function createDatabaseAdapter(dbPath) {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.info(`Node.js version: ${process.version}`);
|
||||
}
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.info(`Platform: ${process.platform} ${process.arch}`);
|
||||
}
|
||||
try {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.info('Attempting to use better-sqlite3...');
|
||||
}
|
||||
const adapter = await createBetterSQLiteAdapter(dbPath);
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.info('Successfully initialized better-sqlite3 adapter');
|
||||
}
|
||||
return adapter;
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
if (errorMessage.includes('NODE_MODULE_VERSION') || errorMessage.includes('was compiled against a different Node.js version')) {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.warn(`Node.js version mismatch detected. Better-sqlite3 was compiled for a different Node.js version.`);
|
||||
}
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.warn(`Current Node.js version: ${process.version}`);
|
||||
}
|
||||
}
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.warn('Failed to initialize better-sqlite3, falling back to sql.js', error);
|
||||
}
|
||||
try {
|
||||
const adapter = await createSQLJSAdapter(dbPath);
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.info('Successfully initialized sql.js adapter (pure JavaScript, no native dependencies)');
|
||||
}
|
||||
return adapter;
|
||||
}
|
||||
catch (sqlJsError) {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.error('Failed to initialize sql.js adapter', sqlJsError);
|
||||
}
|
||||
throw new Error('Failed to initialize any database adapter');
|
||||
}
|
||||
}
|
||||
}
|
||||
async function createBetterSQLiteAdapter(dbPath) {
|
||||
try {
|
||||
const Database = require('better-sqlite3');
|
||||
const db = new Database(dbPath);
|
||||
return new BetterSQLiteAdapter(db);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to create better-sqlite3 adapter: ${error}`);
|
||||
}
|
||||
}
|
||||
async function createSQLJSAdapter(dbPath) {
|
||||
let initSqlJs;
|
||||
try {
|
||||
initSqlJs = require('sql.js');
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Failed to load sql.js module:', error);
|
||||
throw new Error('sql.js module not found. This might be an issue with npm package installation.');
|
||||
}
|
||||
const SQL = await initSqlJs({
|
||||
locateFile: (file) => {
|
||||
if (file.endsWith('.wasm')) {
|
||||
const possiblePaths = [
|
||||
path_1.default.join(__dirname, '../../node_modules/sql.js/dist/', file),
|
||||
path_1.default.join(__dirname, '../../../sql.js/dist/', file),
|
||||
path_1.default.join(process.cwd(), 'node_modules/sql.js/dist/', file),
|
||||
path_1.default.join(path_1.default.dirname(require.resolve('sql.js')), '../dist/', file)
|
||||
];
|
||||
for (const tryPath of possiblePaths) {
|
||||
if (fsSync.existsSync(tryPath)) {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.debug(`Found WASM file at: ${tryPath}`);
|
||||
}
|
||||
return tryPath;
|
||||
}
|
||||
}
|
||||
try {
|
||||
const wasmPath = require.resolve('sql.js/dist/sql-wasm.wasm');
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
logger_1.logger.debug(`Found WASM file via require.resolve: ${wasmPath}`);
|
||||
}
|
||||
return wasmPath;
|
||||
}
|
||||
catch (e) {
|
||||
logger_1.logger.warn(`Could not find WASM file, using default path: ${file}`);
|
||||
return file;
|
||||
}
|
||||
}
|
||||
return file;
|
||||
}
|
||||
});
|
||||
let db;
|
||||
try {
|
||||
const data = await fs_1.promises.readFile(dbPath);
|
||||
db = new SQL.Database(new Uint8Array(data));
|
||||
logger_1.logger.info(`Loaded existing database from ${dbPath}`);
|
||||
}
|
||||
catch (error) {
|
||||
db = new SQL.Database();
|
||||
logger_1.logger.info(`Created new database at ${dbPath}`);
|
||||
}
|
||||
return new SQLJSAdapter(db, dbPath);
|
||||
}
|
||||
class BetterSQLiteAdapter {
|
||||
constructor(db) {
|
||||
this.db = db;
|
||||
}
|
||||
prepare(sql) {
|
||||
const stmt = this.db.prepare(sql);
|
||||
return new BetterSQLiteStatement(stmt);
|
||||
}
|
||||
exec(sql) {
|
||||
this.db.exec(sql);
|
||||
}
|
||||
close() {
|
||||
this.db.close();
|
||||
}
|
||||
pragma(key, value) {
|
||||
return this.db.pragma(key, value);
|
||||
}
|
||||
get inTransaction() {
|
||||
return this.db.inTransaction;
|
||||
}
|
||||
transaction(fn) {
|
||||
return this.db.transaction(fn)();
|
||||
}
|
||||
checkFTS5Support() {
|
||||
try {
|
||||
this.exec("CREATE VIRTUAL TABLE IF NOT EXISTS test_fts5 USING fts5(content);");
|
||||
this.exec("DROP TABLE IF EXISTS test_fts5;");
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
class SQLJSAdapter {
|
||||
constructor(db, dbPath) {
|
||||
this.db = db;
|
||||
this.dbPath = dbPath;
|
||||
this.saveTimer = null;
|
||||
this.closed = false;
|
||||
const envInterval = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
this.saveIntervalMs = envInterval ? parseInt(envInterval, 10) : SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS;
|
||||
if (isNaN(this.saveIntervalMs) || this.saveIntervalMs < 100 || this.saveIntervalMs > 60000) {
|
||||
logger_1.logger.warn(`Invalid SQLJS_SAVE_INTERVAL_MS value: ${envInterval} (must be 100-60000ms), ` +
|
||||
`using default ${SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS}ms`);
|
||||
this.saveIntervalMs = SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS;
|
||||
}
|
||||
logger_1.logger.debug(`SQLJSAdapter initialized with save interval: ${this.saveIntervalMs}ms`);
|
||||
}
|
||||
prepare(sql) {
|
||||
const stmt = this.db.prepare(sql);
|
||||
return new SQLJSStatement(stmt, () => this.scheduleSave());
|
||||
}
|
||||
exec(sql) {
|
||||
this.db.exec(sql);
|
||||
this.scheduleSave();
|
||||
}
|
||||
close() {
|
||||
if (this.closed) {
|
||||
logger_1.logger.debug('SQLJSAdapter already closed, skipping');
|
||||
return;
|
||||
}
|
||||
this.saveToFile();
|
||||
if (this.saveTimer) {
|
||||
clearTimeout(this.saveTimer);
|
||||
this.saveTimer = null;
|
||||
}
|
||||
this.db.close();
|
||||
this.closed = true;
|
||||
}
|
||||
pragma(key, value) {
|
||||
if (key === 'journal_mode' && value === 'WAL') {
|
||||
return 'memory';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
get inTransaction() {
|
||||
return false;
|
||||
}
|
||||
transaction(fn) {
|
||||
try {
|
||||
this.exec('BEGIN');
|
||||
const result = fn();
|
||||
this.exec('COMMIT');
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
this.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
checkFTS5Support() {
|
||||
try {
|
||||
this.exec("CREATE VIRTUAL TABLE IF NOT EXISTS test_fts5 USING fts5(content);");
|
||||
this.exec("DROP TABLE IF EXISTS test_fts5;");
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
scheduleSave() {
|
||||
if (this.saveTimer) {
|
||||
clearTimeout(this.saveTimer);
|
||||
}
|
||||
this.saveTimer = setTimeout(() => {
|
||||
this.saveToFile();
|
||||
}, this.saveIntervalMs);
|
||||
}
|
||||
saveToFile() {
|
||||
try {
|
||||
const data = this.db.export();
|
||||
fsSync.writeFileSync(this.dbPath, data);
|
||||
logger_1.logger.debug(`Database saved to ${this.dbPath}`);
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Failed to save database', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS = 5000;
|
||||
class BetterSQLiteStatement {
|
||||
constructor(stmt) {
|
||||
this.stmt = stmt;
|
||||
}
|
||||
run(...params) {
|
||||
return this.stmt.run(...params);
|
||||
}
|
||||
get(...params) {
|
||||
return this.stmt.get(...params);
|
||||
}
|
||||
all(...params) {
|
||||
return this.stmt.all(...params);
|
||||
}
|
||||
iterate(...params) {
|
||||
return this.stmt.iterate(...params);
|
||||
}
|
||||
pluck(toggle) {
|
||||
this.stmt.pluck(toggle);
|
||||
return this;
|
||||
}
|
||||
expand(toggle) {
|
||||
this.stmt.expand(toggle);
|
||||
return this;
|
||||
}
|
||||
raw(toggle) {
|
||||
this.stmt.raw(toggle);
|
||||
return this;
|
||||
}
|
||||
columns() {
|
||||
return this.stmt.columns();
|
||||
}
|
||||
bind(...params) {
|
||||
this.stmt.bind(...params);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
class SQLJSStatement {
|
||||
constructor(stmt, onModify) {
|
||||
this.stmt = stmt;
|
||||
this.onModify = onModify;
|
||||
this.boundParams = null;
|
||||
}
|
||||
run(...params) {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
this.bindParams(params);
|
||||
if (this.boundParams) {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
this.stmt.run();
|
||||
this.onModify();
|
||||
return {
|
||||
changes: 1,
|
||||
lastInsertRowid: 0
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
get(...params) {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
this.bindParams(params);
|
||||
if (this.boundParams) {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
if (this.stmt.step()) {
|
||||
const result = this.stmt.getAsObject();
|
||||
this.stmt.reset();
|
||||
return this.convertIntegerColumns(result);
|
||||
}
|
||||
this.stmt.reset();
|
||||
return undefined;
|
||||
}
|
||||
catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
all(...params) {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
this.bindParams(params);
|
||||
if (this.boundParams) {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
const results = [];
|
||||
while (this.stmt.step()) {
|
||||
results.push(this.convertIntegerColumns(this.stmt.getAsObject()));
|
||||
}
|
||||
this.stmt.reset();
|
||||
return results;
|
||||
}
|
||||
catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
iterate(...params) {
|
||||
return this.all(...params)[Symbol.iterator]();
|
||||
}
|
||||
pluck(toggle) {
|
||||
return this;
|
||||
}
|
||||
expand(toggle) {
|
||||
return this;
|
||||
}
|
||||
raw(toggle) {
|
||||
return this;
|
||||
}
|
||||
columns() {
|
||||
return [];
|
||||
}
|
||||
bind(...params) {
|
||||
this.bindParams(params);
|
||||
return this;
|
||||
}
|
||||
bindParams(params) {
|
||||
if (params.length === 0) {
|
||||
this.boundParams = null;
|
||||
return;
|
||||
}
|
||||
if (params.length === 1 && typeof params[0] === 'object' && !Array.isArray(params[0]) && params[0] !== null) {
|
||||
this.boundParams = params[0];
|
||||
}
|
||||
else {
|
||||
this.boundParams = params.map(p => p === undefined ? null : p);
|
||||
}
|
||||
}
|
||||
convertIntegerColumns(row) {
|
||||
if (!row)
|
||||
return row;
|
||||
const integerColumns = ['is_ai_tool', 'is_trigger', 'is_webhook', 'is_versioned'];
|
||||
const converted = { ...row };
|
||||
for (const col of integerColumns) {
|
||||
if (col in converted && typeof converted[col] === 'string') {
|
||||
converted[col] = parseInt(converted[col], 10);
|
||||
}
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=database-adapter.js.map
|
||||
1
dist/database/database-adapter.js.map
vendored
Normal file
1
dist/database/database-adapter.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
129
dist/database/node-repository.d.ts
vendored
Normal file
129
dist/database/node-repository.d.ts
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import { DatabaseAdapter } from './database-adapter';
|
||||
import { ParsedNode } from '../parsers/node-parser';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
export interface CommunityNodeFields {
|
||||
isCommunity: boolean;
|
||||
isVerified: boolean;
|
||||
authorName?: string;
|
||||
authorGithubUrl?: string;
|
||||
npmPackageName?: string;
|
||||
npmVersion?: string;
|
||||
npmDownloads?: number;
|
||||
communityFetchedAt?: string;
|
||||
}
|
||||
export declare class NodeRepository {
|
||||
private db;
|
||||
constructor(dbOrService: DatabaseAdapter | SQLiteStorageService);
|
||||
saveNode(node: ParsedNode & Partial<CommunityNodeFields>): void;
|
||||
getNode(nodeType: string): any;
|
||||
getAITools(): any[];
|
||||
private safeJsonParse;
|
||||
upsertNode(node: ParsedNode): void;
|
||||
getNodeByType(nodeType: string): any;
|
||||
getNodesByCategory(category: string): any[];
|
||||
searchNodes(query: string, mode?: 'OR' | 'AND' | 'FUZZY', limit?: number): any[];
|
||||
getAllNodes(limit?: number): any[];
|
||||
getNodeCount(): number;
|
||||
getAIToolNodes(): any[];
|
||||
getToolVariant(baseNodeType: string): any | null;
|
||||
getBaseNodeForToolVariant(toolNodeType: string): any | null;
|
||||
getToolVariants(): any[];
|
||||
getToolVariantCount(): number;
|
||||
getNodesByPackage(packageName: string): any[];
|
||||
searchNodeProperties(nodeType: string, query: string, maxResults?: number): any[];
|
||||
private parseNodeRow;
|
||||
getNodeOperations(nodeType: string, resource?: string): any[];
|
||||
getNodeResources(nodeType: string): any[];
|
||||
getOperationsForResource(nodeType: string, resource: string): any[];
|
||||
getAllOperations(): Map<string, any[]>;
|
||||
getAllResources(): Map<string, any[]>;
|
||||
getNodePropertyDefaults(nodeType: string): Record<string, any>;
|
||||
getDefaultOperationForResource(nodeType: string, resource?: string): string | undefined;
|
||||
getCommunityNodes(options?: {
|
||||
verified?: boolean;
|
||||
limit?: number;
|
||||
orderBy?: 'downloads' | 'name' | 'updated';
|
||||
}): any[];
|
||||
getCommunityStats(): {
|
||||
total: number;
|
||||
verified: number;
|
||||
unverified: number;
|
||||
};
|
||||
hasNodeByNpmPackage(npmPackageName: string): boolean;
|
||||
getNodeByNpmPackage(npmPackageName: string): any | null;
|
||||
deleteCommunityNodes(): number;
|
||||
updateNodeReadme(nodeType: string, readme: string): void;
|
||||
updateNodeAISummary(nodeType: string, summary: object): void;
|
||||
getCommunityNodesWithoutReadme(): any[];
|
||||
getCommunityNodesWithoutAISummary(): any[];
|
||||
getDocumentationStats(): {
|
||||
total: number;
|
||||
withReadme: number;
|
||||
withAISummary: number;
|
||||
needingReadme: number;
|
||||
needingAISummary: number;
|
||||
};
|
||||
saveNodeVersion(versionData: {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
packageName: string;
|
||||
displayName: string;
|
||||
description?: string;
|
||||
category?: string;
|
||||
isCurrentMax?: boolean;
|
||||
propertiesSchema?: any;
|
||||
operations?: any;
|
||||
credentialsRequired?: any;
|
||||
outputs?: any;
|
||||
minimumN8nVersion?: string;
|
||||
breakingChanges?: any[];
|
||||
deprecatedProperties?: string[];
|
||||
addedProperties?: string[];
|
||||
releasedAt?: Date;
|
||||
}): void;
|
||||
getNodeVersions(nodeType: string): any[];
|
||||
getLatestNodeVersion(nodeType: string): any | null;
|
||||
getNodeVersion(nodeType: string, version: string): any | null;
|
||||
savePropertyChange(changeData: {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking?: boolean;
|
||||
oldValue?: string;
|
||||
newValue?: string;
|
||||
migrationHint?: string;
|
||||
autoMigratable?: boolean;
|
||||
migrationStrategy?: any;
|
||||
severity?: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
}): void;
|
||||
getPropertyChanges(nodeType: string, fromVersion: string, toVersion: string): any[];
|
||||
getBreakingChanges(nodeType: string, fromVersion: string, toVersion?: string): any[];
|
||||
getAutoMigratableChanges(nodeType: string, fromVersion: string, toVersion: string): any[];
|
||||
hasVersionUpgradePath(nodeType: string, fromVersion: string, toVersion: string): boolean;
|
||||
getVersionedNodesCount(): number;
|
||||
private parseNodeVersionRow;
|
||||
private parsePropertyChangeRow;
|
||||
createWorkflowVersion(data: {
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
workflowSnapshot: any;
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
}): number;
|
||||
getWorkflowVersions(workflowId: string, limit?: number): any[];
|
||||
getWorkflowVersion(versionId: number): any | null;
|
||||
getLatestWorkflowVersion(workflowId: string): any | null;
|
||||
deleteWorkflowVersion(versionId: number): void;
|
||||
deleteWorkflowVersionsByWorkflowId(workflowId: string): number;
|
||||
pruneWorkflowVersions(workflowId: string, keepCount: number): number;
|
||||
truncateWorkflowVersions(): number;
|
||||
getWorkflowVersionCount(workflowId: string): number;
|
||||
getVersionStorageStats(): any;
|
||||
private parseWorkflowVersionRow;
|
||||
}
|
||||
//# sourceMappingURL=node-repository.d.ts.map
|
||||
1
dist/database/node-repository.d.ts.map
vendored
Normal file
1
dist/database/node-repository.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"node-repository.d.ts","sourceRoot":"","sources":["../../src/database/node-repository.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAM1E,MAAM,WAAW,mBAAmB;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,kBAAkB,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,qBAAa,cAAc;IACzB,OAAO,CAAC,EAAE,CAAkB;gBAEhB,WAAW,EAAE,eAAe,GAAG,oBAAoB;IAa/D,QAAQ,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,mBAAmB,CAAC,GAAG,IAAI;IAmD/D,OAAO,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IAuC9B,UAAU,IAAI,GAAG,EAAE;IAgBnB,OAAO,CAAC,aAAa;IASrB,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,IAAI;IAIlC,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IAIpC,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAqB3C,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAE,IAAI,GAAG,KAAK,GAAG,OAAc,EAAE,KAAK,GAAE,MAAW,GAAG,GAAG,EAAE;IAwC1F,WAAW,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAUlC,YAAY,IAAI,MAAM;IAKtB,cAAc,IAAI,GAAG,EAAE;IAOvB,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYhD,yBAAyB,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAY3D,eAAe,IAAI,GAAG,EAAE;IAoBxB,mBAAmB,IAAI,MAAM;IAK7B,iBAAiB,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,EAAE;IAS7C,oBAAoB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,GAAE,MAAW,GAAG,GAAG,EAAE;IAmCrF,OAAO,CAAC,YAAY;IA2CpB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAmD7D,gBAAgB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAmBzC,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAyBnE,gBAAgB,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBtC,eAAe,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBrC,uBAAuB,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IAwB9D,8BAA8B,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAsDvF,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC1B,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,OAAO,CAAC,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;KAC5C,GAAG,GAAG,EAAE;IAkCT,iBAAiB,IAAI;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;IAmB5E,mBAAmB,CAAC,cAAc,EAAE,MAAM,GAAG,OAAO;IAUpD,mBAAmB,CAAC,cAAc,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYvD,oBAAoB,IAAI,MAAM;IAc9B,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;IAUxD,mBAAmB,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;IAY5D,8BAA8B,IAAI,GAAG,EAAE;IAYvC,iCAAiC,IAAI,GAAG,EAAE;IAc1C,qBAAqB,IAAI;QACvB,KAAK,EAAE,MAAM,CAAC;QACd,UAAU,EAAE,MAAM,CAAC;QACnB,aAAa,EAAE,MAAM,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;QACtB,gBAAgB,EAAE,MAAM,CAAC;KAC1B;IA8BD,eAAe,CAAC,WAAW,EAAE;QAC3B,QAAQ,EAAE,MAAM,CAAC;QACjB,OAAO,EAAE,MAAM,CAAC;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,OAAO,CAAC;QACvB,gBAAgB,CAAC,EAAE,GAAG,CAAC;QACvB,UAAU,CAAC,EAAE,GAAG,CAAC;QACjB,mBAAmB,CAAC,EAAE,GAAG,CAAC;QAC1B,OAAO,CAAC,EAAE,GAAG,CAAC;QACd,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC;QACxB,oBAAoB,CAAC,EAAE,MAAM,EAAE,CAAC;QAChC,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;QAC3B,UAAU,CAAC,EAAE,IAAI,CAAC;KACnB,GAAG,IAAI;IAkCR,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAexC,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAgBlD,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAe7D,kBAAkB,CAAC,UAAU,EAAE;QAC7B,QAAQ,EAAE,MAAM,CAAC;QACjB,WAAW,EAAE,MAAM,CAAC;QACpB,SAAS,EAAE,MAAM,CAAC;QAClB,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,OAAO,GAAG,SAAS,GAAG,SAAS,GAAG,cAAc,GAAG,qBAAqB,GAAG,iBAAiB,CAAC;QACzG,UAAU,CAAC,EAAE,OAAO,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,cAAc,CAAC,EAAE,OAAO,CAAC;QACzB,iBAAiB,CAAC,EAAE,GAAG,CAAC;QACxB,QAAQ,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,GAAG,IAAI;IA4BR,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAgBnF,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IA4BpF,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAkBzF,qBAAqB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO;IAcxF,sBAAsB,IAAI,MAAM;IAWhC,OAAO,CAAC,mBAAmB;IA0B3B,OAAO,CAAC,sBAAsB;IA0B9B,qBAAqB,CAAC,IAAI,EAAE;QAC1B,UAAU,EAAE,MAAM,CAAC;QACnB,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,GAAG,CAAC;QACtB,OAAO,EAAE,gBAAgB,GAAG,aAAa,GAAG,SAAS,CAAC;QACtD,UAAU,CAAC,EAAE,GAAG,EAAE,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,QAAQ,CAAC,EAAE,GAAG,CAAC;KAChB,GAAG,MAAM;IAyBV,mBAAmB,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAoB9D,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYjD,wBAAwB,CAAC,UAAU,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAexD,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI;IAS9C,kCAAkC,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAY9D,qBAAqB,CAAC,UAAU,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM;IAiCpE,wBAAwB,IAAI,MAAM;IAWlC,uBAAuB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAWnD,sBAAsB,IAAI,GAAG;IAwC7B,OAAO,CAAC,uBAAuB;CAchC"}
|
||||
756
dist/database/node-repository.js
vendored
Normal file
756
dist/database/node-repository.js
vendored
Normal file
@@ -0,0 +1,756 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NodeRepository = void 0;
|
||||
const sqlite_storage_service_1 = require("../services/sqlite-storage-service");
|
||||
const node_type_normalizer_1 = require("../utils/node-type-normalizer");
|
||||
class NodeRepository {
|
||||
constructor(dbOrService) {
|
||||
if (dbOrService instanceof sqlite_storage_service_1.SQLiteStorageService) {
|
||||
this.db = dbOrService.db;
|
||||
return;
|
||||
}
|
||||
this.db = dbOrService;
|
||||
}
|
||||
saveNode(node) {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO nodes (
|
||||
node_type, package_name, display_name, description,
|
||||
category, development_style, is_ai_tool, is_trigger,
|
||||
is_webhook, is_versioned, is_tool_variant, tool_variant_of,
|
||||
has_tool_variant, version, documentation,
|
||||
properties_schema, operations, credentials_required,
|
||||
outputs, output_names,
|
||||
is_community, is_verified, author_name, author_github_url,
|
||||
npm_package_name, npm_version, npm_downloads, community_fetched_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
stmt.run(node.nodeType, node.packageName, node.displayName, node.description, node.category, node.style, node.isAITool ? 1 : 0, node.isTrigger ? 1 : 0, node.isWebhook ? 1 : 0, node.isVersioned ? 1 : 0, node.isToolVariant ? 1 : 0, node.toolVariantOf || null, node.hasToolVariant ? 1 : 0, node.version, node.documentation || null, JSON.stringify(node.properties, null, 2), JSON.stringify(node.operations, null, 2), JSON.stringify(node.credentials, null, 2), node.outputs ? JSON.stringify(node.outputs, null, 2) : null, node.outputNames ? JSON.stringify(node.outputNames, null, 2) : null, node.isCommunity ? 1 : 0, node.isVerified ? 1 : 0, node.authorName || null, node.authorGithubUrl || null, node.npmPackageName || null, node.npmVersion || null, node.npmDownloads || 0, node.communityFetchedAt || null);
|
||||
}
|
||||
getNode(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE node_type = ?
|
||||
`).get(normalizedType);
|
||||
if (!row && normalizedType !== nodeType) {
|
||||
const originalRow = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE node_type = ?
|
||||
`).get(nodeType);
|
||||
if (originalRow) {
|
||||
return this.parseNodeRow(originalRow);
|
||||
}
|
||||
}
|
||||
if (!row) {
|
||||
const caseInsensitiveRow = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE LOWER(node_type) = LOWER(?)
|
||||
`).get(nodeType);
|
||||
if (caseInsensitiveRow) {
|
||||
return this.parseNodeRow(caseInsensitiveRow);
|
||||
}
|
||||
}
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeRow(row);
|
||||
}
|
||||
getAITools() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT node_type, display_name, description, package_name
|
||||
FROM nodes
|
||||
WHERE is_ai_tool = 1
|
||||
ORDER BY display_name
|
||||
`).all();
|
||||
return rows.map(row => ({
|
||||
nodeType: row.node_type,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
package: row.package_name
|
||||
}));
|
||||
}
|
||||
safeJsonParse(json, defaultValue) {
|
||||
try {
|
||||
return JSON.parse(json);
|
||||
}
|
||||
catch {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
upsertNode(node) {
|
||||
this.saveNode(node);
|
||||
}
|
||||
getNodeByType(nodeType) {
|
||||
return this.getNode(nodeType);
|
||||
}
|
||||
getNodesByCategory(category) {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE category = ?
|
||||
ORDER BY display_name
|
||||
`).all(category);
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
searchNodes(query, mode = 'OR', limit = 20) {
|
||||
let sql = '';
|
||||
const params = [];
|
||||
if (mode === 'FUZZY') {
|
||||
sql = `
|
||||
SELECT * FROM nodes
|
||||
WHERE node_type LIKE ? OR display_name LIKE ? OR description LIKE ?
|
||||
ORDER BY display_name
|
||||
LIMIT ?
|
||||
`;
|
||||
const fuzzyQuery = `%${query}%`;
|
||||
params.push(fuzzyQuery, fuzzyQuery, fuzzyQuery, limit);
|
||||
}
|
||||
else {
|
||||
const words = query.split(/\s+/).filter(w => w.length > 0);
|
||||
const conditions = words.map(() => '(node_type LIKE ? OR display_name LIKE ? OR description LIKE ?)');
|
||||
const operator = mode === 'AND' ? ' AND ' : ' OR ';
|
||||
sql = `
|
||||
SELECT * FROM nodes
|
||||
WHERE ${conditions.join(operator)}
|
||||
ORDER BY display_name
|
||||
LIMIT ?
|
||||
`;
|
||||
for (const word of words) {
|
||||
const searchTerm = `%${word}%`;
|
||||
params.push(searchTerm, searchTerm, searchTerm);
|
||||
}
|
||||
params.push(limit);
|
||||
}
|
||||
const rows = this.db.prepare(sql).all(...params);
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getAllNodes(limit) {
|
||||
let sql = 'SELECT * FROM nodes ORDER BY display_name';
|
||||
if (limit) {
|
||||
sql += ` LIMIT ${limit}`;
|
||||
}
|
||||
const rows = this.db.prepare(sql).all();
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getNodeCount() {
|
||||
const result = this.db.prepare('SELECT COUNT(*) as count FROM nodes').get();
|
||||
return result.count;
|
||||
}
|
||||
getAIToolNodes() {
|
||||
return this.getAITools();
|
||||
}
|
||||
getToolVariant(baseNodeType) {
|
||||
if (!baseNodeType || typeof baseNodeType !== 'string' || !baseNodeType.includes('.')) {
|
||||
return null;
|
||||
}
|
||||
const toolNodeType = `${baseNodeType}Tool`;
|
||||
return this.getNode(toolNodeType);
|
||||
}
|
||||
getBaseNodeForToolVariant(toolNodeType) {
|
||||
const row = this.db.prepare(`
|
||||
SELECT tool_variant_of FROM nodes WHERE node_type = ?
|
||||
`).get(toolNodeType);
|
||||
if (!row?.tool_variant_of)
|
||||
return null;
|
||||
return this.getNode(row.tool_variant_of);
|
||||
}
|
||||
getToolVariants() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT node_type, display_name, description, package_name, tool_variant_of
|
||||
FROM nodes
|
||||
WHERE is_tool_variant = 1
|
||||
ORDER BY display_name
|
||||
`).all();
|
||||
return rows.map(row => ({
|
||||
nodeType: row.node_type,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
package: row.package_name,
|
||||
toolVariantOf: row.tool_variant_of
|
||||
}));
|
||||
}
|
||||
getToolVariantCount() {
|
||||
const result = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_tool_variant = 1').get();
|
||||
return result.count;
|
||||
}
|
||||
getNodesByPackage(packageName) {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE package_name = ?
|
||||
ORDER BY display_name
|
||||
`).all(packageName);
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
searchNodeProperties(nodeType, query, maxResults = 20) {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties)
|
||||
return [];
|
||||
const results = [];
|
||||
const searchLower = query.toLowerCase();
|
||||
function searchProperties(properties, path = []) {
|
||||
for (const prop of properties) {
|
||||
if (results.length >= maxResults)
|
||||
break;
|
||||
const currentPath = [...path, prop.name || prop.displayName];
|
||||
const pathString = currentPath.join('.');
|
||||
if (prop.name?.toLowerCase().includes(searchLower) ||
|
||||
prop.displayName?.toLowerCase().includes(searchLower) ||
|
||||
prop.description?.toLowerCase().includes(searchLower)) {
|
||||
results.push({
|
||||
path: pathString,
|
||||
property: prop,
|
||||
description: prop.description
|
||||
});
|
||||
}
|
||||
if (prop.options) {
|
||||
searchProperties(prop.options, currentPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
searchProperties(node.properties);
|
||||
return results;
|
||||
}
|
||||
parseNodeRow(row) {
|
||||
return {
|
||||
nodeType: row.node_type,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
category: row.category,
|
||||
developmentStyle: row.development_style,
|
||||
package: row.package_name,
|
||||
isAITool: Number(row.is_ai_tool) === 1,
|
||||
isTrigger: Number(row.is_trigger) === 1,
|
||||
isWebhook: Number(row.is_webhook) === 1,
|
||||
isVersioned: Number(row.is_versioned) === 1,
|
||||
isToolVariant: Number(row.is_tool_variant) === 1,
|
||||
toolVariantOf: row.tool_variant_of || null,
|
||||
hasToolVariant: Number(row.has_tool_variant) === 1,
|
||||
version: row.version,
|
||||
properties: this.safeJsonParse(row.properties_schema, []),
|
||||
operations: this.safeJsonParse(row.operations, []),
|
||||
credentials: this.safeJsonParse(row.credentials_required, []),
|
||||
hasDocumentation: !!row.documentation,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
outputNames: row.output_names ? this.safeJsonParse(row.output_names, null) : null,
|
||||
isCommunity: Number(row.is_community) === 1,
|
||||
isVerified: Number(row.is_verified) === 1,
|
||||
authorName: row.author_name || null,
|
||||
authorGithubUrl: row.author_github_url || null,
|
||||
npmPackageName: row.npm_package_name || null,
|
||||
npmVersion: row.npm_version || null,
|
||||
npmDownloads: row.npm_downloads || 0,
|
||||
communityFetchedAt: row.community_fetched_at || null,
|
||||
npmReadme: row.npm_readme || null,
|
||||
aiDocumentationSummary: row.ai_documentation_summary
|
||||
? this.safeJsonParse(row.ai_documentation_summary, null)
|
||||
: null,
|
||||
aiSummaryGeneratedAt: row.ai_summary_generated_at || null,
|
||||
};
|
||||
}
|
||||
getNodeOperations(nodeType, resource) {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node)
|
||||
return [];
|
||||
const operations = [];
|
||||
if (node.operations) {
|
||||
if (Array.isArray(node.operations)) {
|
||||
operations.push(...node.operations);
|
||||
}
|
||||
else if (typeof node.operations === 'object') {
|
||||
if (resource && node.operations[resource]) {
|
||||
return node.operations[resource];
|
||||
}
|
||||
else {
|
||||
Object.values(node.operations).forEach(ops => {
|
||||
if (Array.isArray(ops)) {
|
||||
operations.push(...ops);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (node.properties && Array.isArray(node.properties)) {
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name === 'operation' && prop.options) {
|
||||
if (resource && prop.displayOptions?.show?.resource) {
|
||||
const allowedResources = Array.isArray(prop.displayOptions.show.resource)
|
||||
? prop.displayOptions.show.resource
|
||||
: [prop.displayOptions.show.resource];
|
||||
if (!allowedResources.includes(resource)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
operations.push(...prop.options);
|
||||
}
|
||||
}
|
||||
}
|
||||
return operations;
|
||||
}
|
||||
getNodeResources(nodeType) {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties)
|
||||
return [];
|
||||
const resources = [];
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name === 'resource' && prop.options) {
|
||||
resources.push(...prop.options);
|
||||
}
|
||||
}
|
||||
return resources;
|
||||
}
|
||||
getOperationsForResource(nodeType, resource) {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties)
|
||||
return [];
|
||||
const operations = [];
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name === 'operation' && prop.displayOptions?.show?.resource) {
|
||||
const allowedResources = Array.isArray(prop.displayOptions.show.resource)
|
||||
? prop.displayOptions.show.resource
|
||||
: [prop.displayOptions.show.resource];
|
||||
if (allowedResources.includes(resource) && prop.options) {
|
||||
operations.push(...prop.options);
|
||||
}
|
||||
}
|
||||
}
|
||||
return operations;
|
||||
}
|
||||
getAllOperations() {
|
||||
const allOperations = new Map();
|
||||
const nodes = this.getAllNodes();
|
||||
for (const node of nodes) {
|
||||
const operations = this.getNodeOperations(node.nodeType);
|
||||
if (operations.length > 0) {
|
||||
allOperations.set(node.nodeType, operations);
|
||||
}
|
||||
}
|
||||
return allOperations;
|
||||
}
|
||||
getAllResources() {
|
||||
const allResources = new Map();
|
||||
const nodes = this.getAllNodes();
|
||||
for (const node of nodes) {
|
||||
const resources = this.getNodeResources(node.nodeType);
|
||||
if (resources.length > 0) {
|
||||
allResources.set(node.nodeType, resources);
|
||||
}
|
||||
}
|
||||
return allResources;
|
||||
}
|
||||
getNodePropertyDefaults(nodeType) {
|
||||
try {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties)
|
||||
return {};
|
||||
const defaults = {};
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name && prop.default !== undefined) {
|
||||
defaults[prop.name] = prop.default;
|
||||
}
|
||||
}
|
||||
return defaults;
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error getting property defaults for ${nodeType}:`, error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
getDefaultOperationForResource(nodeType, resource) {
|
||||
try {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties)
|
||||
return undefined;
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name === 'operation') {
|
||||
if (resource && prop.displayOptions?.show?.resource) {
|
||||
const resourceDep = prop.displayOptions.show.resource;
|
||||
if (!Array.isArray(resourceDep) && typeof resourceDep !== 'string') {
|
||||
continue;
|
||||
}
|
||||
const allowedResources = Array.isArray(resourceDep)
|
||||
? resourceDep
|
||||
: [resourceDep];
|
||||
if (!allowedResources.includes(resource)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (prop.default !== undefined) {
|
||||
return prop.default;
|
||||
}
|
||||
if (prop.options && Array.isArray(prop.options) && prop.options.length > 0) {
|
||||
const firstOption = prop.options[0];
|
||||
return typeof firstOption === 'string' ? firstOption : firstOption.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Error getting default operation for ${nodeType}:`, error);
|
||||
return undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
getCommunityNodes(options) {
|
||||
let sql = 'SELECT * FROM nodes WHERE is_community = 1';
|
||||
const params = [];
|
||||
if (options?.verified !== undefined) {
|
||||
sql += ' AND is_verified = ?';
|
||||
params.push(options.verified ? 1 : 0);
|
||||
}
|
||||
switch (options?.orderBy) {
|
||||
case 'downloads':
|
||||
sql += ' ORDER BY npm_downloads DESC';
|
||||
break;
|
||||
case 'updated':
|
||||
sql += ' ORDER BY community_fetched_at DESC';
|
||||
break;
|
||||
case 'name':
|
||||
default:
|
||||
sql += ' ORDER BY display_name';
|
||||
}
|
||||
if (options?.limit) {
|
||||
sql += ' LIMIT ?';
|
||||
params.push(options.limit);
|
||||
}
|
||||
const rows = this.db.prepare(sql).all(...params);
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getCommunityStats() {
|
||||
const totalResult = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1').get();
|
||||
const verifiedResult = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND is_verified = 1').get();
|
||||
return {
|
||||
total: totalResult.count,
|
||||
verified: verifiedResult.count,
|
||||
unverified: totalResult.count - verifiedResult.count
|
||||
};
|
||||
}
|
||||
hasNodeByNpmPackage(npmPackageName) {
|
||||
const result = this.db.prepare('SELECT 1 FROM nodes WHERE npm_package_name = ? LIMIT 1').get(npmPackageName);
|
||||
return !!result;
|
||||
}
|
||||
getNodeByNpmPackage(npmPackageName) {
|
||||
const row = this.db.prepare('SELECT * FROM nodes WHERE npm_package_name = ?').get(npmPackageName);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeRow(row);
|
||||
}
|
||||
deleteCommunityNodes() {
|
||||
const result = this.db.prepare('DELETE FROM nodes WHERE is_community = 1').run();
|
||||
return result.changes;
|
||||
}
|
||||
updateNodeReadme(nodeType, readme) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes SET npm_readme = ? WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(readme, nodeType);
|
||||
}
|
||||
updateNodeAISummary(nodeType, summary) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes
|
||||
SET ai_documentation_summary = ?, ai_summary_generated_at = datetime('now')
|
||||
WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(JSON.stringify(summary), nodeType);
|
||||
}
|
||||
getCommunityNodesWithoutReadme() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1 AND (npm_readme IS NULL OR npm_readme = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all();
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getCommunityNodesWithoutAISummary() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1
|
||||
AND npm_readme IS NOT NULL AND npm_readme != ''
|
||||
AND (ai_documentation_summary IS NULL OR ai_documentation_summary = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all();
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getDocumentationStats() {
|
||||
const total = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1').get().count;
|
||||
const withReadme = this.db.prepare("SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND npm_readme IS NOT NULL AND npm_readme != ''").get().count;
|
||||
const withAISummary = this.db.prepare("SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND ai_documentation_summary IS NOT NULL AND ai_documentation_summary != ''").get().count;
|
||||
return {
|
||||
total,
|
||||
withReadme,
|
||||
withAISummary,
|
||||
needingReadme: total - withReadme,
|
||||
needingAISummary: withReadme - withAISummary
|
||||
};
|
||||
}
|
||||
saveNodeVersion(versionData) {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO node_versions (
|
||||
node_type, version, package_name, display_name, description,
|
||||
category, is_current_max, properties_schema, operations,
|
||||
credentials_required, outputs, minimum_n8n_version,
|
||||
breaking_changes, deprecated_properties, added_properties,
|
||||
released_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
stmt.run(versionData.nodeType, versionData.version, versionData.packageName, versionData.displayName, versionData.description || null, versionData.category || null, versionData.isCurrentMax ? 1 : 0, versionData.propertiesSchema ? JSON.stringify(versionData.propertiesSchema) : null, versionData.operations ? JSON.stringify(versionData.operations) : null, versionData.credentialsRequired ? JSON.stringify(versionData.credentialsRequired) : null, versionData.outputs ? JSON.stringify(versionData.outputs) : null, versionData.minimumN8nVersion || null, versionData.breakingChanges ? JSON.stringify(versionData.breakingChanges) : null, versionData.deprecatedProperties ? JSON.stringify(versionData.deprecatedProperties) : null, versionData.addedProperties ? JSON.stringify(versionData.addedProperties) : null, versionData.releasedAt || null);
|
||||
}
|
||||
getNodeVersions(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ?
|
||||
ORDER BY version DESC
|
||||
`).all(normalizedType);
|
||||
return rows.map(row => this.parseNodeVersionRow(row));
|
||||
}
|
||||
getLatestNodeVersion(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND is_current_max = 1
|
||||
LIMIT 1
|
||||
`).get(normalizedType);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
getNodeVersion(nodeType, version) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND version = ?
|
||||
`).get(normalizedType, version);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
savePropertyChange(changeData) {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO version_property_changes (
|
||||
node_type, from_version, to_version, property_name, change_type,
|
||||
is_breaking, old_value, new_value, migration_hint, auto_migratable,
|
||||
migration_strategy, severity
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
stmt.run(changeData.nodeType, changeData.fromVersion, changeData.toVersion, changeData.propertyName, changeData.changeType, changeData.isBreaking ? 1 : 0, changeData.oldValue || null, changeData.newValue || null, changeData.migrationHint || null, changeData.autoMigratable ? 1 : 0, changeData.migrationStrategy ? JSON.stringify(changeData.migrationStrategy) : null, changeData.severity || 'MEDIUM');
|
||||
}
|
||||
getPropertyChanges(nodeType, fromVersion, toVersion) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND from_version = ? AND to_version = ?
|
||||
ORDER BY severity DESC, property_name
|
||||
`).all(normalizedType, fromVersion, toVersion);
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
getBreakingChanges(nodeType, fromVersion, toVersion) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
let sql = `
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND is_breaking = 1
|
||||
`;
|
||||
const params = [normalizedType];
|
||||
if (toVersion) {
|
||||
sql += ` AND from_version >= ? AND to_version <= ?`;
|
||||
params.push(fromVersion, toVersion);
|
||||
}
|
||||
else {
|
||||
sql += ` AND from_version >= ?`;
|
||||
params.push(fromVersion);
|
||||
}
|
||||
sql += ` ORDER BY from_version, to_version, severity DESC`;
|
||||
const rows = this.db.prepare(sql).all(...params);
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
getAutoMigratableChanges(nodeType, fromVersion, toVersion) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ?
|
||||
AND from_version = ?
|
||||
AND to_version = ?
|
||||
AND auto_migratable = 1
|
||||
ORDER BY severity DESC
|
||||
`).all(normalizedType, fromVersion, toVersion);
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
hasVersionUpgradePath(nodeType, fromVersion, toVersion) {
|
||||
const versions = this.getNodeVersions(nodeType);
|
||||
if (versions.length === 0)
|
||||
return false;
|
||||
const fromExists = versions.some(v => v.version === fromVersion);
|
||||
const toExists = versions.some(v => v.version === toVersion);
|
||||
return fromExists && toExists;
|
||||
}
|
||||
getVersionedNodesCount() {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(DISTINCT node_type) as count
|
||||
FROM node_versions
|
||||
`).get();
|
||||
return result.count;
|
||||
}
|
||||
parseNodeVersionRow(row) {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
version: row.version,
|
||||
packageName: row.package_name,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
category: row.category,
|
||||
isCurrentMax: Number(row.is_current_max) === 1,
|
||||
propertiesSchema: row.properties_schema ? this.safeJsonParse(row.properties_schema, []) : null,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, []) : null,
|
||||
credentialsRequired: row.credentials_required ? this.safeJsonParse(row.credentials_required, []) : null,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
minimumN8nVersion: row.minimum_n8n_version,
|
||||
breakingChanges: row.breaking_changes ? this.safeJsonParse(row.breaking_changes, []) : [],
|
||||
deprecatedProperties: row.deprecated_properties ? this.safeJsonParse(row.deprecated_properties, []) : [],
|
||||
addedProperties: row.added_properties ? this.safeJsonParse(row.added_properties, []) : [],
|
||||
releasedAt: row.released_at,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
parsePropertyChangeRow(row) {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
fromVersion: row.from_version,
|
||||
toVersion: row.to_version,
|
||||
propertyName: row.property_name,
|
||||
changeType: row.change_type,
|
||||
isBreaking: Number(row.is_breaking) === 1,
|
||||
oldValue: row.old_value,
|
||||
newValue: row.new_value,
|
||||
migrationHint: row.migration_hint,
|
||||
autoMigratable: Number(row.auto_migratable) === 1,
|
||||
migrationStrategy: row.migration_strategy ? this.safeJsonParse(row.migration_strategy, null) : null,
|
||||
severity: row.severity,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
createWorkflowVersion(data) {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO workflow_versions (
|
||||
workflow_id, version_number, workflow_name, workflow_snapshot,
|
||||
trigger, operations, fix_types, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
const result = stmt.run(data.workflowId, data.versionNumber, data.workflowName, JSON.stringify(data.workflowSnapshot), data.trigger, data.operations ? JSON.stringify(data.operations) : null, data.fixTypes ? JSON.stringify(data.fixTypes) : null, data.metadata ? JSON.stringify(data.metadata) : null);
|
||||
return result.lastInsertRowid;
|
||||
}
|
||||
getWorkflowVersions(workflowId, limit) {
|
||||
let sql = `
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`;
|
||||
if (limit) {
|
||||
sql += ` LIMIT ?`;
|
||||
const rows = this.db.prepare(sql).all(workflowId, limit);
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
const rows = this.db.prepare(sql).all(workflowId);
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
getWorkflowVersion(versionId) {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions WHERE id = ?
|
||||
`).get(versionId);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
getLatestWorkflowVersion(workflowId) {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
LIMIT 1
|
||||
`).get(workflowId);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
deleteWorkflowVersion(versionId) {
|
||||
this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id = ?
|
||||
`).run(versionId);
|
||||
}
|
||||
deleteWorkflowVersionsByWorkflowId(workflowId) {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE workflow_id = ?
|
||||
`).run(workflowId);
|
||||
return result.changes;
|
||||
}
|
||||
pruneWorkflowVersions(workflowId, keepCount) {
|
||||
const versions = this.db.prepare(`
|
||||
SELECT id FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`).all(workflowId);
|
||||
if (versions.length <= keepCount) {
|
||||
return 0;
|
||||
}
|
||||
const idsToDelete = versions.slice(keepCount).map(v => v.id);
|
||||
if (idsToDelete.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
const placeholders = idsToDelete.map(() => '?').join(',');
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id IN (${placeholders})
|
||||
`).run(...idsToDelete);
|
||||
return result.changes;
|
||||
}
|
||||
truncateWorkflowVersions() {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions
|
||||
`).run();
|
||||
return result.changes;
|
||||
}
|
||||
getWorkflowVersionCount(workflowId) {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions WHERE workflow_id = ?
|
||||
`).get(workflowId);
|
||||
return result.count;
|
||||
}
|
||||
getVersionStorageStats() {
|
||||
const totalResult = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions
|
||||
`).get();
|
||||
const sizeResult = this.db.prepare(`
|
||||
SELECT SUM(LENGTH(workflow_snapshot)) as total_size FROM workflow_versions
|
||||
`).get();
|
||||
const byWorkflow = this.db.prepare(`
|
||||
SELECT
|
||||
workflow_id,
|
||||
workflow_name,
|
||||
COUNT(*) as version_count,
|
||||
SUM(LENGTH(workflow_snapshot)) as total_size,
|
||||
MAX(created_at) as last_backup
|
||||
FROM workflow_versions
|
||||
GROUP BY workflow_id
|
||||
ORDER BY version_count DESC
|
||||
`).all();
|
||||
return {
|
||||
totalVersions: totalResult.count,
|
||||
totalSize: sizeResult.total_size || 0,
|
||||
byWorkflow: byWorkflow.map(row => ({
|
||||
workflowId: row.workflow_id,
|
||||
workflowName: row.workflow_name,
|
||||
versionCount: row.version_count,
|
||||
totalSize: row.total_size,
|
||||
lastBackup: row.last_backup
|
||||
}))
|
||||
};
|
||||
}
|
||||
parseWorkflowVersionRow(row) {
|
||||
return {
|
||||
id: row.id,
|
||||
workflowId: row.workflow_id,
|
||||
versionNumber: row.version_number,
|
||||
workflowName: row.workflow_name,
|
||||
workflowSnapshot: this.safeJsonParse(row.workflow_snapshot, null),
|
||||
trigger: row.trigger,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, null) : null,
|
||||
fixTypes: row.fix_types ? this.safeJsonParse(row.fix_types, null) : null,
|
||||
metadata: row.metadata ? this.safeJsonParse(row.metadata, null) : null,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.NodeRepository = NodeRepository;
|
||||
//# sourceMappingURL=node-repository.js.map
|
||||
1
dist/database/node-repository.js.map
vendored
Normal file
1
dist/database/node-repository.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
10
dist/errors/validation-service-error.d.ts
vendored
Normal file
10
dist/errors/validation-service-error.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export declare class ValidationServiceError extends Error {
|
||||
readonly nodeType?: string | undefined;
|
||||
readonly property?: string | undefined;
|
||||
readonly cause?: Error | undefined;
|
||||
constructor(message: string, nodeType?: string | undefined, property?: string | undefined, cause?: Error | undefined);
|
||||
static jsonParseError(nodeType: string, cause: Error): ValidationServiceError;
|
||||
static nodeNotFound(nodeType: string): ValidationServiceError;
|
||||
static dataExtractionError(nodeType: string, dataType: string, cause?: Error): ValidationServiceError;
|
||||
}
|
||||
//# sourceMappingURL=validation-service-error.d.ts.map
|
||||
1
dist/errors/validation-service-error.d.ts.map
vendored
Normal file
1
dist/errors/validation-service-error.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"validation-service-error.d.ts","sourceRoot":"","sources":["../../src/errors/validation-service-error.ts"],"names":[],"mappings":"AAGA,qBAAa,sBAAuB,SAAQ,KAAK;aAG7B,QAAQ,CAAC,EAAE,MAAM;aACjB,QAAQ,CAAC,EAAE,MAAM;aACjB,KAAK,CAAC,EAAE,KAAK;gBAH7B,OAAO,EAAE,MAAM,EACC,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,KAAK,CAAC,EAAE,KAAK,YAAA;IAc/B,MAAM,CAAC,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,GAAG,sBAAsB;IAY7E,MAAM,CAAC,YAAY,CAAC,QAAQ,EAAE,MAAM,GAAG,sBAAsB;IAU7D,MAAM,CAAC,mBAAmB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,KAAK,GAAG,sBAAsB;CAQtG"}
|
||||
26
dist/errors/validation-service-error.js
vendored
Normal file
26
dist/errors/validation-service-error.js
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ValidationServiceError = void 0;
|
||||
class ValidationServiceError extends Error {
|
||||
constructor(message, nodeType, property, cause) {
|
||||
super(message);
|
||||
this.nodeType = nodeType;
|
||||
this.property = property;
|
||||
this.cause = cause;
|
||||
this.name = 'ValidationServiceError';
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, ValidationServiceError);
|
||||
}
|
||||
}
|
||||
static jsonParseError(nodeType, cause) {
|
||||
return new ValidationServiceError(`Failed to parse JSON data for node ${nodeType}`, nodeType, undefined, cause);
|
||||
}
|
||||
static nodeNotFound(nodeType) {
|
||||
return new ValidationServiceError(`Node type ${nodeType} not found in repository`, nodeType);
|
||||
}
|
||||
static dataExtractionError(nodeType, dataType, cause) {
|
||||
return new ValidationServiceError(`Failed to extract ${dataType} for node ${nodeType}`, nodeType, dataType, cause);
|
||||
}
|
||||
}
|
||||
exports.ValidationServiceError = ValidationServiceError;
|
||||
//# sourceMappingURL=validation-service-error.js.map
|
||||
1
dist/errors/validation-service-error.js.map
vendored
Normal file
1
dist/errors/validation-service-error.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"validation-service-error.js","sourceRoot":"","sources":["../../src/errors/validation-service-error.ts"],"names":[],"mappings":";;;AAGA,MAAa,sBAAuB,SAAQ,KAAK;IAC/C,YACE,OAAe,EACC,QAAiB,EACjB,QAAiB,EACjB,KAAa;QAE7B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJC,aAAQ,GAAR,QAAQ,CAAS;QACjB,aAAQ,GAAR,QAAQ,CAAS;QACjB,UAAK,GAAL,KAAK,CAAQ;QAG7B,IAAI,CAAC,IAAI,GAAG,wBAAwB,CAAC;QAGrC,IAAI,KAAK,CAAC,iBAAiB,EAAE,CAAC;YAC5B,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,sBAAsB,CAAC,CAAC;QACxD,CAAC;IACH,CAAC;IAKD,MAAM,CAAC,cAAc,CAAC,QAAgB,EAAE,KAAY;QAClD,OAAO,IAAI,sBAAsB,CAC/B,sCAAsC,QAAQ,EAAE,EAChD,QAAQ,EACR,SAAS,EACT,KAAK,CACN,CAAC;IACJ,CAAC;IAKD,MAAM,CAAC,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,sBAAsB,CAC/B,aAAa,QAAQ,0BAA0B,EAC/C,QAAQ,CACT,CAAC;IACJ,CAAC;IAKD,MAAM,CAAC,mBAAmB,CAAC,QAAgB,EAAE,QAAgB,EAAE,KAAa;QAC1E,OAAO,IAAI,sBAAsB,CAC/B,qBAAqB,QAAQ,aAAa,QAAQ,EAAE,EACpD,QAAQ,EACR,QAAQ,EACR,KAAK,CACN,CAAC;IACJ,CAAC;CACF;AAjDD,wDAiDC"}
|
||||
52
dist/http-server-single-session.d.ts
vendored
Normal file
52
dist/http-server-single-session.d.ts
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env node
|
||||
import express from 'express';
|
||||
import { InstanceContext } from './types/instance-context';
|
||||
import { SessionState } from './types/session-state';
|
||||
export declare class SingleSessionHTTPServer {
|
||||
private transports;
|
||||
private servers;
|
||||
private sessionMetadata;
|
||||
private sessionContexts;
|
||||
private contextSwitchLocks;
|
||||
private session;
|
||||
private consoleManager;
|
||||
private expressServer;
|
||||
private sessionTimeout;
|
||||
private authToken;
|
||||
private cleanupTimer;
|
||||
constructor();
|
||||
private startSessionCleanup;
|
||||
private cleanupExpiredSessions;
|
||||
private removeSession;
|
||||
private getActiveSessionCount;
|
||||
private canCreateSession;
|
||||
private isValidSessionId;
|
||||
private sanitizeErrorForClient;
|
||||
private updateSessionAccess;
|
||||
private switchSessionContext;
|
||||
private performContextSwitch;
|
||||
private getSessionMetrics;
|
||||
private loadAuthToken;
|
||||
private validateEnvironment;
|
||||
handleRequest(req: express.Request, res: express.Response, instanceContext?: InstanceContext): Promise<void>;
|
||||
private resetSessionSSE;
|
||||
private isExpired;
|
||||
private isSessionExpired;
|
||||
start(): Promise<void>;
|
||||
shutdown(): Promise<void>;
|
||||
getSessionInfo(): {
|
||||
active: boolean;
|
||||
sessionId?: string;
|
||||
age?: number;
|
||||
sessions?: {
|
||||
total: number;
|
||||
active: number;
|
||||
expired: number;
|
||||
max: number;
|
||||
sessionIds: string[];
|
||||
};
|
||||
};
|
||||
exportSessionState(): SessionState[];
|
||||
restoreSessionState(sessions: SessionState[]): number;
|
||||
}
|
||||
//# sourceMappingURL=http-server-single-session.d.ts.map
|
||||
1
dist/http-server-single-session.d.ts.map
vendored
Normal file
1
dist/http-server-single-session.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"http-server-single-session.d.ts","sourceRoot":"","sources":["../src/http-server-single-session.ts"],"names":[],"mappings":";AAMA,OAAO,OAAO,MAAM,SAAS,CAAC;AAoB9B,OAAO,EAAE,eAAe,EAA2B,MAAM,0BAA0B,CAAC;AACpF,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAuErD,qBAAa,uBAAuB;IAElC,OAAO,CAAC,UAAU,CAA8D;IAChF,OAAO,CAAC,OAAO,CAA0D;IACzE,OAAO,CAAC,eAAe,CAAsE;IAC7F,OAAO,CAAC,eAAe,CAA4D;IACnF,OAAO,CAAC,kBAAkB,CAAyC;IACnE,OAAO,CAAC,OAAO,CAAwB;IACvC,OAAO,CAAC,cAAc,CAAwB;IAC9C,OAAO,CAAC,aAAa,CAAM;IAC3B,OAAO,CAAC,cAAc,CAAkB;IACxC,OAAO,CAAC,SAAS,CAAuB;IACxC,OAAO,CAAC,YAAY,CAA+B;;IAcnD,OAAO,CAAC,mBAAmB;IAmB3B,OAAO,CAAC,sBAAsB;YAqChB,aAAa;IAuC3B,OAAO,CAAC,qBAAqB;IAO7B,OAAO,CAAC,gBAAgB;IAkBxB,OAAO,CAAC,gBAAgB;IASxB,OAAO,CAAC,sBAAsB;IAkC9B,OAAO,CAAC,mBAAmB;YASb,oBAAoB;YAwBpB,oBAAoB;IAwBlC,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,aAAa;IA2BrB,OAAO,CAAC,mBAAmB;IAoDrB,aAAa,CACjB,GAAG,EAAE,OAAO,CAAC,OAAO,EACpB,GAAG,EAAE,OAAO,CAAC,QAAQ,EACrB,eAAe,CAAC,EAAE,eAAe,GAChC,OAAO,CAAC,IAAI,CAAC;YAmOF,eAAe;IA8C7B,OAAO,CAAC,SAAS;IAYjB,OAAO,CAAC,gBAAgB;IASlB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAgnBtB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAkD/B,cAAc,IAAI;QAChB,MAAM,EAAE,OAAO,CAAC;QAChB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,QAAQ,CAAC,EAAE;YACT,KAAK,EAAE,MAAM,CAAC;YACd,MAAM,EAAE,MAAM,CAAC;YACf,OAAO,EAAE,MAAM,CAAC;YAChB,GAAG,EAAE,MAAM,CAAC;YACZ,UAAU,EAAE,MAAM,EAAE,CAAC;SACtB,CAAC;KACH;IAmDM,kBAAkB,IAAI,YAAY,EAAE;IAoEpC,mBAAmB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,MAAM;CAsG7D"}
|
||||
1180
dist/http-server-single-session.js
vendored
Normal file
1180
dist/http-server-single-session.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/http-server-single-session.js.map
vendored
Normal file
1
dist/http-server-single-session.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
9
dist/http-server.d.ts
vendored
Normal file
9
dist/http-server.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env node
|
||||
export declare function loadAuthToken(): string | null;
|
||||
export declare function startFixedHTTPServer(): Promise<void>;
|
||||
declare module './mcp/server' {
|
||||
interface N8NDocumentationMCPServer {
|
||||
executeTool(name: string, args: any): Promise<any>;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=http-server.d.ts.map
|
||||
1
dist/http-server.d.ts.map
vendored
Normal file
1
dist/http-server.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"http-server.d.ts","sourceRoot":"","sources":["../src/http-server.ts"],"names":[],"mappings":";AAiDA,wBAAgB,aAAa,IAAI,MAAM,GAAG,IAAI,CAsB7C;AAmED,wBAAsB,oBAAoB,kBAsezC;AAGD,OAAO,QAAQ,cAAc,CAAC;IAC5B,UAAU,yBAAyB;QACjC,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;KACpD;CACF"}
|
||||
481
dist/http-server.js
vendored
Normal file
481
dist/http-server.js
vendored
Normal file
@@ -0,0 +1,481 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.loadAuthToken = loadAuthToken;
|
||||
exports.startFixedHTTPServer = startFixedHTTPServer;
|
||||
const express_1 = __importDefault(require("express"));
|
||||
const tools_1 = require("./mcp/tools");
|
||||
const tools_n8n_manager_1 = require("./mcp/tools-n8n-manager");
|
||||
const server_1 = require("./mcp/server");
|
||||
const logger_1 = require("./utils/logger");
|
||||
const auth_1 = require("./utils/auth");
|
||||
const version_1 = require("./utils/version");
|
||||
const n8n_api_1 = require("./config/n8n-api");
|
||||
const dotenv_1 = __importDefault(require("dotenv"));
|
||||
const fs_1 = require("fs");
|
||||
const url_detector_1 = require("./utils/url-detector");
|
||||
const protocol_version_1 = require("./utils/protocol-version");
|
||||
dotenv_1.default.config();
|
||||
let expressServer;
|
||||
let authToken = null;
|
||||
function loadAuthToken() {
|
||||
if (process.env.AUTH_TOKEN) {
|
||||
logger_1.logger.info('Using AUTH_TOKEN from environment variable');
|
||||
return process.env.AUTH_TOKEN;
|
||||
}
|
||||
if (process.env.AUTH_TOKEN_FILE) {
|
||||
try {
|
||||
const token = (0, fs_1.readFileSync)(process.env.AUTH_TOKEN_FILE, 'utf-8').trim();
|
||||
logger_1.logger.info(`Loaded AUTH_TOKEN from file: ${process.env.AUTH_TOKEN_FILE}`);
|
||||
return token;
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error(`Failed to read AUTH_TOKEN_FILE: ${process.env.AUTH_TOKEN_FILE}`, error);
|
||||
console.error(`ERROR: Failed to read AUTH_TOKEN_FILE: ${process.env.AUTH_TOKEN_FILE}`);
|
||||
console.error(error instanceof Error ? error.message : 'Unknown error');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function validateEnvironment() {
|
||||
authToken = loadAuthToken();
|
||||
if (!authToken || authToken.trim() === '') {
|
||||
logger_1.logger.error('No authentication token found or token is empty');
|
||||
console.error('ERROR: AUTH_TOKEN is required for HTTP mode and cannot be empty');
|
||||
console.error('Set AUTH_TOKEN environment variable or AUTH_TOKEN_FILE pointing to a file containing the token');
|
||||
console.error('Generate AUTH_TOKEN with: openssl rand -base64 32');
|
||||
process.exit(1);
|
||||
}
|
||||
authToken = authToken.trim();
|
||||
if (authToken.length < 32) {
|
||||
logger_1.logger.warn('AUTH_TOKEN should be at least 32 characters for security');
|
||||
console.warn('WARNING: AUTH_TOKEN should be at least 32 characters for security');
|
||||
}
|
||||
if (authToken === 'REPLACE_THIS_AUTH_TOKEN_32_CHARS_MIN_abcdefgh') {
|
||||
logger_1.logger.warn('⚠️ SECURITY WARNING: Using default AUTH_TOKEN - CHANGE IMMEDIATELY!');
|
||||
logger_1.logger.warn('Generate secure token with: openssl rand -base64 32');
|
||||
if (process.env.MCP_MODE === 'http') {
|
||||
console.warn('\n⚠️ SECURITY WARNING ⚠️');
|
||||
console.warn('Using default AUTH_TOKEN - CHANGE IMMEDIATELY!');
|
||||
console.warn('Generate secure token: openssl rand -base64 32');
|
||||
console.warn('Update via Railway dashboard environment variables\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
async function shutdown() {
|
||||
logger_1.logger.info('Shutting down HTTP server...');
|
||||
console.log('Shutting down HTTP server...');
|
||||
if (expressServer) {
|
||||
expressServer.close(() => {
|
||||
logger_1.logger.info('HTTP server closed');
|
||||
console.log('HTTP server closed');
|
||||
process.exit(0);
|
||||
});
|
||||
setTimeout(() => {
|
||||
logger_1.logger.error('Forced shutdown after timeout');
|
||||
process.exit(1);
|
||||
}, 10000);
|
||||
}
|
||||
else {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
async function startFixedHTTPServer() {
|
||||
logger_1.logger.warn('DEPRECATION: startFixedHTTPServer() is deprecated as of v2.31.8. ' +
|
||||
'Use SingleSessionHTTPServer which supports SSE streaming. ' +
|
||||
'See: https://github.com/czlonkowski/n8n-mcp/issues/524');
|
||||
validateEnvironment();
|
||||
const app = (0, express_1.default)();
|
||||
const trustProxy = process.env.TRUST_PROXY ? Number(process.env.TRUST_PROXY) : 0;
|
||||
if (trustProxy > 0) {
|
||||
app.set('trust proxy', trustProxy);
|
||||
logger_1.logger.info(`Trust proxy enabled with ${trustProxy} hop(s)`);
|
||||
}
|
||||
app.use((req, res, next) => {
|
||||
res.setHeader('X-Content-Type-Options', 'nosniff');
|
||||
res.setHeader('X-Frame-Options', 'DENY');
|
||||
res.setHeader('X-XSS-Protection', '1; mode=block');
|
||||
res.setHeader('Strict-Transport-Security', 'max-age=31536000; includeSubDomains');
|
||||
next();
|
||||
});
|
||||
app.use((req, res, next) => {
|
||||
const allowedOrigin = process.env.CORS_ORIGIN || '*';
|
||||
res.setHeader('Access-Control-Allow-Origin', allowedOrigin);
|
||||
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, Accept');
|
||||
res.setHeader('Access-Control-Max-Age', '86400');
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.sendStatus(204);
|
||||
return;
|
||||
}
|
||||
next();
|
||||
});
|
||||
app.use((req, res, next) => {
|
||||
logger_1.logger.info(`${req.method} ${req.path}`, {
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
contentLength: req.get('content-length')
|
||||
});
|
||||
next();
|
||||
});
|
||||
const mcpServer = new server_1.N8NDocumentationMCPServer();
|
||||
logger_1.logger.info('Created persistent MCP server instance');
|
||||
app.get('/', (req, res) => {
|
||||
const port = parseInt(process.env.PORT || '3000');
|
||||
const host = process.env.HOST || '0.0.0.0';
|
||||
const baseUrl = (0, url_detector_1.detectBaseUrl)(req, host, port);
|
||||
const endpoints = (0, url_detector_1.formatEndpointUrls)(baseUrl);
|
||||
res.json({
|
||||
name: 'n8n Documentation MCP Server',
|
||||
version: version_1.PROJECT_VERSION,
|
||||
description: 'Model Context Protocol server providing comprehensive n8n node documentation and workflow management',
|
||||
endpoints: {
|
||||
health: {
|
||||
url: endpoints.health,
|
||||
method: 'GET',
|
||||
description: 'Health check and status information'
|
||||
},
|
||||
mcp: {
|
||||
url: endpoints.mcp,
|
||||
method: 'GET/POST',
|
||||
description: 'MCP endpoint - GET for info, POST for JSON-RPC'
|
||||
}
|
||||
},
|
||||
authentication: {
|
||||
type: 'Bearer Token',
|
||||
header: 'Authorization: Bearer <token>',
|
||||
required_for: ['POST /mcp']
|
||||
},
|
||||
documentation: 'https://github.com/czlonkowski/n8n-mcp'
|
||||
});
|
||||
});
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
mode: 'http-fixed',
|
||||
version: version_1.PROJECT_VERSION,
|
||||
uptime: Math.floor(process.uptime()),
|
||||
memory: {
|
||||
used: Math.round(process.memoryUsage().heapUsed / 1024 / 1024),
|
||||
total: Math.round(process.memoryUsage().heapTotal / 1024 / 1024),
|
||||
unit: 'MB'
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
app.get('/version', (req, res) => {
|
||||
res.json({
|
||||
version: version_1.PROJECT_VERSION,
|
||||
buildTime: new Date().toISOString(),
|
||||
tools: tools_1.n8nDocumentationToolsFinal.map(t => t.name),
|
||||
commit: process.env.GIT_COMMIT || 'unknown'
|
||||
});
|
||||
});
|
||||
app.get('/test-tools', async (req, res) => {
|
||||
try {
|
||||
const result = await mcpServer.executeTool('get_node_essentials', { nodeType: 'nodes-base.httpRequest' });
|
||||
res.json({ status: 'ok', hasData: !!result, toolCount: tools_1.n8nDocumentationToolsFinal.length });
|
||||
}
|
||||
catch (error) {
|
||||
res.json({ status: 'error', message: error instanceof Error ? error.message : 'Unknown error' });
|
||||
}
|
||||
});
|
||||
app.get('/mcp', (req, res) => {
|
||||
res.json({
|
||||
description: 'n8n Documentation MCP Server',
|
||||
version: version_1.PROJECT_VERSION,
|
||||
endpoints: {
|
||||
mcp: {
|
||||
method: 'POST',
|
||||
path: '/mcp',
|
||||
description: 'Main MCP JSON-RPC endpoint',
|
||||
authentication: 'Bearer token required'
|
||||
},
|
||||
health: {
|
||||
method: 'GET',
|
||||
path: '/health',
|
||||
description: 'Health check endpoint',
|
||||
authentication: 'None'
|
||||
},
|
||||
root: {
|
||||
method: 'GET',
|
||||
path: '/',
|
||||
description: 'API information',
|
||||
authentication: 'None'
|
||||
}
|
||||
},
|
||||
documentation: 'https://github.com/czlonkowski/n8n-mcp'
|
||||
});
|
||||
});
|
||||
app.post('/mcp', async (req, res) => {
|
||||
const startTime = Date.now();
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) {
|
||||
logger_1.logger.warn('Authentication failed: Missing Authorization header', {
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
reason: 'no_auth_header'
|
||||
});
|
||||
res.status(401).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32001,
|
||||
message: 'Unauthorized'
|
||||
},
|
||||
id: null
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!authHeader.startsWith('Bearer ')) {
|
||||
logger_1.logger.warn('Authentication failed: Invalid Authorization header format (expected Bearer token)', {
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
reason: 'invalid_auth_format',
|
||||
headerPrefix: authHeader.substring(0, Math.min(authHeader.length, 10)) + '...'
|
||||
});
|
||||
res.status(401).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32001,
|
||||
message: 'Unauthorized'
|
||||
},
|
||||
id: null
|
||||
});
|
||||
return;
|
||||
}
|
||||
const token = authHeader.slice(7).trim();
|
||||
const isValidToken = authToken &&
|
||||
auth_1.AuthManager.timingSafeCompare(token, authToken);
|
||||
if (!isValidToken) {
|
||||
logger_1.logger.warn('Authentication failed: Invalid token', {
|
||||
ip: req.ip,
|
||||
userAgent: req.get('user-agent'),
|
||||
reason: 'invalid_token'
|
||||
});
|
||||
res.status(401).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32001,
|
||||
message: 'Unauthorized'
|
||||
},
|
||||
id: null
|
||||
});
|
||||
return;
|
||||
}
|
||||
try {
|
||||
let body = '';
|
||||
req.on('data', chunk => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const jsonRpcRequest = JSON.parse(body);
|
||||
logger_1.logger.debug('Received JSON-RPC request:', { method: jsonRpcRequest.method });
|
||||
let response;
|
||||
switch (jsonRpcRequest.method) {
|
||||
case 'initialize':
|
||||
const negotiationResult = (0, protocol_version_1.negotiateProtocolVersion)(jsonRpcRequest.params?.protocolVersion, jsonRpcRequest.params?.clientInfo, req.get('user-agent'), req.headers);
|
||||
(0, protocol_version_1.logProtocolNegotiation)(negotiationResult, logger_1.logger, 'HTTP_SERVER_INITIALIZE');
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
result: {
|
||||
protocolVersion: negotiationResult.version,
|
||||
capabilities: {
|
||||
tools: {},
|
||||
resources: {}
|
||||
},
|
||||
serverInfo: {
|
||||
name: 'n8n-documentation-mcp',
|
||||
version: version_1.PROJECT_VERSION
|
||||
}
|
||||
},
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
break;
|
||||
case 'tools/list':
|
||||
const tools = [...tools_1.n8nDocumentationToolsFinal];
|
||||
if ((0, n8n_api_1.isN8nApiConfigured)()) {
|
||||
tools.push(...tools_n8n_manager_1.n8nManagementTools);
|
||||
}
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
result: {
|
||||
tools
|
||||
},
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
break;
|
||||
case 'tools/call':
|
||||
const toolName = jsonRpcRequest.params?.name;
|
||||
const toolArgs = jsonRpcRequest.params?.arguments || {};
|
||||
try {
|
||||
const result = await mcpServer.executeTool(toolName, toolArgs);
|
||||
let responseText = JSON.stringify(result, null, 2);
|
||||
const mcpResult = {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: responseText
|
||||
}
|
||||
]
|
||||
};
|
||||
if (toolName.startsWith('validate_')) {
|
||||
const resultSize = responseText.length;
|
||||
if (resultSize > 1000000) {
|
||||
logger_1.logger.warn(`Validation tool ${toolName} response is very large (${resultSize} chars). ` +
|
||||
`Truncating for HTTP transport safety.`);
|
||||
mcpResult.content[0].text = responseText.substring(0, 999000) +
|
||||
'\n\n[Response truncated due to size limits]';
|
||||
}
|
||||
else {
|
||||
mcpResult.structuredContent = result;
|
||||
}
|
||||
}
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
result: mcpResult,
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32603,
|
||||
message: `Error executing tool ${toolName}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
},
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
}
|
||||
break;
|
||||
default:
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32601,
|
||||
message: `Method not found: ${jsonRpcRequest.method}`
|
||||
},
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
}
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.json(response);
|
||||
const duration = Date.now() - startTime;
|
||||
logger_1.logger.info('MCP request completed', {
|
||||
duration,
|
||||
method: jsonRpcRequest.method
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Error processing request:', error);
|
||||
res.status(400).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32700,
|
||||
message: 'Parse error',
|
||||
data: error instanceof Error ? error.message : 'Unknown error'
|
||||
},
|
||||
id: null
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('MCP request error:', error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32603,
|
||||
message: 'Internal server error',
|
||||
data: process.env.NODE_ENV === 'development'
|
||||
? error.message
|
||||
: undefined
|
||||
},
|
||||
id: null
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
app.use((req, res) => {
|
||||
res.status(404).json({
|
||||
error: 'Not found',
|
||||
message: `Cannot ${req.method} ${req.path}`
|
||||
});
|
||||
});
|
||||
app.use((err, req, res, next) => {
|
||||
logger_1.logger.error('Express error handler:', err);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
jsonrpc: '2.0',
|
||||
error: {
|
||||
code: -32603,
|
||||
message: 'Internal server error',
|
||||
data: process.env.NODE_ENV === 'development' ? err.message : undefined
|
||||
},
|
||||
id: null
|
||||
});
|
||||
}
|
||||
});
|
||||
const port = parseInt(process.env.PORT || '3000');
|
||||
const host = process.env.HOST || '0.0.0.0';
|
||||
expressServer = app.listen(port, host, () => {
|
||||
logger_1.logger.info(`n8n MCP Fixed HTTP Server started`, { port, host });
|
||||
const baseUrl = (0, url_detector_1.getStartupBaseUrl)(host, port);
|
||||
const endpoints = (0, url_detector_1.formatEndpointUrls)(baseUrl);
|
||||
console.log(`n8n MCP Fixed HTTP Server running on ${host}:${port}`);
|
||||
console.log(`Health check: ${endpoints.health}`);
|
||||
console.log(`MCP endpoint: ${endpoints.mcp}`);
|
||||
console.log('\nPress Ctrl+C to stop the server');
|
||||
if (authToken === 'REPLACE_THIS_AUTH_TOKEN_32_CHARS_MIN_abcdefgh') {
|
||||
setInterval(() => {
|
||||
logger_1.logger.warn('⚠️ Still using default AUTH_TOKEN - security risk!');
|
||||
if (process.env.MCP_MODE === 'http') {
|
||||
console.warn('⚠️ REMINDER: Still using default AUTH_TOKEN - please change it!');
|
||||
}
|
||||
}, 300000);
|
||||
}
|
||||
if (process.env.BASE_URL || process.env.PUBLIC_URL) {
|
||||
console.log(`\nPublic URL configured: ${baseUrl}`);
|
||||
}
|
||||
else if (process.env.TRUST_PROXY && Number(process.env.TRUST_PROXY) > 0) {
|
||||
console.log(`\nNote: TRUST_PROXY is enabled. URLs will be auto-detected from proxy headers.`);
|
||||
}
|
||||
});
|
||||
expressServer.on('error', (error) => {
|
||||
if (error.code === 'EADDRINUSE') {
|
||||
logger_1.logger.error(`Port ${port} is already in use`);
|
||||
console.error(`ERROR: Port ${port} is already in use`);
|
||||
process.exit(1);
|
||||
}
|
||||
else {
|
||||
logger_1.logger.error('Server error:', error);
|
||||
console.error('Server error:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('uncaughtException', (error) => {
|
||||
logger_1.logger.error('Uncaught exception:', error);
|
||||
console.error('Uncaught exception:', error);
|
||||
shutdown();
|
||||
});
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
logger_1.logger.error('Unhandled rejection:', reason);
|
||||
console.error('Unhandled rejection at:', promise, 'reason:', reason);
|
||||
shutdown();
|
||||
});
|
||||
}
|
||||
if (typeof require !== 'undefined' && require.main === module) {
|
||||
startFixedHTTPServer().catch(error => {
|
||||
logger_1.logger.error('Failed to start Fixed HTTP server:', error);
|
||||
console.error('Failed to start Fixed HTTP server:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=http-server.js.map
|
||||
1
dist/http-server.js.map
vendored
Normal file
1
dist/http-server.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
11
dist/index.d.ts
vendored
Normal file
11
dist/index.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export { N8NMCPEngine, EngineHealth, EngineOptions } from './mcp-engine';
|
||||
export { SingleSessionHTTPServer } from './http-server-single-session';
|
||||
export { ConsoleManager } from './utils/console-manager';
|
||||
export { N8NDocumentationMCPServer } from './mcp/server';
|
||||
export type { InstanceContext } from './types/instance-context';
|
||||
export { validateInstanceContext, isInstanceContext } from './types/instance-context';
|
||||
export type { SessionState } from './types/session-state';
|
||||
export type { Tool, CallToolResult, ListToolsResult } from '@modelcontextprotocol/sdk/types.js';
|
||||
import N8NMCPEngine from './mcp-engine';
|
||||
export default N8NMCPEngine;
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
dist/index.d.ts.map
vendored
Normal file
1
dist/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,cAAc,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,EAAE,yBAAyB,EAAE,MAAM,cAAc,CAAC;AAGzD,YAAY,EACV,eAAe,EAChB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EACL,uBAAuB,EACvB,iBAAiB,EAClB,MAAM,0BAA0B,CAAC;AAClC,YAAY,EACV,YAAY,EACb,MAAM,uBAAuB,CAAC;AAG/B,YAAY,EACV,IAAI,EACJ,cAAc,EACd,eAAe,EAChB,MAAM,oCAAoC,CAAC;AAG5C,OAAO,YAAY,MAAM,cAAc,CAAC;AACxC,eAAe,YAAY,CAAC"}
|
||||
20
dist/index.js
vendored
Normal file
20
dist/index.js
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isInstanceContext = exports.validateInstanceContext = exports.N8NDocumentationMCPServer = exports.ConsoleManager = exports.SingleSessionHTTPServer = exports.N8NMCPEngine = void 0;
|
||||
var mcp_engine_1 = require("./mcp-engine");
|
||||
Object.defineProperty(exports, "N8NMCPEngine", { enumerable: true, get: function () { return mcp_engine_1.N8NMCPEngine; } });
|
||||
var http_server_single_session_1 = require("./http-server-single-session");
|
||||
Object.defineProperty(exports, "SingleSessionHTTPServer", { enumerable: true, get: function () { return http_server_single_session_1.SingleSessionHTTPServer; } });
|
||||
var console_manager_1 = require("./utils/console-manager");
|
||||
Object.defineProperty(exports, "ConsoleManager", { enumerable: true, get: function () { return console_manager_1.ConsoleManager; } });
|
||||
var server_1 = require("./mcp/server");
|
||||
Object.defineProperty(exports, "N8NDocumentationMCPServer", { enumerable: true, get: function () { return server_1.N8NDocumentationMCPServer; } });
|
||||
var instance_context_1 = require("./types/instance-context");
|
||||
Object.defineProperty(exports, "validateInstanceContext", { enumerable: true, get: function () { return instance_context_1.validateInstanceContext; } });
|
||||
Object.defineProperty(exports, "isInstanceContext", { enumerable: true, get: function () { return instance_context_1.isInstanceContext; } });
|
||||
const mcp_engine_2 = __importDefault(require("./mcp-engine"));
|
||||
exports.default = mcp_engine_2.default;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
dist/index.js.map
vendored
Normal file
1
dist/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;AAOA,2CAAyE;AAAhE,0GAAA,YAAY,OAAA;AACrB,2EAAuE;AAA9D,qIAAA,uBAAuB,OAAA;AAChC,2DAAyD;AAAhD,iHAAA,cAAc,OAAA;AACvB,uCAAyD;AAAhD,mHAAA,yBAAyB,OAAA;AAMlC,6DAGkC;AAFhC,2HAAA,uBAAuB,OAAA;AACvB,qHAAA,iBAAiB,OAAA;AAcnB,8DAAwC;AACxC,kBAAe,oBAAY,CAAC"}
|
||||
11
dist/loaders/node-loader.d.ts
vendored
Normal file
11
dist/loaders/node-loader.d.ts
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export interface LoadedNode {
|
||||
packageName: string;
|
||||
nodeName: string;
|
||||
NodeClass: any;
|
||||
}
|
||||
export declare class N8nNodeLoader {
|
||||
private readonly CORE_PACKAGES;
|
||||
loadAllNodes(): Promise<LoadedNode[]>;
|
||||
private loadPackageNodes;
|
||||
}
|
||||
//# sourceMappingURL=node-loader.d.ts.map
|
||||
1
dist/loaders/node-loader.d.ts.map
vendored
Normal file
1
dist/loaders/node-loader.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"node-loader.d.ts","sourceRoot":"","sources":["../../src/loaders/node-loader.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,UAAU;IACzB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,GAAG,CAAC;CAChB;AAED,qBAAa,aAAa;IACxB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAG5B;IAEI,YAAY,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC;YAmB7B,gBAAgB;CAqD/B"}
|
||||
79
dist/loaders/node-loader.js
vendored
Normal file
79
dist/loaders/node-loader.js
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.N8nNodeLoader = void 0;
|
||||
const path_1 = __importDefault(require("path"));
|
||||
class N8nNodeLoader {
|
||||
constructor() {
|
||||
this.CORE_PACKAGES = [
|
||||
{ name: 'n8n-nodes-base', path: 'n8n-nodes-base' },
|
||||
{ name: '@n8n/n8n-nodes-langchain', path: '@n8n/n8n-nodes-langchain' }
|
||||
];
|
||||
}
|
||||
async loadAllNodes() {
|
||||
const results = [];
|
||||
for (const pkg of this.CORE_PACKAGES) {
|
||||
try {
|
||||
console.log(`\n📦 Loading package: ${pkg.name} from ${pkg.path}`);
|
||||
const packageJson = require(`${pkg.path}/package.json`);
|
||||
console.log(` Found ${Object.keys(packageJson.n8n?.nodes || {}).length} nodes in package.json`);
|
||||
const nodes = await this.loadPackageNodes(pkg.name, pkg.path, packageJson);
|
||||
results.push(...nodes);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`Failed to load ${pkg.name}:`, error);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async loadPackageNodes(packageName, packagePath, packageJson) {
|
||||
const n8nConfig = packageJson.n8n || {};
|
||||
const nodes = [];
|
||||
const nodesList = n8nConfig.nodes || [];
|
||||
if (Array.isArray(nodesList)) {
|
||||
for (const nodePath of nodesList) {
|
||||
try {
|
||||
const fullPath = require.resolve(`${packagePath}/${nodePath}`);
|
||||
const nodeModule = require(fullPath);
|
||||
const nodeNameMatch = nodePath.match(/\/([^\/]+)\.node\.(js|ts)$/);
|
||||
const nodeName = nodeNameMatch ? nodeNameMatch[1] : path_1.default.basename(nodePath, '.node.js');
|
||||
const NodeClass = nodeModule.default || nodeModule[nodeName] || Object.values(nodeModule)[0];
|
||||
if (NodeClass) {
|
||||
nodes.push({ packageName, nodeName, NodeClass });
|
||||
console.log(` ✓ Loaded ${nodeName} from ${packageName}`);
|
||||
}
|
||||
else {
|
||||
console.warn(` ⚠ No valid export found for ${nodeName} in ${packageName}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error(` ✗ Failed to load node from ${packageName}/${nodePath}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (const [nodeName, nodePath] of Object.entries(nodesList)) {
|
||||
try {
|
||||
const fullPath = require.resolve(`${packagePath}/${nodePath}`);
|
||||
const nodeModule = require(fullPath);
|
||||
const NodeClass = nodeModule.default || nodeModule[nodeName] || Object.values(nodeModule)[0];
|
||||
if (NodeClass) {
|
||||
nodes.push({ packageName, nodeName, NodeClass });
|
||||
console.log(` ✓ Loaded ${nodeName} from ${packageName}`);
|
||||
}
|
||||
else {
|
||||
console.warn(` ⚠ No valid export found for ${nodeName} in ${packageName}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error(` ✗ Failed to load node ${nodeName} from ${packageName}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
}
|
||||
exports.N8nNodeLoader = N8nNodeLoader;
|
||||
//# sourceMappingURL=node-loader.js.map
|
||||
1
dist/loaders/node-loader.js.map
vendored
Normal file
1
dist/loaders/node-loader.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"node-loader.js","sourceRoot":"","sources":["../../src/loaders/node-loader.ts"],"names":[],"mappings":";;;;;;AAAA,gDAAwB;AAQxB,MAAa,aAAa;IAA1B;QACmB,kBAAa,GAAG;YAC/B,EAAE,IAAI,EAAE,gBAAgB,EAAE,IAAI,EAAE,gBAAgB,EAAE;YAClD,EAAE,IAAI,EAAE,0BAA0B,EAAE,IAAI,EAAE,0BAA0B,EAAE;SACvE,CAAC;IA0EJ,CAAC;IAxEC,KAAK,CAAC,YAAY;QAChB,MAAM,OAAO,GAAiB,EAAE,CAAC;QAEjC,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC;YACrC,IAAI,CAAC;gBACH,OAAO,CAAC,GAAG,CAAC,yBAAyB,GAAG,CAAC,IAAI,SAAS,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;gBAElE,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,GAAG,CAAC,IAAI,eAAe,CAAC,CAAC;gBACxD,OAAO,CAAC,GAAG,CAAC,WAAW,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,KAAK,IAAI,EAAE,CAAC,CAAC,MAAM,wBAAwB,CAAC,CAAC;gBACjG,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;gBAC3E,OAAO,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,CAAC;YACzB,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,kBAAkB,GAAG,CAAC,IAAI,GAAG,EAAE,KAAK,CAAC,CAAC;YACtD,CAAC;QACH,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAEO,KAAK,CAAC,gBAAgB,CAAC,WAAmB,EAAE,WAAmB,EAAE,WAAgB;QACvF,MAAM,SAAS,GAAG,WAAW,CAAC,GAAG,IAAI,EAAE,CAAC;QACxC,MAAM,KAAK,GAAiB,EAAE,CAAC;QAG/B,MAAM,SAAS,GAAG,SAAS,CAAC,KAAK,IAAI,EAAE,CAAC;QAExC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC;YAE7B,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;gBACjC,IAAI,CAAC;oBACH,MAAM,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,WAAW,IAAI,QAAQ,EAAE,CAAC,CAAC;oBAC/D,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAGrC,MAAM,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;oBACnE,MAAM,QAAQ,GAAG,aAAa,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,UAAU,CAAC,CAAC;oBAGxF,MAAM,SAAS,GAAG,UAAU,CAAC,OAAO,IAAI,UAAU,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC7F,IAAI,SAAS,EAAE,CAAC;wBACd,KAAK,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;wBACjD,OAAO,CAAC,GAAG,CAAC,cAAc,QAAQ,SAAS,WAAW,EAAE,CAAC,CAAC;oBAC5D,CAAC;yBAAM,CAAC;wBACN,OAAO,CAAC,IAAI,CAAC,iCAAiC,QAAQ,OAAO,WAAW,EAAE,CAAC,CAAC;oBAC9E,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,OAAO,CAAC,KAAK,CAAC,gCAAgC,WAAW,IAAI,QAAQ,GAAG,EAAG,KAAe,CAAC,OAAO,CAAC,CAAC;gBACtG,CAAC;YACH,CAAC;QACH,CAAC;aAAM,CAAC;YAEN,KAAK,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC;gBAC7D,IAAI,CAAC;oBACH,MAAM,QAAQ,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,WAAW,IAAI,QAAkB,EAAE,CAAC,CAAC;oBACzE,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAGrC,MAAM,SAAS,GAAG,UAAU,CAAC,OAAO,IAAI,UAAU,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC7F,IAAI,SAAS,EAAE,CAAC;wBACd,KAAK,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;wBACjD,OAAO,CAAC,GAAG,CAAC,cAAc,QAAQ,SAAS,WAAW,EAAE,CAAC,CAAC;oBAC5D,CAAC;yBAAM,CAAC;wBACN,OAAO,CAAC,IAAI,CAAC,iCAAiC,QAAQ,OAAO,WAAW,EAAE,CAAC,CAAC;oBAC9E,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,OAAO,CAAC,KAAK,CAAC,2BAA2B,QAAQ,SAAS,WAAW,GAAG,EAAG,KAAe,CAAC,OAAO,CAAC,CAAC;gBACtG,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AA9ED,sCA8EC"}
|
||||
7
dist/mappers/docs-mapper.d.ts
vendored
Normal file
7
dist/mappers/docs-mapper.d.ts
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export declare class DocsMapper {
|
||||
private docsPath;
|
||||
private readonly KNOWN_FIXES;
|
||||
fetchDocumentation(nodeType: string): Promise<string | null>;
|
||||
private enhanceLoopNodeDocumentation;
|
||||
}
|
||||
//# sourceMappingURL=docs-mapper.d.ts.map
|
||||
1
dist/mappers/docs-mapper.d.ts.map
vendored
Normal file
1
dist/mappers/docs-mapper.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"docs-mapper.d.ts","sourceRoot":"","sources":["../../src/mappers/docs-mapper.ts"],"names":[],"mappings":"AAGA,qBAAa,UAAU;IACrB,OAAO,CAAC,QAAQ,CAAwC;IAGxD,OAAO,CAAC,QAAQ,CAAC,WAAW,CAU1B;IAEI,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAkDlE,OAAO,CAAC,4BAA4B;CAmDrC"}
|
||||
106
dist/mappers/docs-mapper.js
vendored
Normal file
106
dist/mappers/docs-mapper.js
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DocsMapper = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const path_1 = __importDefault(require("path"));
|
||||
class DocsMapper {
|
||||
constructor() {
|
||||
this.docsPath = path_1.default.join(process.cwd(), 'n8n-docs');
|
||||
this.KNOWN_FIXES = {
|
||||
'httpRequest': 'httprequest',
|
||||
'code': 'code',
|
||||
'webhook': 'webhook',
|
||||
'respondToWebhook': 'respondtowebhook',
|
||||
'n8n-nodes-base.httpRequest': 'httprequest',
|
||||
'n8n-nodes-base.code': 'code',
|
||||
'n8n-nodes-base.webhook': 'webhook',
|
||||
'n8n-nodes-base.respondToWebhook': 'respondtowebhook'
|
||||
};
|
||||
}
|
||||
async fetchDocumentation(nodeType) {
|
||||
const fixedType = this.KNOWN_FIXES[nodeType] || nodeType;
|
||||
const nodeName = fixedType.split('.').pop()?.toLowerCase();
|
||||
if (!nodeName) {
|
||||
console.log(`⚠️ Could not extract node name from: ${nodeType}`);
|
||||
return null;
|
||||
}
|
||||
console.log(`📄 Looking for docs for: ${nodeType} -> ${nodeName}`);
|
||||
const possiblePaths = [
|
||||
`docs/integrations/builtin/core-nodes/n8n-nodes-base.${nodeName}.md`,
|
||||
`docs/integrations/builtin/app-nodes/n8n-nodes-base.${nodeName}.md`,
|
||||
`docs/integrations/builtin/trigger-nodes/n8n-nodes-base.${nodeName}.md`,
|
||||
`docs/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.${nodeName}.md`,
|
||||
`docs/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.${nodeName}.md`,
|
||||
`docs/integrations/builtin/core-nodes/n8n-nodes-base.${nodeName}/index.md`,
|
||||
`docs/integrations/builtin/app-nodes/n8n-nodes-base.${nodeName}/index.md`,
|
||||
`docs/integrations/builtin/trigger-nodes/n8n-nodes-base.${nodeName}/index.md`,
|
||||
`docs/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.${nodeName}/index.md`,
|
||||
`docs/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.${nodeName}/index.md`
|
||||
];
|
||||
for (const relativePath of possiblePaths) {
|
||||
try {
|
||||
const fullPath = path_1.default.join(this.docsPath, relativePath);
|
||||
let content = await fs_1.promises.readFile(fullPath, 'utf-8');
|
||||
console.log(` ✓ Found docs at: ${relativePath}`);
|
||||
content = this.enhanceLoopNodeDocumentation(nodeType, content);
|
||||
return content;
|
||||
}
|
||||
catch (error) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
console.log(` ✗ No docs found for ${nodeName}`);
|
||||
return null;
|
||||
}
|
||||
enhanceLoopNodeDocumentation(nodeType, content) {
|
||||
if (nodeType.includes('splitInBatches')) {
|
||||
const outputGuidance = `
|
||||
|
||||
## CRITICAL OUTPUT CONNECTION INFORMATION
|
||||
|
||||
**⚠️ OUTPUT INDICES ARE COUNTERINTUITIVE ⚠️**
|
||||
|
||||
The SplitInBatches node has TWO outputs with specific indices:
|
||||
- **Output 0 (index 0) = "done"**: Receives final processed data when loop completes
|
||||
- **Output 1 (index 1) = "loop"**: Receives current batch data during iteration
|
||||
|
||||
### Correct Connection Pattern:
|
||||
1. Connect nodes that PROCESS items inside the loop to **Output 1 ("loop")**
|
||||
2. Connect nodes that run AFTER the loop completes to **Output 0 ("done")**
|
||||
3. The last processing node in the loop must connect back to the SplitInBatches node
|
||||
|
||||
### Common Mistake:
|
||||
AI assistants often connect these backwards because the logical flow (loop first, then done) doesn't match the technical indices (done=0, loop=1).
|
||||
|
||||
`;
|
||||
const insertPoint = content.indexOf('## When to use');
|
||||
if (insertPoint > -1) {
|
||||
content = content.slice(0, insertPoint) + outputGuidance + content.slice(insertPoint);
|
||||
}
|
||||
else {
|
||||
content = outputGuidance + '\n' + content;
|
||||
}
|
||||
}
|
||||
if (nodeType.includes('.if')) {
|
||||
const outputGuidance = `
|
||||
|
||||
## Output Connection Information
|
||||
|
||||
The IF node has TWO outputs:
|
||||
- **Output 0 (index 0) = "true"**: Items that match the condition
|
||||
- **Output 1 (index 1) = "false"**: Items that do not match the condition
|
||||
|
||||
`;
|
||||
const insertPoint = content.indexOf('## Node parameters');
|
||||
if (insertPoint > -1) {
|
||||
content = content.slice(0, insertPoint) + outputGuidance + content.slice(insertPoint);
|
||||
}
|
||||
}
|
||||
return content;
|
||||
}
|
||||
}
|
||||
exports.DocsMapper = DocsMapper;
|
||||
//# sourceMappingURL=docs-mapper.js.map
|
||||
1
dist/mappers/docs-mapper.js.map
vendored
Normal file
1
dist/mappers/docs-mapper.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"docs-mapper.js","sourceRoot":"","sources":["../../src/mappers/docs-mapper.ts"],"names":[],"mappings":";;;;;;AAAA,2BAAoC;AACpC,gDAAwB;AAExB,MAAa,UAAU;IAAvB;QACU,aAAQ,GAAG,cAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,UAAU,CAAC,CAAC;QAGvC,gBAAW,GAA2B;YACrD,aAAa,EAAE,aAAa;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,SAAS;YACpB,kBAAkB,EAAE,kBAAkB;YAEtC,4BAA4B,EAAE,aAAa;YAC3C,qBAAqB,EAAE,MAAM;YAC7B,wBAAwB,EAAE,SAAS;YACnC,iCAAiC,EAAE,kBAAkB;SACtD,CAAC;IAuGJ,CAAC;IArGC,KAAK,CAAC,kBAAkB,CAAC,QAAgB;QAEvC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,IAAI,QAAQ,CAAC;QAGzD,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,WAAW,EAAE,CAAC;QAC3D,IAAI,CAAC,QAAQ,EAAE,CAAC;YACd,OAAO,CAAC,GAAG,CAAC,yCAAyC,QAAQ,EAAE,CAAC,CAAC;YACjE,OAAO,IAAI,CAAC;QACd,CAAC;QAED,OAAO,CAAC,GAAG,CAAC,4BAA4B,QAAQ,OAAO,QAAQ,EAAE,CAAC,CAAC;QAGnE,MAAM,aAAa,GAAG;YAEpB,uDAAuD,QAAQ,KAAK;YACpE,sDAAsD,QAAQ,KAAK;YACnE,0DAA0D,QAAQ,KAAK;YACvE,0EAA0E,QAAQ,KAAK;YACvF,yEAAyE,QAAQ,KAAK;YAEtF,uDAAuD,QAAQ,WAAW;YAC1E,sDAAsD,QAAQ,WAAW;YACzE,0DAA0D,QAAQ,WAAW;YAC7E,0EAA0E,QAAQ,WAAW;YAC7F,yEAAyE,QAAQ,WAAW;SAC7F,CAAC;QAGF,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE,CAAC;YACzC,IAAI,CAAC;gBACH,MAAM,QAAQ,GAAG,cAAI,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;gBACxD,IAAI,OAAO,GAAG,MAAM,aAAE,CAAC,QAAQ,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;gBACnD,OAAO,CAAC,GAAG,CAAC,sBAAsB,YAAY,EAAE,CAAC,CAAC;gBAGlD,OAAO,GAAG,IAAI,CAAC,4BAA4B,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;gBAE/D,OAAO,OAAO,CAAC;YACjB,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBAEf,SAAS;YACX,CAAC;QACH,CAAC;QAED,OAAO,CAAC,GAAG,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAC;QACjD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,4BAA4B,CAAC,QAAgB,EAAE,OAAe;QAEpE,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,EAAE,CAAC;YACxC,MAAM,cAAc,GAAG;;;;;;;;;;;;;;;;;;CAkB5B,CAAC;YAEI,MAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACtD,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE,CAAC;gBACrB,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,GAAG,cAAc,GAAG,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YACxF,CAAC;iBAAM,CAAC;gBAEN,OAAO,GAAG,cAAc,GAAG,IAAI,GAAG,OAAO,CAAC;YAC5C,CAAC;QACH,CAAC;QAGD,IAAI,QAAQ,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;YAC7B,MAAM,cAAc,GAAG;;;;;;;;CAQ5B,CAAC;YACI,MAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;YAC1D,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE,CAAC;gBACrB,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,GAAG,cAAc,GAAG,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YACxF,CAAC;QACH,CAAC;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF;AArHD,gCAqHC"}
|
||||
36
dist/mcp-engine.d.ts
vendored
Normal file
36
dist/mcp-engine.d.ts
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { Request, Response } from 'express';
|
||||
import { InstanceContext } from './types/instance-context';
|
||||
import { SessionState } from './types/session-state';
|
||||
export interface EngineHealth {
|
||||
status: 'healthy' | 'unhealthy';
|
||||
uptime: number;
|
||||
sessionActive: boolean;
|
||||
memoryUsage: {
|
||||
used: number;
|
||||
total: number;
|
||||
unit: string;
|
||||
};
|
||||
version: string;
|
||||
}
|
||||
export interface EngineOptions {
|
||||
sessionTimeout?: number;
|
||||
logLevel?: 'error' | 'warn' | 'info' | 'debug';
|
||||
}
|
||||
export declare class N8NMCPEngine {
|
||||
private server;
|
||||
private startTime;
|
||||
constructor(options?: EngineOptions);
|
||||
processRequest(req: Request, res: Response, instanceContext?: InstanceContext): Promise<void>;
|
||||
healthCheck(): Promise<EngineHealth>;
|
||||
getSessionInfo(): {
|
||||
active: boolean;
|
||||
sessionId?: string;
|
||||
age?: number;
|
||||
};
|
||||
exportSessionState(): SessionState[];
|
||||
restoreSessionState(sessions: SessionState[]): number;
|
||||
shutdown(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
}
|
||||
export default N8NMCPEngine;
|
||||
//# sourceMappingURL=mcp-engine.d.ts.map
|
||||
1
dist/mcp-engine.d.ts.map
vendored
Normal file
1
dist/mcp-engine.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"mcp-engine.d.ts","sourceRoot":"","sources":["../src/mcp-engine.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAC;AAG5C,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3D,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,SAAS,GAAG,WAAW,CAAC;IAChC,MAAM,EAAE,MAAM,CAAC;IACf,aAAa,EAAE,OAAO,CAAC;IACvB,WAAW,EAAE;QACX,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;IACF,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,aAAa;IAC5B,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;CAChD;AAED,qBAAa,YAAY;IACvB,OAAO,CAAC,MAAM,CAA0B;IACxC,OAAO,CAAC,SAAS,CAAO;gBAEZ,OAAO,GAAE,aAAkB;IA8BjC,cAAc,CAClB,GAAG,EAAE,OAAO,EACZ,GAAG,EAAE,QAAQ,EACb,eAAe,CAAC,EAAE,eAAe,GAChC,OAAO,CAAC,IAAI,CAAC;IAkBV,WAAW,IAAI,OAAO,CAAC,YAAY,CAAC;IAgC1C,cAAc,IAAI;QAAE,MAAM,EAAE,OAAO,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,GAAG,CAAC,EAAE,MAAM,CAAA;KAAE;IAoBvE,kBAAkB,IAAI,YAAY,EAAE;IAwBpC,mBAAmB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,MAAM;IAiB/C,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IASzB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAG7B;AA2CD,eAAe,YAAY,CAAC"}
|
||||
77
dist/mcp-engine.js
vendored
Normal file
77
dist/mcp-engine.js
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.N8NMCPEngine = void 0;
|
||||
const http_server_single_session_1 = require("./http-server-single-session");
|
||||
const logger_1 = require("./utils/logger");
|
||||
class N8NMCPEngine {
|
||||
constructor(options = {}) {
|
||||
this.server = new http_server_single_session_1.SingleSessionHTTPServer();
|
||||
this.startTime = new Date();
|
||||
if (options.logLevel) {
|
||||
process.env.LOG_LEVEL = options.logLevel;
|
||||
}
|
||||
}
|
||||
async processRequest(req, res, instanceContext) {
|
||||
try {
|
||||
await this.server.handleRequest(req, res, instanceContext);
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Engine processRequest error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
async healthCheck() {
|
||||
try {
|
||||
const sessionInfo = this.server.getSessionInfo();
|
||||
const memoryUsage = process.memoryUsage();
|
||||
return {
|
||||
status: 'healthy',
|
||||
uptime: Math.floor((Date.now() - this.startTime.getTime()) / 1000),
|
||||
sessionActive: sessionInfo.active,
|
||||
memoryUsage: {
|
||||
used: Math.round(memoryUsage.heapUsed / 1024 / 1024),
|
||||
total: Math.round(memoryUsage.heapTotal / 1024 / 1024),
|
||||
unit: 'MB'
|
||||
},
|
||||
version: '2.24.1'
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Health check failed:', error);
|
||||
return {
|
||||
status: 'unhealthy',
|
||||
uptime: 0,
|
||||
sessionActive: false,
|
||||
memoryUsage: { used: 0, total: 0, unit: 'MB' },
|
||||
version: '2.24.1'
|
||||
};
|
||||
}
|
||||
}
|
||||
getSessionInfo() {
|
||||
return this.server.getSessionInfo();
|
||||
}
|
||||
exportSessionState() {
|
||||
if (!this.server) {
|
||||
logger_1.logger.warn('Cannot export sessions: server not initialized');
|
||||
return [];
|
||||
}
|
||||
return this.server.exportSessionState();
|
||||
}
|
||||
restoreSessionState(sessions) {
|
||||
if (!this.server) {
|
||||
logger_1.logger.warn('Cannot restore sessions: server not initialized');
|
||||
return 0;
|
||||
}
|
||||
return this.server.restoreSessionState(sessions);
|
||||
}
|
||||
async shutdown() {
|
||||
logger_1.logger.info('Shutting down N8N MCP Engine...');
|
||||
await this.server.shutdown();
|
||||
}
|
||||
async start() {
|
||||
await this.server.start();
|
||||
}
|
||||
}
|
||||
exports.N8NMCPEngine = N8NMCPEngine;
|
||||
exports.default = N8NMCPEngine;
|
||||
//# sourceMappingURL=mcp-engine.js.map
|
||||
1
dist/mcp-engine.js.map
vendored
Normal file
1
dist/mcp-engine.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"mcp-engine.js","sourceRoot":"","sources":["../src/mcp-engine.ts"],"names":[],"mappings":";;;AAQA,6EAAuE;AACvE,2CAAwC;AAqBxC,MAAa,YAAY;IAIvB,YAAY,UAAyB,EAAE;QACrC,IAAI,CAAC,MAAM,GAAG,IAAI,oDAAuB,EAAE,CAAC;QAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;QAE5B,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;YACrB,OAAO,CAAC,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,QAAQ,CAAC;QAC3C,CAAC;IACH,CAAC;IAuBD,KAAK,CAAC,cAAc,CAClB,GAAY,EACZ,GAAa,EACb,eAAiC;QAEjC,IAAI,CAAC;YACH,MAAM,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;QAC7D,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,eAAM,CAAC,KAAK,CAAC,8BAA8B,EAAE,KAAK,CAAC,CAAC;YACpD,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;IAWD,KAAK,CAAC,WAAW;QACf,IAAI,CAAC;YACH,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,EAAE,CAAC;YACjD,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;YAE1C,OAAO;gBACL,MAAM,EAAE,SAAS;gBACjB,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC,GAAG,IAAI,CAAC;gBAClE,aAAa,EAAE,WAAW,CAAC,MAAM;gBACjC,WAAW,EAAE;oBACX,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC;oBACpD,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,SAAS,GAAG,IAAI,GAAG,IAAI,CAAC;oBACtD,IAAI,EAAE,IAAI;iBACX;gBACD,OAAO,EAAE,QAAQ;aAClB,CAAC;QACJ,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,eAAM,CAAC,KAAK,CAAC,sBAAsB,EAAE,KAAK,CAAC,CAAC;YAC5C,OAAO;gBACL,MAAM,EAAE,WAAW;gBACnB,MAAM,EAAE,CAAC;gBACT,aAAa,EAAE,KAAK;gBACpB,WAAW,EAAE,EAAE,IAAI,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE;gBAC9C,OAAO,EAAE,QAAQ;aAClB,CAAC;QACJ,CAAC;IACH,CAAC;IAMD,cAAc;QACZ,OAAO,IAAI,CAAC,MAAM,CAAC,cAAc,EAAE,CAAC;IACtC,CAAC;IAkBD,kBAAkB;QAChB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACjB,eAAM,CAAC,IAAI,CAAC,gDAAgD,CAAC,CAAC;YAC9D,OAAO,EAAE,CAAC;QACZ,CAAC;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,kBAAkB,EAAE,CAAC;IAC1C,CAAC;IAkBD,mBAAmB,CAAC,QAAwB;QAC1C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACjB,eAAM,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC/D,OAAO,CAAC,CAAC;QACX,CAAC;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC;IACnD,CAAC;IAWD,KAAK,CAAC,QAAQ;QACZ,eAAM,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAC;QAC/C,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;IAC/B,CAAC;IAMD,KAAK,CAAC,KAAK;QACT,MAAM,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;IAC5B,CAAC;CACF;AAjKD,oCAiKC;AA2CD,kBAAe,YAAY,CAAC"}
|
||||
47
dist/mcp-tools-engine.d.ts
vendored
Normal file
47
dist/mcp-tools-engine.d.ts
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import { NodeRepository } from './database/node-repository';
|
||||
import { WorkflowValidationResult } from './services/workflow-validator';
|
||||
export declare class MCPEngine {
|
||||
private repository;
|
||||
private workflowValidator;
|
||||
constructor(repository: NodeRepository);
|
||||
listNodes(args?: any): Promise<any[]>;
|
||||
searchNodes(args: any): Promise<any[]>;
|
||||
getNodeInfo(args: any): Promise<any>;
|
||||
getNodeEssentials(args: any): Promise<{
|
||||
nodeType: any;
|
||||
displayName: any;
|
||||
description: any;
|
||||
category: any;
|
||||
required: import("./services/property-filter").SimplifiedProperty[];
|
||||
common: import("./services/property-filter").SimplifiedProperty[];
|
||||
} | null>;
|
||||
getNodeDocumentation(args: any): Promise<any>;
|
||||
validateNodeOperation(args: any): Promise<import("./services/config-validator").ValidationResult | {
|
||||
valid: boolean;
|
||||
errors: {
|
||||
type: string;
|
||||
property: string;
|
||||
message: string;
|
||||
}[];
|
||||
warnings: never[];
|
||||
suggestions: never[];
|
||||
visibleProperties: never[];
|
||||
hiddenProperties: never[];
|
||||
}>;
|
||||
validateNodeMinimal(args: any): Promise<{
|
||||
missingFields: never[];
|
||||
error: string;
|
||||
} | {
|
||||
missingFields: string[];
|
||||
error?: undefined;
|
||||
}>;
|
||||
searchNodeProperties(args: any): Promise<any[]>;
|
||||
listAITools(args: any): Promise<any[]>;
|
||||
getDatabaseStatistics(args: any): Promise<{
|
||||
totalNodes: number;
|
||||
aiToolsCount: number;
|
||||
categories: string[];
|
||||
}>;
|
||||
validateWorkflow(args: any): Promise<WorkflowValidationResult>;
|
||||
}
|
||||
//# sourceMappingURL=mcp-tools-engine.d.ts.map
|
||||
1
dist/mcp-tools-engine.d.ts.map
vendored
Normal file
1
dist/mcp-tools-engine.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"mcp-tools-engine.d.ts","sourceRoot":"","sources":["../src/mcp-tools-engine.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAK5D,OAAO,EAAqB,wBAAwB,EAAE,MAAM,+BAA+B,CAAC;AAE5F,qBAAa,SAAS;IAGR,OAAO,CAAC,UAAU;IAF9B,OAAO,CAAC,iBAAiB,CAAoB;gBAEzB,UAAU,EAAE,cAAc;IAIxC,SAAS,CAAC,IAAI,GAAE,GAAQ;IAIxB,WAAW,CAAC,IAAI,EAAE,GAAG;IAIrB,WAAW,CAAC,IAAI,EAAE,GAAG;IAIrB,iBAAiB,CAAC,IAAI,EAAE,GAAG;;;;;;;;IAgB3B,oBAAoB,CAAC,IAAI,EAAE,GAAG;IAK9B,qBAAqB,CAAC,IAAI,EAAE,GAAG;;;;;;;;;;;;IAqB/B,mBAAmB,CAAC,IAAI,EAAE,GAAG;;;;;;;IAmB7B,oBAAoB,CAAC,IAAI,EAAE,GAAG;IAI9B,WAAW,CAAC,IAAI,EAAE,GAAG;IAIrB,qBAAqB,CAAC,IAAI,EAAE,GAAG;;;;;IAU/B,gBAAgB,CAAC,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,wBAAwB,CAAC;CAGrE"}
|
||||
89
dist/mcp-tools-engine.js
vendored
Normal file
89
dist/mcp-tools-engine.js
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.MCPEngine = void 0;
|
||||
const property_filter_1 = require("./services/property-filter");
|
||||
const config_validator_1 = require("./services/config-validator");
|
||||
const enhanced_config_validator_1 = require("./services/enhanced-config-validator");
|
||||
const workflow_validator_1 = require("./services/workflow-validator");
|
||||
class MCPEngine {
|
||||
constructor(repository) {
|
||||
this.repository = repository;
|
||||
this.workflowValidator = new workflow_validator_1.WorkflowValidator(repository, enhanced_config_validator_1.EnhancedConfigValidator);
|
||||
}
|
||||
async listNodes(args = {}) {
|
||||
return this.repository.getAllNodes(args.limit);
|
||||
}
|
||||
async searchNodes(args) {
|
||||
return this.repository.searchNodes(args.query, args.mode || 'OR', args.limit || 20);
|
||||
}
|
||||
async getNodeInfo(args) {
|
||||
return this.repository.getNodeByType(args.nodeType);
|
||||
}
|
||||
async getNodeEssentials(args) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node)
|
||||
return null;
|
||||
const essentials = property_filter_1.PropertyFilter.getEssentials(node.properties || [], args.nodeType);
|
||||
return {
|
||||
nodeType: node.nodeType,
|
||||
displayName: node.displayName,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
required: essentials.required,
|
||||
common: essentials.common
|
||||
};
|
||||
}
|
||||
async getNodeDocumentation(args) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
return node?.documentation || null;
|
||||
}
|
||||
async validateNodeOperation(args) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{ type: 'invalid_configuration', property: '', message: 'Node type not found' }],
|
||||
warnings: [],
|
||||
suggestions: [],
|
||||
visibleProperties: [],
|
||||
hiddenProperties: []
|
||||
};
|
||||
}
|
||||
const userProvidedKeys = new Set(Object.keys(args.config || {}));
|
||||
return config_validator_1.ConfigValidator.validate(args.nodeType, args.config, node.properties || [], userProvidedKeys);
|
||||
}
|
||||
async validateNodeMinimal(args) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node) {
|
||||
return { missingFields: [], error: 'Node type not found' };
|
||||
}
|
||||
const missingFields = [];
|
||||
const requiredFields = property_filter_1.PropertyFilter.getEssentials(node.properties || [], args.nodeType).required;
|
||||
for (const field of requiredFields) {
|
||||
if (!args.config[field.name]) {
|
||||
missingFields.push(field.name);
|
||||
}
|
||||
}
|
||||
return { missingFields };
|
||||
}
|
||||
async searchNodeProperties(args) {
|
||||
return this.repository.searchNodeProperties(args.nodeType, args.query, args.maxResults || 20);
|
||||
}
|
||||
async listAITools(args) {
|
||||
return this.repository.getAIToolNodes();
|
||||
}
|
||||
async getDatabaseStatistics(args) {
|
||||
const count = await this.repository.getNodeCount();
|
||||
const aiTools = await this.repository.getAIToolNodes();
|
||||
return {
|
||||
totalNodes: count,
|
||||
aiToolsCount: aiTools.length,
|
||||
categories: ['trigger', 'transform', 'output', 'input']
|
||||
};
|
||||
}
|
||||
async validateWorkflow(args) {
|
||||
return this.workflowValidator.validateWorkflow(args.workflow, args.options);
|
||||
}
|
||||
}
|
||||
exports.MCPEngine = MCPEngine;
|
||||
//# sourceMappingURL=mcp-tools-engine.js.map
|
||||
1
dist/mcp-tools-engine.js.map
vendored
Normal file
1
dist/mcp-tools-engine.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"mcp-tools-engine.js","sourceRoot":"","sources":["../src/mcp-tools-engine.ts"],"names":[],"mappings":";;;AAKA,gEAA4D;AAE5D,kEAA8D;AAC9D,oFAA+E;AAC/E,sEAA4F;AAE5F,MAAa,SAAS;IAGpB,YAAoB,UAA0B;QAA1B,eAAU,GAAV,UAAU,CAAgB;QAC5C,IAAI,CAAC,iBAAiB,GAAG,IAAI,sCAAiB,CAAC,UAAU,EAAE,mDAAuB,CAAC,CAAC;IACtF,CAAC;IAED,KAAK,CAAC,SAAS,CAAC,OAAY,EAAE;QAC5B,OAAO,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACjD,CAAC;IAED,KAAK,CAAC,WAAW,CAAC,IAAS;QACzB,OAAO,IAAI,CAAC,UAAU,CAAC,WAAW,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE,IAAI,CAAC,KAAK,IAAI,EAAE,CAAC,CAAC;IACtF,CAAC;IAED,KAAK,CAAC,WAAW,CAAC,IAAS;QACzB,OAAO,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED,KAAK,CAAC,iBAAiB,CAAC,IAAS;QAC/B,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChE,IAAI,CAAC,IAAI;YAAE,OAAO,IAAI,CAAC;QAGvB,MAAM,UAAU,GAAG,gCAAc,CAAC,aAAa,CAAC,IAAI,CAAC,UAAU,IAAI,EAAE,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;QACtF,OAAO;YACL,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,QAAQ,EAAE,UAAU,CAAC,QAAQ;YAC7B,MAAM,EAAE,UAAU,CAAC,MAAM;SAC1B,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,oBAAoB,CAAC,IAAS;QAClC,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChE,OAAO,IAAI,EAAE,aAAa,IAAI,IAAI,CAAC;IACrC,CAAC;IAED,KAAK,CAAC,qBAAqB,CAAC,IAAS;QAEnC,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChE,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,OAAO;gBACL,KAAK,EAAE,KAAK;gBACZ,MAAM,EAAE,CAAC,EAAE,IAAI,EAAE,uBAAuB,EAAE,QAAQ,EAAE,EAAE,EAAE,OAAO,EAAE,qBAAqB,EAAE,CAAC;gBACzF,QAAQ,EAAE,EAAE;gBACZ,WAAW,EAAE,EAAE;gBACf,iBAAiB,EAAE,EAAE;gBACrB,gBAAgB,EAAE,EAAE;aACrB,CAAC;QACJ,CAAC;QAID,MAAM,gBAAgB,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;QAEjE,OAAO,kCAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,IAAI,EAAE,EAAE,gBAAgB,CAAC,CAAC;IACvG,CAAC;IAED,KAAK,CAAC,mBAAmB,CAAC,IAAS;QAEjC,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAChE,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,OAAO,EAAE,aAAa,EAAE,EAAE,EAAE,KAAK,EAAE,qBAAqB,EAAE,CAAC;QAC7D,CAAC;QAED,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,MAAM,cAAc,GAAG,gCAAc,CAAC,aAAa,CAAC,IAAI,CAAC,UAAU,IAAI,EAAE,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC;QAEnG,KAAK,MAAM,KAAK,IAAI,cAAc,EAAE,CAAC;YACnC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC7B,aAAa,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YACjC,CAAC;QACH,CAAC;QAED,OAAO,EAAE,aAAa,EAAE,CAAC;IAC3B,CAAC;IAED,KAAK,CAAC,oBAAoB,CAAC,IAAS;QAClC,OAAO,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC;IAChG,CAAC;IAED,KAAK,CAAC,WAAW,CAAC,IAAS;QACzB,OAAO,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,CAAC;IAC1C,CAAC;IAED,KAAK,CAAC,qBAAqB,CAAC,IAAS;QACnC,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,YAAY,EAAE,CAAC;QACnD,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,cAAc,EAAE,CAAC;QACvD,OAAO;YACL,UAAU,EAAE,KAAK;YACjB,YAAY,EAAE,OAAO,CAAC,MAAM;YAC5B,UAAU,EAAE,CAAC,SAAS,EAAE,WAAW,EAAE,QAAQ,EAAE,OAAO,CAAC;SACxD,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,gBAAgB,CAAC,IAAS;QAC9B,OAAO,IAAI,CAAC,iBAAiB,CAAC,gBAAgB,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9E,CAAC;CACF;AArGD,8BAqGC"}
|
||||
29
dist/mcp/handlers-n8n-manager.d.ts
vendored
Normal file
29
dist/mcp/handlers-n8n-manager.d.ts
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
import { N8nApiClient } from '../services/n8n-api-client';
|
||||
import { McpToolResponse } from '../types/n8n-api';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { InstanceContext } from '../types/instance-context';
|
||||
import { TemplateService } from '../templates/template-service';
|
||||
export declare function getInstanceCacheStatistics(): string;
|
||||
export declare function getInstanceCacheMetrics(): import("../utils/cache-utils").CacheMetrics;
|
||||
export declare function clearInstanceCache(): void;
|
||||
export declare function getN8nApiClient(context?: InstanceContext): N8nApiClient | null;
|
||||
export declare function handleCreateWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleGetWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleGetWorkflowDetails(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleGetWorkflowStructure(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleGetWorkflowMinimal(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleUpdateWorkflow(args: unknown, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleDeleteWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleListWorkflows(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleValidateWorkflow(args: unknown, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleAutofixWorkflow(args: unknown, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleTestWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleGetExecution(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleListExecutions(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleDeleteExecution(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleHealthCheck(context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleDiagnostic(request: any, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleWorkflowVersions(args: unknown, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleDeployTemplate(args: unknown, templateService: TemplateService, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
export declare function handleTriggerWebhookWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
//# sourceMappingURL=handlers-n8n-manager.d.ts.map
|
||||
1
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
Normal file
1
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AAqHD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
2026
dist/mcp/handlers-n8n-manager.js
vendored
Normal file
2026
dist/mcp/handlers-n8n-manager.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/mcp/handlers-n8n-manager.js.map
vendored
Normal file
1
dist/mcp/handlers-n8n-manager.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
5
dist/mcp/handlers-workflow-diff.d.ts
vendored
Normal file
5
dist/mcp/handlers-workflow-diff.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { McpToolResponse } from '../types/n8n-api';
|
||||
import { InstanceContext } from '../types/instance-context';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
export declare function handleUpdatePartialWorkflow(args: unknown, repository: NodeRepository, context?: InstanceContext): Promise<McpToolResponse>;
|
||||
//# sourceMappingURL=handlers-workflow-diff.d.ts.map
|
||||
1
dist/mcp/handlers-workflow-diff.d.ts.map
vendored
Normal file
1
dist/mcp/handlers-workflow-diff.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"handlers-workflow-diff.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-workflow-diff.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAMnD,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAE5D,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AA0D7D,wBAAsB,2BAA2B,CAC/C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA6V1B"}
|
||||
461
dist/mcp/handlers-workflow-diff.js
vendored
Normal file
461
dist/mcp/handlers-workflow-diff.js
vendored
Normal file
@@ -0,0 +1,461 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.handleUpdatePartialWorkflow = handleUpdatePartialWorkflow;
|
||||
const zod_1 = require("zod");
|
||||
const workflow_diff_engine_1 = require("../services/workflow-diff-engine");
|
||||
const handlers_n8n_manager_1 = require("./handlers-n8n-manager");
|
||||
const n8n_errors_1 = require("../utils/n8n-errors");
|
||||
const logger_1 = require("../utils/logger");
|
||||
const n8n_validation_1 = require("../services/n8n-validation");
|
||||
const workflow_versioning_service_1 = require("../services/workflow-versioning-service");
|
||||
const workflow_validator_1 = require("../services/workflow-validator");
|
||||
const enhanced_config_validator_1 = require("../services/enhanced-config-validator");
|
||||
let cachedValidator = null;
|
||||
function getValidator(repository) {
|
||||
if (!cachedValidator) {
|
||||
cachedValidator = new workflow_validator_1.WorkflowValidator(repository, enhanced_config_validator_1.EnhancedConfigValidator);
|
||||
}
|
||||
return cachedValidator;
|
||||
}
|
||||
const workflowDiffSchema = zod_1.z.object({
|
||||
id: zod_1.z.string(),
|
||||
operations: zod_1.z.array(zod_1.z.object({
|
||||
type: zod_1.z.string(),
|
||||
description: zod_1.z.string().optional(),
|
||||
node: zod_1.z.any().optional(),
|
||||
nodeId: zod_1.z.string().optional(),
|
||||
nodeName: zod_1.z.string().optional(),
|
||||
updates: zod_1.z.any().optional(),
|
||||
position: zod_1.z.tuple([zod_1.z.number(), zod_1.z.number()]).optional(),
|
||||
source: zod_1.z.string().optional(),
|
||||
target: zod_1.z.string().optional(),
|
||||
from: zod_1.z.string().optional(),
|
||||
to: zod_1.z.string().optional(),
|
||||
sourceOutput: zod_1.z.string().optional(),
|
||||
targetInput: zod_1.z.string().optional(),
|
||||
sourceIndex: zod_1.z.number().optional(),
|
||||
targetIndex: zod_1.z.number().optional(),
|
||||
branch: zod_1.z.enum(['true', 'false']).optional(),
|
||||
case: zod_1.z.number().optional(),
|
||||
ignoreErrors: zod_1.z.boolean().optional(),
|
||||
dryRun: zod_1.z.boolean().optional(),
|
||||
connections: zod_1.z.any().optional(),
|
||||
settings: zod_1.z.any().optional(),
|
||||
name: zod_1.z.string().optional(),
|
||||
tag: zod_1.z.string().optional(),
|
||||
})),
|
||||
validateOnly: zod_1.z.boolean().optional(),
|
||||
continueOnError: zod_1.z.boolean().optional(),
|
||||
createBackup: zod_1.z.boolean().optional(),
|
||||
intent: zod_1.z.string().optional(),
|
||||
});
|
||||
async function handleUpdatePartialWorkflow(args, repository, context) {
|
||||
const startTime = Date.now();
|
||||
const sessionId = `mutation_${Date.now()}_${Math.random().toString(36).slice(2, 11)}`;
|
||||
let workflowBefore = null;
|
||||
let validationBefore = null;
|
||||
let validationAfter = null;
|
||||
try {
|
||||
if (process.env.DEBUG_MCP === 'true') {
|
||||
logger_1.logger.debug('Workflow diff request received', {
|
||||
argsType: typeof args,
|
||||
hasWorkflowId: args && typeof args === 'object' && 'workflowId' in args,
|
||||
operationCount: args && typeof args === 'object' && 'operations' in args ?
|
||||
args.operations?.length : 0
|
||||
});
|
||||
}
|
||||
const input = workflowDiffSchema.parse(args);
|
||||
const client = (0, handlers_n8n_manager_1.getN8nApiClient)(context);
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'n8n API not configured. Please set N8N_API_URL and N8N_API_KEY environment variables.'
|
||||
};
|
||||
}
|
||||
let workflow;
|
||||
try {
|
||||
workflow = await client.getWorkflow(input.id);
|
||||
workflowBefore = JSON.parse(JSON.stringify(workflow));
|
||||
try {
|
||||
const validator = getValidator(repository);
|
||||
validationBefore = await validator.validateWorkflow(workflowBefore, {
|
||||
validateNodes: true,
|
||||
validateConnections: true,
|
||||
validateExpressions: true,
|
||||
profile: 'runtime'
|
||||
});
|
||||
}
|
||||
catch (validationError) {
|
||||
logger_1.logger.debug('Pre-mutation validation failed (non-blocking):', validationError);
|
||||
validationBefore = {
|
||||
valid: false,
|
||||
errors: [{ type: 'validation_error', message: 'Validation failed' }]
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof n8n_errors_1.N8nApiError) {
|
||||
return {
|
||||
success: false,
|
||||
error: (0, n8n_errors_1.getUserFriendlyErrorMessage)(error),
|
||||
code: error.code
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
if (input.createBackup !== false && !input.validateOnly) {
|
||||
try {
|
||||
const versioningService = new workflow_versioning_service_1.WorkflowVersioningService(repository, client);
|
||||
const backupResult = await versioningService.createBackup(input.id, workflow, {
|
||||
trigger: 'partial_update',
|
||||
operations: input.operations
|
||||
});
|
||||
logger_1.logger.info('Workflow backup created', {
|
||||
workflowId: input.id,
|
||||
versionId: backupResult.versionId,
|
||||
versionNumber: backupResult.versionNumber,
|
||||
pruned: backupResult.pruned
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.warn('Failed to create workflow backup', {
|
||||
workflowId: input.id,
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}
|
||||
const diffEngine = new workflow_diff_engine_1.WorkflowDiffEngine();
|
||||
const diffRequest = input;
|
||||
const diffResult = await diffEngine.applyDiff(workflow, diffRequest);
|
||||
if (!diffResult.success) {
|
||||
if (diffRequest.continueOnError && diffResult.workflow && diffResult.operationsApplied && diffResult.operationsApplied > 0) {
|
||||
logger_1.logger.info(`continueOnError mode: Applying ${diffResult.operationsApplied} successful operations despite ${diffResult.failed?.length || 0} failures`);
|
||||
}
|
||||
else {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Failed to apply diff operations',
|
||||
details: {
|
||||
errors: diffResult.errors,
|
||||
warnings: diffResult.warnings,
|
||||
operationsApplied: diffResult.operationsApplied,
|
||||
applied: diffResult.applied,
|
||||
failed: diffResult.failed
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
if (input.validateOnly) {
|
||||
return {
|
||||
success: true,
|
||||
message: diffResult.message,
|
||||
data: {
|
||||
valid: true,
|
||||
operationsToApply: input.operations.length
|
||||
},
|
||||
details: {
|
||||
warnings: diffResult.warnings
|
||||
}
|
||||
};
|
||||
}
|
||||
if (diffResult.workflow) {
|
||||
const structureErrors = (0, n8n_validation_1.validateWorkflowStructure)(diffResult.workflow);
|
||||
if (structureErrors.length > 0) {
|
||||
const skipValidation = process.env.SKIP_WORKFLOW_VALIDATION === 'true';
|
||||
logger_1.logger.warn('Workflow structure validation failed after applying diff operations', {
|
||||
workflowId: input.id,
|
||||
errors: structureErrors,
|
||||
blocking: !skipValidation
|
||||
});
|
||||
const errorTypes = new Set();
|
||||
structureErrors.forEach(err => {
|
||||
if (err.includes('operator') || err.includes('singleValue'))
|
||||
errorTypes.add('operator_issues');
|
||||
if (err.includes('connection') || err.includes('referenced'))
|
||||
errorTypes.add('connection_issues');
|
||||
if (err.includes('Missing') || err.includes('missing'))
|
||||
errorTypes.add('missing_metadata');
|
||||
if (err.includes('branch') || err.includes('output'))
|
||||
errorTypes.add('branch_mismatch');
|
||||
});
|
||||
const recoverySteps = [];
|
||||
if (errorTypes.has('operator_issues')) {
|
||||
recoverySteps.push('Operator structure issue detected. Use validate_node_operation to check specific nodes.');
|
||||
recoverySteps.push('Binary operators (equals, contains, greaterThan, etc.) must NOT have singleValue:true');
|
||||
recoverySteps.push('Unary operators (isEmpty, isNotEmpty, true, false) REQUIRE singleValue:true');
|
||||
}
|
||||
if (errorTypes.has('connection_issues')) {
|
||||
recoverySteps.push('Connection validation failed. Check all node connections reference existing nodes.');
|
||||
recoverySteps.push('Use cleanStaleConnections operation to remove connections to non-existent nodes.');
|
||||
}
|
||||
if (errorTypes.has('missing_metadata')) {
|
||||
recoverySteps.push('Missing metadata detected. Ensure filter-based nodes (IF v2.2+, Switch v3.2+) have complete conditions.options.');
|
||||
recoverySteps.push('Required options: {version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}');
|
||||
}
|
||||
if (errorTypes.has('branch_mismatch')) {
|
||||
recoverySteps.push('Branch count mismatch. Ensure Switch nodes have outputs for all rules (e.g., 3 rules = 3 output branches).');
|
||||
}
|
||||
if (recoverySteps.length === 0) {
|
||||
recoverySteps.push('Review the validation errors listed above');
|
||||
recoverySteps.push('Fix issues using updateNode or cleanStaleConnections operations');
|
||||
recoverySteps.push('Run validate_workflow again to verify fixes');
|
||||
}
|
||||
const errorMessage = structureErrors.length === 1
|
||||
? `Workflow validation failed: ${structureErrors[0]}`
|
||||
: `Workflow validation failed with ${structureErrors.length} structural issues`;
|
||||
if (!skipValidation) {
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
details: {
|
||||
errors: structureErrors,
|
||||
errorCount: structureErrors.length,
|
||||
operationsApplied: diffResult.operationsApplied,
|
||||
applied: diffResult.applied,
|
||||
recoveryGuidance: recoverySteps,
|
||||
note: 'Operations were applied but created an invalid workflow structure. The workflow was NOT saved to n8n to prevent UI rendering errors.',
|
||||
autoSanitizationNote: 'Auto-sanitization runs on all nodes during updates to fix operator structures and add missing metadata. However, it cannot fix all issues (e.g., broken connections, branch mismatches). Use the recovery guidance above to resolve remaining issues.'
|
||||
}
|
||||
};
|
||||
}
|
||||
logger_1.logger.info('Workflow validation skipped (SKIP_WORKFLOW_VALIDATION=true): Allowing workflow with validation warnings to proceed', {
|
||||
workflowId: input.id,
|
||||
warningCount: structureErrors.length
|
||||
});
|
||||
}
|
||||
}
|
||||
try {
|
||||
const updatedWorkflow = await client.updateWorkflow(input.id, diffResult.workflow);
|
||||
let finalWorkflow = updatedWorkflow;
|
||||
let activationMessage = '';
|
||||
try {
|
||||
const validator = getValidator(repository);
|
||||
validationAfter = await validator.validateWorkflow(finalWorkflow, {
|
||||
validateNodes: true,
|
||||
validateConnections: true,
|
||||
validateExpressions: true,
|
||||
profile: 'runtime'
|
||||
});
|
||||
}
|
||||
catch (validationError) {
|
||||
logger_1.logger.debug('Post-mutation validation failed (non-blocking):', validationError);
|
||||
validationAfter = {
|
||||
valid: false,
|
||||
errors: [{ type: 'validation_error', message: 'Validation failed' }]
|
||||
};
|
||||
}
|
||||
if (diffResult.shouldActivate) {
|
||||
try {
|
||||
finalWorkflow = await client.activateWorkflow(input.id);
|
||||
activationMessage = ' Workflow activated.';
|
||||
}
|
||||
catch (activationError) {
|
||||
logger_1.logger.error('Failed to activate workflow after update', activationError);
|
||||
return {
|
||||
success: false,
|
||||
error: 'Workflow updated successfully but activation failed',
|
||||
details: {
|
||||
workflowUpdated: true,
|
||||
activationError: activationError instanceof Error ? activationError.message : 'Unknown error'
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (diffResult.shouldDeactivate) {
|
||||
try {
|
||||
finalWorkflow = await client.deactivateWorkflow(input.id);
|
||||
activationMessage = ' Workflow deactivated.';
|
||||
}
|
||||
catch (deactivationError) {
|
||||
logger_1.logger.error('Failed to deactivate workflow after update', deactivationError);
|
||||
return {
|
||||
success: false,
|
||||
error: 'Workflow updated successfully but deactivation failed',
|
||||
details: {
|
||||
workflowUpdated: true,
|
||||
deactivationError: deactivationError instanceof Error ? deactivationError.message : 'Unknown error'
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
if (workflowBefore && !input.validateOnly) {
|
||||
trackWorkflowMutation({
|
||||
sessionId,
|
||||
toolName: 'n8n_update_partial_workflow',
|
||||
userIntent: input.intent || 'Partial workflow update',
|
||||
operations: input.operations,
|
||||
workflowBefore,
|
||||
workflowAfter: finalWorkflow,
|
||||
validationBefore,
|
||||
validationAfter,
|
||||
mutationSuccess: true,
|
||||
durationMs: Date.now() - startTime,
|
||||
}).catch(err => {
|
||||
logger_1.logger.debug('Failed to track mutation telemetry:', err);
|
||||
});
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
id: finalWorkflow.id,
|
||||
name: finalWorkflow.name,
|
||||
active: finalWorkflow.active,
|
||||
nodeCount: finalWorkflow.nodes?.length || 0,
|
||||
operationsApplied: diffResult.operationsApplied
|
||||
},
|
||||
message: `Workflow "${finalWorkflow.name}" updated successfully. Applied ${diffResult.operationsApplied} operations.${activationMessage} Use n8n_get_workflow with mode 'structure' to verify current state.`,
|
||||
details: {
|
||||
applied: diffResult.applied,
|
||||
failed: diffResult.failed,
|
||||
errors: diffResult.errors,
|
||||
warnings: diffResult.warnings
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
if (workflowBefore && !input.validateOnly) {
|
||||
trackWorkflowMutation({
|
||||
sessionId,
|
||||
toolName: 'n8n_update_partial_workflow',
|
||||
userIntent: input.intent || 'Partial workflow update',
|
||||
operations: input.operations,
|
||||
workflowBefore,
|
||||
workflowAfter: workflowBefore,
|
||||
validationBefore,
|
||||
validationAfter: validationBefore,
|
||||
mutationSuccess: false,
|
||||
mutationError: error instanceof Error ? error.message : 'Unknown error',
|
||||
durationMs: Date.now() - startTime,
|
||||
}).catch(err => {
|
||||
logger_1.logger.warn('Failed to track mutation telemetry for failed operation:', err);
|
||||
});
|
||||
}
|
||||
if (error instanceof n8n_errors_1.N8nApiError) {
|
||||
return {
|
||||
success: false,
|
||||
error: (0, n8n_errors_1.getUserFriendlyErrorMessage)(error),
|
||||
code: error.code,
|
||||
details: error.details
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof zod_1.z.ZodError) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid input',
|
||||
details: { errors: error.errors }
|
||||
};
|
||||
}
|
||||
logger_1.logger.error('Failed to update partial workflow', error);
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
};
|
||||
}
|
||||
}
|
||||
function inferIntentFromOperations(operations) {
|
||||
if (!operations || operations.length === 0) {
|
||||
return 'Partial workflow update';
|
||||
}
|
||||
const opTypes = operations.map((op) => op.type);
|
||||
const opCount = operations.length;
|
||||
if (opCount === 1) {
|
||||
const op = operations[0];
|
||||
switch (op.type) {
|
||||
case 'addNode':
|
||||
return `Add ${op.node?.type || 'node'}`;
|
||||
case 'removeNode':
|
||||
return `Remove node ${op.nodeName || op.nodeId || ''}`.trim();
|
||||
case 'updateNode':
|
||||
return `Update node ${op.nodeName || op.nodeId || ''}`.trim();
|
||||
case 'addConnection':
|
||||
return `Connect ${op.source || 'node'} to ${op.target || 'node'}`;
|
||||
case 'removeConnection':
|
||||
return `Disconnect ${op.source || 'node'} from ${op.target || 'node'}`;
|
||||
case 'rewireConnection':
|
||||
return `Rewire ${op.source || 'node'} from ${op.from || ''} to ${op.to || ''}`.trim();
|
||||
case 'updateName':
|
||||
return `Rename workflow to "${op.name || ''}"`;
|
||||
case 'activateWorkflow':
|
||||
return 'Activate workflow';
|
||||
case 'deactivateWorkflow':
|
||||
return 'Deactivate workflow';
|
||||
default:
|
||||
return `Workflow ${op.type}`;
|
||||
}
|
||||
}
|
||||
const typeSet = new Set(opTypes);
|
||||
const summary = [];
|
||||
if (typeSet.has('addNode')) {
|
||||
const count = opTypes.filter((t) => t === 'addNode').length;
|
||||
summary.push(`add ${count} node${count > 1 ? 's' : ''}`);
|
||||
}
|
||||
if (typeSet.has('removeNode')) {
|
||||
const count = opTypes.filter((t) => t === 'removeNode').length;
|
||||
summary.push(`remove ${count} node${count > 1 ? 's' : ''}`);
|
||||
}
|
||||
if (typeSet.has('updateNode')) {
|
||||
const count = opTypes.filter((t) => t === 'updateNode').length;
|
||||
summary.push(`update ${count} node${count > 1 ? 's' : ''}`);
|
||||
}
|
||||
if (typeSet.has('addConnection') || typeSet.has('rewireConnection')) {
|
||||
summary.push('modify connections');
|
||||
}
|
||||
if (typeSet.has('updateName') || typeSet.has('updateSettings')) {
|
||||
summary.push('update metadata');
|
||||
}
|
||||
return summary.length > 0
|
||||
? `Workflow update: ${summary.join(', ')}`
|
||||
: `Workflow update: ${opCount} operations`;
|
||||
}
|
||||
async function trackWorkflowMutation(data) {
|
||||
try {
|
||||
if (!data.userIntent ||
|
||||
data.userIntent === 'Partial workflow update' ||
|
||||
data.userIntent.length < 10) {
|
||||
data.userIntent = inferIntentFromOperations(data.operations);
|
||||
}
|
||||
const { telemetry } = await Promise.resolve().then(() => __importStar(require('../telemetry/telemetry-manager.js')));
|
||||
await telemetry.trackWorkflowMutation(data);
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.debug('Telemetry tracking failed:', error);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=handlers-workflow-diff.js.map
|
||||
1
dist/mcp/handlers-workflow-diff.js.map
vendored
Normal file
1
dist/mcp/handlers-workflow-diff.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
3
dist/mcp/index.d.ts
vendored
Normal file
3
dist/mcp/index.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
dist/mcp/index.d.ts.map
vendored
Normal file
1
dist/mcp/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/mcp/index.ts"],"names":[],"mappings":""}
|
||||
228
dist/mcp/index.js
vendored
Normal file
228
dist/mcp/index.js
vendored
Normal file
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const server_1 = require("./server");
|
||||
const logger_1 = require("../utils/logger");
|
||||
const config_manager_1 = require("../telemetry/config-manager");
|
||||
const early_error_logger_1 = require("../telemetry/early-error-logger");
|
||||
const startup_checkpoints_1 = require("../telemetry/startup-checkpoints");
|
||||
const fs_1 = require("fs");
|
||||
process.on('uncaughtException', (error) => {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
console.error('Uncaught Exception:', error);
|
||||
}
|
||||
logger_1.logger.error('Uncaught Exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
if (process.env.MCP_MODE !== 'stdio') {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
}
|
||||
logger_1.logger.error('Unhandled Rejection:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
function isContainerEnvironment() {
|
||||
const dockerEnv = (process.env.IS_DOCKER || '').toLowerCase();
|
||||
const containerEnv = (process.env.IS_CONTAINER || '').toLowerCase();
|
||||
if (['true', '1', 'yes'].includes(dockerEnv)) {
|
||||
return true;
|
||||
}
|
||||
if (['true', '1', 'yes'].includes(containerEnv)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
return (0, fs_1.existsSync)('/.dockerenv') || (0, fs_1.existsSync)('/run/.containerenv');
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.debug('Container detection filesystem check failed:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function main() {
|
||||
const startTime = Date.now();
|
||||
const earlyLogger = early_error_logger_1.EarlyErrorLogger.getInstance();
|
||||
const checkpoints = [];
|
||||
try {
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.PROCESS_STARTED);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.PROCESS_STARTED);
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length > 0 && args[0] === 'telemetry') {
|
||||
const telemetryConfig = config_manager_1.TelemetryConfigManager.getInstance();
|
||||
const action = args[1];
|
||||
switch (action) {
|
||||
case 'enable':
|
||||
telemetryConfig.enable();
|
||||
process.exit(0);
|
||||
break;
|
||||
case 'disable':
|
||||
telemetryConfig.disable();
|
||||
process.exit(0);
|
||||
break;
|
||||
case 'status':
|
||||
console.log(telemetryConfig.getStatus());
|
||||
process.exit(0);
|
||||
break;
|
||||
default:
|
||||
console.log(`
|
||||
Usage: n8n-mcp telemetry [command]
|
||||
|
||||
Commands:
|
||||
enable Enable anonymous telemetry
|
||||
disable Disable anonymous telemetry
|
||||
status Show current telemetry status
|
||||
|
||||
Learn more: https://github.com/czlonkowski/n8n-mcp/blob/main/PRIVACY.md
|
||||
`);
|
||||
process.exit(args[1] ? 1 : 0);
|
||||
}
|
||||
}
|
||||
const mode = process.env.MCP_MODE || 'stdio';
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.TELEMETRY_INITIALIZING);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.TELEMETRY_INITIALIZING);
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.TELEMETRY_READY);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.TELEMETRY_READY);
|
||||
try {
|
||||
if (mode === 'http') {
|
||||
console.error(`Starting n8n Documentation MCP Server in ${mode} mode...`);
|
||||
console.error('Current directory:', process.cwd());
|
||||
console.error('Node version:', process.version);
|
||||
}
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.MCP_HANDSHAKE_STARTING);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.MCP_HANDSHAKE_STARTING);
|
||||
if (mode === 'http') {
|
||||
if (process.env.USE_FIXED_HTTP === 'true') {
|
||||
logger_1.logger.warn('DEPRECATION WARNING: USE_FIXED_HTTP=true is deprecated as of v2.31.8. ' +
|
||||
'The fixed HTTP implementation does not support SSE streaming required by clients like OpenAI Codex. ' +
|
||||
'Please unset USE_FIXED_HTTP to use the modern SingleSessionHTTPServer which supports both JSON-RPC and SSE. ' +
|
||||
'This option will be removed in a future version. See: https://github.com/czlonkowski/n8n-mcp/issues/524');
|
||||
console.warn('\n⚠️ DEPRECATION WARNING ⚠️');
|
||||
console.warn('USE_FIXED_HTTP=true is deprecated as of v2.31.8.');
|
||||
console.warn('The fixed HTTP implementation does not support SSE streaming.');
|
||||
console.warn('Please unset USE_FIXED_HTTP to use SingleSessionHTTPServer.');
|
||||
console.warn('See: https://github.com/czlonkowski/n8n-mcp/issues/524\n');
|
||||
const { startFixedHTTPServer } = await Promise.resolve().then(() => __importStar(require('../http-server')));
|
||||
await startFixedHTTPServer();
|
||||
}
|
||||
else {
|
||||
const { SingleSessionHTTPServer } = await Promise.resolve().then(() => __importStar(require('../http-server-single-session')));
|
||||
const server = new SingleSessionHTTPServer();
|
||||
const shutdown = async () => {
|
||||
await server.shutdown();
|
||||
process.exit(0);
|
||||
};
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGINT', shutdown);
|
||||
await server.start();
|
||||
}
|
||||
}
|
||||
else {
|
||||
const server = new server_1.N8NDocumentationMCPServer(undefined, earlyLogger);
|
||||
let isShuttingDown = false;
|
||||
const shutdown = async (signal = 'UNKNOWN') => {
|
||||
if (isShuttingDown)
|
||||
return;
|
||||
isShuttingDown = true;
|
||||
try {
|
||||
logger_1.logger.info(`Shutdown initiated by: ${signal}`);
|
||||
await server.shutdown();
|
||||
if (process.stdin && !process.stdin.destroyed) {
|
||||
process.stdin.pause();
|
||||
process.stdin.destroy();
|
||||
}
|
||||
setTimeout(() => {
|
||||
logger_1.logger.warn('Shutdown timeout exceeded, forcing exit');
|
||||
process.exit(0);
|
||||
}, 1000).unref();
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Error during shutdown:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
process.on('SIGTERM', () => shutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => shutdown('SIGINT'));
|
||||
process.on('SIGHUP', () => shutdown('SIGHUP'));
|
||||
const isContainer = isContainerEnvironment();
|
||||
if (!isContainer && process.stdin.readable && !process.stdin.destroyed) {
|
||||
try {
|
||||
process.stdin.on('end', () => shutdown('STDIN_END'));
|
||||
process.stdin.on('close', () => shutdown('STDIN_CLOSE'));
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Failed to register stdin handlers, using signal handlers only:', error);
|
||||
}
|
||||
}
|
||||
await server.run();
|
||||
}
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.MCP_HANDSHAKE_COMPLETE);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.MCP_HANDSHAKE_COMPLETE);
|
||||
earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.SERVER_READY);
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.SERVER_READY);
|
||||
const startupDuration = Date.now() - startTime;
|
||||
earlyLogger.logStartupSuccess(checkpoints, startupDuration);
|
||||
logger_1.logger.info(`Server startup completed in ${startupDuration}ms (${checkpoints.length} checkpoints passed)`);
|
||||
}
|
||||
catch (error) {
|
||||
const failedCheckpoint = (0, startup_checkpoints_1.findFailedCheckpoint)(checkpoints);
|
||||
earlyLogger.logStartupError(failedCheckpoint, error);
|
||||
if (mode !== 'stdio') {
|
||||
console.error('Failed to start MCP server:', error);
|
||||
logger_1.logger.error('Failed to start MCP server', error);
|
||||
if (error instanceof Error && error.message.includes('nodes.db not found')) {
|
||||
console.error('\nTo fix this issue:');
|
||||
console.error('1. cd to the n8n-mcp directory');
|
||||
console.error('2. Run: npm run build');
|
||||
console.error('3. Run: npm run rebuild');
|
||||
}
|
||||
else if (error instanceof Error && error.message.includes('NODE_MODULE_VERSION')) {
|
||||
console.error('\nTo fix this Node.js version mismatch:');
|
||||
console.error('1. cd to the n8n-mcp directory');
|
||||
console.error('2. Run: npm rebuild better-sqlite3');
|
||||
console.error('3. If that doesn\'t work, try: rm -rf node_modules && npm install');
|
||||
}
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
catch (outerError) {
|
||||
logger_1.logger.error('Critical startup error:', outerError);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
if (require.main === module) {
|
||||
main().catch(console.error);
|
||||
}
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
dist/mcp/index.js.map
vendored
Normal file
1
dist/mcp/index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
82
dist/mcp/server.d.ts
vendored
Normal file
82
dist/mcp/server.d.ts
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { InstanceContext } from '../types/instance-context';
|
||||
import { EarlyErrorLogger } from '../telemetry/early-error-logger';
|
||||
export declare class N8NDocumentationMCPServer {
|
||||
private server;
|
||||
private db;
|
||||
private repository;
|
||||
private templateService;
|
||||
private initialized;
|
||||
private cache;
|
||||
private clientInfo;
|
||||
private instanceContext?;
|
||||
private previousTool;
|
||||
private previousToolTimestamp;
|
||||
private earlyLogger;
|
||||
private disabledToolsCache;
|
||||
constructor(instanceContext?: InstanceContext, earlyLogger?: EarlyErrorLogger);
|
||||
close(): Promise<void>;
|
||||
private initializeDatabase;
|
||||
private initializeInMemorySchema;
|
||||
private parseSQLStatements;
|
||||
private ensureInitialized;
|
||||
private dbHealthChecked;
|
||||
private validateDatabaseHealth;
|
||||
private getDisabledTools;
|
||||
private setupHandlers;
|
||||
private sanitizeValidationResult;
|
||||
private validateToolParams;
|
||||
private validateToolParamsBasic;
|
||||
private validateExtractedArgs;
|
||||
private listNodes;
|
||||
private getNodeInfo;
|
||||
private searchNodes;
|
||||
private searchNodesFTS;
|
||||
private searchNodesFuzzy;
|
||||
private calculateFuzzyScore;
|
||||
private getEditDistance;
|
||||
private searchNodesLIKE;
|
||||
private calculateRelevance;
|
||||
private calculateRelevanceScore;
|
||||
private rankSearchResults;
|
||||
private listAITools;
|
||||
private getNodeDocumentation;
|
||||
private safeJsonParse;
|
||||
private getDatabaseStatistics;
|
||||
private getNodeEssentials;
|
||||
private getNode;
|
||||
private handleInfoMode;
|
||||
private handleVersionMode;
|
||||
private getVersionSummary;
|
||||
private getVersionHistory;
|
||||
private compareVersions;
|
||||
private getBreakingChanges;
|
||||
private getMigrations;
|
||||
private enrichPropertyWithTypeInfo;
|
||||
private enrichPropertiesWithTypeInfo;
|
||||
private searchNodeProperties;
|
||||
private getPropertyValue;
|
||||
private listTasks;
|
||||
private validateNodeConfig;
|
||||
private getPropertyDependencies;
|
||||
private getNodeAsToolInfo;
|
||||
private getOutputDescriptions;
|
||||
private getCommonAIToolUseCases;
|
||||
private buildToolVariantGuidance;
|
||||
private getAIToolExamples;
|
||||
private validateNodeMinimal;
|
||||
private getToolsDocumentation;
|
||||
connect(transport: any): Promise<void>;
|
||||
private listTemplates;
|
||||
private listNodeTemplates;
|
||||
private getTemplate;
|
||||
private searchTemplates;
|
||||
private getTemplatesForTask;
|
||||
private searchTemplatesByMetadata;
|
||||
private getTaskDescription;
|
||||
private validateWorkflow;
|
||||
private validateWorkflowConnections;
|
||||
private validateWorkflowExpressions;
|
||||
run(): Promise<void>;
|
||||
shutdown(): Promise<void>;
|
||||
}
|
||||
//# sourceMappingURL=server.d.ts.map
|
||||
1
dist/mcp/server.d.ts.map
vendored
Normal file
1
dist/mcp/server.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/mcp/server.ts"],"names":[],"mappings":"AAsCA,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAE5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AAmGnE,qBAAa,yBAAyB;IACpC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,EAAE,CAAgC;IAC1C,OAAO,CAAC,UAAU,CAA+B;IACjD,OAAO,CAAC,eAAe,CAAgC;IACvD,OAAO,CAAC,WAAW,CAAgB;IACnC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,UAAU,CAAa;IAC/B,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,YAAY,CAAuB;IAC3C,OAAO,CAAC,qBAAqB,CAAsB;IACnD,OAAO,CAAC,WAAW,CAAiC;IACpD,OAAO,CAAC,kBAAkB,CAA4B;gBAE1C,eAAe,CAAC,EAAE,eAAe,EAAE,WAAW,CAAC,EAAE,gBAAgB;IAiGvE,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;YA6Bd,kBAAkB;YAwClB,wBAAwB;IA0BtC,OAAO,CAAC,kBAAkB;YA6CZ,iBAAiB;IAa/B,OAAO,CAAC,eAAe,CAAkB;YAE3B,sBAAsB;IAgDpC,OAAO,CAAC,gBAAgB;IAqCxB,OAAO,CAAC,aAAa;IAoTrB,OAAO,CAAC,wBAAwB;IAoFhC,OAAO,CAAC,kBAAkB;IAqE1B,OAAO,CAAC,uBAAuB;IAwB/B,OAAO,CAAC,qBAAqB;YAoTf,SAAS;YA2DT,WAAW;YAkFX,WAAW;YA0CX,cAAc;YA8Md,gBAAgB;IAqD9B,OAAO,CAAC,mBAAmB;IAwE3B,OAAO,CAAC,eAAe;YAsBT,eAAe;IA2L7B,OAAO,CAAC,kBAAkB;IAQ1B,OAAO,CAAC,uBAAuB;IA0D/B,OAAO,CAAC,iBAAiB;YAqFX,WAAW;YAgCX,oBAAoB;IAuFlC,OAAO,CAAC,aAAa;YAQP,qBAAqB;YAwDrB,iBAAiB;YAiKjB,OAAO;YAgDP,cAAc;YAwFd,iBAAiB;IAqC/B,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,eAAe;IAwCvB,OAAO,CAAC,kBAAkB;IAiC1B,OAAO,CAAC,aAAa;IAoCrB,OAAO,CAAC,0BAA0B;IAgClC,OAAO,CAAC,4BAA4B;YAKtB,oBAAoB;IAsDlC,OAAO,CAAC,gBAAgB;YAiBV,SAAS;YA6CT,kBAAkB;YAqElB,uBAAuB;YAsDvB,iBAAiB;IAqE/B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,uBAAuB;IA4D/B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,iBAAiB;YAoDX,mBAAmB;YAoEnB,qBAAqB;IAS7B,OAAO,CAAC,SAAS,EAAE,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;YAS9B,aAAa;YAcb,iBAAiB;YAoBjB,WAAW;YAwBX,eAAe;YAqBf,mBAAmB;YAwBnB,yBAAyB;IA4CvC,OAAO,CAAC,kBAAkB;YAiBZ,gBAAgB;YA6HhB,2BAA2B;YAiE3B,2BAA2B;IAyEnC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IA0BpB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAuBhC"}
|
||||
2914
dist/mcp/server.js
vendored
Normal file
2914
dist/mcp/server.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/mcp/server.js.map
vendored
Normal file
1
dist/mcp/server.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
3
dist/mcp/stdio-wrapper.d.ts
vendored
Normal file
3
dist/mcp/stdio-wrapper.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
||||
//# sourceMappingURL=stdio-wrapper.d.ts.map
|
||||
1
dist/mcp/stdio-wrapper.d.ts.map
vendored
Normal file
1
dist/mcp/stdio-wrapper.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"stdio-wrapper.d.ts","sourceRoot":"","sources":["../../src/mcp/stdio-wrapper.ts"],"names":[],"mappings":""}
|
||||
81
dist/mcp/stdio-wrapper.js
vendored
Normal file
81
dist/mcp/stdio-wrapper.js
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
process.env.MCP_MODE = 'stdio';
|
||||
process.env.DISABLE_CONSOLE_OUTPUT = 'true';
|
||||
process.env.LOG_LEVEL = 'error';
|
||||
const originalConsoleLog = console.log;
|
||||
const originalConsoleError = console.error;
|
||||
const originalConsoleWarn = console.warn;
|
||||
const originalConsoleInfo = console.info;
|
||||
const originalConsoleDebug = console.debug;
|
||||
const originalConsoleTrace = console.trace;
|
||||
const originalConsoleDir = console.dir;
|
||||
const originalConsoleTime = console.time;
|
||||
const originalConsoleTimeEnd = console.timeEnd;
|
||||
console.log = () => { };
|
||||
console.error = () => { };
|
||||
console.warn = () => { };
|
||||
console.info = () => { };
|
||||
console.debug = () => { };
|
||||
console.trace = () => { };
|
||||
console.dir = () => { };
|
||||
console.time = () => { };
|
||||
console.timeEnd = () => { };
|
||||
console.timeLog = () => { };
|
||||
console.group = () => { };
|
||||
console.groupEnd = () => { };
|
||||
console.table = () => { };
|
||||
console.clear = () => { };
|
||||
console.count = () => { };
|
||||
console.countReset = () => { };
|
||||
const server_1 = require("./server");
|
||||
let server = null;
|
||||
async function main() {
|
||||
try {
|
||||
server = new server_1.N8NDocumentationMCPServer();
|
||||
await server.run();
|
||||
}
|
||||
catch (error) {
|
||||
originalConsoleError('Fatal error:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
process.on('uncaughtException', (error) => {
|
||||
originalConsoleError('Uncaught exception:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
originalConsoleError('Unhandled rejection:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
let isShuttingDown = false;
|
||||
async function shutdown(signal) {
|
||||
if (isShuttingDown)
|
||||
return;
|
||||
isShuttingDown = true;
|
||||
originalConsoleError(`Received ${signal}, shutting down gracefully...`);
|
||||
try {
|
||||
if (server) {
|
||||
await server.shutdown();
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
originalConsoleError('Error during shutdown:', error);
|
||||
}
|
||||
process.stdin.pause();
|
||||
process.stdin.destroy();
|
||||
setTimeout(() => {
|
||||
process.exit(0);
|
||||
}, 500).unref();
|
||||
process.exit(0);
|
||||
}
|
||||
process.on('SIGTERM', () => void shutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => void shutdown('SIGINT'));
|
||||
process.on('SIGHUP', () => void shutdown('SIGHUP'));
|
||||
process.stdin.on('end', () => {
|
||||
originalConsoleError('stdin closed, shutting down...');
|
||||
void shutdown('STDIN_CLOSE');
|
||||
});
|
||||
main();
|
||||
//# sourceMappingURL=stdio-wrapper.js.map
|
||||
1
dist/mcp/stdio-wrapper.js.map
vendored
Normal file
1
dist/mcp/stdio-wrapper.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"stdio-wrapper.js","sourceRoot":"","sources":["../../src/mcp/stdio-wrapper.ts"],"names":[],"mappings":";;;AAQA,OAAO,CAAC,GAAG,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC/B,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,MAAM,CAAC;AAC5C,OAAO,CAAC,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC;AAGhC,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC;AACvC,MAAM,oBAAoB,GAAG,OAAO,CAAC,KAAK,CAAC;AAC3C,MAAM,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;AACzC,MAAM,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;AACzC,MAAM,oBAAoB,GAAG,OAAO,CAAC,KAAK,CAAC;AAC3C,MAAM,oBAAoB,GAAG,OAAO,CAAC,KAAK,CAAC;AAC3C,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC;AACvC,MAAM,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;AACzC,MAAM,sBAAsB,GAAG,OAAO,CAAC,OAAO,CAAC;AAG/C,OAAO,CAAC,GAAG,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACvB,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,IAAI,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACxB,OAAO,CAAC,IAAI,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACxB,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,GAAG,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACvB,OAAO,CAAC,IAAI,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACxB,OAAO,CAAC,OAAO,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AAC3B,OAAO,CAAC,OAAO,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AAC3B,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,QAAQ,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AAC5B,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,KAAK,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AACzB,OAAO,CAAC,UAAU,GAAG,GAAG,EAAE,GAAE,CAAC,CAAC;AAG9B,qCAAqD;AAErD,IAAI,MAAM,GAAqC,IAAI,CAAC;AAEpD,KAAK,UAAU,IAAI;IACjB,IAAI,CAAC;QACH,MAAM,GAAG,IAAI,kCAAyB,EAAE,CAAC;QACzC,MAAM,MAAM,CAAC,GAAG,EAAE,CAAC;IACrB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAEf,oBAAoB,CAAC,cAAc,EAAE,KAAK,CAAC,CAAC;QAC5C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;AACH,CAAC;AAGD,OAAO,CAAC,EAAE,CAAC,mBAAmB,EAAE,CAAC,KAAK,EAAE,EAAE;IACxC,oBAAoB,CAAC,qBAAqB,EAAE,KAAK,CAAC,CAAC;IACnD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC;AAEH,OAAO,CAAC,EAAE,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,EAAE;IAC1C,oBAAoB,CAAC,sBAAsB,EAAE,MAAM,CAAC,CAAC;IACrD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC,CAAC,CAAC;AAGH,IAAI,cAAc,GAAG,KAAK,CAAC;AAE3B,KAAK,UAAU,QAAQ,CAAC,MAAc;IACpC,IAAI,cAAc;QAAE,OAAO;IAC3B,cAAc,GAAG,IAAI,CAAC;IAGtB,oBAAoB,CAAC,YAAY,MAAM,+BAA+B,CAAC,CAAC;IAExE,IAAI,CAAC;QAEH,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,MAAM,CAAC,QAAQ,EAAE,CAAC;QAC1B,CAAC;IACH,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,oBAAoB,CAAC,wBAAwB,EAAE,KAAK,CAAC,CAAC;IACxD,CAAC;IAGD,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC;IACtB,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC;IAGxB,UAAU,CAAC,GAAG,EAAE;QACd,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;IAGhB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC;AAGD,OAAO,CAAC,EAAE,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,KAAK,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;AACtD,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,KAAK,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AACpD,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,KAAK,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;AAGpD,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;IAC3B,oBAAoB,CAAC,gCAAgC,CAAC,CAAC;IACvD,KAAK,QAAQ,CAAC,aAAa,CAAC,CAAC;AAC/B,CAAC,CAAC,CAAC;AAEH,IAAI,EAAE,CAAC"}
|
||||
3
dist/mcp/tool-docs/configuration/get-node.d.ts
vendored
Normal file
3
dist/mcp/tool-docs/configuration/get-node.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { ToolDocumentation } from '../types';
|
||||
export declare const getNodeDoc: ToolDocumentation;
|
||||
//# sourceMappingURL=get-node.d.ts.map
|
||||
1
dist/mcp/tool-docs/configuration/get-node.d.ts.map
vendored
Normal file
1
dist/mcp/tool-docs/configuration/get-node.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"get-node.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/configuration/get-node.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,UAAU,EAAE,iBAqFxB,CAAC"}
|
||||
90
dist/mcp/tool-docs/configuration/get-node.js
vendored
Normal file
90
dist/mcp/tool-docs/configuration/get-node.js
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getNodeDoc = void 0;
|
||||
exports.getNodeDoc = {
|
||||
name: 'get_node',
|
||||
category: 'configuration',
|
||||
essentials: {
|
||||
description: 'Unified node information tool with progressive detail levels and multiple modes. Get node schema, docs, search properties, or version info.',
|
||||
keyParameters: ['nodeType', 'detail', 'mode', 'includeTypeInfo', 'includeExamples'],
|
||||
example: 'get_node({nodeType: "nodes-base.httpRequest", detail: "standard"})',
|
||||
performance: 'Instant (<10ms) for minimal/standard, moderate for full',
|
||||
tips: [
|
||||
'Use detail="standard" (default) for most tasks - shows required fields',
|
||||
'Use mode="docs" for readable markdown documentation',
|
||||
'Use mode="search_properties" with propertyQuery to find specific fields',
|
||||
'Use mode="versions" to check version history and breaking changes',
|
||||
'Add includeExamples=true to get real-world configuration examples'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
description: `**Detail Levels (mode="info", default):**
|
||||
- minimal (~200 tokens): Basic metadata only - nodeType, displayName, description, category
|
||||
- standard (~1-2K tokens): Essential properties + operations - recommended for most tasks
|
||||
- full (~3-8K tokens): Complete node schema - use only when standard insufficient
|
||||
|
||||
**Operation Modes:**
|
||||
- info (default): Node schema with configurable detail level
|
||||
- docs: Readable markdown documentation with examples and patterns
|
||||
- search_properties: Find specific properties within a node
|
||||
- versions: List all available versions with breaking changes summary
|
||||
- compare: Compare two versions with property-level changes
|
||||
- breaking: Show only breaking changes between versions
|
||||
- migrations: Show auto-migratable changes between versions`,
|
||||
parameters: {
|
||||
nodeType: { type: 'string', required: true, description: 'Full node type with prefix: "nodes-base.httpRequest" or "nodes-langchain.agent"' },
|
||||
detail: { type: 'string', required: false, description: 'Detail level for mode=info: "minimal", "standard" (default), "full"' },
|
||||
mode: { type: 'string', required: false, description: 'Operation mode: "info" (default), "docs", "search_properties", "versions", "compare", "breaking", "migrations"' },
|
||||
includeTypeInfo: { type: 'boolean', required: false, description: 'Include type structure metadata (validation rules, JS types). Adds ~80-120 tokens per property' },
|
||||
includeExamples: { type: 'boolean', required: false, description: 'Include real-world configuration examples from templates. Adds ~200-400 tokens per example' },
|
||||
propertyQuery: { type: 'string', required: false, description: 'For mode=search_properties: search term to find properties (e.g., "auth", "header", "body")' },
|
||||
maxPropertyResults: { type: 'number', required: false, description: 'For mode=search_properties: max results (default 20)' },
|
||||
fromVersion: { type: 'string', required: false, description: 'For compare/breaking/migrations modes: source version (e.g., "1.0")' },
|
||||
toVersion: { type: 'string', required: false, description: 'For compare mode: target version (e.g., "2.0"). Defaults to latest' }
|
||||
},
|
||||
returns: `Depends on mode:
|
||||
- info: Node schema with properties based on detail level
|
||||
- docs: Markdown documentation string
|
||||
- search_properties: Array of matching property paths with descriptions
|
||||
- versions: Version history with breaking changes flags
|
||||
- compare/breaking/migrations: Version comparison details`,
|
||||
examples: [
|
||||
'// Standard detail (recommended for AI agents)\nget_node({nodeType: "nodes-base.httpRequest"})',
|
||||
'// Minimal for quick metadata check\nget_node({nodeType: "nodes-base.slack", detail: "minimal"})',
|
||||
'// Full detail with examples\nget_node({nodeType: "nodes-base.googleSheets", detail: "full", includeExamples: true})',
|
||||
'// Get readable documentation\nget_node({nodeType: "nodes-base.webhook", mode: "docs"})',
|
||||
'// Search for authentication properties\nget_node({nodeType: "nodes-base.httpRequest", mode: "search_properties", propertyQuery: "auth"})',
|
||||
'// Check version history\nget_node({nodeType: "nodes-base.executeWorkflow", mode: "versions"})',
|
||||
'// Compare specific versions\nget_node({nodeType: "nodes-base.httpRequest", mode: "compare", fromVersion: "3.0", toVersion: "4.1"})'
|
||||
],
|
||||
useCases: [
|
||||
'Configure nodes for workflow building (use detail=standard)',
|
||||
'Find specific configuration options (use mode=search_properties)',
|
||||
'Get human-readable node documentation (use mode=docs)',
|
||||
'Check for breaking changes before version upgrades (use mode=breaking)',
|
||||
'Understand complex types with includeTypeInfo=true'
|
||||
],
|
||||
performance: `Token costs by detail level:
|
||||
- minimal: ~200 tokens
|
||||
- standard: ~1000-2000 tokens (default)
|
||||
- full: ~3000-8000 tokens
|
||||
- includeTypeInfo: +80-120 tokens per property
|
||||
- includeExamples: +200-400 tokens per example
|
||||
- Version modes: ~400-1200 tokens`,
|
||||
bestPractices: [
|
||||
'Start with detail="standard" - it covers 95% of use cases',
|
||||
'Only use detail="full" if standard is missing required properties',
|
||||
'Use mode="docs" when explaining nodes to users',
|
||||
'Combine includeTypeInfo=true for complex nodes (filter, resourceMapper)',
|
||||
'Check version history before configuring versioned nodes'
|
||||
],
|
||||
pitfalls: [
|
||||
'detail="full" returns large responses (~100KB) - use sparingly',
|
||||
'Node type must include prefix (nodes-base. or nodes-langchain.)',
|
||||
'includeExamples only works with mode=info and detail=standard',
|
||||
'Version modes require nodes with multiple versions in database'
|
||||
],
|
||||
relatedTools: ['search_nodes', 'validate_node', 'validate_workflow']
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=get-node.js.map
|
||||
1
dist/mcp/tool-docs/configuration/get-node.js.map
vendored
Normal file
1
dist/mcp/tool-docs/configuration/get-node.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"get-node.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/configuration/get-node.ts"],"names":[],"mappings":";;;AAEa,QAAA,UAAU,GAAsB;IAC3C,IAAI,EAAE,UAAU;IAChB,QAAQ,EAAE,eAAe;IACzB,UAAU,EAAE;QACV,WAAW,EAAE,6IAA6I;QAC1J,aAAa,EAAE,CAAC,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;QACnF,OAAO,EAAE,oEAAoE;QAC7E,WAAW,EAAE,yDAAyD;QACtE,IAAI,EAAE;YACJ,wEAAwE;YACxE,qDAAqD;YACrD,yEAAyE;YACzE,mEAAmE;YACnE,mEAAmE;SACpE;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE;;;;;;;;;;;;4DAY2C;QACxD,UAAU,EAAE;YACV,QAAQ,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,iFAAiF,EAAE;YAC5I,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,qEAAqE,EAAE;YAC/H,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,gHAAgH,EAAE;YACxK,eAAe,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,gGAAgG,EAAE;YACpK,eAAe,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,4FAA4F,EAAE;YAChK,aAAa,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,6FAA6F,EAAE;YAC9J,kBAAkB,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,sDAAsD,EAAE;YAC5H,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,qEAAqE,EAAE;YACpI,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,oEAAoE,EAAE;SAClI;QACD,OAAO,EAAE;;;;;0DAK6C;QACtD,QAAQ,EAAE;YACR,gGAAgG;YAChG,kGAAkG;YAClG,sHAAsH;YACtH,yFAAyF;YACzF,2IAA2I;YAC3I,gGAAgG;YAChG,qIAAqI;SACtI;QACD,QAAQ,EAAE;YACR,6DAA6D;YAC7D,kEAAkE;YAClE,uDAAuD;YACvD,wEAAwE;YACxE,oDAAoD;SACrD;QACD,WAAW,EAAE;;;;;;kCAMiB;QAC9B,aAAa,EAAE;YACb,2DAA2D;YAC3D,mEAAmE;YACnE,gDAAgD;YAChD,yEAAyE;YACzE,0DAA0D;SAC3D;QACD,QAAQ,EAAE;YACR,gEAAgE;YAChE,iEAAiE;YACjE,+DAA+D;YAC/D,gEAAgE;SACjE;QACD,YAAY,EAAE,CAAC,cAAc,EAAE,eAAe,EAAE,mBAAmB,CAAC;KACrE;CACF,CAAC"}
|
||||
2
dist/mcp/tool-docs/configuration/index.d.ts
vendored
Normal file
2
dist/mcp/tool-docs/configuration/index.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { getNodeDoc } from './get-node';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
dist/mcp/tool-docs/configuration/index.d.ts.map
vendored
Normal file
1
dist/mcp/tool-docs/configuration/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/configuration/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC"}
|
||||
6
dist/mcp/tool-docs/configuration/index.js
vendored
Normal file
6
dist/mcp/tool-docs/configuration/index.js
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getNodeDoc = void 0;
|
||||
var get_node_1 = require("./get-node");
|
||||
Object.defineProperty(exports, "getNodeDoc", { enumerable: true, get: function () { return get_node_1.getNodeDoc; } });
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
dist/mcp/tool-docs/configuration/index.js.map
vendored
Normal file
1
dist/mcp/tool-docs/configuration/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/configuration/index.ts"],"names":[],"mappings":";;;AAAA,uCAAwC;AAA/B,sGAAA,UAAU,OAAA"}
|
||||
2
dist/mcp/tool-docs/discovery/index.d.ts
vendored
Normal file
2
dist/mcp/tool-docs/discovery/index.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { searchNodesDoc } from './search-nodes';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
dist/mcp/tool-docs/discovery/index.d.ts.map
vendored
Normal file
1
dist/mcp/tool-docs/discovery/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC"}
|
||||
6
dist/mcp/tool-docs/discovery/index.js
vendored
Normal file
6
dist/mcp/tool-docs/discovery/index.js
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.searchNodesDoc = void 0;
|
||||
var search_nodes_1 = require("./search-nodes");
|
||||
Object.defineProperty(exports, "searchNodesDoc", { enumerable: true, get: function () { return search_nodes_1.searchNodesDoc; } });
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
dist/mcp/tool-docs/discovery/index.js.map
vendored
Normal file
1
dist/mcp/tool-docs/discovery/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/index.ts"],"names":[],"mappings":";;;AAAA,+CAAgD;AAAvC,8GAAA,cAAc,OAAA"}
|
||||
3
dist/mcp/tool-docs/discovery/search-nodes.d.ts
vendored
Normal file
3
dist/mcp/tool-docs/discovery/search-nodes.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { ToolDocumentation } from '../types';
|
||||
export declare const searchNodesDoc: ToolDocumentation;
|
||||
//# sourceMappingURL=search-nodes.d.ts.map
|
||||
1
dist/mcp/tool-docs/discovery/search-nodes.d.ts.map
vendored
Normal file
1
dist/mcp/tool-docs/discovery/search-nodes.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"search-nodes.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,cAAc,EAAE,iBAiE5B,CAAC"}
|
||||
70
dist/mcp/tool-docs/discovery/search-nodes.js
vendored
Normal file
70
dist/mcp/tool-docs/discovery/search-nodes.js
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.searchNodesDoc = void 0;
|
||||
exports.searchNodesDoc = {
|
||||
name: 'search_nodes',
|
||||
category: 'discovery',
|
||||
essentials: {
|
||||
description: 'Text search across node names and descriptions. Returns most relevant nodes first, with frequently-used nodes (HTTP Request, Webhook, Set, Code, Slack) prioritized in results. Searches all 800+ nodes including 300+ verified community nodes.',
|
||||
keyParameters: ['query', 'mode', 'limit', 'source', 'includeExamples'],
|
||||
example: 'search_nodes({query: "webhook"})',
|
||||
performance: '<20ms even for complex queries',
|
||||
tips: [
|
||||
'OR mode (default): Matches any search word',
|
||||
'AND mode: Requires all words present',
|
||||
'FUZZY mode: Handles typos and spelling errors',
|
||||
'Use quotes for exact phrases: "google sheets"',
|
||||
'Use source="community" to search only community nodes',
|
||||
'Use source="verified" for verified community nodes only'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
description: 'Full-text search engine for n8n nodes using SQLite FTS5. Searches across node names, descriptions, and aliases. Results are ranked by relevance with commonly-used nodes given priority. Includes 500+ core nodes and 300+ community nodes. Common core nodes include: HTTP Request, Webhook, Set, Code, IF, Switch, Merge, SplitInBatches, Slack, Google Sheets. Community nodes include verified integrations like BrightData, ScrapingBee, CraftMyPDF, and more.',
|
||||
parameters: {
|
||||
query: { type: 'string', description: 'Search keywords. Use quotes for exact phrases like "google sheets"', required: true },
|
||||
limit: { type: 'number', description: 'Maximum results to return. Default: 20, Max: 100', required: false },
|
||||
mode: { type: 'string', description: 'Search mode: "OR" (any word matches, default), "AND" (all words required), "FUZZY" (typo-tolerant)', required: false },
|
||||
source: { type: 'string', description: 'Filter by node source: "all" (default, everything), "core" (n8n base nodes only), "community" (community nodes only), "verified" (verified community nodes only)', required: false },
|
||||
includeExamples: { type: 'boolean', description: 'Include top 2 real-world configuration examples from popular templates for each node. Default: false. Adds ~200-400 tokens per node.', required: false }
|
||||
},
|
||||
returns: 'Array of node objects sorted by relevance score. Each object contains: nodeType, displayName, description, category, relevance score. For community nodes, also includes: isCommunity (boolean), isVerified (boolean), authorName (string), npmDownloads (number). Common nodes appear first when relevance is similar.',
|
||||
examples: [
|
||||
'search_nodes({query: "webhook"}) - Returns Webhook node as top result',
|
||||
'search_nodes({query: "database"}) - Returns MySQL, Postgres, MongoDB, Redis, etc.',
|
||||
'search_nodes({query: "google sheets", mode: "AND"}) - Requires both words',
|
||||
'search_nodes({query: "slak", mode: "FUZZY"}) - Finds Slack despite typo',
|
||||
'search_nodes({query: "http api"}) - Finds HTTP Request, GraphQL, REST nodes',
|
||||
'search_nodes({query: "transform data"}) - Finds Set, Code, Function, Item Lists nodes',
|
||||
'search_nodes({query: "scraping", source: "community"}) - Find community scraping nodes',
|
||||
'search_nodes({query: "pdf", source: "verified"}) - Find verified community PDF nodes',
|
||||
'search_nodes({query: "brightdata"}) - Find BrightData community node',
|
||||
'search_nodes({query: "slack", includeExamples: true}) - Get Slack with template examples'
|
||||
],
|
||||
useCases: [
|
||||
'Finding nodes when you know partial names',
|
||||
'Discovering nodes by functionality (e.g., "email", "database", "transform")',
|
||||
'Handling user typos in node names',
|
||||
'Finding all nodes related to a service (e.g., "google", "aws", "microsoft")',
|
||||
'Discovering community integrations for specific services',
|
||||
'Finding verified community nodes for enhanced trust'
|
||||
],
|
||||
performance: '<20ms for simple queries, <50ms for complex FUZZY searches. Uses FTS5 index for speed',
|
||||
bestPractices: [
|
||||
'Start with single keywords for broadest results',
|
||||
'Use FUZZY mode when users might misspell node names',
|
||||
'AND mode works best for 2-3 word searches',
|
||||
'Combine with get_node after finding the right node',
|
||||
'Use source="verified" when recommending community nodes for production',
|
||||
'Check isVerified flag to ensure community node quality'
|
||||
],
|
||||
pitfalls: [
|
||||
'AND mode searches all fields (name, description) not just node names',
|
||||
'FUZZY mode with very short queries (1-2 chars) may return unexpected results',
|
||||
'Exact matches in quotes are case-sensitive',
|
||||
'Community nodes require npm installation (n8n npm install <package-name>)',
|
||||
'Unverified community nodes (isVerified: false) may have limited support'
|
||||
],
|
||||
relatedTools: ['get_node to configure found nodes', 'search_templates to find workflow examples', 'validate_node to check configurations']
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=search-nodes.js.map
|
||||
1
dist/mcp/tool-docs/discovery/search-nodes.js.map
vendored
Normal file
1
dist/mcp/tool-docs/discovery/search-nodes.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"search-nodes.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":";;;AAEa,QAAA,cAAc,GAAsB;IAC/C,IAAI,EAAE,cAAc;IACpB,QAAQ,EAAE,WAAW;IACrB,UAAU,EAAE;QACV,WAAW,EAAE,kPAAkP;QAC/P,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,iBAAiB,CAAC;QACtE,OAAO,EAAE,kCAAkC;QAC3C,WAAW,EAAE,gCAAgC;QAC7C,IAAI,EAAE;YACJ,4CAA4C;YAC5C,sCAAsC;YACtC,+CAA+C;YAC/C,+CAA+C;YAC/C,uDAAuD;YACvD,yDAAyD;SAC1D;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE,qcAAqc;QACld,UAAU,EAAE;YACV,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oEAAoE,EAAE,QAAQ,EAAE,IAAI,EAAE;YAC5H,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kDAAkD,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC3G,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oGAAoG,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC5J,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kKAAkK,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC5N,eAAe,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,sIAAsI,EAAE,QAAQ,EAAE,KAAK,EAAE;SAC3M;QACD,OAAO,EAAE,yTAAyT;QAClU,QAAQ,EAAE;YACR,uEAAuE;YACvE,mFAAmF;YACnF,2EAA2E;YAC3E,yEAAyE;YACzE,6EAA6E;YAC7E,uFAAuF;YACvF,wFAAwF;YACxF,sFAAsF;YACtF,sEAAsE;YACtE,0FAA0F;SAC3F;QACD,QAAQ,EAAE;YACR,2CAA2C;YAC3C,6EAA6E;YAC7E,mCAAmC;YACnC,6EAA6E;YAC7E,0DAA0D;YAC1D,qDAAqD;SACtD;QACD,WAAW,EAAE,uFAAuF;QACpG,aAAa,EAAE;YACb,iDAAiD;YACjD,qDAAqD;YACrD,2CAA2C;YAC3C,oDAAoD;YACpD,wEAAwE;YACxE,wDAAwD;SACzD;QACD,QAAQ,EAAE;YACR,sEAAsE;YACtE,8EAA8E;YAC9E,4CAA4C;YAC5C,2EAA2E;YAC3E,yEAAyE;SAC1E;QACD,YAAY,EAAE,CAAC,mCAAmC,EAAE,4CAA4C,EAAE,uCAAuC,CAAC;KAC3I;CACF,CAAC"}
|
||||
3
dist/mcp/tool-docs/guides/ai-agents-guide.d.ts
vendored
Normal file
3
dist/mcp/tool-docs/guides/ai-agents-guide.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { ToolDocumentation } from '../types';
|
||||
export declare const aiAgentsGuide: ToolDocumentation;
|
||||
//# sourceMappingURL=ai-agents-guide.d.ts.map
|
||||
1
dist/mcp/tool-docs/guides/ai-agents-guide.d.ts.map
vendored
Normal file
1
dist/mcp/tool-docs/guides/ai-agents-guide.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ai-agents-guide.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/guides/ai-agents-guide.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,aAAa,EAAE,iBA8tB3B,CAAC"}
|
||||
739
dist/mcp/tool-docs/guides/ai-agents-guide.js
vendored
Normal file
739
dist/mcp/tool-docs/guides/ai-agents-guide.js
vendored
Normal file
@@ -0,0 +1,739 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.aiAgentsGuide = void 0;
|
||||
exports.aiAgentsGuide = {
|
||||
name: 'ai_agents_guide',
|
||||
category: 'guides',
|
||||
essentials: {
|
||||
description: 'Comprehensive guide to building AI Agent workflows in n8n. Covers architecture, connections, tools, validation, and best practices for production AI systems.',
|
||||
keyParameters: [],
|
||||
example: 'Use tools_documentation({topic: "ai_agents_guide"}) to access this guide',
|
||||
performance: 'N/A - Documentation only',
|
||||
tips: [
|
||||
'Start with Chat Trigger → AI Agent → Language Model pattern',
|
||||
'Always connect language model BEFORE enabling AI Agent',
|
||||
'Use proper toolDescription for all AI tools (15+ characters)',
|
||||
'Validate workflows with n8n_validate_workflow before deployment',
|
||||
'Use includeExamples=true when searching for AI nodes',
|
||||
'Check FINAL_AI_VALIDATION_SPEC.md for detailed requirements'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
description: `# Complete Guide to AI Agents in n8n
|
||||
|
||||
This comprehensive guide covers everything you need to build production-ready AI Agent workflows in n8n.
|
||||
|
||||
## Table of Contents
|
||||
1. [AI Agent Architecture](#architecture)
|
||||
2. [Essential Connection Types](#connections)
|
||||
3. [Building Your First AI Agent](#first-agent)
|
||||
4. [AI Tools Deep Dive](#tools)
|
||||
5. [Advanced Patterns](#advanced)
|
||||
6. [Validation & Best Practices](#validation)
|
||||
7. [Troubleshooting](#troubleshooting)
|
||||
|
||||
---
|
||||
|
||||
## 1. AI Agent Architecture {#architecture}
|
||||
|
||||
### Core Components
|
||||
|
||||
An n8n AI Agent workflow typically consists of:
|
||||
|
||||
1. **Chat Trigger**: Entry point for user interactions
|
||||
- Webhook-based or manual trigger
|
||||
- Supports streaming responses (responseMode)
|
||||
- Passes user message to AI Agent
|
||||
|
||||
2. **AI Agent**: The orchestrator
|
||||
- Manages conversation flow
|
||||
- Decides when to use tools
|
||||
- Iterates until task is complete
|
||||
- Supports fallback models for reliability
|
||||
|
||||
3. **Language Model**: The AI brain
|
||||
- OpenAI GPT-4, Claude, Gemini, etc.
|
||||
- Connected via ai_languageModel port
|
||||
- Can have primary + fallback for reliability
|
||||
|
||||
4. **Tools**: AI Agent's capabilities
|
||||
- HTTP Request, Code, Vector Store, etc.
|
||||
- Connected via ai_tool port
|
||||
- Each tool needs clear toolDescription
|
||||
|
||||
5. **Optional Components**:
|
||||
- Memory (conversation history)
|
||||
- Output Parser (structured responses)
|
||||
- Vector Store (knowledge retrieval)
|
||||
|
||||
### Connection Flow
|
||||
|
||||
**CRITICAL**: AI connections flow TO the consumer (reversed from standard n8n):
|
||||
|
||||
\`\`\`
|
||||
Standard n8n: [Source] --main--> [Target]
|
||||
AI pattern: [Language Model] --ai_languageModel--> [AI Agent]
|
||||
[HTTP Tool] --ai_tool--> [AI Agent]
|
||||
\`\`\`
|
||||
|
||||
This is why you use \`sourceOutput: "ai_languageModel"\` when connecting components.
|
||||
|
||||
---
|
||||
|
||||
## 2. Essential Connection Types {#connections}
|
||||
|
||||
### The 8 AI Connection Types
|
||||
|
||||
1. **ai_languageModel**
|
||||
- FROM: OpenAI Chat Model, Anthropic, Google Gemini, etc.
|
||||
- TO: AI Agent, Basic LLM Chain
|
||||
- REQUIRED: Every AI Agent needs 1-2 language models
|
||||
- Example: \`{type: "addConnection", source: "OpenAI", target: "AI Agent", sourceOutput: "ai_languageModel"}\`
|
||||
|
||||
2. **ai_tool**
|
||||
- FROM: Any tool node (HTTP Request Tool, Code Tool, etc.)
|
||||
- TO: AI Agent
|
||||
- REQUIRED: At least 1 tool recommended
|
||||
- Example: \`{type: "addConnection", source: "HTTP Request Tool", target: "AI Agent", sourceOutput: "ai_tool"}\`
|
||||
|
||||
3. **ai_memory**
|
||||
- FROM: Window Buffer Memory, Conversation Summary, etc.
|
||||
- TO: AI Agent
|
||||
- OPTIONAL: 0-1 memory system
|
||||
- Enables conversation history tracking
|
||||
|
||||
4. **ai_outputParser**
|
||||
- FROM: Structured Output Parser, JSON Parser, etc.
|
||||
- TO: AI Agent
|
||||
- OPTIONAL: For structured responses
|
||||
- Must set hasOutputParser=true on AI Agent
|
||||
|
||||
5. **ai_embedding**
|
||||
- FROM: Embeddings OpenAI, Embeddings Google, etc.
|
||||
- TO: Vector Store (Pinecone, In-Memory, etc.)
|
||||
- REQUIRED: For vector-based retrieval
|
||||
|
||||
6. **ai_vectorStore**
|
||||
- FROM: Vector Store node
|
||||
- TO: Vector Store Tool
|
||||
- REQUIRED: For retrieval-augmented generation (RAG)
|
||||
|
||||
7. **ai_document**
|
||||
- FROM: Document Loader, Default Data Loader
|
||||
- TO: Vector Store
|
||||
- REQUIRED: Provides data for vector storage
|
||||
|
||||
8. **ai_textSplitter**
|
||||
- FROM: Text Splitter nodes
|
||||
- TO: Document processing chains
|
||||
- OPTIONAL: Chunk large documents
|
||||
|
||||
### Connection Examples
|
||||
|
||||
\`\`\`typescript
|
||||
// Basic AI Agent setup
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
// Connect language model (REQUIRED)
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "OpenAI Chat Model",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_languageModel"
|
||||
},
|
||||
// Connect tools
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "HTTP Request Tool",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_tool"
|
||||
},
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "Code Tool",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_tool"
|
||||
},
|
||||
// Add memory (optional)
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "Window Buffer Memory",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_memory"
|
||||
}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## 3. Building Your First AI Agent {#first-agent}
|
||||
|
||||
### Step-by-Step Tutorial
|
||||
|
||||
#### Step 1: Create Chat Trigger
|
||||
|
||||
Use \`n8n_create_workflow\` or manually create a workflow with:
|
||||
|
||||
\`\`\`typescript
|
||||
{
|
||||
name: "My First AI Agent",
|
||||
nodes: [
|
||||
{
|
||||
id: "chat_trigger",
|
||||
name: "Chat Trigger",
|
||||
type: "@n8n/n8n-nodes-langchain.chatTrigger",
|
||||
position: [100, 100],
|
||||
parameters: {
|
||||
options: {
|
||||
responseMode: "lastNode" // or "streaming" for real-time
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
#### Step 2: Add Language Model
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "OpenAI Chat Model",
|
||||
type: "@n8n/n8n-nodes-langchain.lmChatOpenAi",
|
||||
position: [300, 50],
|
||||
parameters: {
|
||||
model: "gpt-4",
|
||||
temperature: 0.7
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
#### Step 3: Add AI Agent
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "AI Agent",
|
||||
type: "@n8n/n8n-nodes-langchain.agent",
|
||||
position: [300, 150],
|
||||
parameters: {
|
||||
promptType: "auto",
|
||||
systemMessage: "You are a helpful assistant. Be concise and accurate."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
#### Step 4: Connect Components
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
// Chat Trigger → AI Agent (main connection)
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "Chat Trigger",
|
||||
target: "AI Agent"
|
||||
},
|
||||
// Language Model → AI Agent (AI connection)
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "OpenAI Chat Model",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_languageModel"
|
||||
}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
#### Step 5: Validate
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_validate_workflow({id: "workflow_id"})
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## 4. AI Tools Deep Dive {#tools}
|
||||
|
||||
### Tool Types and When to Use Them
|
||||
|
||||
#### 1. HTTP Request Tool
|
||||
**Use when**: AI needs to call external APIs
|
||||
|
||||
**Critical Requirements**:
|
||||
- \`toolDescription\`: Clear, 15+ character description
|
||||
- \`url\`: API endpoint (can include placeholders)
|
||||
- \`placeholderDefinitions\`: Define all {placeholders}
|
||||
- Proper authentication if needed
|
||||
|
||||
**Example**:
|
||||
\`\`\`typescript
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "GitHub Issues Tool",
|
||||
type: "@n8n/n8n-nodes-langchain.toolHttpRequest",
|
||||
position: [500, 100],
|
||||
parameters: {
|
||||
method: "POST",
|
||||
url: "https://api.github.com/repos/{owner}/{repo}/issues",
|
||||
toolDescription: "Create GitHub issues. Requires owner (username), repo (repository name), title, and body.",
|
||||
placeholderDefinitions: {
|
||||
values: [
|
||||
{name: "owner", description: "Repository owner username"},
|
||||
{name: "repo", description: "Repository name"},
|
||||
{name: "title", description: "Issue title"},
|
||||
{name: "body", description: "Issue description"}
|
||||
]
|
||||
},
|
||||
sendBody: true,
|
||||
jsonBody: "={{ { title: $json.title, body: $json.body } }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
#### 2. Code Tool
|
||||
**Use when**: AI needs to run custom logic
|
||||
|
||||
**Critical Requirements**:
|
||||
- \`name\`: Function name (alphanumeric + underscore)
|
||||
- \`description\`: 10+ character explanation
|
||||
- \`code\`: JavaScript or Python code
|
||||
- \`inputSchema\`: Define expected inputs (recommended)
|
||||
|
||||
**Example**:
|
||||
\`\`\`typescript
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "Calculate Shipping",
|
||||
type: "@n8n/n8n-nodes-langchain.toolCode",
|
||||
position: [500, 200],
|
||||
parameters: {
|
||||
name: "calculate_shipping",
|
||||
description: "Calculate shipping cost based on weight (kg) and distance (km)",
|
||||
language: "javaScript",
|
||||
code: "const cost = 5 + ($input.weight * 2) + ($input.distance * 0.1); return { cost };",
|
||||
specifyInputSchema: true,
|
||||
inputSchema: "{ \\"type\\": \\"object\\", \\"properties\\": { \\"weight\\": { \\"type\\": \\"number\\" }, \\"distance\\": { \\"type\\": \\"number\\" } } }"
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
#### 3. Vector Store Tool
|
||||
**Use when**: AI needs to search knowledge base
|
||||
|
||||
**Setup**: Requires Vector Store + Embeddings + Documents
|
||||
|
||||
**Example**:
|
||||
\`\`\`typescript
|
||||
// Step 1: Create Vector Store with embeddings and documents
|
||||
n8n_update_partial_workflow({
|
||||
operations: [
|
||||
{type: "addConnection", source: "Embeddings OpenAI", target: "Pinecone", sourceOutput: "ai_embedding"},
|
||||
{type: "addConnection", source: "Document Loader", target: "Pinecone", sourceOutput: "ai_document"}
|
||||
]
|
||||
})
|
||||
|
||||
// Step 2: Connect Vector Store to Vector Store Tool
|
||||
n8n_update_partial_workflow({
|
||||
operations: [
|
||||
{type: "addConnection", source: "Pinecone", target: "Vector Store Tool", sourceOutput: "ai_vectorStore"}
|
||||
]
|
||||
})
|
||||
|
||||
// Step 3: Connect tool to AI Agent
|
||||
n8n_update_partial_workflow({
|
||||
operations: [
|
||||
{type: "addConnection", source: "Vector Store Tool", target: "AI Agent", sourceOutput: "ai_tool"}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
#### 4. AI Agent Tool (Sub-Agents)
|
||||
**Use when**: Need specialized expertise
|
||||
|
||||
**Example**: Research specialist sub-agent
|
||||
\`\`\`typescript
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "Research Specialist",
|
||||
type: "@n8n/n8n-nodes-langchain.agentTool",
|
||||
position: [500, 300],
|
||||
parameters: {
|
||||
name: "research_specialist",
|
||||
description: "Expert researcher that searches multiple sources and synthesizes information. Use for detailed research tasks.",
|
||||
systemMessage: "You are a research specialist. Search thoroughly, cite sources, and provide comprehensive analysis."
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
#### 5. MCP Client Tool
|
||||
**Use when**: Need to use Model Context Protocol servers
|
||||
|
||||
**Example**: Filesystem access
|
||||
\`\`\`typescript
|
||||
{
|
||||
type: "addNode",
|
||||
node: {
|
||||
name: "Filesystem Tool",
|
||||
type: "@n8n/n8n-nodes-langchain.mcpClientTool",
|
||||
position: [500, 400],
|
||||
parameters: {
|
||||
description: "Access file system to read files, list directories, and search content",
|
||||
mcpServer: {
|
||||
transport: "stdio",
|
||||
command: "npx",
|
||||
args: ["-y", "@modelcontextprotocol/server-filesystem", "/allowed/path"]
|
||||
},
|
||||
tool: "read_file"
|
||||
}
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## 5. Advanced Patterns {#advanced}
|
||||
|
||||
### Pattern 1: Streaming Responses
|
||||
|
||||
For real-time user experience:
|
||||
|
||||
\`\`\`typescript
|
||||
// Set Chat Trigger to streaming mode
|
||||
{
|
||||
parameters: {
|
||||
options: {
|
||||
responseMode: "streaming"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: AI Agent must NOT have main output connections in streaming mode
|
||||
// Responses stream back through Chat Trigger automatically
|
||||
\`\`\`
|
||||
|
||||
**Validation will fail if**:
|
||||
- Chat Trigger has streaming but target is not AI Agent
|
||||
- AI Agent in streaming mode has main output connections
|
||||
|
||||
### Pattern 2: Fallback Language Models
|
||||
|
||||
For production reliability with fallback language models:
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
operations: [
|
||||
// Primary model
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "OpenAI GPT-4",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_languageModel",
|
||||
targetIndex: 0
|
||||
},
|
||||
// Fallback model
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "Anthropic Claude",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_languageModel",
|
||||
targetIndex: 1
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
// Enable fallback on AI Agent
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeName: "AI Agent",
|
||||
updates: {
|
||||
"parameters.needsFallback": true
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
### Pattern 3: RAG (Retrieval-Augmented Generation)
|
||||
|
||||
Complete knowledge base setup:
|
||||
|
||||
\`\`\`typescript
|
||||
// 1. Load documents
|
||||
{type: "addConnection", source: "PDF Loader", target: "Text Splitter", sourceOutput: "ai_document"}
|
||||
|
||||
// 2. Split and embed
|
||||
{type: "addConnection", source: "Text Splitter", target: "Vector Store"}
|
||||
{type: "addConnection", source: "Embeddings", target: "Vector Store", sourceOutput: "ai_embedding"}
|
||||
|
||||
// 3. Create search tool
|
||||
{type: "addConnection", source: "Vector Store", target: "Vector Store Tool", sourceOutput: "ai_vectorStore"}
|
||||
|
||||
// 4. Give tool to agent
|
||||
{type: "addConnection", source: "Vector Store Tool", target: "AI Agent", sourceOutput: "ai_tool"}
|
||||
\`\`\`
|
||||
|
||||
### Pattern 4: Multi-Agent Systems
|
||||
|
||||
Specialized sub-agents for complex tasks:
|
||||
|
||||
\`\`\`typescript
|
||||
// Create sub-agents with specific expertise
|
||||
[
|
||||
{name: "research_agent", description: "Deep research specialist"},
|
||||
{name: "data_analyst", description: "Data analysis expert"},
|
||||
{name: "writer_agent", description: "Content writing specialist"}
|
||||
].forEach(agent => {
|
||||
// Add as AI Agent Tool to main coordinator agent
|
||||
{
|
||||
type: "addConnection",
|
||||
source: agent.name,
|
||||
target: "Coordinator Agent",
|
||||
sourceOutput: "ai_tool"
|
||||
}
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## 6. Validation & Best Practices {#validation}
|
||||
|
||||
### Always Validate Before Deployment
|
||||
|
||||
\`\`\`typescript
|
||||
const result = n8n_validate_workflow({id: "workflow_id"})
|
||||
|
||||
if (!result.valid) {
|
||||
console.log("Errors:", result.errors)
|
||||
console.log("Warnings:", result.warnings)
|
||||
console.log("Suggestions:", result.suggestions)
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
### Common Validation Errors
|
||||
|
||||
1. **MISSING_LANGUAGE_MODEL**
|
||||
- Problem: AI Agent has no ai_languageModel connection
|
||||
- Fix: Connect a language model before creating AI Agent
|
||||
|
||||
2. **MISSING_TOOL_DESCRIPTION**
|
||||
- Problem: HTTP Request Tool has no toolDescription
|
||||
- Fix: Add clear description (15+ characters)
|
||||
|
||||
3. **STREAMING_WITH_MAIN_OUTPUT**
|
||||
- Problem: AI Agent in streaming mode has outgoing main connections
|
||||
- Fix: Remove main connections when using streaming
|
||||
|
||||
4. **FALLBACK_MISSING_SECOND_MODEL**
|
||||
- Problem: needsFallback=true but only 1 language model
|
||||
- Fix: Add second language model or disable needsFallback
|
||||
|
||||
### Best Practices Checklist
|
||||
|
||||
✅ **Before Creating AI Agent**:
|
||||
- [ ] Language model is connected first
|
||||
- [ ] At least one tool is prepared (or will be added)
|
||||
- [ ] System message is thoughtful and specific
|
||||
|
||||
✅ **For Each Tool**:
|
||||
- [ ] Has toolDescription/description (15+ characters)
|
||||
- [ ] toolDescription explains WHEN to use the tool
|
||||
- [ ] All required parameters are configured
|
||||
- [ ] Credentials are set up if needed
|
||||
|
||||
✅ **For Production**:
|
||||
- [ ] Workflow validated with n8n_validate_workflow
|
||||
- [ ] Tested with real user queries
|
||||
- [ ] Fallback model configured for reliability
|
||||
- [ ] Error handling in place
|
||||
- [ ] maxIterations set appropriately (default 10, max 50)
|
||||
|
||||
---
|
||||
|
||||
## 7. Troubleshooting {#troubleshooting}
|
||||
|
||||
### Problem: "AI Agent has no language model"
|
||||
|
||||
**Cause**: Connection created AFTER AI Agent or using wrong sourceOutput
|
||||
|
||||
**Solution**:
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
operations: [
|
||||
{
|
||||
type: "addConnection",
|
||||
source: "OpenAI Chat Model",
|
||||
target: "AI Agent",
|
||||
sourceOutput: "ai_languageModel" // ← CRITICAL
|
||||
}
|
||||
]
|
||||
})
|
||||
\`\`\`
|
||||
|
||||
### Problem: "Tool has no description"
|
||||
|
||||
**Cause**: HTTP Request Tool or Code Tool missing toolDescription/description
|
||||
|
||||
**Solution**:
|
||||
\`\`\`typescript
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeName: "HTTP Request Tool",
|
||||
updates: {
|
||||
"parameters.toolDescription": "Call weather API to get current conditions for a city"
|
||||
}
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
### Problem: "Streaming mode not working"
|
||||
|
||||
**Causes**:
|
||||
1. Chat Trigger not set to streaming
|
||||
2. AI Agent has main output connections
|
||||
3. Target of Chat Trigger is not AI Agent
|
||||
|
||||
**Solution**:
|
||||
\`\`\`typescript
|
||||
// 1. Set Chat Trigger to streaming
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeName: "Chat Trigger",
|
||||
updates: {
|
||||
"parameters.options.responseMode": "streaming"
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Remove AI Agent main outputs
|
||||
{
|
||||
type: "removeConnection",
|
||||
source: "AI Agent",
|
||||
target: "Any Output Node"
|
||||
}
|
||||
\`\`\`
|
||||
|
||||
### Problem: "Agent keeps looping"
|
||||
|
||||
**Cause**: Tool not returning proper response or agent stuck in reasoning loop
|
||||
|
||||
**Solutions**:
|
||||
1. Set maxIterations lower: \`"parameters.maxIterations": 5\`
|
||||
2. Improve tool descriptions to be more specific
|
||||
3. Add system message guidance: "Use tools efficiently, don't repeat actions"
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Essential Tools
|
||||
|
||||
| Tool | Purpose | Key Parameters |
|
||||
|------|---------|----------------|
|
||||
| HTTP Request Tool | API calls | toolDescription, url, placeholders |
|
||||
| Code Tool | Custom logic | name, description, code, inputSchema |
|
||||
| Vector Store Tool | Knowledge search | description, topK |
|
||||
| AI Agent Tool | Sub-agents | name, description, systemMessage |
|
||||
| MCP Client Tool | MCP protocol | description, mcpServer, tool |
|
||||
|
||||
### Connection Quick Codes
|
||||
|
||||
\`\`\`typescript
|
||||
// Language Model → AI Agent
|
||||
sourceOutput: "ai_languageModel"
|
||||
|
||||
// Tool → AI Agent
|
||||
sourceOutput: "ai_tool"
|
||||
|
||||
// Memory → AI Agent
|
||||
sourceOutput: "ai_memory"
|
||||
|
||||
// Parser → AI Agent
|
||||
sourceOutput: "ai_outputParser"
|
||||
|
||||
// Embeddings → Vector Store
|
||||
sourceOutput: "ai_embedding"
|
||||
|
||||
// Vector Store → Vector Store Tool
|
||||
sourceOutput: "ai_vectorStore"
|
||||
\`\`\`
|
||||
|
||||
### Validation Command
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_validate_workflow({id: "workflow_id"})
|
||||
\`\`\`
|
||||
|
||||
---
|
||||
|
||||
## Related Resources
|
||||
|
||||
- **FINAL_AI_VALIDATION_SPEC.md**: Complete validation rules
|
||||
- **n8n_update_partial_workflow**: Workflow modification tool
|
||||
- **search_nodes({query: "AI", includeExamples: true})**: Find AI nodes with examples
|
||||
- **get_node({nodeType: "...", detail: "standard", includeExamples: true})**: Node details with examples
|
||||
|
||||
---
|
||||
|
||||
*This guide is part of the n8n-mcp documentation system. For questions or issues, refer to the validation spec or use tools_documentation() for specific topics.*`,
|
||||
parameters: {},
|
||||
returns: 'Complete AI Agents guide with architecture, patterns, validation, and troubleshooting',
|
||||
examples: [
|
||||
'tools_documentation({topic: "ai_agents_guide"}) - Full guide',
|
||||
'tools_documentation({topic: "ai_agents_guide", depth: "essentials"}) - Quick reference',
|
||||
'When user asks about AI Agents, Chat Trigger, or building AI workflows → Point to this guide'
|
||||
],
|
||||
useCases: [
|
||||
'Learning AI Agent architecture in n8n',
|
||||
'Understanding AI connection types and patterns',
|
||||
'Building first AI Agent workflow step-by-step',
|
||||
'Implementing advanced patterns (streaming, fallback, RAG, multi-agent)',
|
||||
'Troubleshooting AI workflow issues',
|
||||
'Validating AI workflows before deployment',
|
||||
'Quick reference for connection types and tools'
|
||||
],
|
||||
performance: 'N/A - Static documentation',
|
||||
bestPractices: [
|
||||
'Reference this guide when users ask about AI Agents',
|
||||
'Point to specific sections based on user needs',
|
||||
'Combine with search_nodes(includeExamples=true) for working examples',
|
||||
'Validate workflows after following guide instructions',
|
||||
'Use FINAL_AI_VALIDATION_SPEC.md for detailed requirements'
|
||||
],
|
||||
pitfalls: [
|
||||
'This is a guide, not an executable tool',
|
||||
'Always validate workflows after making changes',
|
||||
'AI connections require sourceOutput parameter',
|
||||
'Streaming mode has specific constraints',
|
||||
'Fallback models require AI Agent node with fallback support'
|
||||
],
|
||||
relatedTools: [
|
||||
'n8n_create_workflow',
|
||||
'n8n_update_partial_workflow',
|
||||
'n8n_validate_workflow',
|
||||
'search_nodes',
|
||||
'get_node'
|
||||
]
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=ai-agents-guide.js.map
|
||||
1
dist/mcp/tool-docs/guides/ai-agents-guide.js.map
vendored
Normal file
1
dist/mcp/tool-docs/guides/ai-agents-guide.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ai-agents-guide.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/guides/ai-agents-guide.ts"],"names":[],"mappings":";;;AAEa,QAAA,aAAa,GAAsB;IAC9C,IAAI,EAAE,iBAAiB;IACvB,QAAQ,EAAE,QAAQ;IAClB,UAAU,EAAE;QACV,WAAW,EAAE,+JAA+J;QAC5K,aAAa,EAAE,EAAE;QACjB,OAAO,EAAE,0EAA0E;QACnF,WAAW,EAAE,0BAA0B;QACvC,IAAI,EAAE;YACJ,6DAA6D;YAC7D,wDAAwD;YACxD,8DAA8D;YAC9D,iEAAiE;YACjE,sDAAsD;YACtD,6DAA6D;SAC9D;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;kKAoqBiJ;QAC9J,UAAU,EAAE,EAAE;QACd,OAAO,EAAE,uFAAuF;QAChG,QAAQ,EAAE;YACR,8DAA8D;YAC9D,wFAAwF;YACxF,8FAA8F;SAC/F;QACD,QAAQ,EAAE;YACR,uCAAuC;YACvC,gDAAgD;YAChD,+CAA+C;YAC/C,wEAAwE;YACxE,oCAAoC;YACpC,2CAA2C;YAC3C,gDAAgD;SACjD;QACD,WAAW,EAAE,4BAA4B;QACzC,aAAa,EAAE;YACb,qDAAqD;YACrD,gDAAgD;YAChD,sEAAsE;YACtE,uDAAuD;YACvD,2DAA2D;SAC5D;QACD,QAAQ,EAAE;YACR,yCAAyC;YACzC,gDAAgD;YAChD,+CAA+C;YAC/C,yCAAyC;YACzC,6DAA6D;SAC9D;QACD,YAAY,EAAE;YACZ,qBAAqB;YACrB,6BAA6B;YAC7B,uBAAuB;YACvB,cAAc;YACd,UAAU;SACX;KACF;CACF,CAAC"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user