mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-02-06 13:33:11 +00:00
Compare commits
4 Commits
v2.33.6
...
feat/cnd-o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8a5145b15 | ||
|
|
5459107dcf | ||
|
|
fa89d2f18e | ||
|
|
68bc5744dc |
@@ -37,11 +37,9 @@ MCP_SERVER_HOST=localhost
|
||||
# Server mode: stdio (local) or http (remote)
|
||||
MCP_MODE=stdio
|
||||
|
||||
# DEPRECATED: USE_FIXED_HTTP is deprecated as of v2.31.8
|
||||
# The fixed HTTP implementation does not support SSE streaming required by
|
||||
# clients like OpenAI Codex. Use the default SingleSessionHTTPServer instead.
|
||||
# See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
# USE_FIXED_HTTP=true # DO NOT USE - deprecated
|
||||
# Use fixed HTTP implementation (recommended for stability)
|
||||
# Set to true to bypass StreamableHTTPServerTransport issues
|
||||
USE_FIXED_HTTP=true
|
||||
|
||||
# HTTP Server Configuration (only used when MCP_MODE=http)
|
||||
PORT=3000
|
||||
|
||||
30
.github/workflows/docker-build.yml
vendored
30
.github/workflows/docker-build.yml
vendored
@@ -53,24 +53,13 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Sync runtime version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
@@ -155,24 +144,13 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Sync runtime version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
|
||||
13
.github/workflows/release.yml
vendored
13
.github/workflows/release.yml
vendored
@@ -427,18 +427,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Sufficient disk space: ${AVAILABLE_GB}GB available"
|
||||
|
||||
- name: Sync runtime version for Docker
|
||||
run: |
|
||||
VERSION=$(node -p "require('./package.json').version")
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const pkg = JSON.parse(fs.readFileSync('package.runtime.json'));
|
||||
pkg.version = '$VERSION';
|
||||
fs.writeFileSync('package.runtime.json', JSON.stringify(pkg, null, 2) + '\n');
|
||||
"
|
||||
echo "✅ Synced package.runtime.json to version $VERSION"
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -40,7 +40,7 @@ permissions:
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15 # Increased from 10 to accommodate larger database with community nodes
|
||||
timeout-minutes: 10 # Add a 10-minute timeout to prevent hanging
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
||||
7117
CHANGELOG.md
7117
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,7 @@ RUN --mount=type=cache,target=/root/.npm \
|
||||
echo '{}' > package.json && \
|
||||
npm install --no-save typescript@^5.8.3 @types/node@^22.15.30 @types/express@^5.0.3 \
|
||||
@modelcontextprotocol/sdk@1.20.1 dotenv@^16.5.0 express@^5.1.0 axios@^1.10.0 \
|
||||
n8n-workflow@^2.4.2 uuid@^11.0.5 @types/uuid@^10.0.0 \
|
||||
n8n-workflow@^1.96.0 uuid@^11.0.5 @types/uuid@^10.0.0 \
|
||||
openai@^4.77.0 zod@3.24.1 lru-cache@^11.2.1 @supabase/supabase-js@^2.57.4
|
||||
|
||||
# Copy source and build
|
||||
|
||||
@@ -74,8 +74,7 @@ ENV AUTH_TOKEN="REPLACE_THIS_AUTH_TOKEN_32_CHARS_MIN_abcdefgh"
|
||||
ENV NODE_ENV=production
|
||||
ENV IS_DOCKER=true
|
||||
ENV MCP_MODE=http
|
||||
# NOTE: USE_FIXED_HTTP is deprecated. SingleSessionHTTPServer is now the default.
|
||||
# See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
ENV USE_FIXED_HTTP=true
|
||||
ENV LOG_LEVEL=info
|
||||
ENV TRUST_PROXY=1
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
36
README.md
36
README.md
@@ -5,24 +5,23 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 1,084 workflow automation nodes (537 core + 547 community).
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 545 workflow automation nodes.
|
||||
|
||||
## Overview
|
||||
|
||||
n8n-MCP serves as a bridge between n8n's workflow automation platform and AI models, enabling them to understand and work with n8n nodes effectively. It provides structured access to:
|
||||
|
||||
- 📚 **1,084 n8n nodes** - 537 core nodes + 547 community nodes (301 verified)
|
||||
- 📚 **543 n8n nodes** from both n8n-nodes-base and @n8n/n8n-nodes-langchain
|
||||
- 🔧 **Node properties** - 99% coverage with detailed schemas
|
||||
- ⚡ **Node operations** - 63.6% coverage of available actions
|
||||
- 📄 **Documentation** - 87% coverage from official n8n docs (including AI nodes)
|
||||
- 🤖 **AI tools** - 265 AI-capable tool variants detected with full documentation
|
||||
- 🤖 **AI tools** - 271 AI-capable nodes detected with full documentation
|
||||
- 💡 **Real-world examples** - 2,646 pre-extracted configurations from popular templates
|
||||
- 🎯 **Template library** - 2,709 workflow templates with 100% metadata coverage
|
||||
- 🌐 **Community nodes** - Search verified community integrations with `source` filter (NEW!)
|
||||
|
||||
|
||||
## ⚠️ Important Safety Warning
|
||||
@@ -941,7 +940,7 @@ Once connected, Claude can use these powerful tools:
|
||||
|
||||
### Core Tools (7 tools)
|
||||
- **`tools_documentation`** - Get documentation for any MCP tool (START HERE!)
|
||||
- **`search_nodes`** - Full-text search across all nodes. Use `source: 'community'|'verified'` for community nodes, `includeExamples: true` for configs
|
||||
- **`search_nodes`** - Full-text search across all nodes. Use `includeExamples: true` for real-world configurations
|
||||
- **`get_node`** - Unified node information tool with multiple modes (v2.26.0):
|
||||
- **Info mode** (default): `detail: 'minimal'|'standard'|'full'`, `includeExamples: true`
|
||||
- **Docs mode**: `mode: 'docs'` - Human-readable markdown documentation
|
||||
@@ -1025,18 +1024,6 @@ search_nodes({
|
||||
includeExamples: true // Returns top 2 configs per node
|
||||
})
|
||||
|
||||
// Search community nodes only
|
||||
search_nodes({
|
||||
query: "scraping",
|
||||
source: "community" // Options: all, core, community, verified
|
||||
})
|
||||
|
||||
// Search verified community nodes
|
||||
search_nodes({
|
||||
query: "pdf",
|
||||
source: "verified" // Only verified community integrations
|
||||
})
|
||||
|
||||
// Validate node configuration
|
||||
validate_node({
|
||||
nodeType: "nodes-base.httpRequest",
|
||||
@@ -1134,18 +1121,17 @@ npm run dev:http # HTTP dev mode
|
||||
|
||||
## 📊 Metrics & Coverage
|
||||
|
||||
Current database coverage (n8n v2.2.3):
|
||||
Current database coverage (n8n v1.117.2):
|
||||
|
||||
- ✅ **1,084 total nodes** - 537 core + 547 community
|
||||
- ✅ **301 verified** community nodes from n8n Strapi API
|
||||
- ✅ **246 popular** npm community packages indexed
|
||||
- ✅ **470** nodes with documentation (87% core coverage)
|
||||
- ✅ **265** AI-capable tool variants detected
|
||||
- ✅ **541/541** nodes loaded (100%)
|
||||
- ✅ **541** nodes with properties (100%)
|
||||
- ✅ **470** nodes with documentation (87%)
|
||||
- ✅ **271** AI-capable tools detected
|
||||
- ✅ **2,646** pre-extracted template configurations
|
||||
- ✅ **2,709** workflow templates available (100% metadata coverage)
|
||||
- ✅ **AI Agent & LangChain nodes** fully documented
|
||||
- ⚡ **Average response time**: ~12ms
|
||||
- 💾 **Database size**: ~70MB (includes templates and community nodes)
|
||||
- 💾 **Database size**: ~68MB (includes templates with metadata)
|
||||
|
||||
## 🔄 Recent Updates
|
||||
|
||||
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
2
dist/constants/type-structures.d.ts.map
vendored
2
dist/constants/type-structures.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"type-structures.d.ts","sourceRoot":"","sources":["../../src/constants/type-structures.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAe9D,eAAO,MAAM,eAAe,EAAE,MAAM,CAAC,iBAAiB,EAAE,aAAa,CAkmBpE,CAAC;AAUF,eAAO,MAAM,qBAAqB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4GjC,CAAC"}
|
||||
{"version":3,"file":"type-structures.d.ts","sourceRoot":"","sources":["../../src/constants/type-structures.ts"],"names":[],"mappings":"AAaA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAe9D,eAAO,MAAM,eAAe,EAAE,MAAM,CAAC,iBAAiB,EAAE,aAAa,CAilBpE,CAAC;AAUF,eAAO,MAAM,qBAAqB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4GjC,CAAC"}
|
||||
16
dist/constants/type-structures.js
vendored
16
dist/constants/type-structures.js
vendored
@@ -545,22 +545,6 @@ exports.TYPE_STRUCTURES = {
|
||||
'One-time import feature',
|
||||
],
|
||||
},
|
||||
icon: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'Icon identifier for visual representation',
|
||||
example: 'fa:envelope',
|
||||
examples: ['fa:envelope', 'fa:user', 'fa:cog', 'file:slack.svg'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'References icon by name or file path',
|
||||
'Supports Font Awesome icons (fa:) and file paths (file:)',
|
||||
'Used for visual customization in UI',
|
||||
],
|
||||
},
|
||||
};
|
||||
exports.COMPLEX_TYPE_EXAMPLES = {
|
||||
collection: {
|
||||
|
||||
2
dist/constants/type-structures.js.map
vendored
2
dist/constants/type-structures.js.map
vendored
File diff suppressed because one or more lines are too long
20
dist/database/database-adapter.js
vendored
20
dist/database/database-adapter.js
vendored
@@ -311,17 +311,6 @@ class SQLJSStatement {
|
||||
this.stmt = stmt;
|
||||
this.onModify = onModify;
|
||||
this.boundParams = null;
|
||||
this.freed = false;
|
||||
}
|
||||
freeStatement() {
|
||||
if (!this.freed && this.stmt) {
|
||||
try {
|
||||
this.stmt.free();
|
||||
this.freed = true;
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
run(...params) {
|
||||
try {
|
||||
@@ -342,9 +331,6 @@ class SQLJSStatement {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
get(...params) {
|
||||
try {
|
||||
@@ -366,9 +352,6 @@ class SQLJSStatement {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
all(...params) {
|
||||
try {
|
||||
@@ -389,9 +372,6 @@ class SQLJSStatement {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
iterate(...params) {
|
||||
return this.all(...params)[Symbol.iterator]();
|
||||
|
||||
2
dist/database/database-adapter.js.map
vendored
2
dist/database/database-adapter.js.map
vendored
File diff suppressed because one or more lines are too long
36
dist/database/node-repository.d.ts
vendored
36
dist/database/node-repository.d.ts
vendored
@@ -1,20 +1,10 @@
|
||||
import { DatabaseAdapter } from './database-adapter';
|
||||
import { ParsedNode } from '../parsers/node-parser';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
export interface CommunityNodeFields {
|
||||
isCommunity: boolean;
|
||||
isVerified: boolean;
|
||||
authorName?: string;
|
||||
authorGithubUrl?: string;
|
||||
npmPackageName?: string;
|
||||
npmVersion?: string;
|
||||
npmDownloads?: number;
|
||||
communityFetchedAt?: string;
|
||||
}
|
||||
export declare class NodeRepository {
|
||||
private db;
|
||||
constructor(dbOrService: DatabaseAdapter | SQLiteStorageService);
|
||||
saveNode(node: ParsedNode & Partial<CommunityNodeFields>): void;
|
||||
saveNode(node: ParsedNode): void;
|
||||
getNode(nodeType: string): any;
|
||||
getAITools(): any[];
|
||||
private safeJsonParse;
|
||||
@@ -39,30 +29,6 @@ export declare class NodeRepository {
|
||||
getAllResources(): Map<string, any[]>;
|
||||
getNodePropertyDefaults(nodeType: string): Record<string, any>;
|
||||
getDefaultOperationForResource(nodeType: string, resource?: string): string | undefined;
|
||||
getCommunityNodes(options?: {
|
||||
verified?: boolean;
|
||||
limit?: number;
|
||||
orderBy?: 'downloads' | 'name' | 'updated';
|
||||
}): any[];
|
||||
getCommunityStats(): {
|
||||
total: number;
|
||||
verified: number;
|
||||
unverified: number;
|
||||
};
|
||||
hasNodeByNpmPackage(npmPackageName: string): boolean;
|
||||
getNodeByNpmPackage(npmPackageName: string): any | null;
|
||||
deleteCommunityNodes(): number;
|
||||
updateNodeReadme(nodeType: string, readme: string): void;
|
||||
updateNodeAISummary(nodeType: string, summary: object): void;
|
||||
getCommunityNodesWithoutReadme(): any[];
|
||||
getCommunityNodesWithoutAISummary(): any[];
|
||||
getDocumentationStats(): {
|
||||
total: number;
|
||||
withReadme: number;
|
||||
withAISummary: number;
|
||||
needingReadme: number;
|
||||
needingAISummary: number;
|
||||
};
|
||||
saveNodeVersion(versionData: {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
|
||||
2
dist/database/node-repository.d.ts.map
vendored
2
dist/database/node-repository.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"node-repository.d.ts","sourceRoot":"","sources":["../../src/database/node-repository.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAM1E,MAAM,WAAW,mBAAmB;IAClC,WAAW,EAAE,OAAO,CAAC;IACrB,UAAU,EAAE,OAAO,CAAC;IACpB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,kBAAkB,CAAC,EAAE,MAAM,CAAC;CAC7B;AAED,qBAAa,cAAc;IACzB,OAAO,CAAC,EAAE,CAAkB;gBAEhB,WAAW,EAAE,eAAe,GAAG,oBAAoB;IAa/D,QAAQ,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,mBAAmB,CAAC,GAAG,IAAI;IAmD/D,OAAO,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IAuC9B,UAAU,IAAI,GAAG,EAAE;IAgBnB,OAAO,CAAC,aAAa;IASrB,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,IAAI;IAIlC,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IAIpC,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAqB3C,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAE,IAAI,GAAG,KAAK,GAAG,OAAc,EAAE,KAAK,GAAE,MAAW,GAAG,GAAG,EAAE;IAwC1F,WAAW,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAUlC,YAAY,IAAI,MAAM;IAKtB,cAAc,IAAI,GAAG,EAAE;IAOvB,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYhD,yBAAyB,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAY3D,eAAe,IAAI,GAAG,EAAE;IAoBxB,mBAAmB,IAAI,MAAM;IAK7B,iBAAiB,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,EAAE;IAS7C,oBAAoB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,GAAE,MAAW,GAAG,GAAG,EAAE;IAmCrF,OAAO,CAAC,YAAY;IA2CpB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAmD7D,gBAAgB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAmBzC,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAyBnE,gBAAgB,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBtC,eAAe,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBrC,uBAAuB,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IAwB9D,8BAA8B,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAsDvF,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC1B,QAAQ,CAAC,EAAE,OAAO,CAAC;QACnB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,OAAO,CAAC,EAAE,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;KAC5C,GAAG,GAAG,EAAE;IAkCT,iBAAiB,IAAI;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;IAmB5E,mBAAmB,CAAC,cAAc,EAAE,MAAM,GAAG,OAAO;IAUpD,mBAAmB,CAAC,cAAc,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYvD,oBAAoB,IAAI,MAAM;IAc9B,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;IAUxD,mBAAmB,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;IAY5D,8BAA8B,IAAI,GAAG,EAAE;IAYvC,iCAAiC,IAAI,GAAG,EAAE;IAc1C,qBAAqB,IAAI;QACvB,KAAK,EAAE,MAAM,CAAC;QACd,UAAU,EAAE,MAAM,CAAC;QACnB,aAAa,EAAE,MAAM,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;QACtB,gBAAgB,EAAE,MAAM,CAAC;KAC1B;IA8BD,eAAe,CAAC,WAAW,EAAE;QAC3B,QAAQ,EAAE,MAAM,CAAC;QACjB,OAAO,EAAE,MAAM,CAAC;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,OAAO,CAAC;QACvB,gBAAgB,CAAC,EAAE,GAAG,CAAC;QACvB,UAAU,CAAC,EAAE,GAAG,CAAC;QACjB,mBAAmB,CAAC,EAAE,GAAG,CAAC;QAC1B,OAAO,CAAC,EAAE,GAAG,CAAC;QACd,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC;QACxB,oBAAoB,CAAC,EAAE,MAAM,EAAE,CAAC;QAChC,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;QAC3B,UAAU,CAAC,EAAE,IAAI,CAAC;KACnB,GAAG,IAAI;IAkCR,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAexC,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAgBlD,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAe7D,kBAAkB,CAAC,UAAU,EAAE;QAC7B,QAAQ,EAAE,MAAM,CAAC;QACjB,WAAW,EAAE,MAAM,CAAC;QACpB,SAAS,EAAE,MAAM,CAAC;QAClB,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,OAAO,GAAG,SAAS,GAAG,SAAS,GAAG,cAAc,GAAG,qBAAqB,GAAG,iBAAiB,CAAC;QACzG,UAAU,CAAC,EAAE,OAAO,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,cAAc,CAAC,EAAE,OAAO,CAAC;QACzB,iBAAiB,CAAC,EAAE,GAAG,CAAC;QACxB,QAAQ,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,GAAG,IAAI;IA4BR,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAgBnF,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IA4BpF,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAkBzF,qBAAqB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO;IAcxF,sBAAsB,IAAI,MAAM;IAWhC,OAAO,CAAC,mBAAmB;IA0B3B,OAAO,CAAC,sBAAsB;IA0B9B,qBAAqB,CAAC,IAAI,EAAE;QAC1B,UAAU,EAAE,MAAM,CAAC;QACnB,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,GAAG,CAAC;QACtB,OAAO,EAAE,gBAAgB,GAAG,aAAa,GAAG,SAAS,CAAC;QACtD,UAAU,CAAC,EAAE,GAAG,EAAE,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,QAAQ,CAAC,EAAE,GAAG,CAAC;KAChB,GAAG,MAAM;IAyBV,mBAAmB,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAoB9D,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYjD,wBAAwB,CAAC,UAAU,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAexD,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI;IAS9C,kCAAkC,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAY9D,qBAAqB,CAAC,UAAU,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM;IAiCpE,wBAAwB,IAAI,MAAM;IAWlC,uBAAuB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAWnD,sBAAsB,IAAI,GAAG;IAwC7B,OAAO,CAAC,uBAAuB;CAchC"}
|
||||
{"version":3,"file":"node-repository.d.ts","sourceRoot":"","sources":["../../src/database/node-repository.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AACpD,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAG1E,qBAAa,cAAc;IACzB,OAAO,CAAC,EAAE,CAAkB;gBAEhB,WAAW,EAAE,eAAe,GAAG,oBAAoB;IAY/D,QAAQ,CAAC,IAAI,EAAE,UAAU,GAAG,IAAI;IAwChC,OAAO,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IA2B9B,UAAU,IAAI,GAAG,EAAE;IAgBnB,OAAO,CAAC,aAAa;IASrB,UAAU,CAAC,IAAI,EAAE,UAAU,GAAG,IAAI;IAIlC,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG;IAIpC,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAqB3C,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,GAAE,IAAI,GAAG,KAAK,GAAG,OAAc,EAAE,KAAK,GAAE,MAAW,GAAG,GAAG,EAAE;IAwC1F,WAAW,CAAC,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAUlC,YAAY,IAAI,MAAM;IAKtB,cAAc,IAAI,GAAG,EAAE;IAOvB,cAAc,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYhD,yBAAyB,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAY3D,eAAe,IAAI,GAAG,EAAE;IAoBxB,mBAAmB,IAAI,MAAM;IAK7B,iBAAiB,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,EAAE;IAS7C,oBAAoB,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,UAAU,GAAE,MAAW,GAAG,GAAG,EAAE;IAmCrF,OAAO,CAAC,YAAY;IA4BpB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAmD7D,gBAAgB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAmBzC,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAyBnE,gBAAgB,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBtC,eAAe,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC;IAiBrC,uBAAuB,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IAwB9D,8BAA8B,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAuDvF,eAAe,CAAC,WAAW,EAAE;QAC3B,QAAQ,EAAE,MAAM,CAAC;QACjB,OAAO,EAAE,MAAM,CAAC;QAChB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,EAAE,MAAM,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,OAAO,CAAC;QACvB,gBAAgB,CAAC,EAAE,GAAG,CAAC;QACvB,UAAU,CAAC,EAAE,GAAG,CAAC;QACjB,mBAAmB,CAAC,EAAE,GAAG,CAAC;QAC1B,OAAO,CAAC,EAAE,GAAG,CAAC;QACd,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,eAAe,CAAC,EAAE,GAAG,EAAE,CAAC;QACxB,oBAAoB,CAAC,EAAE,MAAM,EAAE,CAAC;QAChC,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;QAC3B,UAAU,CAAC,EAAE,IAAI,CAAC;KACnB,GAAG,IAAI;IAkCR,eAAe,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,EAAE;IAexC,oBAAoB,CAAC,QAAQ,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAgBlD,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAe7D,kBAAkB,CAAC,UAAU,EAAE;QAC7B,QAAQ,EAAE,MAAM,CAAC;QACjB,WAAW,EAAE,MAAM,CAAC;QACpB,SAAS,EAAE,MAAM,CAAC;QAClB,YAAY,EAAE,MAAM,CAAC;QACrB,UAAU,EAAE,OAAO,GAAG,SAAS,GAAG,SAAS,GAAG,cAAc,GAAG,qBAAqB,GAAG,iBAAiB,CAAC;QACzG,UAAU,CAAC,EAAE,OAAO,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,cAAc,CAAC,EAAE,OAAO,CAAC;QACzB,iBAAiB,CAAC,EAAE,GAAG,CAAC;QACxB,QAAQ,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,GAAG,IAAI;IA4BR,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAgBnF,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IA4BpF,wBAAwB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,EAAE;IAkBzF,qBAAqB,CAAC,QAAQ,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO;IAcxF,sBAAsB,IAAI,MAAM;IAWhC,OAAO,CAAC,mBAAmB;IA0B3B,OAAO,CAAC,sBAAsB;IA0B9B,qBAAqB,CAAC,IAAI,EAAE;QAC1B,UAAU,EAAE,MAAM,CAAC;QACnB,aAAa,EAAE,MAAM,CAAC;QACtB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,GAAG,CAAC;QACtB,OAAO,EAAE,gBAAgB,GAAG,aAAa,GAAG,SAAS,CAAC;QACtD,UAAU,CAAC,EAAE,GAAG,EAAE,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,QAAQ,CAAC,EAAE,GAAG,CAAC;KAChB,GAAG,MAAM;IAyBV,mBAAmB,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE;IAoB9D,kBAAkB,CAAC,SAAS,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAYjD,wBAAwB,CAAC,UAAU,EAAE,MAAM,GAAG,GAAG,GAAG,IAAI;IAexD,qBAAqB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI;IAS9C,kCAAkC,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAY9D,qBAAqB,CAAC,UAAU,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM;IAiCpE,wBAAwB,IAAI,MAAM;IAWlC,uBAAuB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM;IAWnD,sBAAsB,IAAI,GAAG;IAwC7B,OAAO,CAAC,uBAAuB;CAchC"}
|
||||
123
dist/database/node-repository.js
vendored
123
dist/database/node-repository.js
vendored
@@ -19,12 +19,10 @@ class NodeRepository {
|
||||
is_webhook, is_versioned, is_tool_variant, tool_variant_of,
|
||||
has_tool_variant, version, documentation,
|
||||
properties_schema, operations, credentials_required,
|
||||
outputs, output_names,
|
||||
is_community, is_verified, author_name, author_github_url,
|
||||
npm_package_name, npm_version, npm_downloads, community_fetched_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
outputs, output_names
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
stmt.run(node.nodeType, node.packageName, node.displayName, node.description, node.category, node.style, node.isAITool ? 1 : 0, node.isTrigger ? 1 : 0, node.isWebhook ? 1 : 0, node.isVersioned ? 1 : 0, node.isToolVariant ? 1 : 0, node.toolVariantOf || null, node.hasToolVariant ? 1 : 0, node.version, node.documentation || null, JSON.stringify(node.properties, null, 2), JSON.stringify(node.operations, null, 2), JSON.stringify(node.credentials, null, 2), node.outputs ? JSON.stringify(node.outputs, null, 2) : null, node.outputNames ? JSON.stringify(node.outputNames, null, 2) : null, node.isCommunity ? 1 : 0, node.isVerified ? 1 : 0, node.authorName || null, node.authorGithubUrl || null, node.npmPackageName || null, node.npmVersion || null, node.npmDownloads || 0, node.communityFetchedAt || null);
|
||||
stmt.run(node.nodeType, node.packageName, node.displayName, node.description, node.category, node.style, node.isAITool ? 1 : 0, node.isTrigger ? 1 : 0, node.isWebhook ? 1 : 0, node.isVersioned ? 1 : 0, node.isToolVariant ? 1 : 0, node.toolVariantOf || null, node.hasToolVariant ? 1 : 0, node.version, node.documentation || null, JSON.stringify(node.properties, null, 2), JSON.stringify(node.operations, null, 2), JSON.stringify(node.credentials, null, 2), node.outputs ? JSON.stringify(node.outputs, null, 2) : null, node.outputNames ? JSON.stringify(node.outputNames, null, 2) : null);
|
||||
}
|
||||
getNode(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
@@ -39,14 +37,6 @@ class NodeRepository {
|
||||
return this.parseNodeRow(originalRow);
|
||||
}
|
||||
}
|
||||
if (!row) {
|
||||
const caseInsensitiveRow = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE LOWER(node_type) = LOWER(?)
|
||||
`).get(nodeType);
|
||||
if (caseInsensitiveRow) {
|
||||
return this.parseNodeRow(caseInsensitiveRow);
|
||||
}
|
||||
}
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeRow(row);
|
||||
@@ -224,20 +214,7 @@ class NodeRepository {
|
||||
credentials: this.safeJsonParse(row.credentials_required, []),
|
||||
hasDocumentation: !!row.documentation,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
outputNames: row.output_names ? this.safeJsonParse(row.output_names, null) : null,
|
||||
isCommunity: Number(row.is_community) === 1,
|
||||
isVerified: Number(row.is_verified) === 1,
|
||||
authorName: row.author_name || null,
|
||||
authorGithubUrl: row.author_github_url || null,
|
||||
npmPackageName: row.npm_package_name || null,
|
||||
npmVersion: row.npm_version || null,
|
||||
npmDownloads: row.npm_downloads || 0,
|
||||
communityFetchedAt: row.community_fetched_at || null,
|
||||
npmReadme: row.npm_readme || null,
|
||||
aiDocumentationSummary: row.ai_documentation_summary
|
||||
? this.safeJsonParse(row.ai_documentation_summary, null)
|
||||
: null,
|
||||
aiSummaryGeneratedAt: row.ai_summary_generated_at || null,
|
||||
outputNames: row.output_names ? this.safeJsonParse(row.output_names, null) : null
|
||||
};
|
||||
}
|
||||
getNodeOperations(nodeType, resource) {
|
||||
@@ -383,98 +360,6 @@ class NodeRepository {
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
getCommunityNodes(options) {
|
||||
let sql = 'SELECT * FROM nodes WHERE is_community = 1';
|
||||
const params = [];
|
||||
if (options?.verified !== undefined) {
|
||||
sql += ' AND is_verified = ?';
|
||||
params.push(options.verified ? 1 : 0);
|
||||
}
|
||||
switch (options?.orderBy) {
|
||||
case 'downloads':
|
||||
sql += ' ORDER BY npm_downloads DESC';
|
||||
break;
|
||||
case 'updated':
|
||||
sql += ' ORDER BY community_fetched_at DESC';
|
||||
break;
|
||||
case 'name':
|
||||
default:
|
||||
sql += ' ORDER BY display_name';
|
||||
}
|
||||
if (options?.limit) {
|
||||
sql += ' LIMIT ?';
|
||||
params.push(options.limit);
|
||||
}
|
||||
const rows = this.db.prepare(sql).all(...params);
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getCommunityStats() {
|
||||
const totalResult = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1').get();
|
||||
const verifiedResult = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND is_verified = 1').get();
|
||||
return {
|
||||
total: totalResult.count,
|
||||
verified: verifiedResult.count,
|
||||
unverified: totalResult.count - verifiedResult.count
|
||||
};
|
||||
}
|
||||
hasNodeByNpmPackage(npmPackageName) {
|
||||
const result = this.db.prepare('SELECT 1 FROM nodes WHERE npm_package_name = ? LIMIT 1').get(npmPackageName);
|
||||
return !!result;
|
||||
}
|
||||
getNodeByNpmPackage(npmPackageName) {
|
||||
const row = this.db.prepare('SELECT * FROM nodes WHERE npm_package_name = ?').get(npmPackageName);
|
||||
if (!row)
|
||||
return null;
|
||||
return this.parseNodeRow(row);
|
||||
}
|
||||
deleteCommunityNodes() {
|
||||
const result = this.db.prepare('DELETE FROM nodes WHERE is_community = 1').run();
|
||||
return result.changes;
|
||||
}
|
||||
updateNodeReadme(nodeType, readme) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes SET npm_readme = ? WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(readme, nodeType);
|
||||
}
|
||||
updateNodeAISummary(nodeType, summary) {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes
|
||||
SET ai_documentation_summary = ?, ai_summary_generated_at = datetime('now')
|
||||
WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(JSON.stringify(summary), nodeType);
|
||||
}
|
||||
getCommunityNodesWithoutReadme() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1 AND (npm_readme IS NULL OR npm_readme = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all();
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getCommunityNodesWithoutAISummary() {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1
|
||||
AND npm_readme IS NOT NULL AND npm_readme != ''
|
||||
AND (ai_documentation_summary IS NULL OR ai_documentation_summary = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all();
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
getDocumentationStats() {
|
||||
const total = this.db.prepare('SELECT COUNT(*) as count FROM nodes WHERE is_community = 1').get().count;
|
||||
const withReadme = this.db.prepare("SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND npm_readme IS NOT NULL AND npm_readme != ''").get().count;
|
||||
const withAISummary = this.db.prepare("SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND ai_documentation_summary IS NOT NULL AND ai_documentation_summary != ''").get().count;
|
||||
return {
|
||||
total,
|
||||
withReadme,
|
||||
withAISummary,
|
||||
needingReadme: total - withReadme,
|
||||
needingAISummary: withReadme - withAISummary
|
||||
};
|
||||
}
|
||||
saveNodeVersion(versionData) {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO node_versions (
|
||||
|
||||
2
dist/database/node-repository.js.map
vendored
2
dist/database/node-repository.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/http-server-single-session.d.ts.map
vendored
2
dist/http-server-single-session.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"http-server-single-session.d.ts","sourceRoot":"","sources":["../src/http-server-single-session.ts"],"names":[],"mappings":";AAMA,OAAO,OAAO,MAAM,SAAS,CAAC;AAoB9B,OAAO,EAAE,eAAe,EAA2B,MAAM,0BAA0B,CAAC;AACpF,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAwErD,qBAAa,uBAAuB;IAElC,OAAO,CAAC,UAAU,CAA8D;IAChF,OAAO,CAAC,OAAO,CAA0D;IACzE,OAAO,CAAC,eAAe,CAAsE;IAC7F,OAAO,CAAC,eAAe,CAA4D;IACnF,OAAO,CAAC,kBAAkB,CAAyC;IACnE,OAAO,CAAC,OAAO,CAAwB;IACvC,OAAO,CAAC,cAAc,CAAwB;IAC9C,OAAO,CAAC,aAAa,CAAM;IAI3B,OAAO,CAAC,cAAc,CAER;IACd,OAAO,CAAC,SAAS,CAAuB;IACxC,OAAO,CAAC,YAAY,CAA+B;;IAcnD,OAAO,CAAC,mBAAmB;IAmB3B,OAAO,CAAC,sBAAsB;YAqChB,aAAa;IAuC3B,OAAO,CAAC,qBAAqB;IAO7B,OAAO,CAAC,gBAAgB;IAkBxB,OAAO,CAAC,gBAAgB;IASxB,OAAO,CAAC,sBAAsB;IAkC9B,OAAO,CAAC,mBAAmB;YASb,oBAAoB;YAwBpB,oBAAoB;IAwBlC,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,aAAa;IA2BrB,OAAO,CAAC,mBAAmB;IAoDrB,aAAa,CACjB,GAAG,EAAE,OAAO,CAAC,OAAO,EACpB,GAAG,EAAE,OAAO,CAAC,QAAQ,EACrB,eAAe,CAAC,EAAE,eAAe,GAChC,OAAO,CAAC,IAAI,CAAC;YA0PF,eAAe;IA4D7B,OAAO,CAAC,SAAS;IAYjB,OAAO,CAAC,gBAAgB;IASlB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAgnBtB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IA2D/B,cAAc,IAAI;QAChB,MAAM,EAAE,OAAO,CAAC;QAChB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,QAAQ,CAAC,EAAE;YACT,KAAK,EAAE,MAAM,CAAC;YACd,MAAM,EAAE,MAAM,CAAC;YACf,OAAO,EAAE,MAAM,CAAC;YAChB,GAAG,EAAE,MAAM,CAAC;YACZ,UAAU,EAAE,MAAM,EAAE,CAAC;SACtB,CAAC;KACH;IAmDM,kBAAkB,IAAI,YAAY,EAAE;IAoEpC,mBAAmB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,MAAM;CAsG7D"}
|
||||
{"version":3,"file":"http-server-single-session.d.ts","sourceRoot":"","sources":["../src/http-server-single-session.ts"],"names":[],"mappings":";AAMA,OAAO,OAAO,MAAM,SAAS,CAAC;AAoB9B,OAAO,EAAE,eAAe,EAA2B,MAAM,0BAA0B,CAAC;AACpF,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAuErD,qBAAa,uBAAuB;IAElC,OAAO,CAAC,UAAU,CAA8D;IAChF,OAAO,CAAC,OAAO,CAA0D;IACzE,OAAO,CAAC,eAAe,CAAsE;IAC7F,OAAO,CAAC,eAAe,CAA4D;IACnF,OAAO,CAAC,kBAAkB,CAAyC;IACnE,OAAO,CAAC,OAAO,CAAwB;IACvC,OAAO,CAAC,cAAc,CAAwB;IAC9C,OAAO,CAAC,aAAa,CAAM;IAC3B,OAAO,CAAC,cAAc,CAAkB;IACxC,OAAO,CAAC,SAAS,CAAuB;IACxC,OAAO,CAAC,YAAY,CAA+B;;IAcnD,OAAO,CAAC,mBAAmB;IAmB3B,OAAO,CAAC,sBAAsB;YAqChB,aAAa;IAuC3B,OAAO,CAAC,qBAAqB;IAO7B,OAAO,CAAC,gBAAgB;IAkBxB,OAAO,CAAC,gBAAgB;IASxB,OAAO,CAAC,sBAAsB;IAkC9B,OAAO,CAAC,mBAAmB;YASb,oBAAoB;YAwBpB,oBAAoB;IAwBlC,OAAO,CAAC,iBAAiB;IAsBzB,OAAO,CAAC,aAAa;IA2BrB,OAAO,CAAC,mBAAmB;IAoDrB,aAAa,CACjB,GAAG,EAAE,OAAO,CAAC,OAAO,EACpB,GAAG,EAAE,OAAO,CAAC,QAAQ,EACrB,eAAe,CAAC,EAAE,eAAe,GAChC,OAAO,CAAC,IAAI,CAAC;YAmOF,eAAe;IA8C7B,OAAO,CAAC,SAAS;IAYjB,OAAO,CAAC,gBAAgB;IASlB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAgnBtB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAkD/B,cAAc,IAAI;QAChB,MAAM,EAAE,OAAO,CAAC;QAChB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,QAAQ,CAAC,EAAE;YACT,KAAK,EAAE,MAAM,CAAC;YACd,MAAM,EAAE,MAAM,CAAC;YACf,OAAO,EAAE,MAAM,CAAC;YAChB,GAAG,EAAE,MAAM,CAAC;YACZ,UAAU,EAAE,MAAM,EAAE,CAAC;SACtB,CAAC;KACH;IAmDM,kBAAkB,IAAI,YAAY,EAAE;IAoEpC,mBAAmB,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,MAAM;CAsG7D"}
|
||||
44
dist/http-server-single-session.js
vendored
44
dist/http-server-single-session.js
vendored
@@ -22,7 +22,6 @@ const crypto_1 = require("crypto");
|
||||
const types_js_1 = require("@modelcontextprotocol/sdk/types.js");
|
||||
const protocol_version_1 = require("./utils/protocol-version");
|
||||
const instance_context_1 = require("./types/instance-context");
|
||||
const shared_database_1 = require("./database/shared-database");
|
||||
dotenv_1.default.config();
|
||||
const DEFAULT_PROTOCOL_VERSION = protocol_version_1.STANDARD_PROTOCOL_VERSION;
|
||||
const MAX_SESSIONS = Math.max(1, parseInt(process.env.N8N_MCP_MAX_SESSIONS || '100', 10));
|
||||
@@ -53,7 +52,7 @@ class SingleSessionHTTPServer {
|
||||
this.contextSwitchLocks = new Map();
|
||||
this.session = null;
|
||||
this.consoleManager = new console_manager_1.ConsoleManager();
|
||||
this.sessionTimeout = parseInt(process.env.SESSION_TIMEOUT_MINUTES || '5', 10) * 60 * 1000;
|
||||
this.sessionTimeout = 30 * 60 * 1000;
|
||||
this.authToken = null;
|
||||
this.cleanupTimer = null;
|
||||
this.validateEnvironment();
|
||||
@@ -291,25 +290,6 @@ class SingleSessionHTTPServer {
|
||||
return;
|
||||
}
|
||||
logger_1.logger.info('handleRequest: Creating new transport for initialize request');
|
||||
if (instanceContext?.instanceId) {
|
||||
const sessionsToRemove = [];
|
||||
for (const [existingSessionId, context] of Object.entries(this.sessionContexts)) {
|
||||
if (context?.instanceId === instanceContext.instanceId) {
|
||||
sessionsToRemove.push(existingSessionId);
|
||||
}
|
||||
}
|
||||
for (const oldSessionId of sessionsToRemove) {
|
||||
if (!this.transports[oldSessionId]) {
|
||||
continue;
|
||||
}
|
||||
logger_1.logger.info('Cleaning up previous session for instance', {
|
||||
instanceId: instanceContext.instanceId,
|
||||
oldSession: oldSessionId,
|
||||
reason: 'instance_reconnect'
|
||||
});
|
||||
await this.removeSession(oldSessionId, 'instance_reconnect');
|
||||
}
|
||||
}
|
||||
let sessionIdToUse;
|
||||
const isMultiTenantEnabled = process.env.ENABLE_MULTI_TENANT === 'true';
|
||||
const sessionStrategy = process.env.MULTI_TENANT_SESSION_STRATEGY || 'instance';
|
||||
@@ -454,21 +434,12 @@ class SingleSessionHTTPServer {
|
||||
}
|
||||
async resetSessionSSE(res) {
|
||||
if (this.session) {
|
||||
const sessionId = this.session.sessionId;
|
||||
logger_1.logger.info('Closing previous session for SSE', { sessionId });
|
||||
if (this.session.server && typeof this.session.server.close === 'function') {
|
||||
try {
|
||||
await this.session.server.close();
|
||||
}
|
||||
catch (serverError) {
|
||||
logger_1.logger.warn('Error closing server for SSE session', { sessionId, error: serverError });
|
||||
}
|
||||
}
|
||||
try {
|
||||
logger_1.logger.info('Closing previous session for SSE', { sessionId: this.session.sessionId });
|
||||
await this.session.transport.close();
|
||||
}
|
||||
catch (transportError) {
|
||||
logger_1.logger.warn('Error closing transport for SSE session', { sessionId, error: transportError });
|
||||
catch (error) {
|
||||
logger_1.logger.warn('Error closing previous session:', error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
@@ -1043,13 +1014,6 @@ class SingleSessionHTTPServer {
|
||||
});
|
||||
});
|
||||
}
|
||||
try {
|
||||
await (0, shared_database_1.closeSharedDatabase)();
|
||||
logger_1.logger.info('Shared database closed');
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.warn('Error closing shared database:', error);
|
||||
}
|
||||
logger_1.logger.info('Single-Session HTTP server shutdown completed');
|
||||
}
|
||||
getSessionInfo() {
|
||||
|
||||
2
dist/http-server-single-session.js.map
vendored
2
dist/http-server-single-session.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/http-server.d.ts.map
vendored
2
dist/http-server.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"http-server.d.ts","sourceRoot":"","sources":["../src/http-server.ts"],"names":[],"mappings":";AAiDA,wBAAgB,aAAa,IAAI,MAAM,GAAG,IAAI,CAsB7C;AAmED,wBAAsB,oBAAoB,kBAsezC;AAGD,OAAO,QAAQ,cAAc,CAAC;IAC5B,UAAU,yBAAyB;QACjC,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;KACpD;CACF"}
|
||||
{"version":3,"file":"http-server.d.ts","sourceRoot":"","sources":["../src/http-server.ts"],"names":[],"mappings":";AA0CA,wBAAgB,aAAa,IAAI,MAAM,GAAG,IAAI,CAsB7C;AA+DD,wBAAsB,oBAAoB,kBA+dzC;AAGD,OAAO,QAAQ,cAAc,CAAC;IAC5B,UAAU,yBAAyB;QACjC,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;KACpD;CACF"}
|
||||
3
dist/http-server.js
vendored
3
dist/http-server.js
vendored
@@ -85,9 +85,6 @@ async function shutdown() {
|
||||
}
|
||||
}
|
||||
async function startFixedHTTPServer() {
|
||||
logger_1.logger.warn('DEPRECATION: startFixedHTTPServer() is deprecated as of v2.31.8. ' +
|
||||
'Use SingleSessionHTTPServer which supports SSE streaming. ' +
|
||||
'See: https://github.com/czlonkowski/n8n-mcp/issues/524');
|
||||
validateEnvironment();
|
||||
const app = (0, express_1.default)();
|
||||
const trustProxy = process.env.TRUST_PROXY ? Number(process.env.TRUST_PROXY) : 0;
|
||||
|
||||
2
dist/http-server.js.map
vendored
2
dist/http-server.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AAqHD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AAqHD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA4F3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
29
dist/mcp/handlers-n8n-manager.js
vendored
29
dist/mcp/handlers-n8n-manager.js
vendored
@@ -1024,18 +1024,14 @@ async function handleGetExecution(args, context) {
|
||||
const client = ensureApiConfigured(context);
|
||||
const schema = zod_1.z.object({
|
||||
id: zod_1.z.string(),
|
||||
mode: zod_1.z.enum(['preview', 'summary', 'filtered', 'full', 'error']).optional(),
|
||||
mode: zod_1.z.enum(['preview', 'summary', 'filtered', 'full']).optional(),
|
||||
nodeNames: zod_1.z.array(zod_1.z.string()).optional(),
|
||||
itemsLimit: zod_1.z.number().optional(),
|
||||
includeInputData: zod_1.z.boolean().optional(),
|
||||
includeData: zod_1.z.boolean().optional(),
|
||||
errorItemsLimit: zod_1.z.number().min(0).max(100).optional(),
|
||||
includeStackTrace: zod_1.z.boolean().optional(),
|
||||
includeExecutionPath: zod_1.z.boolean().optional(),
|
||||
fetchWorkflow: zod_1.z.boolean().optional()
|
||||
includeData: zod_1.z.boolean().optional()
|
||||
});
|
||||
const params = schema.parse(args);
|
||||
const { id, mode, nodeNames, itemsLimit, includeInputData, includeData, errorItemsLimit, includeStackTrace, includeExecutionPath, fetchWorkflow } = params;
|
||||
const { id, mode, nodeNames, itemsLimit, includeInputData, includeData } = params;
|
||||
let effectiveMode = mode;
|
||||
if (!effectiveMode && includeData !== undefined) {
|
||||
effectiveMode = includeData ? 'summary' : undefined;
|
||||
@@ -1048,28 +1044,13 @@ async function handleGetExecution(args, context) {
|
||||
data: execution
|
||||
};
|
||||
}
|
||||
let workflow;
|
||||
if (effectiveMode === 'error' && fetchWorkflow !== false && execution.workflowId) {
|
||||
try {
|
||||
workflow = await client.getWorkflow(execution.workflowId);
|
||||
}
|
||||
catch (e) {
|
||||
logger_1.logger.debug('Could not fetch workflow for error analysis', {
|
||||
workflowId: execution.workflowId,
|
||||
error: e instanceof Error ? e.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
}
|
||||
const filterOptions = {
|
||||
mode: effectiveMode,
|
||||
nodeNames,
|
||||
itemsLimit,
|
||||
includeInputData,
|
||||
errorItemsLimit,
|
||||
includeStackTrace,
|
||||
includeExecutionPath
|
||||
includeInputData
|
||||
};
|
||||
const processedExecution = (0, execution_processor_1.processExecution)(execution, filterOptions, workflow);
|
||||
const processedExecution = (0, execution_processor_1.processExecution)(execution, filterOptions);
|
||||
return {
|
||||
success: true,
|
||||
data: processedExecution
|
||||
|
||||
2
dist/mcp/handlers-n8n-manager.js.map
vendored
2
dist/mcp/handlers-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
9
dist/mcp/index.js
vendored
9
dist/mcp/index.js
vendored
@@ -124,15 +124,6 @@ Learn more: https://github.com/czlonkowski/n8n-mcp/blob/main/PRIVACY.md
|
||||
checkpoints.push(startup_checkpoints_1.STARTUP_CHECKPOINTS.MCP_HANDSHAKE_STARTING);
|
||||
if (mode === 'http') {
|
||||
if (process.env.USE_FIXED_HTTP === 'true') {
|
||||
logger_1.logger.warn('DEPRECATION WARNING: USE_FIXED_HTTP=true is deprecated as of v2.31.8. ' +
|
||||
'The fixed HTTP implementation does not support SSE streaming required by clients like OpenAI Codex. ' +
|
||||
'Please unset USE_FIXED_HTTP to use the modern SingleSessionHTTPServer which supports both JSON-RPC and SSE. ' +
|
||||
'This option will be removed in a future version. See: https://github.com/czlonkowski/n8n-mcp/issues/524');
|
||||
console.warn('\n⚠️ DEPRECATION WARNING ⚠️');
|
||||
console.warn('USE_FIXED_HTTP=true is deprecated as of v2.31.8.');
|
||||
console.warn('The fixed HTTP implementation does not support SSE streaming.');
|
||||
console.warn('Please unset USE_FIXED_HTTP to use SingleSessionHTTPServer.');
|
||||
console.warn('See: https://github.com/czlonkowski/n8n-mcp/issues/524\n');
|
||||
const { startFixedHTTPServer } = await Promise.resolve().then(() => __importStar(require('../http-server')));
|
||||
await startFixedHTTPServer();
|
||||
}
|
||||
|
||||
2
dist/mcp/index.js.map
vendored
2
dist/mcp/index.js.map
vendored
File diff suppressed because one or more lines are too long
4
dist/mcp/server.d.ts
vendored
4
dist/mcp/server.d.ts
vendored
@@ -13,9 +13,6 @@ export declare class N8NDocumentationMCPServer {
|
||||
private previousToolTimestamp;
|
||||
private earlyLogger;
|
||||
private disabledToolsCache;
|
||||
private useSharedDatabase;
|
||||
private sharedDbState;
|
||||
private isShutdown;
|
||||
constructor(instanceContext?: InstanceContext, earlyLogger?: EarlyErrorLogger);
|
||||
close(): Promise<void>;
|
||||
private initializeDatabase;
|
||||
@@ -43,7 +40,6 @@ export declare class N8NDocumentationMCPServer {
|
||||
private rankSearchResults;
|
||||
private listAITools;
|
||||
private getNodeDocumentation;
|
||||
private safeJsonParse;
|
||||
private getDatabaseStatistics;
|
||||
private getNodeEssentials;
|
||||
private getNode;
|
||||
|
||||
2
dist/mcp/server.d.ts.map
vendored
2
dist/mcp/server.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/mcp/server.ts"],"names":[],"mappings":"AAuCA,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAE5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AAmGnE,qBAAa,yBAAyB;IACpC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,EAAE,CAAgC;IAC1C,OAAO,CAAC,UAAU,CAA+B;IACjD,OAAO,CAAC,eAAe,CAAgC;IACvD,OAAO,CAAC,WAAW,CAAgB;IACnC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,UAAU,CAAa;IAC/B,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,YAAY,CAAuB;IAC3C,OAAO,CAAC,qBAAqB,CAAsB;IACnD,OAAO,CAAC,WAAW,CAAiC;IACpD,OAAO,CAAC,kBAAkB,CAA4B;IACtD,OAAO,CAAC,iBAAiB,CAAkB;IAC3C,OAAO,CAAC,aAAa,CAAoC;IACzD,OAAO,CAAC,UAAU,CAAkB;gBAExB,eAAe,CAAC,EAAE,eAAe,EAAE,WAAW,CAAC,EAAE,gBAAgB;IAqGvE,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;YA+Cd,kBAAkB;YAiDlB,wBAAwB;IA0BtC,OAAO,CAAC,kBAAkB;YA6CZ,iBAAiB;IAa/B,OAAO,CAAC,eAAe,CAAkB;YAE3B,sBAAsB;IAgDpC,OAAO,CAAC,gBAAgB;IAqCxB,OAAO,CAAC,aAAa;IAoTrB,OAAO,CAAC,wBAAwB;IAoFhC,OAAO,CAAC,kBAAkB;IAqE1B,OAAO,CAAC,uBAAuB;IAwB/B,OAAO,CAAC,qBAAqB;YAoTf,SAAS;YA2DT,WAAW;YAkFX,WAAW;YA0CX,cAAc;YA8Md,gBAAgB;IAqD9B,OAAO,CAAC,mBAAmB;IAwE3B,OAAO,CAAC,eAAe;YAsBT,eAAe;IA2L7B,OAAO,CAAC,kBAAkB;IAQ1B,OAAO,CAAC,uBAAuB;IA0D/B,OAAO,CAAC,iBAAiB;YAqFX,WAAW;YAgCX,oBAAoB;IAuFlC,OAAO,CAAC,aAAa;YAQP,qBAAqB;YAwDrB,iBAAiB;YAiKjB,OAAO;YAgDP,cAAc;YAwFd,iBAAiB;IAqC/B,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,eAAe;IAwCvB,OAAO,CAAC,kBAAkB;IAiC1B,OAAO,CAAC,aAAa;IAoCrB,OAAO,CAAC,0BAA0B;IAgClC,OAAO,CAAC,4BAA4B;YAKtB,oBAAoB;IAsDlC,OAAO,CAAC,gBAAgB;YAiBV,SAAS;YA6CT,kBAAkB;YAqElB,uBAAuB;YAsDvB,iBAAiB;IAqE/B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,uBAAuB;IA4D/B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,iBAAiB;YAoDX,mBAAmB;YAoEnB,qBAAqB;IAS7B,OAAO,CAAC,SAAS,EAAE,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;YAS9B,aAAa;YAcb,iBAAiB;YAoBjB,WAAW;YAwBX,eAAe;YAqBf,mBAAmB;YAwBnB,yBAAyB;IA4CvC,OAAO,CAAC,kBAAkB;YAiBZ,gBAAgB;YA6HhB,2BAA2B;YAiE3B,2BAA2B;IAyEnC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IA0BpB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAgEhC"}
|
||||
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/mcp/server.ts"],"names":[],"mappings":"AAsCA,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAE5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,iCAAiC,CAAC;AAgGnE,qBAAa,yBAAyB;IACpC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,EAAE,CAAgC;IAC1C,OAAO,CAAC,UAAU,CAA+B;IACjD,OAAO,CAAC,eAAe,CAAgC;IACvD,OAAO,CAAC,WAAW,CAAgB;IACnC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,UAAU,CAAa;IAC/B,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,YAAY,CAAuB;IAC3C,OAAO,CAAC,qBAAqB,CAAsB;IACnD,OAAO,CAAC,WAAW,CAAiC;IACpD,OAAO,CAAC,kBAAkB,CAA4B;gBAE1C,eAAe,CAAC,EAAE,eAAe,EAAE,WAAW,CAAC,EAAE,gBAAgB;IAiGvE,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;YA6Bd,kBAAkB;YAwClB,wBAAwB;IA0BtC,OAAO,CAAC,kBAAkB;YA6CZ,iBAAiB;IAa/B,OAAO,CAAC,eAAe,CAAkB;YAE3B,sBAAsB;IAgDpC,OAAO,CAAC,gBAAgB;IAqCxB,OAAO,CAAC,aAAa;IAoTrB,OAAO,CAAC,wBAAwB;IAoFhC,OAAO,CAAC,kBAAkB;IAqE1B,OAAO,CAAC,uBAAuB;IAwB/B,OAAO,CAAC,qBAAqB;YAgTf,SAAS;YA2DT,WAAW;YAkFX,WAAW;YAyCX,cAAc;YAyKd,gBAAgB;IAqD9B,OAAO,CAAC,mBAAmB;IAwE3B,OAAO,CAAC,eAAe;YAsBT,eAAe;IAqI7B,OAAO,CAAC,kBAAkB;IAQ1B,OAAO,CAAC,uBAAuB;IA0D/B,OAAO,CAAC,iBAAiB;YAqFX,WAAW;YAgCX,oBAAoB;YA2EpB,qBAAqB;YAwDrB,iBAAiB;YAiKjB,OAAO;YAgDP,cAAc;YAwFd,iBAAiB;IAqC/B,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,iBAAiB;IA0BzB,OAAO,CAAC,eAAe;IAwCvB,OAAO,CAAC,kBAAkB;IAiC1B,OAAO,CAAC,aAAa;IAoCrB,OAAO,CAAC,0BAA0B;IAgClC,OAAO,CAAC,4BAA4B;YAKtB,oBAAoB;IAsDlC,OAAO,CAAC,gBAAgB;YAiBV,SAAS;YA6CT,kBAAkB;YAqElB,uBAAuB;YAsDvB,iBAAiB;IAqE/B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,uBAAuB;IA4D/B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,iBAAiB;YAoDX,mBAAmB;YAoEnB,qBAAqB;IAS7B,OAAO,CAAC,SAAS,EAAE,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC;YAS9B,aAAa;YAcb,iBAAiB;YAoBjB,WAAW;YAwBX,eAAe;YAqBf,mBAAmB;YAwBnB,yBAAyB;IA4CvC,OAAO,CAAC,kBAAkB;YAiBZ,gBAAgB;YA6HhB,2BAA2B;YAiE3B,2BAA2B;IAyEnC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IA0BpB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAuBhC"}
|
||||
238
dist/mcp/server.js
vendored
238
dist/mcp/server.js
vendored
@@ -49,7 +49,6 @@ const workflow_examples_1 = require("./workflow-examples");
|
||||
const logger_1 = require("../utils/logger");
|
||||
const node_repository_1 = require("../database/node-repository");
|
||||
const database_adapter_1 = require("../database/database-adapter");
|
||||
const shared_database_1 = require("../database/shared-database");
|
||||
const property_filter_1 = require("../services/property-filter");
|
||||
const task_templates_1 = require("../services/task-templates");
|
||||
const config_validator_1 = require("../services/config-validator");
|
||||
@@ -81,9 +80,6 @@ class N8NDocumentationMCPServer {
|
||||
this.previousToolTimestamp = Date.now();
|
||||
this.earlyLogger = null;
|
||||
this.disabledToolsCache = null;
|
||||
this.useSharedDatabase = false;
|
||||
this.sharedDbState = null;
|
||||
this.isShutdown = false;
|
||||
this.dbHealthChecked = false;
|
||||
this.instanceContext = instanceContext;
|
||||
this.earlyLogger = earlyLogger || null;
|
||||
@@ -153,22 +149,10 @@ class N8NDocumentationMCPServer {
|
||||
this.setupHandlers();
|
||||
}
|
||||
async close() {
|
||||
try {
|
||||
await this.initialized;
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.debug('Initialization had failed, proceeding with cleanup', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
try {
|
||||
await this.server.close();
|
||||
this.cache.destroy();
|
||||
if (this.useSharedDatabase && this.sharedDbState) {
|
||||
(0, shared_database_1.releaseSharedDatabase)(this.sharedDbState);
|
||||
logger_1.logger.debug('Released shared database reference');
|
||||
}
|
||||
else if (this.db) {
|
||||
if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
}
|
||||
@@ -182,7 +166,6 @@ class N8NDocumentationMCPServer {
|
||||
this.repository = null;
|
||||
this.templateService = null;
|
||||
this.earlyLogger = null;
|
||||
this.sharedDbState = null;
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.warn('Error closing MCP server', { error: error instanceof Error ? error.message : String(error) });
|
||||
@@ -194,27 +177,17 @@ class N8NDocumentationMCPServer {
|
||||
this.earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.DATABASE_CONNECTING);
|
||||
}
|
||||
logger_1.logger.debug('Database initialization starting...', { dbPath });
|
||||
this.db = await (0, database_adapter_1.createDatabaseAdapter)(dbPath);
|
||||
logger_1.logger.debug('Database adapter created');
|
||||
if (dbPath === ':memory:') {
|
||||
this.db = await (0, database_adapter_1.createDatabaseAdapter)(dbPath);
|
||||
logger_1.logger.debug('Database adapter created (in-memory mode)');
|
||||
await this.initializeInMemorySchema();
|
||||
logger_1.logger.debug('In-memory schema initialized');
|
||||
this.repository = new node_repository_1.NodeRepository(this.db);
|
||||
this.templateService = new template_service_1.TemplateService(this.db);
|
||||
enhanced_config_validator_1.EnhancedConfigValidator.initializeSimilarityServices(this.repository);
|
||||
this.useSharedDatabase = false;
|
||||
}
|
||||
else {
|
||||
const sharedState = await (0, shared_database_1.getSharedDatabase)(dbPath);
|
||||
this.db = sharedState.db;
|
||||
this.repository = sharedState.repository;
|
||||
this.templateService = sharedState.templateService;
|
||||
this.sharedDbState = sharedState;
|
||||
this.useSharedDatabase = true;
|
||||
logger_1.logger.debug('Using shared database connection');
|
||||
}
|
||||
this.repository = new node_repository_1.NodeRepository(this.db);
|
||||
logger_1.logger.debug('Node repository initialized');
|
||||
this.templateService = new template_service_1.TemplateService(this.db);
|
||||
logger_1.logger.debug('Template service initialized');
|
||||
enhanced_config_validator_1.EnhancedConfigValidator.initializeSimilarityServices(this.repository);
|
||||
logger_1.logger.debug('Similarity services initialized');
|
||||
if (this.earlyLogger) {
|
||||
this.earlyLogger.logCheckpoint(startup_checkpoints_1.STARTUP_CHECKPOINTS.DATABASE_CONNECTED);
|
||||
@@ -777,11 +750,7 @@ class N8NDocumentationMCPServer {
|
||||
case 'search_nodes':
|
||||
this.validateToolParams(name, args, ['query']);
|
||||
const limit = args.limit !== undefined ? Number(args.limit) || 20 : 20;
|
||||
return this.searchNodes(args.query, limit, {
|
||||
mode: args.mode,
|
||||
includeExamples: args.includeExamples,
|
||||
source: args.source
|
||||
});
|
||||
return this.searchNodes(args.query, limit, { mode: args.mode, includeExamples: args.includeExamples });
|
||||
case 'get_node':
|
||||
this.validateToolParams(name, args, ['nodeType']);
|
||||
if (args.mode === 'docs') {
|
||||
@@ -1120,19 +1089,6 @@ class N8NDocumentationMCPServer {
|
||||
}
|
||||
}
|
||||
try {
|
||||
let sourceFilter = '';
|
||||
const sourceValue = options?.source || 'all';
|
||||
switch (sourceValue) {
|
||||
case 'core':
|
||||
sourceFilter = 'AND n.is_community = 0';
|
||||
break;
|
||||
case 'community':
|
||||
sourceFilter = 'AND n.is_community = 1';
|
||||
break;
|
||||
case 'verified':
|
||||
sourceFilter = 'AND n.is_community = 1 AND n.is_verified = 1';
|
||||
break;
|
||||
}
|
||||
const nodes = this.db.prepare(`
|
||||
SELECT
|
||||
n.*,
|
||||
@@ -1140,7 +1096,6 @@ class N8NDocumentationMCPServer {
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
WHERE nodes_fts MATCH ?
|
||||
${sourceFilter}
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER(?) THEN 0
|
||||
@@ -1173,28 +1128,15 @@ class N8NDocumentationMCPServer {
|
||||
}
|
||||
const result = {
|
||||
query,
|
||||
results: scoredNodes.map(node => {
|
||||
const nodeResult = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name,
|
||||
relevance: this.calculateRelevance(node, cleanedQuery)
|
||||
};
|
||||
if (node.is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = node.is_verified === 1;
|
||||
if (node.author_name) {
|
||||
nodeResult.authorName = node.author_name;
|
||||
}
|
||||
if (node.npm_downloads) {
|
||||
nodeResult.npmDownloads = node.npm_downloads;
|
||||
}
|
||||
}
|
||||
return nodeResult;
|
||||
}),
|
||||
results: scoredNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name,
|
||||
relevance: this.calculateRelevance(node, cleanedQuery)
|
||||
})),
|
||||
totalCount: scoredNodes.length
|
||||
};
|
||||
if (mode !== 'OR') {
|
||||
@@ -1356,51 +1298,24 @@ class N8NDocumentationMCPServer {
|
||||
async searchNodesLIKE(query, limit, options) {
|
||||
if (!this.db)
|
||||
throw new Error('Database not initialized');
|
||||
let sourceFilter = '';
|
||||
const sourceValue = options?.source || 'all';
|
||||
switch (sourceValue) {
|
||||
case 'core':
|
||||
sourceFilter = 'AND is_community = 0';
|
||||
break;
|
||||
case 'community':
|
||||
sourceFilter = 'AND is_community = 1';
|
||||
break;
|
||||
case 'verified':
|
||||
sourceFilter = 'AND is_community = 1 AND is_verified = 1';
|
||||
break;
|
||||
}
|
||||
if (query.startsWith('"') && query.endsWith('"')) {
|
||||
const exactPhrase = query.slice(1, -1);
|
||||
const nodes = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE (node_type LIKE ? OR display_name LIKE ? OR description LIKE ?)
|
||||
${sourceFilter}
|
||||
WHERE node_type LIKE ? OR display_name LIKE ? OR description LIKE ?
|
||||
LIMIT ?
|
||||
`).all(`%${exactPhrase}%`, `%${exactPhrase}%`, `%${exactPhrase}%`, limit * 3);
|
||||
const rankedNodes = this.rankSearchResults(nodes, exactPhrase, limit);
|
||||
const result = {
|
||||
query,
|
||||
results: rankedNodes.map(node => {
|
||||
const nodeResult = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
};
|
||||
if (node.is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = node.is_verified === 1;
|
||||
if (node.author_name) {
|
||||
nodeResult.authorName = node.author_name;
|
||||
}
|
||||
if (node.npm_downloads) {
|
||||
nodeResult.npmDownloads = node.npm_downloads;
|
||||
}
|
||||
}
|
||||
return nodeResult;
|
||||
}),
|
||||
results: rankedNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
})),
|
||||
totalCount: rankedNodes.length
|
||||
};
|
||||
if (options?.includeExamples) {
|
||||
@@ -1439,35 +1354,21 @@ class N8NDocumentationMCPServer {
|
||||
const params = words.flatMap(w => [`%${w}%`, `%${w}%`, `%${w}%`]);
|
||||
params.push(limit * 3);
|
||||
const nodes = this.db.prepare(`
|
||||
SELECT DISTINCT * FROM nodes
|
||||
WHERE (${conditions})
|
||||
${sourceFilter}
|
||||
SELECT DISTINCT * FROM nodes
|
||||
WHERE ${conditions}
|
||||
LIMIT ?
|
||||
`).all(...params);
|
||||
const rankedNodes = this.rankSearchResults(nodes, query, limit);
|
||||
const result = {
|
||||
query,
|
||||
results: rankedNodes.map(node => {
|
||||
const nodeResult = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
};
|
||||
if (node.is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = node.is_verified === 1;
|
||||
if (node.author_name) {
|
||||
nodeResult.authorName = node.author_name;
|
||||
}
|
||||
if (node.npm_downloads) {
|
||||
nodeResult.npmDownloads = node.npm_downloads;
|
||||
}
|
||||
}
|
||||
return nodeResult;
|
||||
}),
|
||||
results: rankedNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: (0, node_utils_1.getWorkflowNodeType)(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
})),
|
||||
totalCount: rankedNodes.length
|
||||
};
|
||||
if (options?.includeExamples) {
|
||||
@@ -1644,16 +1545,14 @@ class N8NDocumentationMCPServer {
|
||||
throw new Error('Database not initialized');
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
let node = this.db.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(normalizedType);
|
||||
if (!node && normalizedType !== nodeType) {
|
||||
node = this.db.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(nodeType);
|
||||
}
|
||||
@@ -1661,9 +1560,8 @@ class N8NDocumentationMCPServer {
|
||||
const alternatives = (0, node_utils_1.getNodeTypeAlternatives)(normalizedType);
|
||||
for (const alt of alternatives) {
|
||||
node = this.db.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(alt);
|
||||
if (node)
|
||||
@@ -1673,9 +1571,6 @@ class N8NDocumentationMCPServer {
|
||||
if (!node) {
|
||||
throw new Error(`Node ${nodeType} not found`);
|
||||
}
|
||||
const aiDocSummary = node.ai_documentation_summary
|
||||
? this.safeJsonParse(node.ai_documentation_summary, null)
|
||||
: null;
|
||||
if (!node.documentation) {
|
||||
const essentials = await this.getNodeEssentials(nodeType);
|
||||
return {
|
||||
@@ -1695,9 +1590,7 @@ ${essentials?.commonProperties?.length > 0 ?
|
||||
## Note
|
||||
Full documentation is being prepared. For now, use get_node_essentials for configuration help.
|
||||
`,
|
||||
hasDocumentation: false,
|
||||
aiDocumentationSummary: aiDocSummary,
|
||||
aiSummaryGeneratedAt: node.ai_summary_generated_at || null,
|
||||
hasDocumentation: false
|
||||
};
|
||||
}
|
||||
return {
|
||||
@@ -1705,18 +1598,8 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
displayName: node.display_name || 'Unknown Node',
|
||||
documentation: node.documentation,
|
||||
hasDocumentation: true,
|
||||
aiDocumentationSummary: aiDocSummary,
|
||||
aiSummaryGeneratedAt: node.ai_summary_generated_at || null,
|
||||
};
|
||||
}
|
||||
safeJsonParse(json, defaultValue = null) {
|
||||
try {
|
||||
return JSON.parse(json);
|
||||
}
|
||||
catch {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
async getDatabaseStatistics() {
|
||||
await this.ensureInitialized();
|
||||
if (!this.db)
|
||||
@@ -2916,26 +2799,7 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
process.stdin.resume();
|
||||
}
|
||||
async shutdown() {
|
||||
if (this.isShutdown) {
|
||||
logger_1.logger.debug('Shutdown already called, skipping');
|
||||
return;
|
||||
}
|
||||
this.isShutdown = true;
|
||||
logger_1.logger.info('Shutting down MCP server...');
|
||||
try {
|
||||
await this.initialized;
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.debug('Initialization had failed, proceeding with cleanup', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
try {
|
||||
await this.server.close();
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Error closing MCP server:', error);
|
||||
}
|
||||
if (this.cache) {
|
||||
try {
|
||||
this.cache.destroy();
|
||||
@@ -2945,29 +2809,15 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
logger_1.logger.error('Error cleaning up cache:', error);
|
||||
}
|
||||
}
|
||||
if (this.useSharedDatabase && this.sharedDbState) {
|
||||
if (this.db) {
|
||||
try {
|
||||
(0, shared_database_1.releaseSharedDatabase)(this.sharedDbState);
|
||||
logger_1.logger.info('Released shared database reference');
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Error releasing shared database:', error);
|
||||
}
|
||||
}
|
||||
else if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
await this.db.close();
|
||||
logger_1.logger.info('Database connection closed');
|
||||
}
|
||||
catch (error) {
|
||||
logger_1.logger.error('Error closing database:', error);
|
||||
}
|
||||
}
|
||||
this.db = null;
|
||||
this.repository = null;
|
||||
this.templateService = null;
|
||||
this.earlyLogger = null;
|
||||
this.sharedDbState = null;
|
||||
}
|
||||
}
|
||||
exports.N8NDocumentationMCPServer = N8NDocumentationMCPServer;
|
||||
|
||||
2
dist/mcp/server.js.map
vendored
2
dist/mcp/server.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{"version":3,"file":"search-nodes.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,cAAc,EAAE,iBAiE5B,CAAC"}
|
||||
{"version":3,"file":"search-nodes.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,cAAc,EAAE,iBAmD5B,CAAC"}
|
||||
34
dist/mcp/tool-docs/discovery/search-nodes.js
vendored
34
dist/mcp/tool-docs/discovery/search-nodes.js
vendored
@@ -5,64 +5,50 @@ exports.searchNodesDoc = {
|
||||
name: 'search_nodes',
|
||||
category: 'discovery',
|
||||
essentials: {
|
||||
description: 'Text search across node names and descriptions. Returns most relevant nodes first, with frequently-used nodes (HTTP Request, Webhook, Set, Code, Slack) prioritized in results. Searches all 800+ nodes including 300+ verified community nodes.',
|
||||
keyParameters: ['query', 'mode', 'limit', 'source', 'includeExamples'],
|
||||
description: 'Text search across node names and descriptions. Returns most relevant nodes first, with frequently-used nodes (HTTP Request, Webhook, Set, Code, Slack) prioritized in results. Searches all 500+ nodes in the database.',
|
||||
keyParameters: ['query', 'mode', 'limit'],
|
||||
example: 'search_nodes({query: "webhook"})',
|
||||
performance: '<20ms even for complex queries',
|
||||
tips: [
|
||||
'OR mode (default): Matches any search word',
|
||||
'AND mode: Requires all words present',
|
||||
'FUZZY mode: Handles typos and spelling errors',
|
||||
'Use quotes for exact phrases: "google sheets"',
|
||||
'Use source="community" to search only community nodes',
|
||||
'Use source="verified" for verified community nodes only'
|
||||
'Use quotes for exact phrases: "google sheets"'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
description: 'Full-text search engine for n8n nodes using SQLite FTS5. Searches across node names, descriptions, and aliases. Results are ranked by relevance with commonly-used nodes given priority. Includes 500+ core nodes and 300+ community nodes. Common core nodes include: HTTP Request, Webhook, Set, Code, IF, Switch, Merge, SplitInBatches, Slack, Google Sheets. Community nodes include verified integrations like BrightData, ScrapingBee, CraftMyPDF, and more.',
|
||||
description: 'Full-text search engine for n8n nodes using SQLite FTS5. Searches across node names, descriptions, and aliases. Results are ranked by relevance with commonly-used nodes given priority. Common nodes include: HTTP Request, Webhook, Set, Code, IF, Switch, Merge, SplitInBatches, Slack, Google Sheets.',
|
||||
parameters: {
|
||||
query: { type: 'string', description: 'Search keywords. Use quotes for exact phrases like "google sheets"', required: true },
|
||||
limit: { type: 'number', description: 'Maximum results to return. Default: 20, Max: 100', required: false },
|
||||
mode: { type: 'string', description: 'Search mode: "OR" (any word matches, default), "AND" (all words required), "FUZZY" (typo-tolerant)', required: false },
|
||||
source: { type: 'string', description: 'Filter by node source: "all" (default, everything), "core" (n8n base nodes only), "community" (community nodes only), "verified" (verified community nodes only)', required: false },
|
||||
includeExamples: { type: 'boolean', description: 'Include top 2 real-world configuration examples from popular templates for each node. Default: false. Adds ~200-400 tokens per node.', required: false }
|
||||
mode: { type: 'string', description: 'Search mode: "OR" (any word matches, default), "AND" (all words required), "FUZZY" (typo-tolerant)', required: false }
|
||||
},
|
||||
returns: 'Array of node objects sorted by relevance score. Each object contains: nodeType, displayName, description, category, relevance score. For community nodes, also includes: isCommunity (boolean), isVerified (boolean), authorName (string), npmDownloads (number). Common nodes appear first when relevance is similar.',
|
||||
returns: 'Array of node objects sorted by relevance score. Each object contains: nodeType, displayName, description, category, relevance score. Common nodes appear first when relevance is similar.',
|
||||
examples: [
|
||||
'search_nodes({query: "webhook"}) - Returns Webhook node as top result',
|
||||
'search_nodes({query: "database"}) - Returns MySQL, Postgres, MongoDB, Redis, etc.',
|
||||
'search_nodes({query: "google sheets", mode: "AND"}) - Requires both words',
|
||||
'search_nodes({query: "slak", mode: "FUZZY"}) - Finds Slack despite typo',
|
||||
'search_nodes({query: "http api"}) - Finds HTTP Request, GraphQL, REST nodes',
|
||||
'search_nodes({query: "transform data"}) - Finds Set, Code, Function, Item Lists nodes',
|
||||
'search_nodes({query: "scraping", source: "community"}) - Find community scraping nodes',
|
||||
'search_nodes({query: "pdf", source: "verified"}) - Find verified community PDF nodes',
|
||||
'search_nodes({query: "brightdata"}) - Find BrightData community node',
|
||||
'search_nodes({query: "slack", includeExamples: true}) - Get Slack with template examples'
|
||||
'search_nodes({query: "transform data"}) - Finds Set, Code, Function, Item Lists nodes'
|
||||
],
|
||||
useCases: [
|
||||
'Finding nodes when you know partial names',
|
||||
'Discovering nodes by functionality (e.g., "email", "database", "transform")',
|
||||
'Handling user typos in node names',
|
||||
'Finding all nodes related to a service (e.g., "google", "aws", "microsoft")',
|
||||
'Discovering community integrations for specific services',
|
||||
'Finding verified community nodes for enhanced trust'
|
||||
'Finding all nodes related to a service (e.g., "google", "aws", "microsoft")'
|
||||
],
|
||||
performance: '<20ms for simple queries, <50ms for complex FUZZY searches. Uses FTS5 index for speed',
|
||||
bestPractices: [
|
||||
'Start with single keywords for broadest results',
|
||||
'Use FUZZY mode when users might misspell node names',
|
||||
'AND mode works best for 2-3 word searches',
|
||||
'Combine with get_node after finding the right node',
|
||||
'Use source="verified" when recommending community nodes for production',
|
||||
'Check isVerified flag to ensure community node quality'
|
||||
'Combine with get_node after finding the right node'
|
||||
],
|
||||
pitfalls: [
|
||||
'AND mode searches all fields (name, description) not just node names',
|
||||
'FUZZY mode with very short queries (1-2 chars) may return unexpected results',
|
||||
'Exact matches in quotes are case-sensitive',
|
||||
'Community nodes require npm installation (n8n npm install <package-name>)',
|
||||
'Unverified community nodes (isVerified: false) may have limited support'
|
||||
'Exact matches in quotes are case-sensitive'
|
||||
],
|
||||
relatedTools: ['get_node to configure found nodes', 'search_templates to find workflow examples', 'validate_node to check configurations']
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"search-nodes.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":";;;AAEa,QAAA,cAAc,GAAsB;IAC/C,IAAI,EAAE,cAAc;IACpB,QAAQ,EAAE,WAAW;IACrB,UAAU,EAAE;QACV,WAAW,EAAE,kPAAkP;QAC/P,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,iBAAiB,CAAC;QACtE,OAAO,EAAE,kCAAkC;QAC3C,WAAW,EAAE,gCAAgC;QAC7C,IAAI,EAAE;YACJ,4CAA4C;YAC5C,sCAAsC;YACtC,+CAA+C;YAC/C,+CAA+C;YAC/C,uDAAuD;YACvD,yDAAyD;SAC1D;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE,qcAAqc;QACld,UAAU,EAAE;YACV,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oEAAoE,EAAE,QAAQ,EAAE,IAAI,EAAE;YAC5H,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kDAAkD,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC3G,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oGAAoG,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC5J,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kKAAkK,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC5N,eAAe,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,WAAW,EAAE,sIAAsI,EAAE,QAAQ,EAAE,KAAK,EAAE;SAC3M;QACD,OAAO,EAAE,yTAAyT;QAClU,QAAQ,EAAE;YACR,uEAAuE;YACvE,mFAAmF;YACnF,2EAA2E;YAC3E,yEAAyE;YACzE,6EAA6E;YAC7E,uFAAuF;YACvF,wFAAwF;YACxF,sFAAsF;YACtF,sEAAsE;YACtE,0FAA0F;SAC3F;QACD,QAAQ,EAAE;YACR,2CAA2C;YAC3C,6EAA6E;YAC7E,mCAAmC;YACnC,6EAA6E;YAC7E,0DAA0D;YAC1D,qDAAqD;SACtD;QACD,WAAW,EAAE,uFAAuF;QACpG,aAAa,EAAE;YACb,iDAAiD;YACjD,qDAAqD;YACrD,2CAA2C;YAC3C,oDAAoD;YACpD,wEAAwE;YACxE,wDAAwD;SACzD;QACD,QAAQ,EAAE;YACR,sEAAsE;YACtE,8EAA8E;YAC9E,4CAA4C;YAC5C,2EAA2E;YAC3E,yEAAyE;SAC1E;QACD,YAAY,EAAE,CAAC,mCAAmC,EAAE,4CAA4C,EAAE,uCAAuC,CAAC;KAC3I;CACF,CAAC"}
|
||||
{"version":3,"file":"search-nodes.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/discovery/search-nodes.ts"],"names":[],"mappings":";;;AAEa,QAAA,cAAc,GAAsB;IAC/C,IAAI,EAAE,cAAc;IACpB,QAAQ,EAAE,WAAW;IACrB,UAAU,EAAE;QACV,WAAW,EAAE,0NAA0N;QACvO,aAAa,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC;QACzC,OAAO,EAAE,kCAAkC;QAC3C,WAAW,EAAE,gCAAgC;QAC7C,IAAI,EAAE;YACJ,4CAA4C;YAC5C,sCAAsC;YACtC,+CAA+C;YAC/C,+CAA+C;SAChD;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE,2SAA2S;QACxT,UAAU,EAAE;YACV,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oEAAoE,EAAE,QAAQ,EAAE,IAAI,EAAE;YAC5H,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kDAAkD,EAAE,QAAQ,EAAE,KAAK,EAAE;YAC3G,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oGAAoG,EAAE,QAAQ,EAAE,KAAK,EAAE;SAC7J;QACD,OAAO,EAAE,4LAA4L;QACrM,QAAQ,EAAE;YACR,uEAAuE;YACvE,mFAAmF;YACnF,2EAA2E;YAC3E,yEAAyE;YACzE,6EAA6E;YAC7E,uFAAuF;SACxF;QACD,QAAQ,EAAE;YACR,2CAA2C;YAC3C,6EAA6E;YAC7E,mCAAmC;YACnC,6EAA6E;SAC9E;QACD,WAAW,EAAE,uFAAuF;QACpG,aAAa,EAAE;YACb,iDAAiD;YACjD,qDAAqD;YACrD,2CAA2C;YAC3C,oDAAoD;SACrD;QACD,QAAQ,EAAE;YACR,sEAAsE;YACtE,8EAA8E;YAC9E,4CAA4C;SAC7C;QACD,YAAY,EAAE,CAAC,mCAAmC,EAAE,4CAA4C,EAAE,uCAAuC,CAAC;KAC3I;CACF,CAAC"}
|
||||
2
dist/mcp/tool-docs/templates/get-template.js
vendored
2
dist/mcp/tool-docs/templates/get-template.js
vendored
@@ -43,7 +43,7 @@ exports.getTemplateDoc = {
|
||||
- url: Link to template on n8n.io
|
||||
- workflow: Complete workflow JSON with structure:
|
||||
- nodes: Array of node objects (id, name, type, typeVersion, position, parameters)
|
||||
- connections: Object mapping source node names to targets
|
||||
- connections: Object mapping source nodes to targets
|
||||
- settings: Workflow configuration (timezone, error handling, etc.)
|
||||
- usage: Instructions for using the workflow`,
|
||||
examples: [
|
||||
|
||||
@@ -21,7 +21,7 @@ exports.n8nCreateWorkflowDoc = {
|
||||
parameters: {
|
||||
name: { type: 'string', required: true, description: 'Workflow name' },
|
||||
nodes: { type: 'array', required: true, description: 'Array of nodes with id, name, type, typeVersion, position, parameters' },
|
||||
connections: { type: 'object', required: true, description: 'Node connections. Keys are source node names (not IDs)' },
|
||||
connections: { type: 'object', required: true, description: 'Node connections. Keys are source node IDs' },
|
||||
settings: { type: 'object', description: 'Optional workflow settings (timezone, error handling, etc.)' }
|
||||
},
|
||||
returns: 'Minimal summary (id, name, active, nodeCount) for token efficiency. Use n8n_get_workflow with mode "structure" to verify current state if needed.',
|
||||
@@ -56,8 +56,8 @@ n8n_create_workflow({
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
"Webhook": {
|
||||
"main": [[{node: "Slack", type: "main", index: 0}]]
|
||||
"webhook_1": {
|
||||
"main": [[{node: "slack_1", type: "main", index: 0}]]
|
||||
}
|
||||
}
|
||||
})`,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"n8n-create-workflow.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-create-workflow.ts"],"names":[],"mappings":";;;AAEa,QAAA,oBAAoB,GAAsB;IACrD,IAAI,EAAE,qBAAqB;IAC3B,QAAQ,EAAE,qBAAqB;IAC/B,UAAU,EAAE;QACV,WAAW,EAAE,sGAAsG;QACnH,aAAa,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC;QAC/C,OAAO,EAAE,0EAA0E;QACnF,WAAW,EAAE,mBAAmB;QAChC,IAAI,EAAE;YACJ,2BAA2B;YAC3B,+BAA+B;YAC/B,uCAAuC;YACvC,kFAAkF;SACnF;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE,uLAAuL;QACpM,UAAU,EAAE;YACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,eAAe,EAAE;YACtE,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,uEAAuE,EAAE;YAC9H,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,wDAAwD,EAAE;YACtH,QAAQ,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,6DAA6D,EAAE;SACzG;QACD,OAAO,EAAE,mJAAmJ;QAC5J,QAAQ,EAAE;YACR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCH;YACG;;;;;;;;;;;GAWH;SACE;QACD,QAAQ,EAAE;YACR,4BAA4B;YAC5B,4BAA4B;YAC5B,2BAA2B;YAC3B,qBAAqB;SACtB;QACD,WAAW,EAAE,oEAAoE;QACjF,aAAa,EAAE;YACb,uCAAuC;YACvC,qBAAqB;YACrB,gCAAgC;YAChC,6BAA6B;SAC9B;QACD,QAAQ,EAAE;YACR,0GAA0G;YAC1G,gEAAgE;YAChE,yCAAyC;YACzC,kDAAkD;YAClD,4EAA4E;YAC5E,yIAAyI;YACzI,uIAAuI;SACxI;QACD,YAAY,EAAE,CAAC,mBAAmB,EAAE,6BAA6B,EAAE,mBAAmB,CAAC;KACxF;CACF,CAAC"}
|
||||
{"version":3,"file":"n8n-create-workflow.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-create-workflow.ts"],"names":[],"mappings":";;;AAEa,QAAA,oBAAoB,GAAsB;IACrD,IAAI,EAAE,qBAAqB;IAC3B,QAAQ,EAAE,qBAAqB;IAC/B,UAAU,EAAE;QACV,WAAW,EAAE,sGAAsG;QACnH,aAAa,EAAE,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC;QAC/C,OAAO,EAAE,0EAA0E;QACnF,WAAW,EAAE,mBAAmB;QAChC,IAAI,EAAE;YACJ,2BAA2B;YAC3B,+BAA+B;YAC/B,uCAAuC;YACvC,kFAAkF;SACnF;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE,uLAAuL;QACpM,UAAU,EAAE;YACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,eAAe,EAAE;YACtE,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,uEAAuE,EAAE;YAC9H,WAAW,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,4CAA4C,EAAE;YAC1G,QAAQ,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,6DAA6D,EAAE;SACzG;QACD,OAAO,EAAE,mJAAmJ;QAC5J,QAAQ,EAAE;YACR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkCH;YACG;;;;;;;;;;;GAWH;SACE;QACD,QAAQ,EAAE;YACR,4BAA4B;YAC5B,4BAA4B;YAC5B,2BAA2B;YAC3B,qBAAqB;SACtB;QACD,WAAW,EAAE,oEAAoE;QACjF,aAAa,EAAE;YACb,uCAAuC;YACvC,qBAAqB;YACrB,gCAAgC;YAChC,6BAA6B;SAC9B;QACD,QAAQ,EAAE;YACR,0GAA0G;YAC1G,gEAAgE;YAChE,yCAAyC;YACzC,kDAAkD;YAClD,4EAA4E;YAC5E,yIAAyI;YACzI,uIAAuI;SACxI;QACD,YAAY,EAAE,CAAC,mBAAmB,EAAE,6BAA6B,EAAE,mBAAmB,CAAC;KACxF;CACF,CAAC"}
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"n8n-executions.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-executions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,gBAAgB,EAAE,iBAwG9B,CAAC"}
|
||||
{"version":3,"file":"n8n-executions.d.ts","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-executions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAE7C,eAAO,MAAM,gBAAgB,EAAE,iBA+E9B,CAAC"}
|
||||
@@ -6,14 +6,13 @@ exports.n8nExecutionsDoc = {
|
||||
category: 'workflow_management',
|
||||
essentials: {
|
||||
description: 'Manage workflow executions: get details, list, or delete. Unified tool for all execution operations.',
|
||||
keyParameters: ['action', 'id', 'workflowId', 'status', 'mode'],
|
||||
example: 'n8n_executions({action: "get", id: "exec_456", mode: "error"})',
|
||||
keyParameters: ['action', 'id', 'workflowId', 'status'],
|
||||
example: 'n8n_executions({action: "list", workflowId: "abc123", status: "error"})',
|
||||
performance: 'Fast (50-200ms)',
|
||||
tips: [
|
||||
'action="get": Get execution details by ID',
|
||||
'action="list": List executions with filters',
|
||||
'action="delete": Delete execution record',
|
||||
'Use mode="error" for efficient failure debugging (80-90% token savings)',
|
||||
'Use mode parameter for action=get to control detail level'
|
||||
]
|
||||
},
|
||||
@@ -27,26 +26,14 @@ exports.n8nExecutionsDoc = {
|
||||
- preview: Structure only, no data
|
||||
- summary: 2 items per node (default)
|
||||
- filtered: Custom items limit, optionally filter by node names
|
||||
- full: All execution data (can be very large)
|
||||
- error: Optimized for debugging failures - extracts error info, upstream context, and AI suggestions
|
||||
|
||||
**Error Mode Features:**
|
||||
- Extracts error message, type, and node configuration
|
||||
- Samples input data from upstream node (configurable limit)
|
||||
- Shows execution path leading to error
|
||||
- Provides AI-friendly fix suggestions based on error patterns
|
||||
- Token-efficient (80-90% smaller than full mode)`,
|
||||
- full: All execution data (can be very large)`,
|
||||
parameters: {
|
||||
action: { type: 'string', required: true, description: 'Operation: "get", "list", or "delete"' },
|
||||
id: { type: 'string', required: false, description: 'Execution ID (required for action=get or action=delete)' },
|
||||
mode: { type: 'string', required: false, description: 'For action=get: "preview", "summary" (default), "filtered", "full", "error"' },
|
||||
mode: { type: 'string', required: false, description: 'For action=get: "preview", "summary" (default), "filtered", "full"' },
|
||||
nodeNames: { type: 'array', required: false, description: 'For action=get with mode=filtered: Filter to specific nodes by name' },
|
||||
itemsLimit: { type: 'number', required: false, description: 'For action=get with mode=filtered: Items per node (0=structure, 2=default, -1=unlimited)' },
|
||||
includeInputData: { type: 'boolean', required: false, description: 'For action=get: Include input data in addition to output (default: false)' },
|
||||
errorItemsLimit: { type: 'number', required: false, description: 'For action=get with mode=error: Sample items from upstream (default: 2, max: 100)' },
|
||||
includeStackTrace: { type: 'boolean', required: false, description: 'For action=get with mode=error: Include full stack trace (default: false, shows truncated)' },
|
||||
includeExecutionPath: { type: 'boolean', required: false, description: 'For action=get with mode=error: Include execution path (default: true)' },
|
||||
fetchWorkflow: { type: 'boolean', required: false, description: 'For action=get with mode=error: Fetch workflow for accurate upstream detection (default: true)' },
|
||||
workflowId: { type: 'string', required: false, description: 'For action=list: Filter by workflow ID' },
|
||||
status: { type: 'string', required: false, description: 'For action=list: Filter by status ("success", "error", "waiting")' },
|
||||
limit: { type: 'number', required: false, description: 'For action=list: Number of results (1-100, default: 100)' },
|
||||
@@ -55,15 +42,10 @@ exports.n8nExecutionsDoc = {
|
||||
includeData: { type: 'boolean', required: false, description: 'For action=list: Include execution data (default: false)' }
|
||||
},
|
||||
returns: `Depends on action:
|
||||
- get (error mode): { errorInfo: { primaryError, upstreamContext, executionPath, suggestions }, summary }
|
||||
- get (other modes): Execution object with data based on mode
|
||||
- get: Execution object with data based on mode
|
||||
- list: { data: [...executions], nextCursor?: string }
|
||||
- delete: { success: boolean, message: string }`,
|
||||
examples: [
|
||||
'// Debug a failed execution (recommended for errors)\nn8n_executions({action: "get", id: "exec_456", mode: "error"})',
|
||||
'// Debug with more sample data from upstream\nn8n_executions({action: "get", id: "exec_456", mode: "error", errorItemsLimit: 5})',
|
||||
'// Debug with full stack trace\nn8n_executions({action: "get", id: "exec_456", mode: "error", includeStackTrace: true})',
|
||||
'// Debug without workflow fetch (faster but less accurate)\nn8n_executions({action: "get", id: "exec_456", mode: "error", fetchWorkflow: false})',
|
||||
'// List recent executions for a workflow\nn8n_executions({action: "list", workflowId: "abc123", limit: 10})',
|
||||
'// List failed executions\nn8n_executions({action: "list", status: "error"})',
|
||||
'// Get execution summary\nn8n_executions({action: "get", id: "exec_456"})',
|
||||
@@ -72,10 +54,7 @@ exports.n8nExecutionsDoc = {
|
||||
'// Delete an execution\nn8n_executions({action: "delete", id: "exec_456"})'
|
||||
],
|
||||
useCases: [
|
||||
'Debug workflow failures efficiently (mode=error) - 80-90% token savings',
|
||||
'Get AI suggestions for fixing common errors',
|
||||
'Analyze input data that caused failure',
|
||||
'Debug workflow failures with full data (mode=full)',
|
||||
'Debug workflow failures (get with mode=full)',
|
||||
'Monitor workflow health (list with status filter)',
|
||||
'Audit execution history',
|
||||
'Clean up old execution records',
|
||||
@@ -84,22 +63,18 @@ exports.n8nExecutionsDoc = {
|
||||
performance: `Response times:
|
||||
- list: 50-150ms depending on filters
|
||||
- get (preview/summary): 30-100ms
|
||||
- get (error): 50-200ms (includes optional workflow fetch)
|
||||
- get (full): 100-500ms+ depending on data size
|
||||
- delete: 30-80ms`,
|
||||
bestPractices: [
|
||||
'Use mode="error" for debugging failed executions - 80-90% token savings vs full',
|
||||
'Use mode="summary" (default) for quick inspection',
|
||||
'Use mode="summary" (default) for debugging - shows enough data',
|
||||
'Use mode="filtered" with nodeNames for large workflows',
|
||||
'Filter by workflowId when listing to reduce results',
|
||||
'Use cursor for pagination through large result sets',
|
||||
'Set fetchWorkflow=false if you already know the workflow structure',
|
||||
'Delete old executions to save storage'
|
||||
],
|
||||
pitfalls: [
|
||||
'Requires N8N_API_URL and N8N_API_KEY configured',
|
||||
'mode="full" can return very large responses for complex workflows',
|
||||
'mode="error" fetches workflow by default (adds ~50-100ms), disable with fetchWorkflow=false',
|
||||
'Execution must exist or returns 404',
|
||||
'Delete is permanent - cannot undo'
|
||||
],
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"n8n-executions.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-executions.ts"],"names":[],"mappings":";;;AAEa,QAAA,gBAAgB,GAAsB;IACjD,IAAI,EAAE,gBAAgB;IACtB,QAAQ,EAAE,qBAAqB;IAC/B,UAAU,EAAE;QACV,WAAW,EAAE,sGAAsG;QACnH,aAAa,EAAE,CAAC,QAAQ,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,EAAE,MAAM,CAAC;QAC/D,OAAO,EAAE,gEAAgE;QACzE,WAAW,EAAE,iBAAiB;QAC9B,IAAI,EAAE;YACJ,2CAA2C;YAC3C,6CAA6C;YAC7C,0CAA0C;YAC1C,yEAAyE;YACzE,2DAA2D;SAC5D;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE;;;;;;;;;;;;;;;;;kDAiBiC;QAC9C,UAAU,EAAE;YACV,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,uCAAuC,EAAE;YAChG,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,yDAAyD,EAAE;YAC/G,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,6EAA6E,EAAE;YACrI,SAAS,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,qEAAqE,EAAE;YACjI,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0FAA0F,EAAE;YACxJ,gBAAgB,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,2EAA2E,EAAE;YAChJ,eAAe,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,mFAAmF,EAAE;YACtJ,iBAAiB,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,4FAA4F,EAAE;YAClK,oBAAoB,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,wEAAwE,EAAE;YACjJ,aAAa,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,gGAAgG,EAAE;YAClK,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,wCAAwC,EAAE;YACtG,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,mEAAmE,EAAE;YAC7H,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0DAA0D,EAAE;YACnH,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,2DAA2D,EAAE;YACrH,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,oDAAoD,EAAE;YACjH,WAAW,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0DAA0D,EAAE;SAC3H;QACD,OAAO,EAAE;;;;gDAImC;QAC5C,QAAQ,EAAE;YACR,sHAAsH;YACtH,kIAAkI;YAClI,yHAAyH;YACzH,kJAAkJ;YAClJ,6GAA6G;YAC7G,8EAA8E;YAC9E,2EAA2E;YAC3E,2FAA2F;YAC3F,+IAA+I;YAC/I,4EAA4E;SAC7E;QACD,QAAQ,EAAE;YACR,yEAAyE;YACzE,6CAA6C;YAC7C,wCAAwC;YACxC,oDAAoD;YACpD,mDAAmD;YACnD,yBAAyB;YACzB,gCAAgC;YAChC,+BAA+B;SAChC;QACD,WAAW,EAAE;;;;;kBAKC;QACd,aAAa,EAAE;YACb,iFAAiF;YACjF,mDAAmD;YACnD,wDAAwD;YACxD,qDAAqD;YACrD,qDAAqD;YACrD,oEAAoE;YACpE,uCAAuC;SACxC;QACD,QAAQ,EAAE;YACR,iDAAiD;YACjD,mEAAmE;YACnE,6FAA6F;YAC7F,qCAAqC;YACrC,mCAAmC;SACpC;QACD,YAAY,EAAE,CAAC,kBAAkB,EAAE,mBAAmB,EAAE,uBAAuB,CAAC;KACjF;CACF,CAAC"}
|
||||
{"version":3,"file":"n8n-executions.js","sourceRoot":"","sources":["../../../../src/mcp/tool-docs/workflow_management/n8n-executions.ts"],"names":[],"mappings":";;;AAEa,QAAA,gBAAgB,GAAsB;IACjD,IAAI,EAAE,gBAAgB;IACtB,QAAQ,EAAE,qBAAqB;IAC/B,UAAU,EAAE;QACV,WAAW,EAAE,sGAAsG;QACnH,aAAa,EAAE,CAAC,QAAQ,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,CAAC;QACvD,OAAO,EAAE,yEAAyE;QAClF,WAAW,EAAE,iBAAiB;QAC9B,IAAI,EAAE;YACJ,2CAA2C;YAC3C,6CAA6C;YAC7C,0CAA0C;YAC1C,2DAA2D;SAC5D;KACF;IACD,IAAI,EAAE;QACJ,WAAW,EAAE;;;;;;;;;+CAS8B;QAC3C,UAAU,EAAE;YACV,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,IAAI,EAAE,WAAW,EAAE,uCAAuC,EAAE;YAChG,EAAE,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,yDAAyD,EAAE;YAC/G,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,oEAAoE,EAAE;YAC5H,SAAS,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,qEAAqE,EAAE;YACjI,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0FAA0F,EAAE;YACxJ,gBAAgB,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,2EAA2E,EAAE;YAChJ,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,wCAAwC,EAAE;YACtG,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,mEAAmE,EAAE;YAC7H,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0DAA0D,EAAE;YACnH,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,2DAA2D,EAAE;YACrH,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,oDAAoD,EAAE;YACjH,WAAW,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,WAAW,EAAE,0DAA0D,EAAE;SAC3H;QACD,OAAO,EAAE;;;gDAGmC;QAC5C,QAAQ,EAAE;YACR,6GAA6G;YAC7G,8EAA8E;YAC9E,2EAA2E;YAC3E,2FAA2F;YAC3F,+IAA+I;YAC/I,4EAA4E;SAC7E;QACD,QAAQ,EAAE;YACR,8CAA8C;YAC9C,mDAAmD;YACnD,yBAAyB;YACzB,gCAAgC;YAChC,+BAA+B;SAChC;QACD,WAAW,EAAE;;;;kBAIC;QACd,aAAa,EAAE;YACb,gEAAgE;YAChE,wDAAwD;YACxD,qDAAqD;YACrD,qDAAqD;YACrD,uCAAuC;SACxC;QACD,QAAQ,EAAE;YACR,iDAAiD;YACjD,mEAAmE;YACnE,qCAAqC;YACrC,mCAAmC;SACpC;QACD,YAAY,EAAE,CAAC,kBAAkB,EAAE,mBAAmB,EAAE,uBAAuB,CAAC;KACjF;CACF,CAAC"}
|
||||
2
dist/mcp/tools-n8n-manager.d.ts.map
vendored
2
dist/mcp/tools-n8n-manager.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"tools-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/tools-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAQ1C,eAAO,MAAM,kBAAkB,EAAE,cAAc,EAqlB9C,CAAC"}
|
||||
{"version":3,"file":"tools-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/tools-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAQ1C,eAAO,MAAM,kBAAkB,EAAE,cAAc,EAmf9C,CAAC"}
|
||||
129
dist/mcp/tools-n8n-manager.js
vendored
129
dist/mcp/tools-n8n-manager.js
vendored
@@ -42,7 +42,7 @@ exports.n8nManagementTools = [
|
||||
},
|
||||
connections: {
|
||||
type: 'object',
|
||||
description: 'Workflow connections object. Keys are source node names (the name field, not id), values define output connections'
|
||||
description: 'Workflow connections object. Keys are source node IDs, values define output connections'
|
||||
},
|
||||
settings: {
|
||||
type: 'object',
|
||||
@@ -60,13 +60,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['name', 'nodes', 'connections']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Create Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_get_workflow',
|
||||
@@ -86,13 +80,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Get Workflow',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_update_full_workflow',
|
||||
@@ -126,14 +114,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Update Full Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_update_partial_workflow',
|
||||
@@ -164,14 +145,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id', 'operations']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Update Partial Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_delete_workflow',
|
||||
@@ -185,13 +159,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Delete Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_list_workflows',
|
||||
@@ -225,13 +193,7 @@ exports.n8nManagementTools = [
|
||||
description: 'Exclude pinned data from response (default: true)'
|
||||
}
|
||||
}
|
||||
},
|
||||
annotations: {
|
||||
title: 'List Workflows',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_validate_workflow',
|
||||
@@ -268,13 +230,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Validate Workflow',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_autofix_workflow',
|
||||
@@ -309,14 +265,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Autofix Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_test_workflow',
|
||||
@@ -368,13 +317,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['workflowId']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Test Workflow',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_executions',
|
||||
@@ -393,8 +336,8 @@ exports.n8nManagementTools = [
|
||||
},
|
||||
mode: {
|
||||
type: 'string',
|
||||
enum: ['preview', 'summary', 'filtered', 'full', 'error'],
|
||||
description: 'For action=get: preview=structure only, summary=2 items (default), filtered=custom, full=all data, error=optimized error debugging'
|
||||
enum: ['preview', 'summary', 'filtered', 'full'],
|
||||
description: 'For action=get: preview=structure only, summary=2 items (default), filtered=custom, full=all data'
|
||||
},
|
||||
nodeNames: {
|
||||
type: 'array',
|
||||
@@ -409,22 +352,6 @@ exports.n8nManagementTools = [
|
||||
type: 'boolean',
|
||||
description: 'For action=get: include input data in addition to output (default: false)'
|
||||
},
|
||||
errorItemsLimit: {
|
||||
type: 'number',
|
||||
description: 'For action=get with mode=error: sample items from upstream node (default: 2, max: 100)'
|
||||
},
|
||||
includeStackTrace: {
|
||||
type: 'boolean',
|
||||
description: 'For action=get with mode=error: include full stack trace (default: false, shows truncated)'
|
||||
},
|
||||
includeExecutionPath: {
|
||||
type: 'boolean',
|
||||
description: 'For action=get with mode=error: include execution path leading to error (default: true)'
|
||||
},
|
||||
fetchWorkflow: {
|
||||
type: 'boolean',
|
||||
description: 'For action=get with mode=error: fetch workflow for accurate upstream detection (default: true)'
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'For action=list: number of executions to return (1-100, default: 100)'
|
||||
@@ -452,13 +379,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['action']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Manage Executions',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_health_check',
|
||||
@@ -477,13 +398,7 @@ exports.n8nManagementTools = [
|
||||
description: 'Include extra details in diagnostic mode (default: false)'
|
||||
}
|
||||
}
|
||||
},
|
||||
annotations: {
|
||||
title: 'Health Check',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_workflow_versions',
|
||||
@@ -537,13 +452,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['mode']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Workflow Versions',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: true,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_deploy_template',
|
||||
@@ -576,13 +485,7 @@ exports.n8nManagementTools = [
|
||||
}
|
||||
},
|
||||
required: ['templateId']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Deploy Template',
|
||||
readOnlyHint: false,
|
||||
destructiveHint: false,
|
||||
openWorldHint: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
];
|
||||
//# sourceMappingURL=tools-n8n-manager.js.map
|
||||
2
dist/mcp/tools-n8n-manager.js.map
vendored
2
dist/mcp/tools-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/mcp/tools.d.ts.map
vendored
2
dist/mcp/tools.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"tools.d.ts","sourceRoot":"","sources":["../../src/mcp/tools.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAQ1C,eAAO,MAAM,0BAA0B,EAAE,cAAc,EAwatD,CAAC"}
|
||||
{"version":3,"file":"tools.d.ts","sourceRoot":"","sources":["../../src/mcp/tools.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAQ1C,eAAO,MAAM,0BAA0B,EAAE,cAAc,EA+XtD,CAAC"}
|
||||
41
dist/mcp/tools.js
vendored
41
dist/mcp/tools.js
vendored
@@ -20,11 +20,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
},
|
||||
},
|
||||
annotations: {
|
||||
title: 'Tools Documentation',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'search_nodes',
|
||||
@@ -52,20 +47,9 @@ exports.n8nDocumentationToolsFinal = [
|
||||
description: 'Include top 2 real-world configuration examples from popular templates (default: false)',
|
||||
default: false,
|
||||
},
|
||||
source: {
|
||||
type: 'string',
|
||||
enum: ['all', 'core', 'community', 'verified'],
|
||||
description: 'Filter by node source: all=everything (default), core=n8n base nodes, community=community nodes, verified=verified community nodes only',
|
||||
default: 'all',
|
||||
},
|
||||
},
|
||||
required: ['query'],
|
||||
},
|
||||
annotations: {
|
||||
title: 'Search Nodes',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_node',
|
||||
@@ -119,11 +103,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
required: ['nodeType'],
|
||||
},
|
||||
annotations: {
|
||||
title: 'Get Node Info',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'validate_node',
|
||||
@@ -204,11 +183,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
required: ['nodeType', 'displayName', 'valid']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Validate Node Config',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_template',
|
||||
@@ -229,11 +203,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
required: ['templateId'],
|
||||
},
|
||||
annotations: {
|
||||
title: 'Get Template',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'search_templates',
|
||||
@@ -324,11 +293,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
},
|
||||
},
|
||||
annotations: {
|
||||
title: 'Search Templates',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'validate_workflow',
|
||||
@@ -414,11 +378,6 @@ exports.n8nDocumentationToolsFinal = [
|
||||
},
|
||||
required: ['valid', 'summary']
|
||||
},
|
||||
annotations: {
|
||||
title: 'Validate Workflow',
|
||||
readOnlyHint: true,
|
||||
idempotentHint: true,
|
||||
},
|
||||
},
|
||||
];
|
||||
//# sourceMappingURL=tools.js.map
|
||||
2
dist/mcp/tools.js.map
vendored
2
dist/mcp/tools.js.map
vendored
File diff suppressed because one or more lines are too long
6
dist/services/execution-processor.d.ts
vendored
6
dist/services/execution-processor.d.ts
vendored
@@ -1,8 +1,8 @@
|
||||
import { Execution, ExecutionPreview, ExecutionRecommendation, ExecutionFilterOptions, FilteredExecutionResponse, Workflow } from '../types/n8n-api';
|
||||
import { Execution, ExecutionPreview, ExecutionRecommendation, ExecutionFilterOptions, FilteredExecutionResponse } from '../types/n8n-api';
|
||||
export declare function generatePreview(execution: Execution): {
|
||||
preview: ExecutionPreview;
|
||||
recommendation: ExecutionRecommendation;
|
||||
};
|
||||
export declare function filterExecutionData(execution: Execution, options: ExecutionFilterOptions, workflow?: Workflow): FilteredExecutionResponse;
|
||||
export declare function processExecution(execution: Execution, options?: ExecutionFilterOptions, workflow?: Workflow): FilteredExecutionResponse | Execution;
|
||||
export declare function filterExecutionData(execution: Execution, options: ExecutionFilterOptions): FilteredExecutionResponse;
|
||||
export declare function processExecution(execution: Execution, options?: ExecutionFilterOptions): FilteredExecutionResponse | Execution;
|
||||
//# sourceMappingURL=execution-processor.d.ts.map
|
||||
2
dist/services/execution-processor.d.ts.map
vendored
2
dist/services/execution-processor.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"execution-processor.d.ts","sourceRoot":"","sources":["../../src/services/execution-processor.ts"],"names":[],"mappings":"AAaA,OAAO,EACL,SAAS,EAET,gBAAgB,EAEhB,uBAAuB,EACvB,sBAAsB,EACtB,yBAAyB,EAGzB,QAAQ,EACT,MAAM,kBAAkB,CAAC;AAgH1B,wBAAgB,eAAe,CAAC,SAAS,EAAE,SAAS,GAAG;IACrD,OAAO,EAAE,gBAAgB,CAAC;IAC1B,cAAc,EAAE,uBAAuB,CAAC;CACzC,CA2EA;AAoID,wBAAgB,mBAAmB,CACjC,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,sBAAsB,EAC/B,QAAQ,CAAC,EAAE,QAAQ,GAClB,yBAAyB,CAsL3B;AAMD,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,SAAS,EACpB,OAAO,GAAE,sBAA2B,EACpC,QAAQ,CAAC,EAAE,QAAQ,GAClB,yBAAyB,GAAG,SAAS,CAOvC"}
|
||||
{"version":3,"file":"execution-processor.d.ts","sourceRoot":"","sources":["../../src/services/execution-processor.ts"],"names":[],"mappings":"AAaA,OAAO,EACL,SAAS,EAET,gBAAgB,EAEhB,uBAAuB,EACvB,sBAAsB,EACtB,yBAAyB,EAG1B,MAAM,kBAAkB,CAAC;AA+G1B,wBAAgB,eAAe,CAAC,SAAS,EAAE,SAAS,GAAG;IACrD,OAAO,EAAE,gBAAgB,CAAC;IAC1B,cAAc,EAAE,uBAAuB,CAAC;CACzC,CA2EA;AAoID,wBAAgB,mBAAmB,CACjC,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,sBAAsB,GAC9B,yBAAyB,CA2J3B;AAMD,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,SAAS,EACpB,OAAO,GAAE,sBAA2B,GACnC,yBAAyB,GAAG,SAAS,CAOvC"}
|
||||
28
dist/services/execution-processor.js
vendored
28
dist/services/execution-processor.js
vendored
@@ -4,7 +4,6 @@ exports.generatePreview = generatePreview;
|
||||
exports.filterExecutionData = filterExecutionData;
|
||||
exports.processExecution = processExecution;
|
||||
const logger_1 = require("../utils/logger");
|
||||
const error_execution_processor_1 = require("./error-execution-processor");
|
||||
const THRESHOLDS = {
|
||||
CHAR_SIZE_BYTES: 2,
|
||||
OVERHEAD_PER_OBJECT: 50,
|
||||
@@ -232,7 +231,7 @@ function truncateItems(items, limit) {
|
||||
},
|
||||
};
|
||||
}
|
||||
function filterExecutionData(execution, options, workflow) {
|
||||
function filterExecutionData(execution, options) {
|
||||
const mode = options.mode || 'summary';
|
||||
let itemsLimit = options.itemsLimit !== undefined ? options.itemsLimit : 2;
|
||||
if (itemsLimit !== -1) {
|
||||
@@ -266,27 +265,6 @@ function filterExecutionData(execution, options, workflow) {
|
||||
response.recommendation = recommendation;
|
||||
return response;
|
||||
}
|
||||
if (mode === 'error') {
|
||||
const errorAnalysis = (0, error_execution_processor_1.processErrorExecution)(execution, {
|
||||
itemsLimit: options.errorItemsLimit ?? 2,
|
||||
includeStackTrace: options.includeStackTrace ?? false,
|
||||
includeExecutionPath: options.includeExecutionPath !== false,
|
||||
workflow
|
||||
});
|
||||
const runData = execution.data?.resultData?.runData || {};
|
||||
const executedNodes = Object.keys(runData).length;
|
||||
response.errorInfo = errorAnalysis;
|
||||
response.summary = {
|
||||
totalNodes: executedNodes,
|
||||
executedNodes,
|
||||
totalItems: 0,
|
||||
hasMoreData: false
|
||||
};
|
||||
if (execution.data?.resultData?.error) {
|
||||
response.error = execution.data.resultData.error;
|
||||
}
|
||||
return response;
|
||||
}
|
||||
if (!execution.data?.resultData?.runData) {
|
||||
response.summary = {
|
||||
totalNodes: 0,
|
||||
@@ -372,10 +350,10 @@ function filterExecutionData(execution, options, workflow) {
|
||||
}
|
||||
return response;
|
||||
}
|
||||
function processExecution(execution, options = {}, workflow) {
|
||||
function processExecution(execution, options = {}) {
|
||||
if (!options.mode && !options.nodeNames && options.itemsLimit === undefined) {
|
||||
return execution;
|
||||
}
|
||||
return filterExecutionData(execution, options, workflow);
|
||||
return filterExecutionData(execution, options);
|
||||
}
|
||||
//# sourceMappingURL=execution-processor.js.map
|
||||
2
dist/services/execution-processor.js.map
vendored
2
dist/services/execution-processor.js.map
vendored
File diff suppressed because one or more lines are too long
24
dist/services/n8n-validation.d.ts
vendored
24
dist/services/n8n-validation.d.ts
vendored
@@ -26,10 +26,10 @@ export declare const workflowNodeSchema: z.ZodObject<{
|
||||
parameters: Record<string, unknown>;
|
||||
credentials?: Record<string, unknown> | undefined;
|
||||
retryOnFail?: boolean | undefined;
|
||||
continueOnFail?: boolean | undefined;
|
||||
maxTries?: number | undefined;
|
||||
waitBetweenTries?: number | undefined;
|
||||
alwaysOutputData?: boolean | undefined;
|
||||
continueOnFail?: boolean | undefined;
|
||||
executeOnce?: boolean | undefined;
|
||||
disabled?: boolean | undefined;
|
||||
notes?: string | undefined;
|
||||
@@ -43,10 +43,10 @@ export declare const workflowNodeSchema: z.ZodObject<{
|
||||
parameters: Record<string, unknown>;
|
||||
credentials?: Record<string, unknown> | undefined;
|
||||
retryOnFail?: boolean | undefined;
|
||||
continueOnFail?: boolean | undefined;
|
||||
maxTries?: number | undefined;
|
||||
waitBetweenTries?: number | undefined;
|
||||
alwaysOutputData?: boolean | undefined;
|
||||
continueOnFail?: boolean | undefined;
|
||||
executeOnce?: boolean | undefined;
|
||||
disabled?: boolean | undefined;
|
||||
notes?: string | undefined;
|
||||
@@ -155,11 +155,6 @@ export declare const workflowConnectionSchema: z.ZodRecord<z.ZodString, z.ZodObj
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_tool?: {
|
||||
type: string;
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_languageModel?: {
|
||||
type: string;
|
||||
node: string;
|
||||
@@ -170,6 +165,11 @@ export declare const workflowConnectionSchema: z.ZodRecord<z.ZodString, z.ZodObj
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_tool?: {
|
||||
type: string;
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_embedding?: {
|
||||
type: string;
|
||||
node: string;
|
||||
@@ -191,11 +191,6 @@ export declare const workflowConnectionSchema: z.ZodRecord<z.ZodString, z.ZodObj
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_tool?: {
|
||||
type: string;
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_languageModel?: {
|
||||
type: string;
|
||||
node: string;
|
||||
@@ -206,6 +201,11 @@ export declare const workflowConnectionSchema: z.ZodRecord<z.ZodString, z.ZodObj
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_tool?: {
|
||||
type: string;
|
||||
node: string;
|
||||
index: number;
|
||||
}[][] | undefined;
|
||||
ai_embedding?: {
|
||||
type: string;
|
||||
node: string;
|
||||
|
||||
2
dist/services/n8n-validation.d.ts.map
vendored
2
dist/services/n8n-validation.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"n8n-validation.d.ts","sourceRoot":"","sources":["../../src/services/n8n-validation.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,YAAY,EAAE,kBAAkB,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAM9E,eAAO,MAAM,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAiB7B,CAAC;AAkBH,eAAO,MAAM,wBAAwB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAUpC,CAAC;AAEF,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAWjC,CAAC;AAGH,eAAO,MAAM,uBAAuB;;;;;;CAMnC,CAAC;AAGF,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,OAAO,GAAG,YAAY,CAEhE;AAED,wBAAgB,2BAA2B,CAAC,WAAW,EAAE,OAAO,GAAG,kBAAkB,CAEpF;AAED,wBAAgB,wBAAwB,CAAC,QAAQ,EAAE,OAAO,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAElG;AAGD,wBAAgB,sBAAsB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC,CAsBrF;AAiBD,wBAAgB,sBAAsB,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAoE5E;AAGD,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAAE,CA6P/E;AAGD,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAK7D;AAMD,wBAAgB,+BAA+B,CAAC,IAAI,EAAE,YAAY,GAAG,MAAM,EAAE,CA+F5E;AAMD,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CA0D/E;AAGD,wBAAgB,aAAa,CAAC,QAAQ,EAAE,QAAQ,GAAG,MAAM,GAAG,IAAI,CAmB/D;AAGD,wBAAgB,2BAA2B,IAAI,MAAM,CA6CpD;AAGD,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,CAmBpE"}
|
||||
{"version":3,"file":"n8n-validation.d.ts","sourceRoot":"","sources":["../../src/services/n8n-validation.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,YAAY,EAAE,kBAAkB,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAM9E,eAAO,MAAM,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAiB7B,CAAC;AAkBH,eAAO,MAAM,wBAAwB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAUpC,CAAC;AAEF,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAWjC,CAAC;AAGH,eAAO,MAAM,uBAAuB;;;;;;CAMnC,CAAC;AAGF,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,OAAO,GAAG,YAAY,CAEhE;AAED,wBAAgB,2BAA2B,CAAC,WAAW,EAAE,OAAO,GAAG,kBAAkB,CAEpF;AAED,wBAAgB,wBAAwB,CAAC,QAAQ,EAAE,OAAO,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAElG;AAGD,wBAAgB,sBAAsB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,OAAO,CAAC,QAAQ,CAAC,CAsBrF;AAiBD,wBAAgB,sBAAsB,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAoE5E;AAGD,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAAE,CAiP/E;AAGD,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAK7D;AAMD,wBAAgB,+BAA+B,CAAC,IAAI,EAAE,YAAY,GAAG,MAAM,EAAE,CA+F5E;AAMD,wBAAgB,yBAAyB,CAAC,QAAQ,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CA0D/E;AAGD,wBAAgB,aAAa,CAAC,QAAQ,EAAE,QAAQ,GAAG,MAAM,GAAG,IAAI,CAmB/D;AAGD,wBAAgB,2BAA2B,IAAI,MAAM,CA6CpD;AAGD,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,CAmBpE"}
|
||||
28
dist/services/n8n-validation.js
vendored
28
dist/services/n8n-validation.js
vendored
@@ -152,23 +152,17 @@ function validateWorkflowStructure(workflow) {
|
||||
}
|
||||
else if (connectionCount > 0 || executableNodes.length > 1) {
|
||||
const connectedNodes = new Set();
|
||||
const ALL_CONNECTION_TYPES = ['main', 'error', 'ai_tool', 'ai_languageModel', 'ai_memory', 'ai_embedding', 'ai_vectorStore'];
|
||||
Object.entries(workflow.connections).forEach(([sourceName, connection]) => {
|
||||
connectedNodes.add(sourceName);
|
||||
ALL_CONNECTION_TYPES.forEach(connType => {
|
||||
const connData = connection[connType];
|
||||
if (connData && Array.isArray(connData)) {
|
||||
connData.forEach((outputs) => {
|
||||
if (Array.isArray(outputs)) {
|
||||
outputs.forEach((target) => {
|
||||
if (target?.node) {
|
||||
connectedNodes.add(target.node);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (connection.main && Array.isArray(connection.main)) {
|
||||
connection.main.forEach((outputs) => {
|
||||
if (Array.isArray(outputs)) {
|
||||
outputs.forEach((target) => {
|
||||
connectedNodes.add(target.node);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
const disconnectedNodes = workflow.nodes.filter(node => {
|
||||
if ((0, node_classification_1.isNonExecutableNode)(node.type)) {
|
||||
@@ -177,9 +171,7 @@ function validateWorkflowStructure(workflow) {
|
||||
const isConnected = connectedNodes.has(node.name);
|
||||
const isNodeTrigger = (0, node_type_utils_1.isTriggerNode)(node.type);
|
||||
if (isNodeTrigger) {
|
||||
const hasOutgoingConnections = !!workflow.connections?.[node.name];
|
||||
const hasInboundConnections = isConnected;
|
||||
return !hasOutgoingConnections && !hasInboundConnections;
|
||||
return !workflow.connections?.[node.name];
|
||||
}
|
||||
return !isConnected;
|
||||
});
|
||||
|
||||
2
dist/services/n8n-validation.js.map
vendored
2
dist/services/n8n-validation.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{"version":3,"file":"node-similarity-service.d.ts","sourceRoot":"","sources":["../../src/services/node-similarity-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAI7D,MAAM,WAAW,cAAc;IAC7B,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,eAAe;IAC9B,cAAc,EAAE,MAAM,CAAC;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,oBAAoB;IACnC,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,qBAAqB;IAEhC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,CAAM;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,kBAAkB,CAAK;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,mBAAmB,CAAK;IAChD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,CAAiB;IAC1D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,mBAAmB,CAAO;IAElD,OAAO,CAAC,UAAU,CAAiB;IACnC,OAAO,CAAC,cAAc,CAAsC;IAC5D,OAAO,CAAC,SAAS,CAAsB;IACvC,OAAO,CAAC,WAAW,CAAa;IAChC,OAAO,CAAC,YAAY,CAAa;gBAErB,UAAU,EAAE,cAAc;IAStC,OAAO,CAAC,wBAAwB;IAkDhC,OAAO,CAAC,yBAAyB;IAuB3B,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,GAAE,MAAU,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAiEzF,OAAO,CAAC,mBAAmB;IA0E3B,OAAO,CAAC,wBAAwB;IAuEhC,OAAO,CAAC,gBAAgB;IA2BxB,OAAO,CAAC,iBAAiB;IAUzB,OAAO,CAAC,mBAAmB;IAgB3B,OAAO,CAAC,eAAe;YAgDT,cAAc;IAqCrB,eAAe,IAAI,IAAI;IAUjB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ1C,uBAAuB,CAAC,WAAW,EAAE,cAAc,EAAE,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM;IA8BnF,aAAa,CAAC,UAAU,EAAE,cAAc,GAAG,OAAO;IAQlD,UAAU,IAAI,IAAI;CAGnB"}
|
||||
{"version":3,"file":"node-similarity-service.d.ts","sourceRoot":"","sources":["../../src/services/node-similarity-service.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAG7D,MAAM,WAAW,cAAc;IAC7B,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,eAAe;IAC9B,cAAc,EAAE,MAAM,CAAC;IACvB,aAAa,EAAE,MAAM,CAAC;IACtB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,oBAAoB;IACnC,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,qBAAqB;IAEhC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,CAAM;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,kBAAkB,CAAK;IAC/C,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,mBAAmB,CAAK;IAChD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,CAAiB;IAC1D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,mBAAmB,CAAO;IAElD,OAAO,CAAC,UAAU,CAAiB;IACnC,OAAO,CAAC,cAAc,CAAsC;IAC5D,OAAO,CAAC,SAAS,CAAsB;IACvC,OAAO,CAAC,WAAW,CAAa;IAChC,OAAO,CAAC,YAAY,CAAa;gBAErB,UAAU,EAAE,cAAc;IAStC,OAAO,CAAC,wBAAwB;IAkDhC,OAAO,CAAC,yBAAyB;IAuB3B,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,GAAE,MAAU,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IA8CzF,OAAO,CAAC,mBAAmB;IA0E3B,OAAO,CAAC,wBAAwB;IAuEhC,OAAO,CAAC,gBAAgB;IA2BxB,OAAO,CAAC,iBAAiB;IAUzB,OAAO,CAAC,mBAAmB;IAgB3B,OAAO,CAAC,eAAe;YAgDT,cAAc;IAqCrB,eAAe,IAAI,IAAI;IAUjB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ1C,uBAAuB,CAAC,WAAW,EAAE,cAAc,EAAE,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM;IA8BnF,aAAa,CAAC,UAAU,EAAE,cAAc,GAAG,OAAO;IAQlD,UAAU,IAAI,IAAI;CAGnB"}
|
||||
17
dist/services/node-similarity-service.js
vendored
17
dist/services/node-similarity-service.js
vendored
@@ -2,7 +2,6 @@
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NodeSimilarityService = void 0;
|
||||
const logger_1 = require("../utils/logger");
|
||||
const tool_variant_generator_1 = require("./tool-variant-generator");
|
||||
class NodeSimilarityService {
|
||||
constructor(repository) {
|
||||
this.nodeCache = null;
|
||||
@@ -68,22 +67,6 @@ class NodeSimilarityService {
|
||||
if (!invalidType || invalidType.trim() === '') {
|
||||
return [];
|
||||
}
|
||||
if (tool_variant_generator_1.ToolVariantGenerator.isToolVariantNodeType(invalidType)) {
|
||||
const baseNodeType = tool_variant_generator_1.ToolVariantGenerator.getBaseNodeType(invalidType);
|
||||
if (baseNodeType) {
|
||||
const baseNode = this.repository.getNode(baseNodeType);
|
||||
if (baseNode) {
|
||||
return [{
|
||||
nodeType: invalidType,
|
||||
displayName: `${baseNode.displayName} Tool`,
|
||||
confidence: 0.98,
|
||||
reason: `Dynamic AI Tool variant of ${baseNode.displayName}`,
|
||||
category: baseNode.category,
|
||||
description: 'Runtime-generated Tool variant for AI Agent integration'
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
const suggestions = [];
|
||||
const mistakeSuggestion = this.checkCommonMistakes(invalidType);
|
||||
if (mistakeSuggestion) {
|
||||
|
||||
2
dist/services/node-similarity-service.js.map
vendored
2
dist/services/node-similarity-service.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/services/workflow-validator.d.ts.map
vendored
2
dist/services/workflow-validator.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAatE,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,IAAI,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACnE,KAAK,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACpE,OAAO,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACvE,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IAmO9B,OAAO,CAAC,mBAAmB;IA8H3B,OAAO,CAAC,yBAAyB;IAgGjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAuE5B,OAAO,CAAC,QAAQ;IAsFhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAatE,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,IAAI,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACnE,KAAK,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACpE,OAAO,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACvE,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IA4L9B,OAAO,CAAC,mBAAmB;IA8H3B,OAAO,CAAC,yBAAyB;IAgGjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAuE5B,OAAO,CAAC,QAAQ;IAsFhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
29
dist/services/workflow-validator.js
vendored
29
dist/services/workflow-validator.js
vendored
@@ -236,31 +236,7 @@ class WorkflowValidator {
|
||||
}
|
||||
}
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(node.type);
|
||||
let nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo && tool_variant_generator_1.ToolVariantGenerator.isToolVariantNodeType(normalizedType)) {
|
||||
const baseNodeType = tool_variant_generator_1.ToolVariantGenerator.getBaseNodeType(normalizedType);
|
||||
if (baseNodeType) {
|
||||
const baseNodeInfo = this.nodeRepository.getNode(baseNodeType);
|
||||
if (baseNodeInfo) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: `Node type "${node.type}" is inferred as a dynamic AI Tool variant of "${baseNodeType}". ` +
|
||||
`This Tool variant is created by n8n at runtime when connecting "${baseNodeInfo.displayName}" to an AI Agent.`,
|
||||
code: 'INFERRED_TOOL_VARIANT'
|
||||
});
|
||||
nodeInfo = {
|
||||
...baseNodeInfo,
|
||||
nodeType: normalizedType,
|
||||
displayName: `${baseNodeInfo.displayName} Tool`,
|
||||
isToolVariant: true,
|
||||
toolVariantOf: baseNodeType,
|
||||
isInferred: true
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo) {
|
||||
const suggestions = await this.similarityService.findSimilarNodes(node.type, 3);
|
||||
let message = `Unknown node type: "${node.type}".`;
|
||||
@@ -334,9 +310,6 @@ class WorkflowValidator {
|
||||
if (normalizedType.startsWith('nodes-langchain.')) {
|
||||
continue;
|
||||
}
|
||||
if (nodeInfo.isInferred) {
|
||||
continue;
|
||||
}
|
||||
const paramsWithVersion = {
|
||||
'@version': node.typeVersion || 1,
|
||||
...node.parameters
|
||||
|
||||
2
dist/services/workflow-validator.js.map
vendored
2
dist/services/workflow-validator.js.map
vendored
File diff suppressed because one or more lines are too long
2
dist/telemetry/batch-processor.d.ts
vendored
2
dist/telemetry/batch-processor.d.ts
vendored
@@ -12,8 +12,6 @@ export declare class TelemetryBatchProcessor {
|
||||
private flushTimes;
|
||||
private deadLetterQueue;
|
||||
private readonly maxDeadLetterSize;
|
||||
private eventListeners;
|
||||
private started;
|
||||
constructor(supabase: SupabaseClient | null, isEnabled: () => boolean);
|
||||
start(): void;
|
||||
stop(): void;
|
||||
|
||||
2
dist/telemetry/batch-processor.d.ts.map
vendored
2
dist/telemetry/batch-processor.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"batch-processor.d.ts","sourceRoot":"","sources":["../../src/telemetry/batch-processor.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AACvD,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,sBAAsB,EAAoB,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AAoClI,qBAAa,uBAAuB;IA2BhC,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,SAAS;IA3BnB,OAAO,CAAC,UAAU,CAAC,CAAiB;IACpC,OAAO,CAAC,gBAAgB,CAAkB;IAC1C,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,cAAc,CAA0B;IAChD,OAAO,CAAC,OAAO,CAQb;IACF,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,eAAe,CAAuE;IAC9F,OAAO,CAAC,QAAQ,CAAC,iBAAiB,CAAO;IAEzC,OAAO,CAAC,cAAc,CAIf;IACP,OAAO,CAAC,OAAO,CAAkB;gBAGvB,QAAQ,EAAE,cAAc,GAAG,IAAI,EAC/B,SAAS,EAAE,MAAM,OAAO;IAQlC,KAAK,IAAI,IAAI;IA0Cb,IAAI,IAAI,IAAI;IAyBN,KAAK,CAAC,MAAM,CAAC,EAAE,cAAc,EAAE,EAAE,SAAS,CAAC,EAAE,iBAAiB,EAAE,EAAE,SAAS,CAAC,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;YAgD9G,WAAW;YAmDX,cAAc;YAuDd,cAAc;YAiEd,gBAAgB;IAgD9B,OAAO,CAAC,aAAa;IAarB,OAAO,CAAC,oBAAoB;IAiB5B,OAAO,CAAC,oBAAoB;YAmBd,sBAAsB;IAgCpC,OAAO,CAAC,eAAe;IAiBvB,UAAU,IAAI,gBAAgB,GAAG;QAAE,mBAAmB,EAAE,GAAG,CAAC;QAAC,mBAAmB,EAAE,MAAM,CAAA;KAAE;IAW1F,YAAY,IAAI,IAAI;CAarB"}
|
||||
{"version":3,"file":"batch-processor.d.ts","sourceRoot":"","sources":["../../src/telemetry/batch-processor.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AACvD,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,sBAAsB,EAAoB,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AAyBlI,qBAAa,uBAAuB;IAoBhC,OAAO,CAAC,QAAQ;IAChB,OAAO,CAAC,SAAS;IApBnB,OAAO,CAAC,UAAU,CAAC,CAAiB;IACpC,OAAO,CAAC,gBAAgB,CAAkB;IAC1C,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,mBAAmB,CAAkB;IAC7C,OAAO,CAAC,cAAc,CAA0B;IAChD,OAAO,CAAC,OAAO,CAQb;IACF,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,eAAe,CAAuE;IAC9F,OAAO,CAAC,QAAQ,CAAC,iBAAiB,CAAO;gBAG/B,QAAQ,EAAE,cAAc,GAAG,IAAI,EAC/B,SAAS,EAAE,MAAM,OAAO;IAQlC,KAAK,IAAI,IAAI;IA+Bb,IAAI,IAAI,IAAI;IAWN,KAAK,CAAC,MAAM,CAAC,EAAE,cAAc,EAAE,EAAE,SAAS,CAAC,EAAE,iBAAiB,EAAE,EAAE,SAAS,CAAC,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;YAgD9G,WAAW;YAmDX,cAAc;YAuDd,cAAc;YAiEd,gBAAgB;IAgD9B,OAAO,CAAC,aAAa;IAarB,OAAO,CAAC,oBAAoB;IAiB5B,OAAO,CAAC,oBAAoB;YAmBd,sBAAsB;IAgCpC,OAAO,CAAC,eAAe;IAiBvB,UAAU,IAAI,gBAAgB,GAAG;QAAE,mBAAmB,EAAE,GAAG,CAAC;QAAC,mBAAmB,EAAE,MAAM,CAAA;KAAE;IAW1F,YAAY,IAAI,IAAI;CAarB"}
|
||||
51
dist/telemetry/batch-processor.js
vendored
51
dist/telemetry/batch-processor.js
vendored
@@ -4,13 +4,19 @@ exports.TelemetryBatchProcessor = void 0;
|
||||
const telemetry_types_1 = require("./telemetry-types");
|
||||
const telemetry_error_1 = require("./telemetry-error");
|
||||
const logger_1 = require("../utils/logger");
|
||||
function keyToSnakeCase(key) {
|
||||
return key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
||||
}
|
||||
function mutationToSupabaseFormat(mutation) {
|
||||
function toSnakeCase(obj) {
|
||||
if (obj === null || obj === undefined)
|
||||
return obj;
|
||||
if (Array.isArray(obj))
|
||||
return obj.map(toSnakeCase);
|
||||
if (typeof obj !== 'object')
|
||||
return obj;
|
||||
const result = {};
|
||||
for (const [key, value] of Object.entries(mutation)) {
|
||||
result[keyToSnakeCase(key)] = value;
|
||||
for (const key in obj) {
|
||||
if (obj.hasOwnProperty(key)) {
|
||||
const snakeKey = key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
||||
result[snakeKey] = toSnakeCase(obj[key]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -33,36 +39,26 @@ class TelemetryBatchProcessor {
|
||||
this.flushTimes = [];
|
||||
this.deadLetterQueue = [];
|
||||
this.maxDeadLetterSize = 100;
|
||||
this.eventListeners = {};
|
||||
this.started = false;
|
||||
this.circuitBreaker = new telemetry_error_1.TelemetryCircuitBreaker();
|
||||
}
|
||||
start() {
|
||||
if (!this.isEnabled() || !this.supabase)
|
||||
return;
|
||||
if (this.started) {
|
||||
logger_1.logger.debug('Telemetry batch processor already started, skipping');
|
||||
return;
|
||||
}
|
||||
this.flushTimer = setInterval(() => {
|
||||
this.flush();
|
||||
}, telemetry_types_1.TELEMETRY_CONFIG.BATCH_FLUSH_INTERVAL);
|
||||
if (typeof this.flushTimer === 'object' && 'unref' in this.flushTimer) {
|
||||
this.flushTimer.unref();
|
||||
}
|
||||
this.eventListeners.beforeExit = () => this.flush();
|
||||
this.eventListeners.sigint = () => {
|
||||
process.on('beforeExit', () => this.flush());
|
||||
process.on('SIGINT', () => {
|
||||
this.flush();
|
||||
process.exit(0);
|
||||
};
|
||||
this.eventListeners.sigterm = () => {
|
||||
});
|
||||
process.on('SIGTERM', () => {
|
||||
this.flush();
|
||||
process.exit(0);
|
||||
};
|
||||
process.on('beforeExit', this.eventListeners.beforeExit);
|
||||
process.on('SIGINT', this.eventListeners.sigint);
|
||||
process.on('SIGTERM', this.eventListeners.sigterm);
|
||||
this.started = true;
|
||||
});
|
||||
logger_1.logger.debug('Telemetry batch processor started');
|
||||
}
|
||||
stop() {
|
||||
@@ -70,17 +66,6 @@ class TelemetryBatchProcessor {
|
||||
clearInterval(this.flushTimer);
|
||||
this.flushTimer = undefined;
|
||||
}
|
||||
if (this.eventListeners.beforeExit) {
|
||||
process.removeListener('beforeExit', this.eventListeners.beforeExit);
|
||||
}
|
||||
if (this.eventListeners.sigint) {
|
||||
process.removeListener('SIGINT', this.eventListeners.sigint);
|
||||
}
|
||||
if (this.eventListeners.sigterm) {
|
||||
process.removeListener('SIGTERM', this.eventListeners.sigterm);
|
||||
}
|
||||
this.eventListeners = {};
|
||||
this.started = false;
|
||||
logger_1.logger.debug('Telemetry batch processor stopped');
|
||||
}
|
||||
async flush(events, workflows, mutations) {
|
||||
@@ -200,7 +185,7 @@ class TelemetryBatchProcessor {
|
||||
const batches = this.createBatches(mutations, telemetry_types_1.TELEMETRY_CONFIG.MAX_BATCH_SIZE);
|
||||
for (const batch of batches) {
|
||||
const result = await this.executeWithRetry(async () => {
|
||||
const snakeCaseBatch = batch.map(mutation => mutationToSupabaseFormat(mutation));
|
||||
const snakeCaseBatch = batch.map(mutation => toSnakeCase(mutation));
|
||||
const { error } = await this.supabase
|
||||
.from('workflow_mutations')
|
||||
.insert(snakeCaseBatch);
|
||||
|
||||
2
dist/telemetry/batch-processor.js.map
vendored
2
dist/telemetry/batch-processor.js.map
vendored
File diff suppressed because one or more lines are too long
8
dist/types/index.d.ts
vendored
8
dist/types/index.d.ts
vendored
@@ -7,13 +7,6 @@ export interface MCPServerConfig {
|
||||
host: string;
|
||||
authToken?: string;
|
||||
}
|
||||
export interface ToolAnnotations {
|
||||
title?: string;
|
||||
readOnlyHint?: boolean;
|
||||
destructiveHint?: boolean;
|
||||
idempotentHint?: boolean;
|
||||
openWorldHint?: boolean;
|
||||
}
|
||||
export interface ToolDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
@@ -29,7 +22,6 @@ export interface ToolDefinition {
|
||||
required?: string[];
|
||||
additionalProperties?: boolean | Record<string, any>;
|
||||
};
|
||||
annotations?: ToolAnnotations;
|
||||
}
|
||||
export interface ResourceDefinition {
|
||||
uri: string;
|
||||
|
||||
2
dist/types/index.d.ts.map
vendored
2
dist/types/index.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":"AACA,cAAc,cAAc,CAAC;AAC7B,cAAc,mBAAmB,CAAC;AAClC,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAEhC,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAMD,MAAM,WAAW,eAAe;IAE9B,KAAK,CAAC,EAAE,MAAM,CAAC;IAEf,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB,eAAe,CAAC,EAAE,OAAO,CAAC;IAE1B,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB,aAAa,CAAC,EAAE,OAAO,CAAC;CACzB;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE;QACX,IAAI,EAAE,MAAM,CAAC;QACb,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,oBAAoB,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACtD,CAAC;IACF,YAAY,CAAC,EAAE;QACb,IAAI,EAAE,MAAM,CAAC;QACb,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,oBAAoB,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACtD,CAAC;IAEF,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B;AAED,MAAM,WAAW,kBAAkB;IACjC,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;KACpB,CAAC,CAAC;CACJ"}
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":"AACA,cAAc,cAAc,CAAC;AAC7B,cAAc,mBAAmB,CAAC;AAClC,cAAc,oBAAoB,CAAC;AACnC,cAAc,iBAAiB,CAAC;AAEhC,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE;QACX,IAAI,EAAE,MAAM,CAAC;QACb,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,oBAAoB,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACtD,CAAC;IACF,YAAY,CAAC,EAAE;QACb,IAAI,EAAE,MAAM,CAAC;QACb,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAChC,QAAQ,CAAC,EAAE,MAAM,EAAE,CAAC;QACpB,oBAAoB,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;KACtD,CAAC;CACH;AAED,MAAM,WAAW,kBAAkB;IACjC,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;KACpB,CAAC,CAAC;CACJ"}
|
||||
41
dist/types/n8n-api.d.ts
vendored
41
dist/types/n8n-api.d.ts
vendored
@@ -267,7 +267,7 @@ export interface McpToolResponse {
|
||||
executionId?: string;
|
||||
workflowId?: string;
|
||||
}
|
||||
export type ExecutionMode = 'preview' | 'summary' | 'filtered' | 'full' | 'error';
|
||||
export type ExecutionMode = 'preview' | 'summary' | 'filtered' | 'full';
|
||||
export interface ExecutionPreview {
|
||||
totalNodes: number;
|
||||
executedNodes: number;
|
||||
@@ -296,9 +296,6 @@ export interface ExecutionFilterOptions {
|
||||
itemsLimit?: number;
|
||||
includeInputData?: boolean;
|
||||
fieldsToInclude?: string[];
|
||||
errorItemsLimit?: number;
|
||||
includeStackTrace?: boolean;
|
||||
includeExecutionPath?: boolean;
|
||||
}
|
||||
export interface FilteredExecutionResponse {
|
||||
id: string;
|
||||
@@ -319,7 +316,6 @@ export interface FilteredExecutionResponse {
|
||||
};
|
||||
nodes?: Record<string, FilteredNodeData>;
|
||||
error?: Record<string, unknown>;
|
||||
errorInfo?: ErrorAnalysis;
|
||||
}
|
||||
export interface FilteredNodeData {
|
||||
executionTime?: number;
|
||||
@@ -337,39 +333,4 @@ export interface FilteredNodeData {
|
||||
};
|
||||
};
|
||||
}
|
||||
export interface ErrorAnalysis {
|
||||
primaryError: {
|
||||
message: string;
|
||||
errorType: string;
|
||||
nodeName: string;
|
||||
nodeType: string;
|
||||
nodeId?: string;
|
||||
nodeParameters?: Record<string, unknown>;
|
||||
stackTrace?: string;
|
||||
};
|
||||
upstreamContext?: {
|
||||
nodeName: string;
|
||||
nodeType: string;
|
||||
itemCount: number;
|
||||
sampleItems: unknown[];
|
||||
dataStructure: Record<string, unknown>;
|
||||
};
|
||||
executionPath?: Array<{
|
||||
nodeName: string;
|
||||
status: 'success' | 'error' | 'skipped';
|
||||
itemCount: number;
|
||||
executionTime?: number;
|
||||
}>;
|
||||
additionalErrors?: Array<{
|
||||
nodeName: string;
|
||||
message: string;
|
||||
}>;
|
||||
suggestions?: ErrorSuggestion[];
|
||||
}
|
||||
export interface ErrorSuggestion {
|
||||
type: 'fix' | 'investigate' | 'workaround';
|
||||
title: string;
|
||||
description: string;
|
||||
confidence: 'high' | 'medium' | 'low';
|
||||
}
|
||||
//# sourceMappingURL=n8n-api.d.ts.map
|
||||
2
dist/types/n8n-api.d.ts.map
vendored
2
dist/types/n8n-api.d.ts.map
vendored
File diff suppressed because one or more lines are too long
@@ -12,8 +12,7 @@ services:
|
||||
environment:
|
||||
# Mode configuration
|
||||
MCP_MODE: ${MCP_MODE:-http}
|
||||
# NOTE: USE_FIXED_HTTP is deprecated. SingleSessionHTTPServer is now the default.
|
||||
# See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
USE_FIXED_HTTP: ${USE_FIXED_HTTP:-true} # Use fixed implementation for stability
|
||||
AUTH_TOKEN: ${AUTH_TOKEN:?AUTH_TOKEN is required for HTTP mode}
|
||||
|
||||
# Application settings
|
||||
|
||||
1652
docs/CHANGELOG.md
Normal file
1652
docs/CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,7 @@ cd n8n-mcp
|
||||
# Create .env file with auth token
|
||||
cat > .env << EOF
|
||||
AUTH_TOKEN=$(openssl rand -base64 32)
|
||||
USE_FIXED_HTTP=true
|
||||
EOF
|
||||
|
||||
# Start the server
|
||||
@@ -45,6 +46,7 @@ docker pull ghcr.io/czlonkowski/n8n-mcp:latest
|
||||
docker run -d \
|
||||
--name n8n-mcp \
|
||||
-e MCP_MODE=http \
|
||||
-e USE_FIXED_HTTP=true \
|
||||
-e AUTH_TOKEN=your-secure-token \
|
||||
-p 3000:3000 \
|
||||
ghcr.io/czlonkowski/n8n-mcp:latest
|
||||
|
||||
@@ -67,6 +67,7 @@ Claude Desktop → mcp-remote → https://your-server.com
|
||||
# 1. Create environment file
|
||||
cat > .env << EOF
|
||||
AUTH_TOKEN=$(openssl rand -base64 32)
|
||||
USE_FIXED_HTTP=true
|
||||
MCP_MODE=http
|
||||
PORT=3000
|
||||
# Optional: Enable n8n management tools
|
||||
@@ -105,6 +106,7 @@ npm run rebuild
|
||||
|
||||
# 2. Configure environment
|
||||
export MCP_MODE=http
|
||||
export USE_FIXED_HTTP=true # Important: Use fixed implementation
|
||||
export AUTH_TOKEN=$(openssl rand -base64 32)
|
||||
export PORT=3000
|
||||
|
||||
@@ -142,6 +144,7 @@ Skip HTTP entirely and use stdio mode directly:
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|------|
|
||||
| `MCP_MODE` | Must be set to `http` | `http` |
|
||||
| `USE_FIXED_HTTP` | **Important**: Set to `true` for stable implementation | `true` |
|
||||
| `AUTH_TOKEN` or `AUTH_TOKEN_FILE` | Authentication method | See security section |
|
||||
|
||||
### Optional Settings
|
||||
@@ -414,6 +417,7 @@ services:
|
||||
environment:
|
||||
# Core configuration
|
||||
MCP_MODE: http
|
||||
USE_FIXED_HTTP: true
|
||||
NODE_ENV: production
|
||||
|
||||
# Security - Using file-based secret
|
||||
@@ -496,6 +500,7 @@ WorkingDirectory=/opt/n8n-mcp
|
||||
# Use file-based secret
|
||||
Environment="AUTH_TOKEN_FILE=/etc/n8n-mcp/auth_token"
|
||||
Environment="MCP_MODE=http"
|
||||
Environment="USE_FIXED_HTTP=true"
|
||||
Environment="NODE_ENV=production"
|
||||
Environment="TRUST_PROXY=1"
|
||||
Environment="BASE_URL=https://n8n-mcp.example.com"
|
||||
@@ -767,8 +772,8 @@ sudo ufw status # Linux
|
||||
```
|
||||
|
||||
**"Stream is not readable":**
|
||||
- This issue was fixed in v2.3.2+ with the SingleSessionHTTPServer
|
||||
- No additional configuration needed
|
||||
- Ensure `USE_FIXED_HTTP=true` is set
|
||||
- Fixed in v2.3.2+
|
||||
|
||||
**Bridge script not working:**
|
||||
```bash
|
||||
|
||||
@@ -18,6 +18,7 @@ The fastest way to get n8n-MCP running:
|
||||
# Using Docker (recommended)
|
||||
cat > .env << EOF
|
||||
AUTH_TOKEN=$(openssl rand -base64 32)
|
||||
USE_FIXED_HTTP=true
|
||||
EOF
|
||||
docker compose up -d
|
||||
```
|
||||
@@ -48,6 +49,7 @@ docker compose up -d
|
||||
|
||||
environment:
|
||||
MCP_MODE: ${MCP_MODE:-http}
|
||||
USE_FIXED_HTTP: ${USE_FIXED_HTTP:-true}
|
||||
AUTH_TOKEN: ${AUTH_TOKEN:?AUTH_TOKEN is required}
|
||||
NODE_ENV: ${NODE_ENV:-production}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-info}
|
||||
|
||||
@@ -98,6 +98,7 @@ These are automatically set by the Railway template:
|
||||
|----------|--------------|-------------|
|
||||
| `AUTH_TOKEN` | `REPLACE_THIS...` | **⚠️ CHANGE IMMEDIATELY** |
|
||||
| `MCP_MODE` | `http` | Required for cloud deployment |
|
||||
| `USE_FIXED_HTTP` | `true` | Stable HTTP implementation |
|
||||
| `NODE_ENV` | `production` | Production optimizations |
|
||||
| `LOG_LEVEL` | `info` | Balanced logging |
|
||||
| `TRUST_PROXY` | `1` | Railway runs behind proxy |
|
||||
|
||||
@@ -40,6 +40,7 @@ Key configuration options:
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `MCP_MODE` | Server mode: `stdio` or `http` | `stdio` |
|
||||
| `USE_FIXED_HTTP` | Use fixed HTTP implementation (v2.3.2+) | `true` |
|
||||
| `AUTH_TOKEN` | Authentication token for HTTP mode | Required |
|
||||
| `PORT` | HTTP server port | `3000` |
|
||||
| `LOG_LEVEL` | Logging verbosity | `info` |
|
||||
|
||||
8214
package-lock.json
generated
8214
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp",
|
||||
"version": "2.33.6",
|
||||
"version": "2.30.1",
|
||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -22,9 +22,9 @@
|
||||
"test-nodes": "node dist/scripts/test-nodes.js",
|
||||
"start": "node dist/mcp/index.js",
|
||||
"start:http": "MCP_MODE=http node dist/mcp/index.js",
|
||||
"start:http:fixed:deprecated": "echo 'DEPRECATED: USE_FIXED_HTTP is deprecated. Use npm run start:http instead.' && MCP_MODE=http USE_FIXED_HTTP=true node dist/mcp/index.js",
|
||||
"start:http:fixed": "MCP_MODE=http USE_FIXED_HTTP=true node dist/mcp/index.js",
|
||||
"start:n8n": "N8N_MODE=true MCP_MODE=http node dist/mcp/index.js",
|
||||
"http": "npm run build && npm run start:http",
|
||||
"http": "npm run build && npm run start:http:fixed",
|
||||
"dev": "npm run build && npm run rebuild && npm run validate",
|
||||
"dev:http": "MCP_MODE=http nodemon --watch src --ext ts --exec 'npm run build && npm run start:http'",
|
||||
"test:single-session": "./scripts/test-single-session.sh",
|
||||
@@ -50,15 +50,6 @@
|
||||
"fetch:templates:update": "node dist/scripts/fetch-templates.js --update",
|
||||
"fetch:templates:extract": "node dist/scripts/fetch-templates.js --extract-only",
|
||||
"fetch:templates:robust": "node dist/scripts/fetch-templates-robust.js",
|
||||
"fetch:community": "node dist/scripts/fetch-community-nodes.js",
|
||||
"fetch:community:verified": "node dist/scripts/fetch-community-nodes.js --verified-only",
|
||||
"fetch:community:update": "node dist/scripts/fetch-community-nodes.js --update",
|
||||
"generate:docs": "node dist/scripts/generate-community-docs.js",
|
||||
"generate:docs:readme-only": "node dist/scripts/generate-community-docs.js --readme-only",
|
||||
"generate:docs:summary-only": "node dist/scripts/generate-community-docs.js --summary-only",
|
||||
"generate:docs:incremental": "node dist/scripts/generate-community-docs.js --incremental",
|
||||
"generate:docs:stats": "node dist/scripts/generate-community-docs.js --stats",
|
||||
"migrate:readme-columns": "node dist/scripts/migrate-readme-columns.js",
|
||||
"prebuild:fts5": "npx tsx scripts/prebuild-fts5.ts",
|
||||
"test:templates": "node dist/scripts/test-templates.js",
|
||||
"test:protocol-negotiation": "npx tsx src/scripts/test-protocol-negotiation.ts",
|
||||
@@ -150,16 +141,16 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.20.1",
|
||||
"@n8n/n8n-nodes-langchain": "^2.6.2",
|
||||
"@n8n/n8n-nodes-langchain": "^2.0.1",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^5.1.0",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"form-data": "^4.0.5",
|
||||
"lru-cache": "^11.2.1",
|
||||
"n8n": "^2.6.3",
|
||||
"n8n-core": "^2.6.1",
|
||||
"n8n-workflow": "^2.6.0",
|
||||
"n8n": "^2.0.2",
|
||||
"n8n-core": "^2.0.1",
|
||||
"n8n-workflow": "^2.0.1",
|
||||
"openai": "^4.77.0",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp-runtime",
|
||||
"version": "2.33.2",
|
||||
"version": "2.29.5",
|
||||
"description": "n8n MCP Server Runtime Dependencies Only",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
|
||||
@@ -71,12 +71,10 @@ const testCases: TestCase[] = [
|
||||
}
|
||||
},
|
||||
{
|
||||
// DEPRECATED: This test case tests the deprecated fixed HTTP implementation
|
||||
// See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
name: 'Fixed HTTP implementation (DEPRECATED)',
|
||||
name: 'Fixed HTTP implementation',
|
||||
env: {
|
||||
MCP_MODE: 'http',
|
||||
USE_FIXED_HTTP: 'true', // DEPRECATED: Will be removed in future version
|
||||
USE_FIXED_HTTP: 'true',
|
||||
AUTH_TOKEN: 'test-token-for-testing-only',
|
||||
PORT: '3005',
|
||||
BASE_URL: 'https://fixed.example.com'
|
||||
|
||||
@@ -1,522 +0,0 @@
|
||||
import axios, { AxiosError } from 'axios';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
/**
|
||||
* Configuration constants for community node fetching
|
||||
*/
|
||||
const FETCH_CONFIG = {
|
||||
/** Default timeout for Strapi API requests (ms) */
|
||||
STRAPI_TIMEOUT: 30000,
|
||||
/** Default timeout for npm registry requests (ms) */
|
||||
NPM_REGISTRY_TIMEOUT: 15000,
|
||||
/** Default timeout for npm downloads API (ms) */
|
||||
NPM_DOWNLOADS_TIMEOUT: 10000,
|
||||
/** Base delay between retries (ms) */
|
||||
RETRY_DELAY: 1000,
|
||||
/** Maximum number of retry attempts */
|
||||
MAX_RETRIES: 3,
|
||||
/** Default delay between requests for rate limiting (ms) */
|
||||
RATE_LIMIT_DELAY: 300,
|
||||
/** Default delay after hitting 429 (ms) */
|
||||
RATE_LIMIT_429_DELAY: 60000,
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Strapi API response types for verified community nodes
|
||||
*/
|
||||
export interface StrapiCommunityNodeAttributes {
|
||||
name: string;
|
||||
displayName: string;
|
||||
description: string;
|
||||
packageName: string;
|
||||
authorName: string;
|
||||
authorGithubUrl?: string;
|
||||
npmVersion: string;
|
||||
numberOfDownloads: number;
|
||||
numberOfStars: number;
|
||||
isOfficialNode: boolean;
|
||||
isPublished: boolean;
|
||||
nodeDescription: any; // Complete n8n node schema
|
||||
nodeVersions?: any[];
|
||||
checksum?: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
|
||||
export interface StrapiCommunityNode {
|
||||
id: number;
|
||||
attributes: StrapiCommunityNodeAttributes;
|
||||
}
|
||||
|
||||
export interface StrapiPaginatedResponse<T> {
|
||||
data: Array<{ id: number; attributes: T }>;
|
||||
meta: {
|
||||
pagination: {
|
||||
page: number;
|
||||
pageSize: number;
|
||||
pageCount: number;
|
||||
total: number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* npm registry search response types
|
||||
*/
|
||||
export interface NpmPackageInfo {
|
||||
name: string;
|
||||
version: string;
|
||||
description: string;
|
||||
keywords: string[];
|
||||
date: string;
|
||||
links: {
|
||||
npm: string;
|
||||
homepage?: string;
|
||||
repository?: string;
|
||||
};
|
||||
author?: {
|
||||
name?: string;
|
||||
email?: string;
|
||||
username?: string;
|
||||
};
|
||||
publisher?: {
|
||||
username: string;
|
||||
email: string;
|
||||
};
|
||||
maintainers: Array<{ username: string; email: string }>;
|
||||
}
|
||||
|
||||
export interface NpmSearchResult {
|
||||
package: NpmPackageInfo;
|
||||
score: {
|
||||
final: number;
|
||||
detail: {
|
||||
quality: number;
|
||||
popularity: number;
|
||||
maintenance: number;
|
||||
};
|
||||
};
|
||||
searchScore: number;
|
||||
}
|
||||
|
||||
export interface NpmSearchResponse {
|
||||
objects: NpmSearchResult[];
|
||||
total: number;
|
||||
time: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Response type for full package data including README
|
||||
*/
|
||||
export interface NpmPackageWithReadme {
|
||||
name: string;
|
||||
version: string;
|
||||
description?: string;
|
||||
readme?: string;
|
||||
readmeFilename?: string;
|
||||
homepage?: string;
|
||||
repository?: {
|
||||
type?: string;
|
||||
url?: string;
|
||||
};
|
||||
keywords?: string[];
|
||||
license?: string;
|
||||
'dist-tags'?: {
|
||||
latest?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches community nodes from n8n Strapi API and npm registry.
|
||||
* Follows the pattern from template-fetcher.ts.
|
||||
*/
|
||||
export class CommunityNodeFetcher {
|
||||
private readonly strapiBaseUrl: string;
|
||||
private readonly npmSearchUrl = 'https://registry.npmjs.org/-/v1/search';
|
||||
private readonly npmRegistryUrl = 'https://registry.npmjs.org';
|
||||
private readonly maxRetries = FETCH_CONFIG.MAX_RETRIES;
|
||||
private readonly retryDelay = FETCH_CONFIG.RETRY_DELAY;
|
||||
private readonly strapiPageSize = 25;
|
||||
private readonly npmPageSize = 250; // npm API max
|
||||
|
||||
/** Regex for validating npm package names per npm naming rules */
|
||||
private readonly npmPackageNameRegex = /^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*$/;
|
||||
|
||||
constructor(environment: 'production' | 'staging' = 'production') {
|
||||
this.strapiBaseUrl =
|
||||
environment === 'production'
|
||||
? 'https://api.n8n.io/api/community-nodes'
|
||||
: 'https://api-staging.n8n.io/api/community-nodes';
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates npm package name to prevent path traversal and injection attacks.
|
||||
* @see https://github.com/npm/validate-npm-package-name
|
||||
*/
|
||||
private validatePackageName(packageName: string): boolean {
|
||||
if (!packageName || typeof packageName !== 'string') {
|
||||
return false;
|
||||
}
|
||||
// Max length per npm spec
|
||||
if (packageName.length > 214) {
|
||||
return false;
|
||||
}
|
||||
// Must match npm naming pattern
|
||||
if (!this.npmPackageNameRegex.test(packageName)) {
|
||||
return false;
|
||||
}
|
||||
// Block path traversal attempts
|
||||
if (packageName.includes('..') || packageName.includes('//')) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an error is a rate limit (429) response
|
||||
*/
|
||||
private isRateLimitError(error: unknown): boolean {
|
||||
return axios.isAxiosError(error) && error.response?.status === 429;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry helper for API calls (same pattern as TemplateFetcher)
|
||||
* Handles 429 rate limit responses with extended delay
|
||||
*/
|
||||
private async retryWithBackoff<T>(
|
||||
fn: () => Promise<T>,
|
||||
context: string,
|
||||
maxRetries: number = this.maxRetries
|
||||
): Promise<T | null> {
|
||||
let lastError: unknown;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error: unknown) {
|
||||
lastError = error;
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
// Handle 429 rate limit with longer delay
|
||||
if (this.isRateLimitError(error)) {
|
||||
const delay = FETCH_CONFIG.RATE_LIMIT_429_DELAY;
|
||||
logger.warn(
|
||||
`${context} - Rate limited (429), waiting ${delay / 1000}s before retry...`
|
||||
);
|
||||
await this.sleep(delay);
|
||||
} else {
|
||||
const delay = this.retryDelay * attempt; // Exponential backoff
|
||||
logger.warn(
|
||||
`${context} - Attempt ${attempt}/${maxRetries} failed, retrying in ${delay}ms...`
|
||||
);
|
||||
await this.sleep(delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(`${context} - All ${maxRetries} attempts failed, skipping`, lastError);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all verified community nodes from n8n Strapi API.
|
||||
* These nodes include full nodeDescription schemas - no parsing needed!
|
||||
*/
|
||||
async fetchVerifiedNodes(
|
||||
progressCallback?: (message: string, current: number, total: number) => void
|
||||
): Promise<StrapiCommunityNode[]> {
|
||||
const allNodes: StrapiCommunityNode[] = [];
|
||||
let page = 1;
|
||||
let hasMore = true;
|
||||
let total = 0;
|
||||
|
||||
logger.info('Fetching verified community nodes from n8n Strapi API...');
|
||||
|
||||
while (hasMore) {
|
||||
const result = await this.retryWithBackoff(
|
||||
async () => {
|
||||
const response = await axios.get<StrapiPaginatedResponse<StrapiCommunityNodeAttributes>>(
|
||||
this.strapiBaseUrl,
|
||||
{
|
||||
params: {
|
||||
'pagination[page]': page,
|
||||
'pagination[pageSize]': this.strapiPageSize,
|
||||
},
|
||||
timeout: FETCH_CONFIG.STRAPI_TIMEOUT,
|
||||
}
|
||||
);
|
||||
return response.data;
|
||||
},
|
||||
`Fetching verified nodes page ${page}`
|
||||
);
|
||||
|
||||
if (result === null) {
|
||||
logger.warn(`Skipping page ${page} after failed attempts`);
|
||||
page++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const nodes = result.data.map((item) => ({
|
||||
id: item.id,
|
||||
attributes: item.attributes,
|
||||
}));
|
||||
|
||||
allNodes.push(...nodes);
|
||||
total = result.meta.pagination.total;
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Fetching verified nodes`, allNodes.length, total);
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Fetched page ${page}/${result.meta.pagination.pageCount}: ${nodes.length} nodes (total: ${allNodes.length}/${total})`
|
||||
);
|
||||
|
||||
// Check if there are more pages
|
||||
if (page >= result.meta.pagination.pageCount) {
|
||||
hasMore = false;
|
||||
}
|
||||
|
||||
page++;
|
||||
|
||||
// Rate limiting
|
||||
if (hasMore) {
|
||||
await this.sleep(FETCH_CONFIG.RATE_LIMIT_DELAY);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Fetched ${allNodes.length} verified community nodes from Strapi API`);
|
||||
return allNodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch popular community node packages from npm registry.
|
||||
* Sorted by popularity (downloads). Returns package metadata only.
|
||||
* To get node schemas, packages need to be downloaded and parsed.
|
||||
*
|
||||
* @param limit Maximum number of packages to fetch (default: 100)
|
||||
*/
|
||||
async fetchNpmPackages(
|
||||
limit: number = 100,
|
||||
progressCallback?: (message: string, current: number, total: number) => void
|
||||
): Promise<NpmSearchResult[]> {
|
||||
const allPackages: NpmSearchResult[] = [];
|
||||
let offset = 0;
|
||||
const targetLimit = Math.min(limit, 1000); // npm API practical limit
|
||||
|
||||
logger.info(`Fetching top ${targetLimit} community node packages from npm registry...`);
|
||||
|
||||
while (allPackages.length < targetLimit) {
|
||||
const remaining = targetLimit - allPackages.length;
|
||||
const size = Math.min(this.npmPageSize, remaining);
|
||||
|
||||
const result = await this.retryWithBackoff(
|
||||
async () => {
|
||||
const response = await axios.get<NpmSearchResponse>(this.npmSearchUrl, {
|
||||
params: {
|
||||
text: 'keywords:n8n-community-node-package',
|
||||
size,
|
||||
from: offset,
|
||||
// Sort by popularity (downloads)
|
||||
quality: 0,
|
||||
popularity: 1,
|
||||
maintenance: 0,
|
||||
},
|
||||
timeout: FETCH_CONFIG.STRAPI_TIMEOUT,
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
`Fetching npm packages (offset ${offset})`
|
||||
);
|
||||
|
||||
if (result === null) {
|
||||
logger.warn(`Skipping npm fetch at offset ${offset} after failed attempts`);
|
||||
break;
|
||||
}
|
||||
|
||||
if (result.objects.length === 0) {
|
||||
break; // No more packages
|
||||
}
|
||||
|
||||
allPackages.push(...result.objects);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Fetching npm packages`, allPackages.length, Math.min(result.total, targetLimit));
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Fetched ${result.objects.length} packages (total: ${allPackages.length}/${Math.min(result.total, targetLimit)})`
|
||||
);
|
||||
|
||||
offset += size;
|
||||
|
||||
// Rate limiting
|
||||
await this.sleep(FETCH_CONFIG.RATE_LIMIT_DELAY);
|
||||
}
|
||||
|
||||
// Sort by popularity score (highest first)
|
||||
allPackages.sort((a, b) => b.score.detail.popularity - a.score.detail.popularity);
|
||||
|
||||
logger.info(`Fetched ${allPackages.length} community node packages from npm`);
|
||||
return allPackages.slice(0, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch package.json for a specific npm package to get the n8n node configuration.
|
||||
* Validates package name to prevent path traversal attacks.
|
||||
*/
|
||||
async fetchPackageJson(packageName: string, version?: string): Promise<any | null> {
|
||||
// Validate package name to prevent path traversal
|
||||
if (!this.validatePackageName(packageName)) {
|
||||
logger.warn(`Invalid package name rejected: ${packageName}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const url = version
|
||||
? `${this.npmRegistryUrl}/${encodeURIComponent(packageName)}/${encodeURIComponent(version)}`
|
||||
: `${this.npmRegistryUrl}/${encodeURIComponent(packageName)}/latest`;
|
||||
|
||||
return this.retryWithBackoff(
|
||||
async () => {
|
||||
const response = await axios.get(url, { timeout: FETCH_CONFIG.NPM_REGISTRY_TIMEOUT });
|
||||
return response.data;
|
||||
},
|
||||
`Fetching package.json for ${packageName}${version ? `@${version}` : ''}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Download package tarball URL for a specific package version.
|
||||
* Returns the tarball URL that can be used to download and extract the package.
|
||||
*/
|
||||
async getPackageTarballUrl(packageName: string, version?: string): Promise<string | null> {
|
||||
const packageJson = await this.fetchPackageJson(packageName, version);
|
||||
|
||||
if (!packageJson) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// For specific version fetch, dist.tarball is directly available
|
||||
if (packageJson.dist?.tarball) {
|
||||
return packageJson.dist.tarball;
|
||||
}
|
||||
|
||||
// For full package fetch, get the latest version's tarball
|
||||
const latestVersion = packageJson['dist-tags']?.latest;
|
||||
if (latestVersion && packageJson.versions?.[latestVersion]?.dist?.tarball) {
|
||||
return packageJson.versions[latestVersion].dist.tarball;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch full package data including README from npm registry.
|
||||
* Uses the base package URL (not /latest) to get the README field.
|
||||
* Validates package name to prevent path traversal attacks.
|
||||
*
|
||||
* @param packageName npm package name (e.g., "n8n-nodes-brightdata")
|
||||
* @returns Full package data including readme, or null if fetch failed
|
||||
*/
|
||||
async fetchPackageWithReadme(packageName: string): Promise<NpmPackageWithReadme | null> {
|
||||
// Validate package name to prevent path traversal
|
||||
if (!this.validatePackageName(packageName)) {
|
||||
logger.warn(`Invalid package name rejected for README fetch: ${packageName}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const url = `${this.npmRegistryUrl}/${encodeURIComponent(packageName)}`;
|
||||
|
||||
return this.retryWithBackoff(
|
||||
async () => {
|
||||
const response = await axios.get<NpmPackageWithReadme>(url, {
|
||||
timeout: FETCH_CONFIG.NPM_REGISTRY_TIMEOUT,
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
`Fetching package with README for ${packageName}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch READMEs for multiple packages in batch with rate limiting.
|
||||
* Returns a Map of packageName -> readme content.
|
||||
*
|
||||
* @param packageNames Array of npm package names
|
||||
* @param progressCallback Optional callback for progress updates
|
||||
* @param concurrency Number of concurrent requests (default: 1 for rate limiting)
|
||||
* @returns Map of packageName to README content (null if not found)
|
||||
*/
|
||||
async fetchReadmesBatch(
|
||||
packageNames: string[],
|
||||
progressCallback?: (message: string, current: number, total: number) => void,
|
||||
concurrency: number = 1
|
||||
): Promise<Map<string, string | null>> {
|
||||
const results = new Map<string, string | null>();
|
||||
const total = packageNames.length;
|
||||
|
||||
logger.info(`Fetching READMEs for ${total} packages (concurrency: ${concurrency})...`);
|
||||
|
||||
// Process in batches based on concurrency
|
||||
for (let i = 0; i < packageNames.length; i += concurrency) {
|
||||
const batch = packageNames.slice(i, i + concurrency);
|
||||
|
||||
// Process batch concurrently
|
||||
const batchPromises = batch.map(async (packageName) => {
|
||||
const data = await this.fetchPackageWithReadme(packageName);
|
||||
return { packageName, readme: data?.readme || null };
|
||||
});
|
||||
|
||||
const batchResults = await Promise.all(batchPromises);
|
||||
|
||||
for (const { packageName, readme } of batchResults) {
|
||||
results.set(packageName, readme);
|
||||
}
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Fetching READMEs', Math.min(i + concurrency, total), total);
|
||||
}
|
||||
|
||||
// Rate limiting between batches
|
||||
if (i + concurrency < packageNames.length) {
|
||||
await this.sleep(FETCH_CONFIG.RATE_LIMIT_DELAY);
|
||||
}
|
||||
}
|
||||
|
||||
const foundCount = Array.from(results.values()).filter((v) => v !== null).length;
|
||||
logger.info(`Fetched ${foundCount}/${total} READMEs successfully`);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get download statistics for a package from npm.
|
||||
* Validates package name to prevent path traversal attacks.
|
||||
*/
|
||||
async getPackageDownloads(
|
||||
packageName: string,
|
||||
period: 'last-week' | 'last-month' = 'last-week'
|
||||
): Promise<number | null> {
|
||||
// Validate package name to prevent path traversal
|
||||
if (!this.validatePackageName(packageName)) {
|
||||
logger.warn(`Invalid package name rejected for downloads: ${packageName}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.retryWithBackoff(
|
||||
async () => {
|
||||
const response = await axios.get(
|
||||
`https://api.npmjs.org/downloads/point/${period}/${encodeURIComponent(packageName)}`,
|
||||
{ timeout: FETCH_CONFIG.NPM_DOWNLOADS_TIMEOUT }
|
||||
);
|
||||
return response.data.downloads;
|
||||
},
|
||||
`Fetching downloads for ${packageName}`
|
||||
);
|
||||
}
|
||||
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
@@ -1,391 +0,0 @@
|
||||
import { logger } from '../utils/logger';
|
||||
import { NodeRepository, CommunityNodeFields } from '../database/node-repository';
|
||||
import { ParsedNode } from '../parsers/node-parser';
|
||||
import {
|
||||
CommunityNodeFetcher,
|
||||
StrapiCommunityNode,
|
||||
NpmSearchResult,
|
||||
} from './community-node-fetcher';
|
||||
|
||||
export interface CommunityStats {
|
||||
total: number;
|
||||
verified: number;
|
||||
unverified: number;
|
||||
}
|
||||
|
||||
export interface SyncResult {
|
||||
verified: {
|
||||
fetched: number;
|
||||
saved: number;
|
||||
skipped: number;
|
||||
errors: string[];
|
||||
};
|
||||
npm: {
|
||||
fetched: number;
|
||||
saved: number;
|
||||
skipped: number;
|
||||
errors: string[];
|
||||
};
|
||||
duration: number;
|
||||
}
|
||||
|
||||
export interface SyncOptions {
|
||||
/** Only sync verified nodes from Strapi API (fast) */
|
||||
verifiedOnly?: boolean;
|
||||
/** Maximum number of npm packages to sync (default: 100) */
|
||||
npmLimit?: number;
|
||||
/** Skip nodes already in database */
|
||||
skipExisting?: boolean;
|
||||
/** Environment for Strapi API */
|
||||
environment?: 'production' | 'staging';
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for syncing community nodes from n8n Strapi API and npm registry.
|
||||
*
|
||||
* Key insight: Verified nodes from Strapi include full `nodeDescription` schemas,
|
||||
* so we can store them directly without downloading/parsing npm packages.
|
||||
*/
|
||||
export class CommunityNodeService {
|
||||
private fetcher: CommunityNodeFetcher;
|
||||
private repository: NodeRepository;
|
||||
|
||||
constructor(repository: NodeRepository, environment: 'production' | 'staging' = 'production') {
|
||||
this.repository = repository;
|
||||
this.fetcher = new CommunityNodeFetcher(environment);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync community nodes from both Strapi API and npm registry.
|
||||
*/
|
||||
async syncCommunityNodes(
|
||||
options: SyncOptions = {},
|
||||
progressCallback?: (message: string, current: number, total: number) => void
|
||||
): Promise<SyncResult> {
|
||||
const startTime = Date.now();
|
||||
const result: SyncResult = {
|
||||
verified: { fetched: 0, saved: 0, skipped: 0, errors: [] },
|
||||
npm: { fetched: 0, saved: 0, skipped: 0, errors: [] },
|
||||
duration: 0,
|
||||
};
|
||||
|
||||
// Step 1: Sync verified nodes from Strapi API
|
||||
logger.info('Syncing verified community nodes from Strapi API...');
|
||||
try {
|
||||
result.verified = await this.syncVerifiedNodes(progressCallback, options.skipExisting);
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to sync verified nodes:', error);
|
||||
result.verified.errors.push(`Strapi sync failed: ${error.message}`);
|
||||
}
|
||||
|
||||
// Step 2: Sync popular npm packages (unless verifiedOnly)
|
||||
if (!options.verifiedOnly) {
|
||||
const npmLimit = options.npmLimit ?? 100;
|
||||
logger.info(`Syncing top ${npmLimit} npm community packages...`);
|
||||
try {
|
||||
result.npm = await this.syncNpmNodes(npmLimit, progressCallback, options.skipExisting);
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to sync npm nodes:', error);
|
||||
result.npm.errors.push(`npm sync failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
result.duration = Date.now() - startTime;
|
||||
logger.info(
|
||||
`Community node sync complete in ${(result.duration / 1000).toFixed(1)}s: ` +
|
||||
`${result.verified.saved} verified, ${result.npm.saved} npm`
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync verified nodes from n8n Strapi API.
|
||||
* These nodes include full nodeDescription - no parsing needed!
|
||||
*/
|
||||
async syncVerifiedNodes(
|
||||
progressCallback?: (message: string, current: number, total: number) => void,
|
||||
skipExisting?: boolean
|
||||
): Promise<SyncResult['verified']> {
|
||||
const result = { fetched: 0, saved: 0, skipped: 0, errors: [] as string[] };
|
||||
|
||||
// Fetch verified nodes from Strapi API
|
||||
const strapiNodes = await this.fetcher.fetchVerifiedNodes(progressCallback);
|
||||
result.fetched = strapiNodes.length;
|
||||
|
||||
if (strapiNodes.length === 0) {
|
||||
logger.warn('No verified nodes returned from Strapi API');
|
||||
return result;
|
||||
}
|
||||
|
||||
logger.info(`Processing ${strapiNodes.length} verified community nodes...`);
|
||||
|
||||
for (const strapiNode of strapiNodes) {
|
||||
try {
|
||||
const { attributes } = strapiNode;
|
||||
|
||||
// Skip if node already exists and skipExisting is true
|
||||
if (skipExisting && this.repository.hasNodeByNpmPackage(attributes.packageName)) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert Strapi node to ParsedNode format
|
||||
const parsedNode = this.strapiNodeToParsedNode(strapiNode);
|
||||
if (!parsedNode) {
|
||||
result.errors.push(`Failed to parse: ${attributes.packageName}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Save to database
|
||||
this.repository.saveNode(parsedNode);
|
||||
result.saved++;
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(
|
||||
`Saving verified nodes`,
|
||||
result.saved + result.skipped,
|
||||
strapiNodes.length
|
||||
);
|
||||
}
|
||||
} catch (error: any) {
|
||||
result.errors.push(`Error saving ${strapiNode.attributes.packageName}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Verified nodes: ${result.saved} saved, ${result.skipped} skipped`);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync popular npm packages.
|
||||
* NOTE: This only stores metadata - full schema extraction requires tarball download.
|
||||
* For now, we store basic metadata and mark them for future parsing.
|
||||
*/
|
||||
async syncNpmNodes(
|
||||
limit: number = 100,
|
||||
progressCallback?: (message: string, current: number, total: number) => void,
|
||||
skipExisting?: boolean
|
||||
): Promise<SyncResult['npm']> {
|
||||
const result = { fetched: 0, saved: 0, skipped: 0, errors: [] as string[] };
|
||||
|
||||
// Fetch npm packages
|
||||
const npmPackages = await this.fetcher.fetchNpmPackages(limit, progressCallback);
|
||||
result.fetched = npmPackages.length;
|
||||
|
||||
if (npmPackages.length === 0) {
|
||||
logger.warn('No npm packages returned from registry');
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get list of verified package names to skip (already synced from Strapi)
|
||||
const verifiedPackages = new Set(
|
||||
this.repository
|
||||
.getCommunityNodes({ verified: true })
|
||||
.map((n) => n.npmPackageName)
|
||||
.filter(Boolean)
|
||||
);
|
||||
|
||||
logger.info(
|
||||
`Processing ${npmPackages.length} npm packages (skipping ${verifiedPackages.size} verified)...`
|
||||
);
|
||||
|
||||
for (const pkg of npmPackages) {
|
||||
try {
|
||||
const packageName = pkg.package.name;
|
||||
|
||||
// Skip if already verified from Strapi
|
||||
if (verifiedPackages.has(packageName)) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip if already exists and skipExisting is true
|
||||
if (skipExisting && this.repository.hasNodeByNpmPackage(packageName)) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// For npm packages, we create a basic node entry with metadata
|
||||
// Full schema extraction would require downloading and parsing the tarball
|
||||
const parsedNode = this.npmPackageToParsedNode(pkg);
|
||||
|
||||
// Save to database
|
||||
this.repository.saveNode(parsedNode);
|
||||
result.saved++;
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback(`Saving npm packages`, result.saved + result.skipped, npmPackages.length);
|
||||
}
|
||||
} catch (error: any) {
|
||||
result.errors.push(`Error saving ${pkg.package.name}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`npm packages: ${result.saved} saved, ${result.skipped} skipped`);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Strapi community node to ParsedNode format.
|
||||
* Strapi nodes include full nodeDescription - no parsing needed!
|
||||
*/
|
||||
private strapiNodeToParsedNode(
|
||||
strapiNode: StrapiCommunityNode
|
||||
): (ParsedNode & CommunityNodeFields) | null {
|
||||
const { attributes } = strapiNode;
|
||||
|
||||
// Strapi includes the full nodeDescription (n8n node schema)
|
||||
const nodeDesc = attributes.nodeDescription;
|
||||
|
||||
if (!nodeDesc) {
|
||||
logger.warn(`No nodeDescription for ${attributes.packageName}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract node type from the description
|
||||
// Strapi uses "preview" format (e.g., n8n-nodes-preview-brightdata.brightData)
|
||||
// but actual installed nodes use the npm package name (e.g., n8n-nodes-brightdata.brightData)
|
||||
// We need to transform preview names to actual names
|
||||
let nodeType = nodeDesc.name || `${attributes.packageName}.${attributes.name}`;
|
||||
|
||||
// Transform preview node type to actual node type
|
||||
// Pattern: n8n-nodes-preview-{name} -> n8n-nodes-{name}
|
||||
// Also handles scoped packages: @scope/n8n-nodes-preview-{name} -> @scope/n8n-nodes-{name}
|
||||
if (nodeType.includes('n8n-nodes-preview-')) {
|
||||
nodeType = nodeType.replace('n8n-nodes-preview-', 'n8n-nodes-');
|
||||
}
|
||||
|
||||
// Determine if it's an AI tool
|
||||
const isAITool =
|
||||
nodeDesc.usableAsTool === true ||
|
||||
nodeDesc.codex?.categories?.includes('AI') ||
|
||||
attributes.name?.toLowerCase().includes('ai');
|
||||
|
||||
return {
|
||||
// Core ParsedNode fields
|
||||
nodeType,
|
||||
packageName: attributes.packageName,
|
||||
displayName: nodeDesc.displayName || attributes.displayName,
|
||||
description: nodeDesc.description || attributes.description,
|
||||
category: nodeDesc.codex?.categories?.[0] || 'Community',
|
||||
style: 'declarative', // Most community nodes are declarative
|
||||
properties: nodeDesc.properties || [],
|
||||
credentials: nodeDesc.credentials || [],
|
||||
operations: this.extractOperations(nodeDesc),
|
||||
isAITool,
|
||||
isTrigger: nodeDesc.group?.includes('trigger') || false,
|
||||
isWebhook:
|
||||
nodeDesc.name?.toLowerCase().includes('webhook') ||
|
||||
nodeDesc.group?.includes('webhook') ||
|
||||
false,
|
||||
isVersioned: (attributes.nodeVersions?.length || 0) > 1,
|
||||
version: nodeDesc.version?.toString() || attributes.npmVersion || '1',
|
||||
outputs: nodeDesc.outputs,
|
||||
outputNames: nodeDesc.outputNames,
|
||||
|
||||
// Community-specific fields
|
||||
isCommunity: true,
|
||||
isVerified: true, // Strapi nodes are verified
|
||||
authorName: attributes.authorName,
|
||||
authorGithubUrl: attributes.authorGithubUrl,
|
||||
npmPackageName: attributes.packageName,
|
||||
npmVersion: attributes.npmVersion,
|
||||
npmDownloads: attributes.numberOfDownloads || 0,
|
||||
communityFetchedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert npm package info to basic ParsedNode.
|
||||
* Note: This is a minimal entry - full schema requires tarball parsing.
|
||||
*/
|
||||
private npmPackageToParsedNode(pkg: NpmSearchResult): ParsedNode & CommunityNodeFields {
|
||||
const { package: pkgInfo, score } = pkg;
|
||||
|
||||
// Extract node name from package name (e.g., n8n-nodes-globals -> GlobalConstants)
|
||||
const nodeName = this.extractNodeNameFromPackage(pkgInfo.name);
|
||||
const nodeType = `${pkgInfo.name}.${nodeName}`;
|
||||
|
||||
return {
|
||||
// Core ParsedNode fields (minimal - no schema available)
|
||||
nodeType,
|
||||
packageName: pkgInfo.name,
|
||||
displayName: nodeName,
|
||||
description: pkgInfo.description || `Community node from ${pkgInfo.name}`,
|
||||
category: 'Community',
|
||||
style: 'declarative',
|
||||
properties: [], // Would need tarball parsing
|
||||
credentials: [],
|
||||
operations: [],
|
||||
isAITool: false,
|
||||
isTrigger: pkgInfo.name.includes('trigger'),
|
||||
isWebhook: pkgInfo.name.includes('webhook'),
|
||||
isVersioned: false,
|
||||
version: pkgInfo.version,
|
||||
|
||||
// Community-specific fields
|
||||
isCommunity: true,
|
||||
isVerified: false, // npm nodes are not verified
|
||||
authorName: pkgInfo.author?.name || pkgInfo.publisher?.username,
|
||||
authorGithubUrl: pkgInfo.links?.repository,
|
||||
npmPackageName: pkgInfo.name,
|
||||
npmVersion: pkgInfo.version,
|
||||
npmDownloads: Math.round(score.detail.popularity * 10000), // Approximate
|
||||
communityFetchedAt: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract operations from node description.
|
||||
*/
|
||||
private extractOperations(nodeDesc: any): any[] {
|
||||
const operations: any[] = [];
|
||||
|
||||
// Check properties for resource/operation pattern
|
||||
if (nodeDesc.properties) {
|
||||
for (const prop of nodeDesc.properties) {
|
||||
if (prop.name === 'operation' && prop.options) {
|
||||
operations.push(...prop.options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract node name from npm package name.
|
||||
* n8n community nodes typically use lowercase node class names.
|
||||
* e.g., "n8n-nodes-chatwoot" -> "chatwoot"
|
||||
* e.g., "@company/n8n-nodes-mynode" -> "mynode"
|
||||
*
|
||||
* Note: We use lowercase because most community nodes follow this convention.
|
||||
* Verified nodes from Strapi have the correct casing in nodeDesc.name.
|
||||
*/
|
||||
private extractNodeNameFromPackage(packageName: string): string {
|
||||
// Remove scope if present
|
||||
let name = packageName.replace(/^@[^/]+\//, '');
|
||||
|
||||
// Remove n8n-nodes- prefix
|
||||
name = name.replace(/^n8n-nodes-/, '');
|
||||
|
||||
// Remove hyphens and keep lowercase (n8n community node convention)
|
||||
// e.g., "bright-data" -> "brightdata", "chatwoot" -> "chatwoot"
|
||||
return name.replace(/-/g, '').toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get community node statistics.
|
||||
*/
|
||||
getCommunityStats(): CommunityStats {
|
||||
return this.repository.getCommunityStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all community nodes (for rebuild).
|
||||
*/
|
||||
deleteCommunityNodes(): number {
|
||||
return this.repository.deleteCommunityNodes();
|
||||
}
|
||||
}
|
||||
@@ -1,291 +0,0 @@
|
||||
/**
|
||||
* Batch processor for community node documentation generation.
|
||||
*
|
||||
* Orchestrates the full workflow:
|
||||
* 1. Fetch READMEs from npm registry
|
||||
* 2. Generate AI documentation summaries
|
||||
* 3. Store results in database
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { CommunityNodeFetcher } from './community-node-fetcher';
|
||||
import {
|
||||
DocumentationGenerator,
|
||||
DocumentationInput,
|
||||
DocumentationResult,
|
||||
createDocumentationGenerator,
|
||||
} from './documentation-generator';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
/**
|
||||
* Options for batch processing
|
||||
*/
|
||||
export interface BatchProcessorOptions {
|
||||
/** Skip nodes that already have READMEs (default: false) */
|
||||
skipExistingReadme?: boolean;
|
||||
/** Skip nodes that already have AI summaries (default: false) */
|
||||
skipExistingSummary?: boolean;
|
||||
/** Only fetch READMEs, skip AI generation (default: false) */
|
||||
readmeOnly?: boolean;
|
||||
/** Only generate AI summaries, skip README fetch (default: false) */
|
||||
summaryOnly?: boolean;
|
||||
/** Max nodes to process (default: unlimited) */
|
||||
limit?: number;
|
||||
/** Concurrency for npm README fetches (default: 5) */
|
||||
readmeConcurrency?: number;
|
||||
/** Concurrency for LLM API calls (default: 3) */
|
||||
llmConcurrency?: number;
|
||||
/** Progress callback */
|
||||
progressCallback?: (message: string, current: number, total: number) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of batch processing
|
||||
*/
|
||||
export interface BatchProcessorResult {
|
||||
/** Number of READMEs fetched */
|
||||
readmesFetched: number;
|
||||
/** Number of READMEs that failed to fetch */
|
||||
readmesFailed: number;
|
||||
/** Number of AI summaries generated */
|
||||
summariesGenerated: number;
|
||||
/** Number of AI summaries that failed */
|
||||
summariesFailed: number;
|
||||
/** Nodes that were skipped (already had data) */
|
||||
skipped: number;
|
||||
/** Total duration in seconds */
|
||||
durationSeconds: number;
|
||||
/** Errors encountered */
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch processor for generating documentation for community nodes
|
||||
*/
|
||||
export class DocumentationBatchProcessor {
|
||||
private repository: NodeRepository;
|
||||
private fetcher: CommunityNodeFetcher;
|
||||
private generator: DocumentationGenerator;
|
||||
|
||||
constructor(
|
||||
repository: NodeRepository,
|
||||
fetcher?: CommunityNodeFetcher,
|
||||
generator?: DocumentationGenerator
|
||||
) {
|
||||
this.repository = repository;
|
||||
this.fetcher = fetcher || new CommunityNodeFetcher();
|
||||
this.generator = generator || createDocumentationGenerator();
|
||||
}
|
||||
|
||||
/**
|
||||
* Process all community nodes to generate documentation
|
||||
*/
|
||||
async processAll(options: BatchProcessorOptions = {}): Promise<BatchProcessorResult> {
|
||||
const startTime = Date.now();
|
||||
const result: BatchProcessorResult = {
|
||||
readmesFetched: 0,
|
||||
readmesFailed: 0,
|
||||
summariesGenerated: 0,
|
||||
summariesFailed: 0,
|
||||
skipped: 0,
|
||||
durationSeconds: 0,
|
||||
errors: [],
|
||||
};
|
||||
|
||||
const {
|
||||
skipExistingReadme = false,
|
||||
skipExistingSummary = false,
|
||||
readmeOnly = false,
|
||||
summaryOnly = false,
|
||||
limit,
|
||||
readmeConcurrency = 5,
|
||||
llmConcurrency = 3,
|
||||
progressCallback,
|
||||
} = options;
|
||||
|
||||
try {
|
||||
// Step 1: Fetch READMEs (unless summaryOnly)
|
||||
if (!summaryOnly) {
|
||||
const readmeResult = await this.fetchReadmes({
|
||||
skipExisting: skipExistingReadme,
|
||||
limit,
|
||||
concurrency: readmeConcurrency,
|
||||
progressCallback,
|
||||
});
|
||||
result.readmesFetched = readmeResult.fetched;
|
||||
result.readmesFailed = readmeResult.failed;
|
||||
result.skipped += readmeResult.skipped;
|
||||
result.errors.push(...readmeResult.errors);
|
||||
}
|
||||
|
||||
// Step 2: Generate AI summaries (unless readmeOnly)
|
||||
if (!readmeOnly) {
|
||||
const summaryResult = await this.generateSummaries({
|
||||
skipExisting: skipExistingSummary,
|
||||
limit,
|
||||
concurrency: llmConcurrency,
|
||||
progressCallback,
|
||||
});
|
||||
result.summariesGenerated = summaryResult.generated;
|
||||
result.summariesFailed = summaryResult.failed;
|
||||
result.skipped += summaryResult.skipped;
|
||||
result.errors.push(...summaryResult.errors);
|
||||
}
|
||||
|
||||
result.durationSeconds = (Date.now() - startTime) / 1000;
|
||||
return result;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
result.errors.push(`Batch processing failed: ${errorMessage}`);
|
||||
result.durationSeconds = (Date.now() - startTime) / 1000;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch READMEs for community nodes
|
||||
*/
|
||||
private async fetchReadmes(options: {
|
||||
skipExisting?: boolean;
|
||||
limit?: number;
|
||||
concurrency?: number;
|
||||
progressCallback?: (message: string, current: number, total: number) => void;
|
||||
}): Promise<{ fetched: number; failed: number; skipped: number; errors: string[] }> {
|
||||
const { skipExisting = false, limit, concurrency = 5, progressCallback } = options;
|
||||
|
||||
// Get nodes that need READMEs
|
||||
let nodes = skipExisting
|
||||
? this.repository.getCommunityNodesWithoutReadme()
|
||||
: this.repository.getCommunityNodes({ orderBy: 'downloads' });
|
||||
|
||||
if (limit) {
|
||||
nodes = nodes.slice(0, limit);
|
||||
}
|
||||
|
||||
logger.info(`Fetching READMEs for ${nodes.length} community nodes...`);
|
||||
|
||||
if (nodes.length === 0) {
|
||||
return { fetched: 0, failed: 0, skipped: 0, errors: [] };
|
||||
}
|
||||
|
||||
// Get package names
|
||||
const packageNames = nodes
|
||||
.map((n) => n.npmPackageName)
|
||||
.filter((name): name is string => !!name);
|
||||
|
||||
// Fetch READMEs in batches
|
||||
const readmeMap = await this.fetcher.fetchReadmesBatch(
|
||||
packageNames,
|
||||
progressCallback,
|
||||
concurrency
|
||||
);
|
||||
|
||||
// Store READMEs in database
|
||||
let fetched = 0;
|
||||
let failed = 0;
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const node of nodes) {
|
||||
if (!node.npmPackageName) continue;
|
||||
|
||||
const readme = readmeMap.get(node.npmPackageName);
|
||||
if (readme) {
|
||||
try {
|
||||
this.repository.updateNodeReadme(node.nodeType, readme);
|
||||
fetched++;
|
||||
} catch (error) {
|
||||
const msg = `Failed to save README for ${node.nodeType}: ${error}`;
|
||||
errors.push(msg);
|
||||
failed++;
|
||||
}
|
||||
} else {
|
||||
failed++;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`README fetch complete: ${fetched} fetched, ${failed} failed`);
|
||||
return { fetched, failed, skipped: 0, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate AI documentation summaries
|
||||
*/
|
||||
private async generateSummaries(options: {
|
||||
skipExisting?: boolean;
|
||||
limit?: number;
|
||||
concurrency?: number;
|
||||
progressCallback?: (message: string, current: number, total: number) => void;
|
||||
}): Promise<{ generated: number; failed: number; skipped: number; errors: string[] }> {
|
||||
const { skipExisting = false, limit, concurrency = 3, progressCallback } = options;
|
||||
|
||||
// Get nodes that need summaries (must have READMEs first)
|
||||
let nodes = skipExisting
|
||||
? this.repository.getCommunityNodesWithoutAISummary()
|
||||
: this.repository.getCommunityNodes({ orderBy: 'downloads' }).filter(
|
||||
(n) => n.npmReadme && n.npmReadme.length > 0
|
||||
);
|
||||
|
||||
if (limit) {
|
||||
nodes = nodes.slice(0, limit);
|
||||
}
|
||||
|
||||
logger.info(`Generating AI summaries for ${nodes.length} nodes...`);
|
||||
|
||||
if (nodes.length === 0) {
|
||||
return { generated: 0, failed: 0, skipped: 0, errors: [] };
|
||||
}
|
||||
|
||||
// Test LLM connection first
|
||||
const connectionTest = await this.generator.testConnection();
|
||||
if (!connectionTest.success) {
|
||||
const error = `LLM connection failed: ${connectionTest.message}`;
|
||||
logger.error(error);
|
||||
return { generated: 0, failed: nodes.length, skipped: 0, errors: [error] };
|
||||
}
|
||||
|
||||
logger.info(`LLM connection successful: ${connectionTest.message}`);
|
||||
|
||||
// Prepare inputs for batch generation
|
||||
const inputs: DocumentationInput[] = nodes.map((node) => ({
|
||||
nodeType: node.nodeType,
|
||||
displayName: node.displayName,
|
||||
description: node.description,
|
||||
readme: node.npmReadme || '',
|
||||
npmPackageName: node.npmPackageName,
|
||||
}));
|
||||
|
||||
// Generate summaries in parallel
|
||||
const results = await this.generator.generateBatch(inputs, concurrency, progressCallback);
|
||||
|
||||
// Store summaries in database
|
||||
let generated = 0;
|
||||
let failed = 0;
|
||||
const errors: string[] = [];
|
||||
|
||||
for (const result of results) {
|
||||
if (result.error) {
|
||||
errors.push(`${result.nodeType}: ${result.error}`);
|
||||
failed++;
|
||||
} else {
|
||||
try {
|
||||
this.repository.updateNodeAISummary(result.nodeType, result.summary);
|
||||
generated++;
|
||||
} catch (error) {
|
||||
const msg = `Failed to save summary for ${result.nodeType}: ${error}`;
|
||||
errors.push(msg);
|
||||
failed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`AI summary generation complete: ${generated} generated, ${failed} failed`);
|
||||
return { generated, failed, skipped: 0, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current documentation statistics
|
||||
*/
|
||||
getStats(): ReturnType<NodeRepository['getDocumentationStats']> {
|
||||
return this.repository.getDocumentationStats();
|
||||
}
|
||||
}
|
||||
@@ -1,362 +0,0 @@
|
||||
/**
|
||||
* AI-powered documentation generator for community nodes.
|
||||
*
|
||||
* Uses a local LLM (Qwen or compatible) via OpenAI-compatible API
|
||||
* to generate structured documentation summaries from README content.
|
||||
*/
|
||||
|
||||
import OpenAI from 'openai';
|
||||
import { z } from 'zod';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
/**
|
||||
* Schema for AI-generated documentation summary
|
||||
*/
|
||||
export const DocumentationSummarySchema = z.object({
|
||||
purpose: z.string().describe('What this node does in 1-2 sentences'),
|
||||
capabilities: z.array(z.string()).max(10).describe('Key features and operations'),
|
||||
authentication: z.string().describe('How to authenticate (API key, OAuth, None, etc.)'),
|
||||
commonUseCases: z.array(z.string()).max(5).describe('Practical use case examples'),
|
||||
limitations: z.array(z.string()).max(5).describe('Known limitations or caveats'),
|
||||
relatedNodes: z.array(z.string()).max(5).describe('Related n8n nodes if mentioned'),
|
||||
});
|
||||
|
||||
export type DocumentationSummary = z.infer<typeof DocumentationSummarySchema>;
|
||||
|
||||
/**
|
||||
* Input for documentation generation
|
||||
*/
|
||||
export interface DocumentationInput {
|
||||
nodeType: string;
|
||||
displayName: string;
|
||||
description?: string;
|
||||
readme: string;
|
||||
npmPackageName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of documentation generation
|
||||
*/
|
||||
export interface DocumentationResult {
|
||||
nodeType: string;
|
||||
summary: DocumentationSummary;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for the documentation generator
|
||||
*/
|
||||
export interface DocumentationGeneratorConfig {
|
||||
/** Base URL for the LLM server (e.g., http://localhost:1234/v1) */
|
||||
baseUrl: string;
|
||||
/** Model name to use (default: qwen3-4b-thinking-2507) */
|
||||
model?: string;
|
||||
/** API key (default: 'not-needed' for local servers) */
|
||||
apiKey?: string;
|
||||
/** Request timeout in ms (default: 60000) */
|
||||
timeout?: number;
|
||||
/** Max tokens for response (default: 2000) */
|
||||
maxTokens?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default configuration
|
||||
*/
|
||||
const DEFAULT_CONFIG: Required<Omit<DocumentationGeneratorConfig, 'baseUrl'>> = {
|
||||
model: 'qwen3-4b-thinking-2507',
|
||||
apiKey: 'not-needed',
|
||||
timeout: 60000,
|
||||
maxTokens: 2000,
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates structured documentation summaries for community nodes
|
||||
* using a local LLM via OpenAI-compatible API.
|
||||
*/
|
||||
export class DocumentationGenerator {
|
||||
private client: OpenAI;
|
||||
private model: string;
|
||||
private maxTokens: number;
|
||||
private timeout: number;
|
||||
|
||||
constructor(config: DocumentationGeneratorConfig) {
|
||||
const fullConfig = { ...DEFAULT_CONFIG, ...config };
|
||||
|
||||
this.client = new OpenAI({
|
||||
baseURL: config.baseUrl,
|
||||
apiKey: fullConfig.apiKey,
|
||||
timeout: fullConfig.timeout,
|
||||
});
|
||||
this.model = fullConfig.model;
|
||||
this.maxTokens = fullConfig.maxTokens;
|
||||
this.timeout = fullConfig.timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate documentation summary for a single node
|
||||
*/
|
||||
async generateSummary(input: DocumentationInput): Promise<DocumentationResult> {
|
||||
try {
|
||||
const prompt = this.buildPrompt(input);
|
||||
|
||||
const completion = await this.client.chat.completions.create({
|
||||
model: this.model,
|
||||
max_tokens: this.maxTokens,
|
||||
temperature: 0.3, // Lower temperature for more consistent output
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: this.getSystemPrompt(),
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const content = completion.choices[0]?.message?.content;
|
||||
if (!content) {
|
||||
throw new Error('No content in LLM response');
|
||||
}
|
||||
|
||||
// Extract JSON from response (handle markdown code blocks)
|
||||
const jsonContent = this.extractJson(content);
|
||||
const parsed = JSON.parse(jsonContent);
|
||||
|
||||
// Truncate arrays to fit schema limits before validation
|
||||
const truncated = this.truncateArrayFields(parsed);
|
||||
|
||||
// Validate with Zod
|
||||
const validated = DocumentationSummarySchema.parse(truncated);
|
||||
|
||||
return {
|
||||
nodeType: input.nodeType,
|
||||
summary: validated,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error(`Error generating documentation for ${input.nodeType}:`, error);
|
||||
|
||||
return {
|
||||
nodeType: input.nodeType,
|
||||
summary: this.getDefaultSummary(input),
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate documentation for multiple nodes in parallel
|
||||
*
|
||||
* @param inputs Array of documentation inputs
|
||||
* @param concurrency Number of parallel requests (default: 3)
|
||||
* @param progressCallback Optional progress callback
|
||||
* @returns Array of documentation results
|
||||
*/
|
||||
async generateBatch(
|
||||
inputs: DocumentationInput[],
|
||||
concurrency: number = 3,
|
||||
progressCallback?: (message: string, current: number, total: number) => void
|
||||
): Promise<DocumentationResult[]> {
|
||||
const results: DocumentationResult[] = [];
|
||||
const total = inputs.length;
|
||||
|
||||
logger.info(`Generating documentation for ${total} nodes (concurrency: ${concurrency})...`);
|
||||
|
||||
// Process in batches based on concurrency
|
||||
for (let i = 0; i < inputs.length; i += concurrency) {
|
||||
const batch = inputs.slice(i, i + concurrency);
|
||||
|
||||
// Process batch concurrently
|
||||
const batchPromises = batch.map((input) => this.generateSummary(input));
|
||||
const batchResults = await Promise.all(batchPromises);
|
||||
|
||||
results.push(...batchResults);
|
||||
|
||||
if (progressCallback) {
|
||||
progressCallback('Generating documentation', Math.min(i + concurrency, total), total);
|
||||
}
|
||||
|
||||
// Small delay between batches to avoid overwhelming the LLM server
|
||||
if (i + concurrency < inputs.length) {
|
||||
await this.sleep(100);
|
||||
}
|
||||
}
|
||||
|
||||
const successCount = results.filter((r) => !r.error).length;
|
||||
logger.info(`Generated ${successCount}/${total} documentation summaries successfully`);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the prompt for documentation generation
|
||||
*/
|
||||
private buildPrompt(input: DocumentationInput): string {
|
||||
// Truncate README to avoid token limits (keep first ~6000 chars)
|
||||
const truncatedReadme = this.truncateReadme(input.readme, 6000);
|
||||
|
||||
return `
|
||||
Node Information:
|
||||
- Name: ${input.displayName}
|
||||
- Type: ${input.nodeType}
|
||||
- Package: ${input.npmPackageName || 'unknown'}
|
||||
- Description: ${input.description || 'No description provided'}
|
||||
|
||||
README Content:
|
||||
${truncatedReadme}
|
||||
|
||||
Based on the README and node information above, generate a structured documentation summary.
|
||||
`.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the system prompt for documentation generation
|
||||
*/
|
||||
private getSystemPrompt(): string {
|
||||
return `You are analyzing an n8n community node to generate documentation for AI assistants.
|
||||
|
||||
Your task: Extract key information from the README and create a structured JSON summary.
|
||||
|
||||
Output format (JSON only, no markdown):
|
||||
{
|
||||
"purpose": "What this node does in 1-2 sentences",
|
||||
"capabilities": ["feature1", "feature2", "feature3"],
|
||||
"authentication": "How to authenticate (e.g., 'API key required', 'OAuth2', 'None')",
|
||||
"commonUseCases": ["use case 1", "use case 2"],
|
||||
"limitations": ["limitation 1"] or [] if none mentioned,
|
||||
"relatedNodes": ["related n8n node types"] or [] if none mentioned
|
||||
}
|
||||
|
||||
Guidelines:
|
||||
- Focus on information useful for AI assistants configuring workflows
|
||||
- Be concise but comprehensive
|
||||
- For capabilities, list specific operations/actions supported
|
||||
- For authentication, identify the auth method from README
|
||||
- For limitations, note any mentioned constraints or missing features
|
||||
- Respond with valid JSON only, no additional text`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract JSON from LLM response (handles markdown code blocks)
|
||||
*/
|
||||
private extractJson(content: string): string {
|
||||
// Try to extract from markdown code block
|
||||
const jsonBlockMatch = content.match(/```(?:json)?\s*([\s\S]*?)```/);
|
||||
if (jsonBlockMatch) {
|
||||
return jsonBlockMatch[1].trim();
|
||||
}
|
||||
|
||||
// Try to find JSON object directly
|
||||
const jsonMatch = content.match(/\{[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
return jsonMatch[0];
|
||||
}
|
||||
|
||||
// Return as-is if no extraction needed
|
||||
return content.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate array fields to fit schema limits
|
||||
* Ensures LLM responses with extra items still validate
|
||||
*/
|
||||
private truncateArrayFields(parsed: Record<string, unknown>): Record<string, unknown> {
|
||||
const limits: Record<string, number> = {
|
||||
capabilities: 10,
|
||||
commonUseCases: 5,
|
||||
limitations: 5,
|
||||
relatedNodes: 5,
|
||||
};
|
||||
|
||||
const result = { ...parsed };
|
||||
|
||||
for (const [field, maxLength] of Object.entries(limits)) {
|
||||
if (Array.isArray(result[field]) && result[field].length > maxLength) {
|
||||
result[field] = (result[field] as unknown[]).slice(0, maxLength);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate README to avoid token limits while keeping useful content
|
||||
*/
|
||||
private truncateReadme(readme: string, maxLength: number): string {
|
||||
if (readme.length <= maxLength) {
|
||||
return readme;
|
||||
}
|
||||
|
||||
// Try to truncate at a paragraph boundary
|
||||
const truncated = readme.slice(0, maxLength);
|
||||
const lastParagraph = truncated.lastIndexOf('\n\n');
|
||||
|
||||
if (lastParagraph > maxLength * 0.7) {
|
||||
return truncated.slice(0, lastParagraph) + '\n\n[README truncated...]';
|
||||
}
|
||||
|
||||
return truncated + '\n\n[README truncated...]';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default summary when generation fails
|
||||
*/
|
||||
private getDefaultSummary(input: DocumentationInput): DocumentationSummary {
|
||||
return {
|
||||
purpose: input.description || `Community node: ${input.displayName}`,
|
||||
capabilities: [],
|
||||
authentication: 'See README for authentication details',
|
||||
commonUseCases: [],
|
||||
limitations: ['Documentation could not be automatically generated'],
|
||||
relatedNodes: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Test connection to the LLM server
|
||||
*/
|
||||
async testConnection(): Promise<{ success: boolean; message: string }> {
|
||||
try {
|
||||
const completion = await this.client.chat.completions.create({
|
||||
model: this.model,
|
||||
max_tokens: 10,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: 'Hello',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (completion.choices[0]?.message?.content) {
|
||||
return { success: true, message: `Connected to ${this.model}` };
|
||||
}
|
||||
|
||||
return { success: false, message: 'No response from LLM' };
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
return { success: false, message: `Connection failed: ${message}` };
|
||||
}
|
||||
}
|
||||
|
||||
private sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a documentation generator with environment variable configuration
|
||||
*/
|
||||
export function createDocumentationGenerator(): DocumentationGenerator {
|
||||
const baseUrl = process.env.N8N_MCP_LLM_BASE_URL || 'http://localhost:1234/v1';
|
||||
const model = process.env.N8N_MCP_LLM_MODEL || 'qwen3-4b-thinking-2507';
|
||||
const timeout = parseInt(process.env.N8N_MCP_LLM_TIMEOUT || '60000', 10);
|
||||
|
||||
return new DocumentationGenerator({
|
||||
baseUrl,
|
||||
model,
|
||||
timeout,
|
||||
});
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
export {
|
||||
CommunityNodeFetcher,
|
||||
StrapiCommunityNode,
|
||||
StrapiCommunityNodeAttributes,
|
||||
StrapiPaginatedResponse,
|
||||
NpmPackageInfo,
|
||||
NpmSearchResult,
|
||||
NpmSearchResponse,
|
||||
NpmPackageWithReadme,
|
||||
} from './community-node-fetcher';
|
||||
|
||||
export {
|
||||
CommunityNodeService,
|
||||
CommunityStats,
|
||||
SyncResult,
|
||||
SyncOptions,
|
||||
} from './community-node-service';
|
||||
|
||||
export {
|
||||
DocumentationGenerator,
|
||||
DocumentationGeneratorConfig,
|
||||
DocumentationInput,
|
||||
DocumentationResult,
|
||||
DocumentationSummary,
|
||||
DocumentationSummarySchema,
|
||||
createDocumentationGenerator,
|
||||
} from './documentation-generator';
|
||||
|
||||
export {
|
||||
DocumentationBatchProcessor,
|
||||
BatchProcessorOptions,
|
||||
BatchProcessorResult,
|
||||
} from './documentation-batch-processor';
|
||||
@@ -5,7 +5,7 @@
|
||||
* These structures define the expected data format, JavaScript type,
|
||||
* validation rules, and examples for each property type.
|
||||
*
|
||||
* Based on n8n-workflow v2.4.2 NodePropertyTypes
|
||||
* Based on n8n-workflow v1.120.3 NodePropertyTypes
|
||||
*
|
||||
* @module constants/type-structures
|
||||
* @since 2.23.0
|
||||
@@ -15,7 +15,7 @@ import type { NodePropertyTypes } from 'n8n-workflow';
|
||||
import type { TypeStructure } from '../types/type-structures';
|
||||
|
||||
/**
|
||||
* Complete type structure definitions for all 23 NodePropertyTypes
|
||||
* Complete type structure definitions for all 22 NodePropertyTypes
|
||||
*
|
||||
* Each entry defines:
|
||||
* - type: Category (primitive/object/collection/special)
|
||||
@@ -620,23 +620,6 @@ export const TYPE_STRUCTURES: Record<NodePropertyTypes, TypeStructure> = {
|
||||
'One-time import feature',
|
||||
],
|
||||
},
|
||||
|
||||
icon: {
|
||||
type: 'primitive',
|
||||
jsType: 'string',
|
||||
description: 'Icon identifier for visual representation',
|
||||
example: 'fa:envelope',
|
||||
examples: ['fa:envelope', 'fa:user', 'fa:cog', 'file:slack.svg'],
|
||||
validation: {
|
||||
allowEmpty: false,
|
||||
allowExpressions: false,
|
||||
},
|
||||
notes: [
|
||||
'References icon by name or file path',
|
||||
'Supports Font Awesome icons (fa:) and file paths (file:)',
|
||||
'Used for visual customization in UI',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -419,36 +419,12 @@ class BetterSQLiteStatement implements PreparedStatement {
|
||||
|
||||
/**
|
||||
* Statement wrapper for sql.js
|
||||
*
|
||||
* IMPORTANT: sql.js requires explicit memory management via Statement.free().
|
||||
* This wrapper automatically frees statement memory after each operation
|
||||
* to prevent memory leaks during sustained traffic.
|
||||
*
|
||||
* See: https://sql.js.org/documentation/Statement.html
|
||||
* "After calling db.prepare() you must manually free the assigned memory
|
||||
* by calling Statement.free()."
|
||||
*/
|
||||
class SQLJSStatement implements PreparedStatement {
|
||||
private boundParams: any = null;
|
||||
private freed: boolean = false;
|
||||
|
||||
|
||||
constructor(private stmt: any, private onModify: () => void) {}
|
||||
|
||||
/**
|
||||
* Free the underlying sql.js statement memory.
|
||||
* Safe to call multiple times - subsequent calls are no-ops.
|
||||
*/
|
||||
private freeStatement(): void {
|
||||
if (!this.freed && this.stmt) {
|
||||
try {
|
||||
this.stmt.free();
|
||||
this.freed = true;
|
||||
} catch (e) {
|
||||
// Statement may already be freed or invalid - ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
run(...params: any[]): RunResult {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
@@ -457,10 +433,10 @@ class SQLJSStatement implements PreparedStatement {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
this.stmt.run();
|
||||
this.onModify();
|
||||
|
||||
|
||||
// sql.js doesn't provide changes/lastInsertRowid easily
|
||||
return {
|
||||
changes: 1, // Assume success means 1 change
|
||||
@@ -469,12 +445,9 @@ class SQLJSStatement implements PreparedStatement {
|
||||
} catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
} finally {
|
||||
// Free statement memory after write operation completes
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
get(...params: any[]): any {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
@@ -483,24 +456,21 @@ class SQLJSStatement implements PreparedStatement {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (this.stmt.step()) {
|
||||
const result = this.stmt.getAsObject();
|
||||
this.stmt.reset();
|
||||
return this.convertIntegerColumns(result);
|
||||
}
|
||||
|
||||
|
||||
this.stmt.reset();
|
||||
return undefined;
|
||||
} catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
} finally {
|
||||
// Free statement memory after read operation completes
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
all(...params: any[]): any[] {
|
||||
try {
|
||||
if (params.length > 0) {
|
||||
@@ -509,20 +479,17 @@ class SQLJSStatement implements PreparedStatement {
|
||||
this.stmt.bind(this.boundParams);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const results: any[] = [];
|
||||
while (this.stmt.step()) {
|
||||
results.push(this.convertIntegerColumns(this.stmt.getAsObject()));
|
||||
}
|
||||
|
||||
|
||||
this.stmt.reset();
|
||||
return results;
|
||||
} catch (error) {
|
||||
this.stmt.reset();
|
||||
throw error;
|
||||
} finally {
|
||||
// Free statement memory after read operation completes
|
||||
this.freeStatement();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,20 +3,6 @@ import { ParsedNode } from '../parsers/node-parser';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
|
||||
/**
|
||||
* Community node extension fields
|
||||
*/
|
||||
export interface CommunityNodeFields {
|
||||
isCommunity: boolean;
|
||||
isVerified: boolean;
|
||||
authorName?: string;
|
||||
authorGithubUrl?: string;
|
||||
npmPackageName?: string;
|
||||
npmVersion?: string;
|
||||
npmDownloads?: number;
|
||||
communityFetchedAt?: string;
|
||||
}
|
||||
|
||||
export class NodeRepository {
|
||||
private db: DatabaseAdapter;
|
||||
|
||||
@@ -31,9 +17,8 @@ export class NodeRepository {
|
||||
|
||||
/**
|
||||
* Save node with proper JSON serialization
|
||||
* Supports both core and community nodes via optional community fields
|
||||
*/
|
||||
saveNode(node: ParsedNode & Partial<CommunityNodeFields>): void {
|
||||
saveNode(node: ParsedNode): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO nodes (
|
||||
node_type, package_name, display_name, description,
|
||||
@@ -41,10 +26,8 @@ export class NodeRepository {
|
||||
is_webhook, is_versioned, is_tool_variant, tool_variant_of,
|
||||
has_tool_variant, version, documentation,
|
||||
properties_schema, operations, credentials_required,
|
||||
outputs, output_names,
|
||||
is_community, is_verified, author_name, author_github_url,
|
||||
npm_package_name, npm_version, npm_downloads, community_fetched_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
outputs, output_names
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
@@ -67,16 +50,7 @@ export class NodeRepository {
|
||||
JSON.stringify(node.operations, null, 2),
|
||||
JSON.stringify(node.credentials, null, 2),
|
||||
node.outputs ? JSON.stringify(node.outputs, null, 2) : null,
|
||||
node.outputNames ? JSON.stringify(node.outputNames, null, 2) : null,
|
||||
// Community node fields
|
||||
node.isCommunity ? 1 : 0,
|
||||
node.isVerified ? 1 : 0,
|
||||
node.authorName || null,
|
||||
node.authorGithubUrl || null,
|
||||
node.npmPackageName || null,
|
||||
node.npmVersion || null,
|
||||
node.npmDownloads || 0,
|
||||
node.communityFetchedAt || null
|
||||
node.outputNames ? JSON.stringify(node.outputNames, null, 2) : null
|
||||
);
|
||||
}
|
||||
|
||||
@@ -103,18 +77,6 @@ export class NodeRepository {
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: case-insensitive lookup for community nodes
|
||||
// Handles cases where node type casing differs (e.g., .Chatwoot vs .chatwoot)
|
||||
if (!row) {
|
||||
const caseInsensitiveRow = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE LOWER(node_type) = LOWER(?)
|
||||
`).get(nodeType) as any;
|
||||
|
||||
if (caseInsensitiveRow) {
|
||||
return this.parseNodeRow(caseInsensitiveRow);
|
||||
}
|
||||
}
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return this.parseNodeRow(row);
|
||||
@@ -353,22 +315,7 @@ export class NodeRepository {
|
||||
credentials: this.safeJsonParse(row.credentials_required, []),
|
||||
hasDocumentation: !!row.documentation,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
outputNames: row.output_names ? this.safeJsonParse(row.output_names, null) : null,
|
||||
// Community node fields
|
||||
isCommunity: Number(row.is_community) === 1,
|
||||
isVerified: Number(row.is_verified) === 1,
|
||||
authorName: row.author_name || null,
|
||||
authorGithubUrl: row.author_github_url || null,
|
||||
npmPackageName: row.npm_package_name || null,
|
||||
npmVersion: row.npm_version || null,
|
||||
npmDownloads: row.npm_downloads || 0,
|
||||
communityFetchedAt: row.community_fetched_at || null,
|
||||
// AI documentation fields
|
||||
npmReadme: row.npm_readme || null,
|
||||
aiDocumentationSummary: row.ai_documentation_summary
|
||||
? this.safeJsonParse(row.ai_documentation_summary, null)
|
||||
: null,
|
||||
aiSummaryGeneratedAt: row.ai_summary_generated_at || null,
|
||||
outputNames: row.output_names ? this.safeJsonParse(row.output_names, null) : null
|
||||
};
|
||||
}
|
||||
|
||||
@@ -575,182 +522,6 @@ export class NodeRepository {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Community Node Methods
|
||||
// ========================================
|
||||
|
||||
/**
|
||||
* Get community nodes with optional filters
|
||||
*/
|
||||
getCommunityNodes(options?: {
|
||||
verified?: boolean;
|
||||
limit?: number;
|
||||
orderBy?: 'downloads' | 'name' | 'updated';
|
||||
}): any[] {
|
||||
let sql = 'SELECT * FROM nodes WHERE is_community = 1';
|
||||
const params: any[] = [];
|
||||
|
||||
if (options?.verified !== undefined) {
|
||||
sql += ' AND is_verified = ?';
|
||||
params.push(options.verified ? 1 : 0);
|
||||
}
|
||||
|
||||
// Order by
|
||||
switch (options?.orderBy) {
|
||||
case 'downloads':
|
||||
sql += ' ORDER BY npm_downloads DESC';
|
||||
break;
|
||||
case 'updated':
|
||||
sql += ' ORDER BY community_fetched_at DESC';
|
||||
break;
|
||||
case 'name':
|
||||
default:
|
||||
sql += ' ORDER BY display_name';
|
||||
}
|
||||
|
||||
if (options?.limit) {
|
||||
sql += ' LIMIT ?';
|
||||
params.push(options.limit);
|
||||
}
|
||||
|
||||
const rows = this.db.prepare(sql).all(...params) as any[];
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get community node statistics
|
||||
*/
|
||||
getCommunityStats(): { total: number; verified: number; unverified: number } {
|
||||
const totalResult = this.db.prepare(
|
||||
'SELECT COUNT(*) as count FROM nodes WHERE is_community = 1'
|
||||
).get() as any;
|
||||
|
||||
const verifiedResult = this.db.prepare(
|
||||
'SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND is_verified = 1'
|
||||
).get() as any;
|
||||
|
||||
return {
|
||||
total: totalResult.count,
|
||||
verified: verifiedResult.count,
|
||||
unverified: totalResult.count - verifiedResult.count
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node exists by npm package name
|
||||
*/
|
||||
hasNodeByNpmPackage(npmPackageName: string): boolean {
|
||||
const result = this.db.prepare(
|
||||
'SELECT 1 FROM nodes WHERE npm_package_name = ? LIMIT 1'
|
||||
).get(npmPackageName) as any;
|
||||
return !!result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get node by npm package name
|
||||
*/
|
||||
getNodeByNpmPackage(npmPackageName: string): any | null {
|
||||
const row = this.db.prepare(
|
||||
'SELECT * FROM nodes WHERE npm_package_name = ?'
|
||||
).get(npmPackageName) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseNodeRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all community nodes (for rebuild)
|
||||
*/
|
||||
deleteCommunityNodes(): number {
|
||||
const result = this.db.prepare(
|
||||
'DELETE FROM nodes WHERE is_community = 1'
|
||||
).run();
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// AI Documentation Methods
|
||||
// ========================================
|
||||
|
||||
/**
|
||||
* Update the README content for a node
|
||||
*/
|
||||
updateNodeReadme(nodeType: string, readme: string): void {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes SET npm_readme = ? WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(readme, nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the AI-generated documentation summary for a node
|
||||
*/
|
||||
updateNodeAISummary(nodeType: string, summary: object): void {
|
||||
const stmt = this.db.prepare(`
|
||||
UPDATE nodes
|
||||
SET ai_documentation_summary = ?, ai_summary_generated_at = datetime('now')
|
||||
WHERE node_type = ?
|
||||
`);
|
||||
stmt.run(JSON.stringify(summary), nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get community nodes that are missing README content
|
||||
*/
|
||||
getCommunityNodesWithoutReadme(): any[] {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1 AND (npm_readme IS NULL OR npm_readme = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all() as any[];
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get community nodes that are missing AI documentation summary
|
||||
*/
|
||||
getCommunityNodesWithoutAISummary(): any[] {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE is_community = 1
|
||||
AND npm_readme IS NOT NULL AND npm_readme != ''
|
||||
AND (ai_documentation_summary IS NULL OR ai_documentation_summary = '')
|
||||
ORDER BY npm_downloads DESC
|
||||
`).all() as any[];
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get documentation statistics for community nodes
|
||||
*/
|
||||
getDocumentationStats(): {
|
||||
total: number;
|
||||
withReadme: number;
|
||||
withAISummary: number;
|
||||
needingReadme: number;
|
||||
needingAISummary: number;
|
||||
} {
|
||||
const total = (this.db.prepare(
|
||||
'SELECT COUNT(*) as count FROM nodes WHERE is_community = 1'
|
||||
).get() as any).count;
|
||||
|
||||
const withReadme = (this.db.prepare(
|
||||
"SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND npm_readme IS NOT NULL AND npm_readme != ''"
|
||||
).get() as any).count;
|
||||
|
||||
const withAISummary = (this.db.prepare(
|
||||
"SELECT COUNT(*) as count FROM nodes WHERE is_community = 1 AND ai_documentation_summary IS NOT NULL AND ai_documentation_summary != ''"
|
||||
).get() as any).count;
|
||||
|
||||
return {
|
||||
total,
|
||||
withReadme,
|
||||
withAISummary,
|
||||
needingReadme: total - withReadme,
|
||||
needingAISummary: withReadme - withAISummary
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* VERSION MANAGEMENT METHODS
|
||||
* Methods for working with node_versions and version_property_changes tables
|
||||
|
||||
@@ -20,19 +20,6 @@ CREATE TABLE IF NOT EXISTS nodes (
|
||||
credentials_required TEXT,
|
||||
outputs TEXT, -- JSON array of output definitions
|
||||
output_names TEXT, -- JSON array of output names
|
||||
-- Community node fields
|
||||
is_community INTEGER DEFAULT 0, -- 1 if this is a community node (not n8n-nodes-base)
|
||||
is_verified INTEGER DEFAULT 0, -- 1 if verified by n8n (from Strapi API)
|
||||
author_name TEXT, -- Community node author name
|
||||
author_github_url TEXT, -- Author's GitHub URL
|
||||
npm_package_name TEXT, -- Full npm package name (e.g., n8n-nodes-globals)
|
||||
npm_version TEXT, -- npm package version
|
||||
npm_downloads INTEGER DEFAULT 0, -- Weekly/monthly download count
|
||||
community_fetched_at DATETIME, -- When the community node was last synced
|
||||
-- AI-enhanced documentation fields
|
||||
npm_readme TEXT, -- Raw README markdown from npm registry
|
||||
ai_documentation_summary TEXT, -- AI-generated structured summary (JSON)
|
||||
ai_summary_generated_at DATETIME, -- When the AI summary was generated
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
@@ -42,11 +29,6 @@ CREATE INDEX IF NOT EXISTS idx_ai_tool ON nodes(is_ai_tool);
|
||||
CREATE INDEX IF NOT EXISTS idx_category ON nodes(category);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_variant ON nodes(is_tool_variant);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_variant_of ON nodes(tool_variant_of);
|
||||
-- Community node indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_community ON nodes(is_community);
|
||||
CREATE INDEX IF NOT EXISTS idx_verified ON nodes(is_verified);
|
||||
CREATE INDEX IF NOT EXISTS idx_npm_downloads ON nodes(npm_downloads);
|
||||
CREATE INDEX IF NOT EXISTS idx_npm_package ON nodes(npm_package_name);
|
||||
|
||||
-- FTS5 full-text search index for nodes
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS nodes_fts USING fts5(
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
/**
|
||||
* Shared Database Manager - Singleton for cross-session database connection
|
||||
*
|
||||
* This module implements a singleton pattern to share a single database connection
|
||||
* across all MCP server sessions. This prevents memory leaks caused by each session
|
||||
* creating its own database connection (~900MB per session).
|
||||
*
|
||||
* Memory impact: Reduces per-session memory from ~900MB to near-zero by sharing
|
||||
* a single ~68MB database connection across all sessions.
|
||||
*
|
||||
* Issue: https://github.com/czlonkowski/n8n-mcp/issues/XXX
|
||||
*/
|
||||
|
||||
import { DatabaseAdapter, createDatabaseAdapter } from './database-adapter';
|
||||
import { NodeRepository } from './node-repository';
|
||||
import { TemplateService } from '../templates/template-service';
|
||||
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
/**
|
||||
* Shared database state - holds the singleton connection and services
|
||||
*/
|
||||
export interface SharedDatabaseState {
|
||||
db: DatabaseAdapter;
|
||||
repository: NodeRepository;
|
||||
templateService: TemplateService;
|
||||
dbPath: string;
|
||||
refCount: number;
|
||||
initialized: boolean;
|
||||
}
|
||||
|
||||
// Module-level singleton state
|
||||
let sharedState: SharedDatabaseState | null = null;
|
||||
let initializationPromise: Promise<SharedDatabaseState> | null = null;
|
||||
|
||||
/**
|
||||
* Get or create the shared database connection
|
||||
*
|
||||
* Thread-safe initialization using a promise lock pattern.
|
||||
* Multiple concurrent calls will wait for the same initialization.
|
||||
*
|
||||
* @param dbPath - Path to the SQLite database file
|
||||
* @returns Shared database state with connection and services
|
||||
*/
|
||||
export async function getSharedDatabase(dbPath: string): Promise<SharedDatabaseState> {
|
||||
// If already initialized with the same path, increment ref count and return
|
||||
if (sharedState && sharedState.initialized && sharedState.dbPath === dbPath) {
|
||||
sharedState.refCount++;
|
||||
logger.debug('Reusing shared database connection', {
|
||||
refCount: sharedState.refCount,
|
||||
dbPath
|
||||
});
|
||||
return sharedState;
|
||||
}
|
||||
|
||||
// If already initialized with a DIFFERENT path, this is a configuration error
|
||||
if (sharedState && sharedState.initialized && sharedState.dbPath !== dbPath) {
|
||||
logger.error('Attempted to initialize shared database with different path', {
|
||||
existingPath: sharedState.dbPath,
|
||||
requestedPath: dbPath
|
||||
});
|
||||
throw new Error(`Shared database already initialized with different path: ${sharedState.dbPath}`);
|
||||
}
|
||||
|
||||
// If initialization is in progress, wait for it
|
||||
if (initializationPromise) {
|
||||
try {
|
||||
const state = await initializationPromise;
|
||||
state.refCount++;
|
||||
logger.debug('Reusing shared database (waited for init)', {
|
||||
refCount: state.refCount,
|
||||
dbPath
|
||||
});
|
||||
return state;
|
||||
} catch (error) {
|
||||
// Initialization failed while we were waiting, clear promise and rethrow
|
||||
initializationPromise = null;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Start new initialization
|
||||
initializationPromise = initializeSharedDatabase(dbPath);
|
||||
|
||||
try {
|
||||
const state = await initializationPromise;
|
||||
// Clear the promise on success to allow future re-initialization after close
|
||||
initializationPromise = null;
|
||||
return state;
|
||||
} catch (error) {
|
||||
// Clear promise on failure to allow retry
|
||||
initializationPromise = null;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the shared database connection and services
|
||||
*/
|
||||
async function initializeSharedDatabase(dbPath: string): Promise<SharedDatabaseState> {
|
||||
logger.info('Initializing shared database connection', { dbPath });
|
||||
|
||||
const db = await createDatabaseAdapter(dbPath);
|
||||
const repository = new NodeRepository(db);
|
||||
const templateService = new TemplateService(db);
|
||||
|
||||
// Initialize similarity services for enhanced validation
|
||||
EnhancedConfigValidator.initializeSimilarityServices(repository);
|
||||
|
||||
sharedState = {
|
||||
db,
|
||||
repository,
|
||||
templateService,
|
||||
dbPath,
|
||||
refCount: 1,
|
||||
initialized: true
|
||||
};
|
||||
|
||||
logger.info('Shared database initialized successfully', {
|
||||
dbPath,
|
||||
refCount: sharedState.refCount
|
||||
});
|
||||
|
||||
return sharedState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Release a reference to the shared database
|
||||
*
|
||||
* Decrements the reference count. Does NOT close the database
|
||||
* as it's shared across all sessions for the lifetime of the process.
|
||||
*
|
||||
* @param state - The shared database state to release
|
||||
*/
|
||||
export function releaseSharedDatabase(state: SharedDatabaseState): void {
|
||||
if (!state || !sharedState) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Guard against double-release (refCount going negative)
|
||||
if (sharedState.refCount <= 0) {
|
||||
logger.warn('Attempted to release shared database with refCount already at or below 0', {
|
||||
refCount: sharedState.refCount
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
sharedState.refCount--;
|
||||
logger.debug('Released shared database reference', {
|
||||
refCount: sharedState.refCount
|
||||
});
|
||||
|
||||
// Note: We intentionally do NOT close the database even when refCount hits 0
|
||||
// The database should remain open for the lifetime of the process to handle
|
||||
// new sessions. Only process shutdown should close it.
|
||||
}
|
||||
|
||||
/**
|
||||
* Force close the shared database (for graceful shutdown only)
|
||||
*
|
||||
* This should only be called during process shutdown, not during normal
|
||||
* session cleanup. Closing the database would break other active sessions.
|
||||
*/
|
||||
export async function closeSharedDatabase(): Promise<void> {
|
||||
if (!sharedState) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Closing shared database connection', {
|
||||
refCount: sharedState.refCount
|
||||
});
|
||||
|
||||
try {
|
||||
sharedState.db.close();
|
||||
} catch (error) {
|
||||
logger.warn('Error closing shared database', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
|
||||
sharedState = null;
|
||||
initializationPromise = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if shared database is initialized
|
||||
*/
|
||||
export function isSharedDatabaseInitialized(): boolean {
|
||||
return sharedState !== null && sharedState.initialized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current reference count (for debugging/monitoring)
|
||||
*/
|
||||
export function getSharedDatabaseRefCount(): number {
|
||||
return sharedState?.refCount ?? 0;
|
||||
}
|
||||
@@ -26,7 +26,6 @@ import {
|
||||
} from './utils/protocol-version';
|
||||
import { InstanceContext, validateInstanceContext } from './types/instance-context';
|
||||
import { SessionState } from './types/session-state';
|
||||
import { closeSharedDatabase } from './database/shared-database';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@@ -107,12 +106,7 @@ export class SingleSessionHTTPServer {
|
||||
private session: Session | null = null; // Keep for SSE compatibility
|
||||
private consoleManager = new ConsoleManager();
|
||||
private expressServer: any;
|
||||
// Session timeout reduced from 30 minutes to 5 minutes for faster cleanup
|
||||
// Configurable via SESSION_TIMEOUT_MINUTES environment variable
|
||||
// This prevents memory buildup from stale sessions
|
||||
private sessionTimeout = parseInt(
|
||||
process.env.SESSION_TIMEOUT_MINUTES || '5', 10
|
||||
) * 60 * 1000;
|
||||
private sessionTimeout = 30 * 60 * 1000; // 30 minutes
|
||||
private authToken: string | null = null;
|
||||
private cleanupTimer: NodeJS.Timeout | null = null;
|
||||
|
||||
@@ -498,29 +492,6 @@ export class SingleSessionHTTPServer {
|
||||
// For initialize requests: always create new transport and server
|
||||
logger.info('handleRequest: Creating new transport for initialize request');
|
||||
|
||||
// EAGER CLEANUP: Remove existing sessions for the same instance
|
||||
// This prevents memory buildup when clients reconnect without proper cleanup
|
||||
if (instanceContext?.instanceId) {
|
||||
const sessionsToRemove: string[] = [];
|
||||
for (const [existingSessionId, context] of Object.entries(this.sessionContexts)) {
|
||||
if (context?.instanceId === instanceContext.instanceId) {
|
||||
sessionsToRemove.push(existingSessionId);
|
||||
}
|
||||
}
|
||||
for (const oldSessionId of sessionsToRemove) {
|
||||
// Double-check session still exists (may have been cleaned by concurrent request)
|
||||
if (!this.transports[oldSessionId]) {
|
||||
continue;
|
||||
}
|
||||
logger.info('Cleaning up previous session for instance', {
|
||||
instanceId: instanceContext.instanceId,
|
||||
oldSession: oldSessionId,
|
||||
reason: 'instance_reconnect'
|
||||
});
|
||||
await this.removeSession(oldSessionId, 'instance_reconnect');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate session ID based on multi-tenant configuration
|
||||
let sessionIdToUse: string;
|
||||
|
||||
@@ -706,25 +677,11 @@ export class SingleSessionHTTPServer {
|
||||
private async resetSessionSSE(res: express.Response): Promise<void> {
|
||||
// Clean up old session if exists
|
||||
if (this.session) {
|
||||
const sessionId = this.session.sessionId;
|
||||
logger.info('Closing previous session for SSE', { sessionId });
|
||||
|
||||
// Close server first to free resources (database, cache timer, etc.)
|
||||
// This mirrors the cleanup pattern in removeSession() (issue #542)
|
||||
// Handle server close errors separately so transport close still runs
|
||||
if (this.session.server && typeof this.session.server.close === 'function') {
|
||||
try {
|
||||
await this.session.server.close();
|
||||
} catch (serverError) {
|
||||
logger.warn('Error closing server for SSE session', { sessionId, error: serverError });
|
||||
}
|
||||
}
|
||||
|
||||
// Close transport last - always attempt even if server.close() failed
|
||||
try {
|
||||
logger.info('Closing previous session for SSE', { sessionId: this.session.sessionId });
|
||||
await this.session.transport.close();
|
||||
} catch (transportError) {
|
||||
logger.warn('Error closing transport for SSE session', { sessionId, error: transportError });
|
||||
} catch (error) {
|
||||
logger.warn('Error closing previous session:', error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1451,16 +1408,7 @@ export class SingleSessionHTTPServer {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Close the shared database connection (only during process shutdown)
|
||||
// This must happen after all sessions are closed
|
||||
try {
|
||||
await closeSharedDatabase();
|
||||
logger.info('Shared database closed');
|
||||
} catch (error) {
|
||||
logger.warn('Error closing shared database:', error);
|
||||
}
|
||||
|
||||
|
||||
logger.info('Single-Session HTTP server shutdown completed');
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* @deprecated This fixed HTTP server is deprecated as of v2.31.8.
|
||||
* Use SingleSessionHTTPServer from http-server-single-session.ts instead.
|
||||
*
|
||||
* This implementation does not support SSE streaming required by clients like OpenAI Codex.
|
||||
* See: https://github.com/czlonkowski/n8n-mcp/issues/524
|
||||
*
|
||||
* Original purpose: Fixed HTTP server for n8n-MCP that properly handles
|
||||
* StreamableHTTPServerTransport initialization by bypassing it entirely.
|
||||
* This implementation ensures the transport is properly initialized before handling requests.
|
||||
* Fixed HTTP server for n8n-MCP that properly handles StreamableHTTPServerTransport initialization
|
||||
* This implementation ensures the transport is properly initialized before handling requests
|
||||
*/
|
||||
import express from 'express';
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
||||
@@ -132,18 +125,7 @@ async function shutdown() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use SingleSessionHTTPServer from http-server-single-session.ts instead.
|
||||
* This function does not support SSE streaming required by clients like OpenAI Codex.
|
||||
*/
|
||||
export async function startFixedHTTPServer() {
|
||||
// Log deprecation warning
|
||||
logger.warn(
|
||||
'DEPRECATION: startFixedHTTPServer() is deprecated as of v2.31.8. ' +
|
||||
'Use SingleSessionHTTPServer which supports SSE streaming. ' +
|
||||
'See: https://github.com/czlonkowski/n8n-mcp/issues/524'
|
||||
);
|
||||
|
||||
validateEnvironment();
|
||||
|
||||
const app = express();
|
||||
|
||||
@@ -1421,33 +1421,17 @@ export async function handleGetExecution(args: unknown, context?: InstanceContex
|
||||
// Parse and validate input with new parameters
|
||||
const schema = z.object({
|
||||
id: z.string(),
|
||||
// Filtering parameters
|
||||
mode: z.enum(['preview', 'summary', 'filtered', 'full', 'error']).optional(),
|
||||
// New filtering parameters
|
||||
mode: z.enum(['preview', 'summary', 'filtered', 'full']).optional(),
|
||||
nodeNames: z.array(z.string()).optional(),
|
||||
itemsLimit: z.number().optional(),
|
||||
includeInputData: z.boolean().optional(),
|
||||
// Legacy parameter (backward compatibility)
|
||||
includeData: z.boolean().optional(),
|
||||
// Error mode specific parameters
|
||||
errorItemsLimit: z.number().min(0).max(100).optional(),
|
||||
includeStackTrace: z.boolean().optional(),
|
||||
includeExecutionPath: z.boolean().optional(),
|
||||
fetchWorkflow: z.boolean().optional()
|
||||
includeData: z.boolean().optional()
|
||||
});
|
||||
|
||||
const params = schema.parse(args);
|
||||
const {
|
||||
id,
|
||||
mode,
|
||||
nodeNames,
|
||||
itemsLimit,
|
||||
includeInputData,
|
||||
includeData,
|
||||
errorItemsLimit,
|
||||
includeStackTrace,
|
||||
includeExecutionPath,
|
||||
fetchWorkflow
|
||||
} = params;
|
||||
const { id, mode, nodeNames, itemsLimit, includeInputData, includeData } = params;
|
||||
|
||||
/**
|
||||
* Map legacy includeData parameter to mode for backward compatibility
|
||||
@@ -1486,33 +1470,15 @@ export async function handleGetExecution(args: unknown, context?: InstanceContex
|
||||
};
|
||||
}
|
||||
|
||||
// For error mode, optionally fetch workflow for accurate upstream detection
|
||||
let workflow: Workflow | undefined;
|
||||
if (effectiveMode === 'error' && fetchWorkflow !== false && execution.workflowId) {
|
||||
try {
|
||||
workflow = await client.getWorkflow(execution.workflowId);
|
||||
} catch (e) {
|
||||
// Workflow fetch failed - continue without it (use heuristics)
|
||||
logger.debug('Could not fetch workflow for error analysis', {
|
||||
workflowId: execution.workflowId,
|
||||
error: e instanceof Error ? e.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Apply filtering using ExecutionProcessor
|
||||
const filterOptions: ExecutionFilterOptions = {
|
||||
mode: effectiveMode,
|
||||
nodeNames,
|
||||
itemsLimit,
|
||||
includeInputData,
|
||||
// Error mode specific options
|
||||
errorItemsLimit,
|
||||
includeStackTrace,
|
||||
includeExecutionPath
|
||||
includeInputData
|
||||
};
|
||||
|
||||
const processedExecution = processExecution(execution, filterOptions, workflow);
|
||||
const processedExecution = processExecution(execution, filterOptions);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -124,23 +124,9 @@ Learn more: https://github.com/czlonkowski/n8n-mcp/blob/main/PRIVACY.md
|
||||
checkpoints.push(STARTUP_CHECKPOINTS.MCP_HANDSHAKE_STARTING);
|
||||
|
||||
if (mode === 'http') {
|
||||
// Check if we should use the fixed implementation (DEPRECATED)
|
||||
// Check if we should use the fixed implementation
|
||||
if (process.env.USE_FIXED_HTTP === 'true') {
|
||||
// DEPRECATION WARNING: Fixed HTTP implementation is deprecated
|
||||
// It does not support SSE streaming required by clients like OpenAI Codex
|
||||
logger.warn(
|
||||
'DEPRECATION WARNING: USE_FIXED_HTTP=true is deprecated as of v2.31.8. ' +
|
||||
'The fixed HTTP implementation does not support SSE streaming required by clients like OpenAI Codex. ' +
|
||||
'Please unset USE_FIXED_HTTP to use the modern SingleSessionHTTPServer which supports both JSON-RPC and SSE. ' +
|
||||
'This option will be removed in a future version. See: https://github.com/czlonkowski/n8n-mcp/issues/524'
|
||||
);
|
||||
console.warn('\n⚠️ DEPRECATION WARNING ⚠️');
|
||||
console.warn('USE_FIXED_HTTP=true is deprecated as of v2.31.8.');
|
||||
console.warn('The fixed HTTP implementation does not support SSE streaming.');
|
||||
console.warn('Please unset USE_FIXED_HTTP to use SingleSessionHTTPServer.');
|
||||
console.warn('See: https://github.com/czlonkowski/n8n-mcp/issues/524\n');
|
||||
|
||||
// Use the deprecated fixed HTTP implementation
|
||||
// Use the fixed HTTP implementation that bypasses StreamableHTTPServerTransport issues
|
||||
const { startFixedHTTPServer } = await import('../http-server');
|
||||
await startFixedHTTPServer();
|
||||
} else {
|
||||
|
||||
@@ -14,7 +14,6 @@ import { getWorkflowExampleString } from './workflow-examples';
|
||||
import { logger } from '../utils/logger';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { DatabaseAdapter, createDatabaseAdapter } from '../database/database-adapter';
|
||||
import { getSharedDatabase, releaseSharedDatabase, SharedDatabaseState } from '../database/shared-database';
|
||||
import { PropertyFilter } from '../services/property-filter';
|
||||
import { TaskTemplates } from '../services/task-templates';
|
||||
import { ConfigValidator } from '../services/config-validator';
|
||||
@@ -61,9 +60,6 @@ interface NodeRow {
|
||||
properties_schema?: string;
|
||||
operations?: string;
|
||||
credentials_required?: string;
|
||||
// AI documentation fields
|
||||
ai_documentation_summary?: string;
|
||||
ai_summary_generated_at?: string;
|
||||
}
|
||||
|
||||
interface VersionSummary {
|
||||
@@ -151,9 +147,6 @@ export class N8NDocumentationMCPServer {
|
||||
private previousToolTimestamp: number = Date.now();
|
||||
private earlyLogger: EarlyErrorLogger | null = null;
|
||||
private disabledToolsCache: Set<string> | null = null;
|
||||
private useSharedDatabase: boolean = false; // Track if using shared DB for cleanup
|
||||
private sharedDbState: SharedDatabaseState | null = null; // Reference to shared DB state for release
|
||||
private isShutdown: boolean = false; // Prevent double-shutdown
|
||||
|
||||
constructor(instanceContext?: InstanceContext, earlyLogger?: EarlyErrorLogger) {
|
||||
this.instanceContext = instanceContext;
|
||||
@@ -249,39 +242,18 @@ export class N8NDocumentationMCPServer {
|
||||
* Order of cleanup:
|
||||
* 1. Close MCP server connection
|
||||
* 2. Destroy cache (clears entries AND stops cleanup timer)
|
||||
* 3. Release shared database OR close dedicated connection
|
||||
* 3. Close database connection
|
||||
* 4. Null out references to help GC
|
||||
*
|
||||
* IMPORTANT: For shared databases, we only release the reference (decrement refCount),
|
||||
* NOT close the database. The database stays open for other sessions.
|
||||
* For in-memory databases (tests), we close the dedicated connection.
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
// Wait for initialization to complete (or fail) before cleanup
|
||||
// This prevents race conditions where close runs while init is in progress
|
||||
try {
|
||||
await this.initialized;
|
||||
} catch (error) {
|
||||
// Initialization failed - that's OK, we still need to clean up
|
||||
logger.debug('Initialization had failed, proceeding with cleanup', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await this.server.close();
|
||||
|
||||
// Use destroy() not clear() - also stops the cleanup timer
|
||||
this.cache.destroy();
|
||||
|
||||
// Handle database cleanup based on whether it's shared or dedicated
|
||||
if (this.useSharedDatabase && this.sharedDbState) {
|
||||
// Shared database: release reference, don't close
|
||||
// The database stays open for other sessions
|
||||
releaseSharedDatabase(this.sharedDbState);
|
||||
logger.debug('Released shared database reference');
|
||||
} else if (this.db) {
|
||||
// Dedicated database (in-memory for tests): close it
|
||||
// Close database connection before nullifying reference
|
||||
if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
} catch (dbError) {
|
||||
@@ -296,7 +268,6 @@ export class N8NDocumentationMCPServer {
|
||||
this.repository = null;
|
||||
this.templateService = null;
|
||||
this.earlyLogger = null;
|
||||
this.sharedDbState = null;
|
||||
} catch (error) {
|
||||
// Log but don't throw - cleanup should be best-effort
|
||||
logger.warn('Error closing MCP server', { error: error instanceof Error ? error.message : String(error) });
|
||||
@@ -312,32 +283,23 @@ export class N8NDocumentationMCPServer {
|
||||
|
||||
logger.debug('Database initialization starting...', { dbPath });
|
||||
|
||||
// For in-memory databases (tests), create a dedicated connection
|
||||
// For regular databases, use the shared connection to prevent memory leaks
|
||||
this.db = await createDatabaseAdapter(dbPath);
|
||||
logger.debug('Database adapter created');
|
||||
|
||||
// If using in-memory database for tests, initialize schema
|
||||
if (dbPath === ':memory:') {
|
||||
this.db = await createDatabaseAdapter(dbPath);
|
||||
logger.debug('Database adapter created (in-memory mode)');
|
||||
await this.initializeInMemorySchema();
|
||||
logger.debug('In-memory schema initialized');
|
||||
this.repository = new NodeRepository(this.db);
|
||||
this.templateService = new TemplateService(this.db);
|
||||
// Initialize similarity services for enhanced validation
|
||||
EnhancedConfigValidator.initializeSimilarityServices(this.repository);
|
||||
this.useSharedDatabase = false;
|
||||
} else {
|
||||
// Use shared database connection to prevent ~900MB memory leak per session
|
||||
// See: Memory leak fix - database was being duplicated per session
|
||||
const sharedState = await getSharedDatabase(dbPath);
|
||||
this.db = sharedState.db;
|
||||
this.repository = sharedState.repository;
|
||||
this.templateService = sharedState.templateService;
|
||||
this.sharedDbState = sharedState;
|
||||
this.useSharedDatabase = true;
|
||||
logger.debug('Using shared database connection');
|
||||
}
|
||||
|
||||
this.repository = new NodeRepository(this.db);
|
||||
logger.debug('Node repository initialized');
|
||||
|
||||
this.templateService = new TemplateService(this.db);
|
||||
logger.debug('Template service initialized');
|
||||
|
||||
// Initialize similarity services for enhanced validation
|
||||
EnhancedConfigValidator.initializeSimilarityServices(this.repository);
|
||||
logger.debug('Similarity services initialized');
|
||||
|
||||
// Checkpoint: Database connected (v2.18.3)
|
||||
@@ -1110,11 +1072,7 @@ export class N8NDocumentationMCPServer {
|
||||
this.validateToolParams(name, args, ['query']);
|
||||
// Convert limit to number if provided, otherwise use default
|
||||
const limit = args.limit !== undefined ? Number(args.limit) || 20 : 20;
|
||||
return this.searchNodes(args.query, limit, {
|
||||
mode: args.mode,
|
||||
includeExamples: args.includeExamples,
|
||||
source: args.source
|
||||
});
|
||||
return this.searchNodes(args.query, limit, { mode: args.mode, includeExamples: args.includeExamples });
|
||||
case 'get_node':
|
||||
this.validateToolParams(name, args, ['nodeType']);
|
||||
// Handle consolidated modes: docs, search_properties
|
||||
@@ -1464,7 +1422,6 @@ export class N8NDocumentationMCPServer {
|
||||
mode?: 'OR' | 'AND' | 'FUZZY';
|
||||
includeSource?: boolean;
|
||||
includeExamples?: boolean;
|
||||
source?: 'all' | 'core' | 'community' | 'verified';
|
||||
}
|
||||
): Promise<any> {
|
||||
await this.ensureInitialized();
|
||||
@@ -1503,11 +1460,7 @@ export class N8NDocumentationMCPServer {
|
||||
query: string,
|
||||
limit: number,
|
||||
mode: 'OR' | 'AND' | 'FUZZY',
|
||||
options?: {
|
||||
includeSource?: boolean;
|
||||
includeExamples?: boolean;
|
||||
source?: 'all' | 'core' | 'community' | 'verified';
|
||||
}
|
||||
options?: { includeSource?: boolean; includeExamples?: boolean; }
|
||||
): Promise<any> {
|
||||
if (!this.db) throw new Error('Database not initialized');
|
||||
|
||||
@@ -1547,22 +1500,6 @@ export class N8NDocumentationMCPServer {
|
||||
}
|
||||
|
||||
try {
|
||||
// Build source filter SQL
|
||||
let sourceFilter = '';
|
||||
const sourceValue = options?.source || 'all';
|
||||
switch (sourceValue) {
|
||||
case 'core':
|
||||
sourceFilter = 'AND n.is_community = 0';
|
||||
break;
|
||||
case 'community':
|
||||
sourceFilter = 'AND n.is_community = 1';
|
||||
break;
|
||||
case 'verified':
|
||||
sourceFilter = 'AND n.is_community = 1 AND n.is_verified = 1';
|
||||
break;
|
||||
// 'all' - no filter
|
||||
}
|
||||
|
||||
// Use FTS5 with ranking
|
||||
const nodes = this.db.prepare(`
|
||||
SELECT
|
||||
@@ -1571,7 +1508,6 @@ export class N8NDocumentationMCPServer {
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
WHERE nodes_fts MATCH ?
|
||||
${sourceFilter}
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER(?) THEN 0
|
||||
@@ -1615,31 +1551,15 @@ export class N8NDocumentationMCPServer {
|
||||
|
||||
const result: any = {
|
||||
query,
|
||||
results: scoredNodes.map(node => {
|
||||
const nodeResult: any = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name,
|
||||
relevance: this.calculateRelevance(node, cleanedQuery)
|
||||
};
|
||||
|
||||
// Add community metadata if this is a community node
|
||||
if ((node as any).is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = (node as any).is_verified === 1;
|
||||
if ((node as any).author_name) {
|
||||
nodeResult.authorName = (node as any).author_name;
|
||||
}
|
||||
if ((node as any).npm_downloads) {
|
||||
nodeResult.npmDownloads = (node as any).npm_downloads;
|
||||
}
|
||||
}
|
||||
|
||||
return nodeResult;
|
||||
}),
|
||||
results: scoredNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name,
|
||||
relevance: this.calculateRelevance(node, cleanedQuery)
|
||||
})),
|
||||
totalCount: scoredNodes.length
|
||||
};
|
||||
|
||||
@@ -1855,38 +1775,17 @@ export class N8NDocumentationMCPServer {
|
||||
private async searchNodesLIKE(
|
||||
query: string,
|
||||
limit: number,
|
||||
options?: {
|
||||
includeSource?: boolean;
|
||||
includeExamples?: boolean;
|
||||
source?: 'all' | 'core' | 'community' | 'verified';
|
||||
}
|
||||
options?: { includeSource?: boolean; includeExamples?: boolean; }
|
||||
): Promise<any> {
|
||||
if (!this.db) throw new Error('Database not initialized');
|
||||
|
||||
// Build source filter SQL
|
||||
let sourceFilter = '';
|
||||
const sourceValue = options?.source || 'all';
|
||||
switch (sourceValue) {
|
||||
case 'core':
|
||||
sourceFilter = 'AND is_community = 0';
|
||||
break;
|
||||
case 'community':
|
||||
sourceFilter = 'AND is_community = 1';
|
||||
break;
|
||||
case 'verified':
|
||||
sourceFilter = 'AND is_community = 1 AND is_verified = 1';
|
||||
break;
|
||||
// 'all' - no filter
|
||||
}
|
||||
|
||||
// This is the existing LIKE-based implementation
|
||||
// Handle exact phrase searches with quotes
|
||||
if (query.startsWith('"') && query.endsWith('"')) {
|
||||
const exactPhrase = query.slice(1, -1);
|
||||
const nodes = this.db!.prepare(`
|
||||
SELECT * FROM nodes
|
||||
WHERE (node_type LIKE ? OR display_name LIKE ? OR description LIKE ?)
|
||||
${sourceFilter}
|
||||
WHERE node_type LIKE ? OR display_name LIKE ? OR description LIKE ?
|
||||
LIMIT ?
|
||||
`).all(`%${exactPhrase}%`, `%${exactPhrase}%`, `%${exactPhrase}%`, limit * 3) as NodeRow[];
|
||||
|
||||
@@ -1895,30 +1794,14 @@ export class N8NDocumentationMCPServer {
|
||||
|
||||
const result: any = {
|
||||
query,
|
||||
results: rankedNodes.map(node => {
|
||||
const nodeResult: any = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
};
|
||||
|
||||
// Add community metadata if this is a community node
|
||||
if ((node as any).is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = (node as any).is_verified === 1;
|
||||
if ((node as any).author_name) {
|
||||
nodeResult.authorName = (node as any).author_name;
|
||||
}
|
||||
if ((node as any).npm_downloads) {
|
||||
nodeResult.npmDownloads = (node as any).npm_downloads;
|
||||
}
|
||||
}
|
||||
|
||||
return nodeResult;
|
||||
}),
|
||||
results: rankedNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
})),
|
||||
totalCount: rankedNodes.length
|
||||
};
|
||||
|
||||
@@ -1970,9 +1853,8 @@ export class N8NDocumentationMCPServer {
|
||||
params.push(limit * 3);
|
||||
|
||||
const nodes = this.db!.prepare(`
|
||||
SELECT DISTINCT * FROM nodes
|
||||
WHERE (${conditions})
|
||||
${sourceFilter}
|
||||
SELECT DISTINCT * FROM nodes
|
||||
WHERE ${conditions}
|
||||
LIMIT ?
|
||||
`).all(...params) as NodeRow[];
|
||||
|
||||
@@ -1981,30 +1863,14 @@ export class N8NDocumentationMCPServer {
|
||||
|
||||
const result: any = {
|
||||
query,
|
||||
results: rankedNodes.map(node => {
|
||||
const nodeResult: any = {
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
};
|
||||
|
||||
// Add community metadata if this is a community node
|
||||
if ((node as any).is_community === 1) {
|
||||
nodeResult.isCommunity = true;
|
||||
nodeResult.isVerified = (node as any).is_verified === 1;
|
||||
if ((node as any).author_name) {
|
||||
nodeResult.authorName = (node as any).author_name;
|
||||
}
|
||||
if ((node as any).npm_downloads) {
|
||||
nodeResult.npmDownloads = (node as any).npm_downloads;
|
||||
}
|
||||
}
|
||||
|
||||
return nodeResult;
|
||||
}),
|
||||
results: rankedNodes.map(node => ({
|
||||
nodeType: node.node_type,
|
||||
workflowNodeType: getWorkflowNodeType(node.package_name, node.node_type),
|
||||
displayName: node.display_name,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
package: node.package_name
|
||||
})),
|
||||
totalCount: rankedNodes.length
|
||||
};
|
||||
|
||||
@@ -2229,34 +2095,31 @@ export class N8NDocumentationMCPServer {
|
||||
// First try with normalized type
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
let node = this.db!.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(normalizedType) as NodeRow | undefined;
|
||||
|
||||
|
||||
// If not found and normalization changed the type, try original
|
||||
if (!node && normalizedType !== nodeType) {
|
||||
node = this.db!.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(nodeType) as NodeRow | undefined;
|
||||
}
|
||||
|
||||
|
||||
// If still not found, try alternatives
|
||||
if (!node) {
|
||||
const alternatives = getNodeTypeAlternatives(normalizedType);
|
||||
|
||||
|
||||
for (const alt of alternatives) {
|
||||
node = this.db!.prepare(`
|
||||
SELECT node_type, display_name, documentation, description,
|
||||
ai_documentation_summary, ai_summary_generated_at
|
||||
FROM nodes
|
||||
SELECT node_type, display_name, documentation, description
|
||||
FROM nodes
|
||||
WHERE node_type = ?
|
||||
`).get(alt) as NodeRow | undefined;
|
||||
|
||||
|
||||
if (node) break;
|
||||
}
|
||||
}
|
||||
@@ -2265,11 +2128,6 @@ export class N8NDocumentationMCPServer {
|
||||
throw new Error(`Node ${nodeType} not found`);
|
||||
}
|
||||
|
||||
// Parse AI documentation summary if present
|
||||
const aiDocSummary = node.ai_documentation_summary
|
||||
? this.safeJsonParse(node.ai_documentation_summary, null)
|
||||
: null;
|
||||
|
||||
// If no documentation, generate fallback with null safety
|
||||
if (!node.documentation) {
|
||||
const essentials = await this.getNodeEssentials(nodeType);
|
||||
@@ -2293,9 +2151,7 @@ ${essentials?.commonProperties?.length > 0 ?
|
||||
## Note
|
||||
Full documentation is being prepared. For now, use get_node_essentials for configuration help.
|
||||
`,
|
||||
hasDocumentation: false,
|
||||
aiDocumentationSummary: aiDocSummary,
|
||||
aiSummaryGeneratedAt: node.ai_summary_generated_at || null,
|
||||
hasDocumentation: false
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2304,19 +2160,9 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
displayName: node.display_name || 'Unknown Node',
|
||||
documentation: node.documentation,
|
||||
hasDocumentation: true,
|
||||
aiDocumentationSummary: aiDocSummary,
|
||||
aiSummaryGeneratedAt: node.ai_summary_generated_at || null,
|
||||
};
|
||||
}
|
||||
|
||||
private safeJsonParse(json: string, defaultValue: any = null): any {
|
||||
try {
|
||||
return JSON.parse(json);
|
||||
} catch {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
private async getDatabaseStatistics(): Promise<any> {
|
||||
await this.ensureInitialized();
|
||||
if (!this.db) throw new Error('Database not initialized');
|
||||
@@ -3945,33 +3791,8 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
}
|
||||
|
||||
async shutdown(): Promise<void> {
|
||||
// Prevent double-shutdown
|
||||
if (this.isShutdown) {
|
||||
logger.debug('Shutdown already called, skipping');
|
||||
return;
|
||||
}
|
||||
this.isShutdown = true;
|
||||
|
||||
logger.info('Shutting down MCP server...');
|
||||
|
||||
// Wait for initialization to complete (or fail) before cleanup
|
||||
// This prevents race conditions where shutdown runs while init is in progress
|
||||
try {
|
||||
await this.initialized;
|
||||
} catch (error) {
|
||||
// Initialization failed - that's OK, we still need to clean up
|
||||
logger.debug('Initialization had failed, proceeding with cleanup', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
|
||||
// Close MCP server connection (for consistency with close() method)
|
||||
try {
|
||||
await this.server.close();
|
||||
} catch (error) {
|
||||
logger.error('Error closing MCP server:', error);
|
||||
}
|
||||
|
||||
|
||||
// Clean up cache timers to prevent memory leaks
|
||||
if (this.cache) {
|
||||
try {
|
||||
@@ -3981,31 +3802,15 @@ Full documentation is being prepared. For now, use get_node_essentials for confi
|
||||
logger.error('Error cleaning up cache:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle database cleanup based on whether it's shared or dedicated
|
||||
// For shared databases, we only release the reference (decrement refCount)
|
||||
// For dedicated databases (in-memory for tests), we close the connection
|
||||
if (this.useSharedDatabase && this.sharedDbState) {
|
||||
|
||||
// Close database connection if it exists
|
||||
if (this.db) {
|
||||
try {
|
||||
releaseSharedDatabase(this.sharedDbState);
|
||||
logger.info('Released shared database reference');
|
||||
} catch (error) {
|
||||
logger.error('Error releasing shared database:', error);
|
||||
}
|
||||
} else if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
await this.db.close();
|
||||
logger.info('Database connection closed');
|
||||
} catch (error) {
|
||||
logger.error('Error closing database:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Null out references to help garbage collection
|
||||
this.db = null;
|
||||
this.repository = null;
|
||||
this.templateService = null;
|
||||
this.earlyLogger = null;
|
||||
this.sharedDbState = null;
|
||||
}
|
||||
}
|
||||
@@ -4,64 +4,50 @@ export const searchNodesDoc: ToolDocumentation = {
|
||||
name: 'search_nodes',
|
||||
category: 'discovery',
|
||||
essentials: {
|
||||
description: 'Text search across node names and descriptions. Returns most relevant nodes first, with frequently-used nodes (HTTP Request, Webhook, Set, Code, Slack) prioritized in results. Searches all 800+ nodes including 300+ verified community nodes.',
|
||||
keyParameters: ['query', 'mode', 'limit', 'source', 'includeExamples'],
|
||||
description: 'Text search across node names and descriptions. Returns most relevant nodes first, with frequently-used nodes (HTTP Request, Webhook, Set, Code, Slack) prioritized in results. Searches all 500+ nodes in the database.',
|
||||
keyParameters: ['query', 'mode', 'limit'],
|
||||
example: 'search_nodes({query: "webhook"})',
|
||||
performance: '<20ms even for complex queries',
|
||||
tips: [
|
||||
'OR mode (default): Matches any search word',
|
||||
'AND mode: Requires all words present',
|
||||
'FUZZY mode: Handles typos and spelling errors',
|
||||
'Use quotes for exact phrases: "google sheets"',
|
||||
'Use source="community" to search only community nodes',
|
||||
'Use source="verified" for verified community nodes only'
|
||||
'Use quotes for exact phrases: "google sheets"'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
description: 'Full-text search engine for n8n nodes using SQLite FTS5. Searches across node names, descriptions, and aliases. Results are ranked by relevance with commonly-used nodes given priority. Includes 500+ core nodes and 300+ community nodes. Common core nodes include: HTTP Request, Webhook, Set, Code, IF, Switch, Merge, SplitInBatches, Slack, Google Sheets. Community nodes include verified integrations like BrightData, ScrapingBee, CraftMyPDF, and more.',
|
||||
description: 'Full-text search engine for n8n nodes using SQLite FTS5. Searches across node names, descriptions, and aliases. Results are ranked by relevance with commonly-used nodes given priority. Common nodes include: HTTP Request, Webhook, Set, Code, IF, Switch, Merge, SplitInBatches, Slack, Google Sheets.',
|
||||
parameters: {
|
||||
query: { type: 'string', description: 'Search keywords. Use quotes for exact phrases like "google sheets"', required: true },
|
||||
limit: { type: 'number', description: 'Maximum results to return. Default: 20, Max: 100', required: false },
|
||||
mode: { type: 'string', description: 'Search mode: "OR" (any word matches, default), "AND" (all words required), "FUZZY" (typo-tolerant)', required: false },
|
||||
source: { type: 'string', description: 'Filter by node source: "all" (default, everything), "core" (n8n base nodes only), "community" (community nodes only), "verified" (verified community nodes only)', required: false },
|
||||
includeExamples: { type: 'boolean', description: 'Include top 2 real-world configuration examples from popular templates for each node. Default: false. Adds ~200-400 tokens per node.', required: false }
|
||||
mode: { type: 'string', description: 'Search mode: "OR" (any word matches, default), "AND" (all words required), "FUZZY" (typo-tolerant)', required: false }
|
||||
},
|
||||
returns: 'Array of node objects sorted by relevance score. Each object contains: nodeType, displayName, description, category, relevance score. For community nodes, also includes: isCommunity (boolean), isVerified (boolean), authorName (string), npmDownloads (number). Common nodes appear first when relevance is similar.',
|
||||
returns: 'Array of node objects sorted by relevance score. Each object contains: nodeType, displayName, description, category, relevance score. Common nodes appear first when relevance is similar.',
|
||||
examples: [
|
||||
'search_nodes({query: "webhook"}) - Returns Webhook node as top result',
|
||||
'search_nodes({query: "database"}) - Returns MySQL, Postgres, MongoDB, Redis, etc.',
|
||||
'search_nodes({query: "google sheets", mode: "AND"}) - Requires both words',
|
||||
'search_nodes({query: "slak", mode: "FUZZY"}) - Finds Slack despite typo',
|
||||
'search_nodes({query: "http api"}) - Finds HTTP Request, GraphQL, REST nodes',
|
||||
'search_nodes({query: "transform data"}) - Finds Set, Code, Function, Item Lists nodes',
|
||||
'search_nodes({query: "scraping", source: "community"}) - Find community scraping nodes',
|
||||
'search_nodes({query: "pdf", source: "verified"}) - Find verified community PDF nodes',
|
||||
'search_nodes({query: "brightdata"}) - Find BrightData community node',
|
||||
'search_nodes({query: "slack", includeExamples: true}) - Get Slack with template examples'
|
||||
'search_nodes({query: "transform data"}) - Finds Set, Code, Function, Item Lists nodes'
|
||||
],
|
||||
useCases: [
|
||||
'Finding nodes when you know partial names',
|
||||
'Discovering nodes by functionality (e.g., "email", "database", "transform")',
|
||||
'Handling user typos in node names',
|
||||
'Finding all nodes related to a service (e.g., "google", "aws", "microsoft")',
|
||||
'Discovering community integrations for specific services',
|
||||
'Finding verified community nodes for enhanced trust'
|
||||
'Finding all nodes related to a service (e.g., "google", "aws", "microsoft")'
|
||||
],
|
||||
performance: '<20ms for simple queries, <50ms for complex FUZZY searches. Uses FTS5 index for speed',
|
||||
bestPractices: [
|
||||
'Start with single keywords for broadest results',
|
||||
'Use FUZZY mode when users might misspell node names',
|
||||
'AND mode works best for 2-3 word searches',
|
||||
'Combine with get_node after finding the right node',
|
||||
'Use source="verified" when recommending community nodes for production',
|
||||
'Check isVerified flag to ensure community node quality'
|
||||
'Combine with get_node after finding the right node'
|
||||
],
|
||||
pitfalls: [
|
||||
'AND mode searches all fields (name, description) not just node names',
|
||||
'FUZZY mode with very short queries (1-2 chars) may return unexpected results',
|
||||
'Exact matches in quotes are case-sensitive',
|
||||
'Community nodes require npm installation (n8n npm install <package-name>)',
|
||||
'Unverified community nodes (isVerified: false) may have limited support'
|
||||
'Exact matches in quotes are case-sensitive'
|
||||
],
|
||||
relatedTools: ['get_node to configure found nodes', 'search_templates to find workflow examples', 'validate_node to check configurations']
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ export const getTemplateDoc: ToolDocumentation = {
|
||||
- url: Link to template on n8n.io
|
||||
- workflow: Complete workflow JSON with structure:
|
||||
- nodes: Array of node objects (id, name, type, typeVersion, position, parameters)
|
||||
- connections: Object mapping source node names to targets
|
||||
- connections: Object mapping source nodes to targets
|
||||
- settings: Workflow configuration (timezone, error handling, etc.)
|
||||
- usage: Instructions for using the workflow`,
|
||||
examples: [
|
||||
|
||||
@@ -20,7 +20,7 @@ export const n8nCreateWorkflowDoc: ToolDocumentation = {
|
||||
parameters: {
|
||||
name: { type: 'string', required: true, description: 'Workflow name' },
|
||||
nodes: { type: 'array', required: true, description: 'Array of nodes with id, name, type, typeVersion, position, parameters' },
|
||||
connections: { type: 'object', required: true, description: 'Node connections. Keys are source node names (not IDs)' },
|
||||
connections: { type: 'object', required: true, description: 'Node connections. Keys are source node IDs' },
|
||||
settings: { type: 'object', description: 'Optional workflow settings (timezone, error handling, etc.)' }
|
||||
},
|
||||
returns: 'Minimal summary (id, name, active, nodeCount) for token efficiency. Use n8n_get_workflow with mode "structure" to verify current state if needed.',
|
||||
@@ -55,8 +55,8 @@ n8n_create_workflow({
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
"Webhook": {
|
||||
"main": [[{node: "Slack", type: "main", index: 0}]]
|
||||
"webhook_1": {
|
||||
"main": [[{node: "slack_1", type: "main", index: 0}]]
|
||||
}
|
||||
}
|
||||
})`,
|
||||
|
||||
@@ -5,14 +5,13 @@ export const n8nExecutionsDoc: ToolDocumentation = {
|
||||
category: 'workflow_management',
|
||||
essentials: {
|
||||
description: 'Manage workflow executions: get details, list, or delete. Unified tool for all execution operations.',
|
||||
keyParameters: ['action', 'id', 'workflowId', 'status', 'mode'],
|
||||
example: 'n8n_executions({action: "get", id: "exec_456", mode: "error"})',
|
||||
keyParameters: ['action', 'id', 'workflowId', 'status'],
|
||||
example: 'n8n_executions({action: "list", workflowId: "abc123", status: "error"})',
|
||||
performance: 'Fast (50-200ms)',
|
||||
tips: [
|
||||
'action="get": Get execution details by ID',
|
||||
'action="list": List executions with filters',
|
||||
'action="delete": Delete execution record',
|
||||
'Use mode="error" for efficient failure debugging (80-90% token savings)',
|
||||
'Use mode parameter for action=get to control detail level'
|
||||
]
|
||||
},
|
||||
@@ -26,26 +25,14 @@ export const n8nExecutionsDoc: ToolDocumentation = {
|
||||
- preview: Structure only, no data
|
||||
- summary: 2 items per node (default)
|
||||
- filtered: Custom items limit, optionally filter by node names
|
||||
- full: All execution data (can be very large)
|
||||
- error: Optimized for debugging failures - extracts error info, upstream context, and AI suggestions
|
||||
|
||||
**Error Mode Features:**
|
||||
- Extracts error message, type, and node configuration
|
||||
- Samples input data from upstream node (configurable limit)
|
||||
- Shows execution path leading to error
|
||||
- Provides AI-friendly fix suggestions based on error patterns
|
||||
- Token-efficient (80-90% smaller than full mode)`,
|
||||
- full: All execution data (can be very large)`,
|
||||
parameters: {
|
||||
action: { type: 'string', required: true, description: 'Operation: "get", "list", or "delete"' },
|
||||
id: { type: 'string', required: false, description: 'Execution ID (required for action=get or action=delete)' },
|
||||
mode: { type: 'string', required: false, description: 'For action=get: "preview", "summary" (default), "filtered", "full", "error"' },
|
||||
mode: { type: 'string', required: false, description: 'For action=get: "preview", "summary" (default), "filtered", "full"' },
|
||||
nodeNames: { type: 'array', required: false, description: 'For action=get with mode=filtered: Filter to specific nodes by name' },
|
||||
itemsLimit: { type: 'number', required: false, description: 'For action=get with mode=filtered: Items per node (0=structure, 2=default, -1=unlimited)' },
|
||||
includeInputData: { type: 'boolean', required: false, description: 'For action=get: Include input data in addition to output (default: false)' },
|
||||
errorItemsLimit: { type: 'number', required: false, description: 'For action=get with mode=error: Sample items from upstream (default: 2, max: 100)' },
|
||||
includeStackTrace: { type: 'boolean', required: false, description: 'For action=get with mode=error: Include full stack trace (default: false, shows truncated)' },
|
||||
includeExecutionPath: { type: 'boolean', required: false, description: 'For action=get with mode=error: Include execution path (default: true)' },
|
||||
fetchWorkflow: { type: 'boolean', required: false, description: 'For action=get with mode=error: Fetch workflow for accurate upstream detection (default: true)' },
|
||||
workflowId: { type: 'string', required: false, description: 'For action=list: Filter by workflow ID' },
|
||||
status: { type: 'string', required: false, description: 'For action=list: Filter by status ("success", "error", "waiting")' },
|
||||
limit: { type: 'number', required: false, description: 'For action=list: Number of results (1-100, default: 100)' },
|
||||
@@ -54,15 +41,10 @@ export const n8nExecutionsDoc: ToolDocumentation = {
|
||||
includeData: { type: 'boolean', required: false, description: 'For action=list: Include execution data (default: false)' }
|
||||
},
|
||||
returns: `Depends on action:
|
||||
- get (error mode): { errorInfo: { primaryError, upstreamContext, executionPath, suggestions }, summary }
|
||||
- get (other modes): Execution object with data based on mode
|
||||
- get: Execution object with data based on mode
|
||||
- list: { data: [...executions], nextCursor?: string }
|
||||
- delete: { success: boolean, message: string }`,
|
||||
examples: [
|
||||
'// Debug a failed execution (recommended for errors)\nn8n_executions({action: "get", id: "exec_456", mode: "error"})',
|
||||
'// Debug with more sample data from upstream\nn8n_executions({action: "get", id: "exec_456", mode: "error", errorItemsLimit: 5})',
|
||||
'// Debug with full stack trace\nn8n_executions({action: "get", id: "exec_456", mode: "error", includeStackTrace: true})',
|
||||
'// Debug without workflow fetch (faster but less accurate)\nn8n_executions({action: "get", id: "exec_456", mode: "error", fetchWorkflow: false})',
|
||||
'// List recent executions for a workflow\nn8n_executions({action: "list", workflowId: "abc123", limit: 10})',
|
||||
'// List failed executions\nn8n_executions({action: "list", status: "error"})',
|
||||
'// Get execution summary\nn8n_executions({action: "get", id: "exec_456"})',
|
||||
@@ -71,10 +53,7 @@ export const n8nExecutionsDoc: ToolDocumentation = {
|
||||
'// Delete an execution\nn8n_executions({action: "delete", id: "exec_456"})'
|
||||
],
|
||||
useCases: [
|
||||
'Debug workflow failures efficiently (mode=error) - 80-90% token savings',
|
||||
'Get AI suggestions for fixing common errors',
|
||||
'Analyze input data that caused failure',
|
||||
'Debug workflow failures with full data (mode=full)',
|
||||
'Debug workflow failures (get with mode=full)',
|
||||
'Monitor workflow health (list with status filter)',
|
||||
'Audit execution history',
|
||||
'Clean up old execution records',
|
||||
@@ -83,22 +62,18 @@ export const n8nExecutionsDoc: ToolDocumentation = {
|
||||
performance: `Response times:
|
||||
- list: 50-150ms depending on filters
|
||||
- get (preview/summary): 30-100ms
|
||||
- get (error): 50-200ms (includes optional workflow fetch)
|
||||
- get (full): 100-500ms+ depending on data size
|
||||
- delete: 30-80ms`,
|
||||
bestPractices: [
|
||||
'Use mode="error" for debugging failed executions - 80-90% token savings vs full',
|
||||
'Use mode="summary" (default) for quick inspection',
|
||||
'Use mode="summary" (default) for debugging - shows enough data',
|
||||
'Use mode="filtered" with nodeNames for large workflows',
|
||||
'Filter by workflowId when listing to reduce results',
|
||||
'Use cursor for pagination through large result sets',
|
||||
'Set fetchWorkflow=false if you already know the workflow structure',
|
||||
'Delete old executions to save storage'
|
||||
],
|
||||
pitfalls: [
|
||||
'Requires N8N_API_URL and N8N_API_KEY configured',
|
||||
'mode="full" can return very large responses for complex workflows',
|
||||
'mode="error" fetches workflow by default (adds ~50-100ms), disable with fetchWorkflow=false',
|
||||
'Execution must exist or returns 404',
|
||||
'Delete is permanent - cannot undo'
|
||||
],
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user