mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-03-17 07:53:08 +00:00
Compare commits
2 Commits
v2.36.2
...
feat/conne
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
edef5145ff | ||
|
|
bc1c00cc2e |
8
.github/workflows/dependency-check.yml
vendored
8
.github/workflows/dependency-check.yml
vendored
@@ -77,15 +77,15 @@ jobs:
|
||||
echo "Zod version: $ZOD_VERSION"
|
||||
echo ""
|
||||
|
||||
# Check MCP SDK version - must be exactly 1.27.1
|
||||
# Check MCP SDK version - must be exactly 1.20.1
|
||||
if [[ "$SDK_VERSION" == "not found" ]]; then
|
||||
echo "❌ FAILED: Could not determine MCP SDK version!"
|
||||
echo " The dependency may not have been installed correctly."
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$SDK_VERSION" != "1.27.1" ]]; then
|
||||
if [[ "$SDK_VERSION" != "1.20.1" ]]; then
|
||||
echo "❌ FAILED: MCP SDK version mismatch!"
|
||||
echo " Expected: 1.27.1"
|
||||
echo " Expected: 1.20.1"
|
||||
echo " Got: $SDK_VERSION"
|
||||
echo ""
|
||||
echo "This can cause runtime errors. See issues #440, #444, #446, #447, #450"
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$ZOD_VERSION" =~ ^4\. ]]; then
|
||||
echo "❌ FAILED: Zod v4 detected - incompatible with MCP SDK 1.27.1!"
|
||||
echo "❌ FAILED: Zod v4 detected - incompatible with MCP SDK 1.20.1!"
|
||||
echo " Expected: 3.x"
|
||||
echo " Got: $ZOD_VERSION"
|
||||
echo ""
|
||||
|
||||
28
CHANGELOG.md
28
CHANGELOG.md
@@ -7,34 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [2.36.2] - 2026-03-14
|
||||
|
||||
### Changed
|
||||
|
||||
- **Updated n8n dependencies**: n8n 2.10.3 → 2.11.4, n8n-core 2.10.1 → 2.11.1, n8n-workflow 2.10.1 → 2.11.1, @n8n/n8n-nodes-langchain 2.10.1 → 2.11.2
|
||||
- **Updated @modelcontextprotocol/sdk**: 1.20.1 → 1.27.1 (fixes critical cross-client data leak vulnerability CVE GHSA-345p-7cg4-v4c7)
|
||||
- Rebuilt node database with 1,239 nodes (809 core + 430 community preserved)
|
||||
- Updated README badge with new n8n version and node counts
|
||||
|
||||
Conceived by Romuald Czlonkowski - https://www.aiadvisors.pl/en
|
||||
|
||||
## [2.36.1] - 2026-03-08
|
||||
|
||||
### Added
|
||||
|
||||
- **Conditional branch fan-out detection** (`CONDITIONAL_BRANCH_FANOUT`): Warns when IF, Filter, or Switch nodes have all connections crammed into `main[0]` with higher-index outputs empty, which usually means all target nodes execute together on one branch while other branches have no effect
|
||||
- Detects IF nodes with both true/false targets on `main[0]`
|
||||
- Detects Filter nodes with both matched/unmatched targets on `main[0]`
|
||||
- Detects Switch nodes with all targets on output 0 and other outputs unused
|
||||
- Skips warning when fan-out is legitimate (higher outputs also have connections)
|
||||
- Skips warning for single connections (intentional true-only/matched-only usage)
|
||||
|
||||
### Changed
|
||||
|
||||
- **Refactored output index validation**: Extracted `getShortNodeType()` and `getConditionalOutputInfo()` helpers to eliminate duplicated conditional node detection logic between `validateOutputIndexBounds` and the new `validateConditionalBranchUsage`
|
||||
|
||||
Conceived by Romuald Czlonkowski - https://www.aiadvisors.pl/en
|
||||
|
||||
## [2.36.0] - 2026-03-07
|
||||
|
||||
### Added
|
||||
|
||||
@@ -5,11 +5,11 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 1,239 workflow automation nodes (809 core + 430 community).
|
||||
A Model Context Protocol (MCP) server that provides AI assistants with comprehensive access to n8n node documentation, properties, and operations. Deploy in minutes to give Claude and other AI assistants deep knowledge about n8n's 1,236 workflow automation nodes (806 core + 430 community).
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AA2HD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AAqHD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
8
dist/mcp/handlers-n8n-manager.js
vendored
8
dist/mcp/handlers-n8n-manager.js
vendored
@@ -212,13 +212,7 @@ const autofixWorkflowSchema = zod_1.z.object({
|
||||
'node-type-correction',
|
||||
'webhook-missing-path',
|
||||
'typeversion-upgrade',
|
||||
'version-migration',
|
||||
'tool-variant-correction',
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
'version-migration'
|
||||
])).optional(),
|
||||
confidenceThreshold: zod_1.z.enum(['high', 'medium', 'low']).optional().default('medium'),
|
||||
maxFixes: zod_1.z.number().optional().default(50)
|
||||
|
||||
2
dist/mcp/handlers-n8n-manager.js.map
vendored
2
dist/mcp/handlers-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
4
dist/mcp/tools-n8n-manager.js
vendored
4
dist/mcp/tools-n8n-manager.js
vendored
@@ -278,7 +278,7 @@ exports.n8nManagementTools = [
|
||||
},
|
||||
{
|
||||
name: 'n8n_autofix_workflow',
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths, connection structure issues (numeric keys, invalid types, ID-to-name, duplicates, out-of-bounds indices).`,
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths.`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -295,7 +295,7 @@ exports.n8nManagementTools = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration', 'tool-variant-correction', 'connection-numeric-keys', 'connection-invalid-type', 'connection-id-to-name', 'connection-duplicate-removal', 'connection-input-index']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
|
||||
2
dist/mcp/tools-n8n-manager.js.map
vendored
2
dist/mcp/tools-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
9
dist/services/workflow-auto-fixer.d.ts
vendored
9
dist/services/workflow-auto-fixer.d.ts
vendored
@@ -5,8 +5,7 @@ import { WorkflowDiffOperation } from '../types/workflow-diff';
|
||||
import { Workflow } from '../types/n8n-api';
|
||||
import { PostUpdateGuidance } from './post-update-validator';
|
||||
export type FixConfidenceLevel = 'high' | 'medium' | 'low';
|
||||
export type FixType = 'expression-format' | 'typeversion-correction' | 'error-output-config' | 'node-type-correction' | 'webhook-missing-path' | 'typeversion-upgrade' | 'version-migration' | 'tool-variant-correction' | 'connection-numeric-keys' | 'connection-invalid-type' | 'connection-id-to-name' | 'connection-duplicate-removal' | 'connection-input-index';
|
||||
export declare const CONNECTION_FIX_TYPES: FixType[];
|
||||
export type FixType = 'expression-format' | 'typeversion-correction' | 'error-output-config' | 'node-type-correction' | 'webhook-missing-path' | 'typeversion-upgrade' | 'version-migration' | 'tool-variant-correction';
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
fixTypes?: FixType[];
|
||||
@@ -69,12 +68,6 @@ export declare class WorkflowAutoFixer {
|
||||
private filterOperationsByFixes;
|
||||
private calculateStats;
|
||||
private generateSummary;
|
||||
private processConnectionFixes;
|
||||
private fixNumericKeys;
|
||||
private fixIdToName;
|
||||
private fixInvalidTypes;
|
||||
private fixInputIndices;
|
||||
private fixDuplicateConnections;
|
||||
private processVersionUpgradeFixes;
|
||||
private processVersionMigrationFixes;
|
||||
}
|
||||
|
||||
2
dist/services/workflow-auto-fixer.d.ts.map
vendored
2
dist/services/workflow-auto-fixer.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"workflow-auto-fixer.d.ts","sourceRoot":"","sources":["../../src/services/workflow-auto-fixer.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,wBAAwB,EAA0B,MAAM,sBAAsB,CAAC;AACxF,OAAO,EAAE,qBAAqB,EAAE,MAAM,+BAA+B,CAAC;AAEtE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EACL,qBAAqB,EAGtB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAgB,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAK1D,OAAO,EAAuB,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAIlF,MAAM,MAAM,kBAAkB,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAC3D,MAAM,MAAM,OAAO,GACf,mBAAmB,GACnB,wBAAwB,GACxB,qBAAqB,GACrB,sBAAsB,GACtB,sBAAsB,GACtB,qBAAqB,GACrB,mBAAmB,GACnB,yBAAyB,GACzB,yBAAyB,GACzB,yBAAyB,GACzB,uBAAuB,GACvB,8BAA8B,GAC9B,wBAAwB,CAAC;AAE7B,eAAO,MAAM,oBAAoB,EAAE,OAAO,EAMzC,CAAC;AAEF,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,mBAAmB,CAAC,EAAE,kBAAkB,CAAC;IACzC,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,GAAG,CAAC;IACZ,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,kBAAkB,CAAC;IAC/B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,qBAAqB,EAAE,CAAC;IACpC,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAChC,YAAY,EAAE,MAAM,CAAC,kBAAkB,EAAE,MAAM,CAAC,CAAC;KAClD,CAAC;IACF,kBAAkB,CAAC,EAAE,kBAAkB,EAAE,CAAC;CAC3C;AAED,MAAM,WAAW,eAAgB,SAAQ,qBAAqB;IAC5D,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAKD,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,qBAAqB,GAAG,KAAK,IAAI,eAAe,CAIxF;AAKD,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,KAAK,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,UAAU,EAAE,MAAM,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACJ;AAED,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,QAAQ,CAAC,aAAa,CAI5B;IACF,OAAO,CAAC,iBAAiB,CAAsC;IAC/D,OAAO,CAAC,cAAc,CAAmC;IACzD,OAAO,CAAC,sBAAsB,CAAuC;IACrE,OAAO,CAAC,gBAAgB,CAAqC;IAC7D,OAAO,CAAC,mBAAmB,CAAoC;gBAEnD,UAAU,CAAC,EAAE,cAAc;IAajC,aAAa,CACjB,QAAQ,EAAE,QAAQ,EAClB,gBAAgB,EAAE,wBAAwB,EAC1C,YAAY,GAAE,qBAAqB,EAAO,EAC1C,MAAM,GAAE,OAAO,CAAC,aAAa,CAAM,GAClC,OAAO,CAAC,aAAa,CAAC;IAgFzB,OAAO,CAAC,4BAA4B;IAqEpC,OAAO,CAAC,uBAAuB;IA8C/B,OAAO,CAAC,uBAAuB;IA0C/B,OAAO,CAAC,oBAAoB;IAkD5B,OAAO,CAAC,uBAAuB;IAwE/B,OAAO,CAAC,uBAAuB;IAsD/B,OAAO,CAAC,cAAc;IAmGtB,OAAO,CAAC,kBAAkB;IAkB1B,OAAO,CAAC,uBAAuB;IAqB/B,OAAO,CAAC,cAAc;IAoCtB,OAAO,CAAC,eAAe;IAwDvB,OAAO,CAAC,sBAAsB;IAgF9B,OAAO,CAAC,cAAc;IA+DtB,OAAO,CAAC,WAAW;IA6EnB,OAAO,CAAC,eAAe;IAqCvB,OAAO,CAAC,eAAe;IA4DvB,OAAO,CAAC,uBAAuB;YA6CjB,0BAA0B;YAmF1B,4BAA4B;CAiF3C"}
|
||||
{"version":3,"file":"workflow-auto-fixer.d.ts","sourceRoot":"","sources":["../../src/services/workflow-auto-fixer.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,wBAAwB,EAAE,MAAM,sBAAsB,CAAC;AAChE,OAAO,EAAE,qBAAqB,EAAE,MAAM,+BAA+B,CAAC;AAEtE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EACL,qBAAqB,EAEtB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAgB,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAK1D,OAAO,EAAuB,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAIlF,MAAM,MAAM,kBAAkB,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAC3D,MAAM,MAAM,OAAO,GACf,mBAAmB,GACnB,wBAAwB,GACxB,qBAAqB,GACrB,sBAAsB,GACtB,sBAAsB,GACtB,qBAAqB,GACrB,mBAAmB,GACnB,yBAAyB,CAAC;AAE9B,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,mBAAmB,CAAC,EAAE,kBAAkB,CAAC;IACzC,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,GAAG,CAAC;IACZ,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,kBAAkB,CAAC;IAC/B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,qBAAqB,EAAE,CAAC;IACpC,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAChC,YAAY,EAAE,MAAM,CAAC,kBAAkB,EAAE,MAAM,CAAC,CAAC;KAClD,CAAC;IACF,kBAAkB,CAAC,EAAE,kBAAkB,EAAE,CAAC;CAC3C;AAED,MAAM,WAAW,eAAgB,SAAQ,qBAAqB;IAC5D,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAKD,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,qBAAqB,GAAG,KAAK,IAAI,eAAe,CAIxF;AAKD,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,KAAK,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,UAAU,EAAE,MAAM,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACJ;AAED,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,QAAQ,CAAC,aAAa,CAI5B;IACF,OAAO,CAAC,iBAAiB,CAAsC;IAC/D,OAAO,CAAC,cAAc,CAAmC;IACzD,OAAO,CAAC,sBAAsB,CAAuC;IACrE,OAAO,CAAC,gBAAgB,CAAqC;IAC7D,OAAO,CAAC,mBAAmB,CAAoC;gBAEnD,UAAU,CAAC,EAAE,cAAc;IAajC,aAAa,CACjB,QAAQ,EAAE,QAAQ,EAClB,gBAAgB,EAAE,wBAAwB,EAC1C,YAAY,GAAE,qBAAqB,EAAO,EAC1C,MAAM,GAAE,OAAO,CAAC,aAAa,CAAM,GAClC,OAAO,CAAC,aAAa,CAAC;IA6EzB,OAAO,CAAC,4BAA4B;IAqEpC,OAAO,CAAC,uBAAuB;IA8C/B,OAAO,CAAC,uBAAuB;IA0C/B,OAAO,CAAC,oBAAoB;IAkD5B,OAAO,CAAC,uBAAuB;IAwE/B,OAAO,CAAC,uBAAuB;IAsD/B,OAAO,CAAC,cAAc;IAmGtB,OAAO,CAAC,kBAAkB;IAkB1B,OAAO,CAAC,uBAAuB;IAiB/B,OAAO,CAAC,cAAc;IA+BtB,OAAO,CAAC,eAAe;YA4CT,0BAA0B;YAmF1B,4BAA4B;CAiF3C"}
|
||||
310
dist/services/workflow-auto-fixer.js
vendored
310
dist/services/workflow-auto-fixer.js
vendored
@@ -3,10 +3,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WorkflowAutoFixer = exports.CONNECTION_FIX_TYPES = void 0;
|
||||
exports.WorkflowAutoFixer = void 0;
|
||||
exports.isNodeFormatIssue = isNodeFormatIssue;
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const workflow_validator_1 = require("./workflow-validator");
|
||||
const node_similarity_service_1 = require("./node-similarity-service");
|
||||
const logger_1 = require("../utils/logger");
|
||||
const node_version_service_1 = require("./node-version-service");
|
||||
@@ -14,13 +13,6 @@ const breaking_change_detector_1 = require("./breaking-change-detector");
|
||||
const node_migration_service_1 = require("./node-migration-service");
|
||||
const post_update_validator_1 = require("./post-update-validator");
|
||||
const logger = new logger_1.Logger({ prefix: '[WorkflowAutoFixer]' });
|
||||
exports.CONNECTION_FIX_TYPES = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
function isNodeFormatIssue(issue) {
|
||||
return 'nodeName' in issue && 'nodeId' in issue &&
|
||||
typeof issue.nodeName === 'string' &&
|
||||
@@ -80,7 +72,6 @@ class WorkflowAutoFixer {
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('version-migration')) {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
this.processConnectionFixes(workflow, validationResult, fullConfig, operations, fixes);
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
const limitedFixes = filteredFixes.slice(0, fullConfig.maxFixes);
|
||||
@@ -402,14 +393,10 @@ class WorkflowAutoFixer {
|
||||
}
|
||||
filterOperationsByFixes(operations, filteredFixes, allFixes) {
|
||||
const fixedNodes = new Set(filteredFixes.map(f => f.node));
|
||||
const hasConnectionFixes = filteredFixes.some(f => exports.CONNECTION_FIX_TYPES.includes(f.type));
|
||||
return operations.filter(op => {
|
||||
if (op.type === 'updateNode') {
|
||||
return fixedNodes.has(op.nodeId || '');
|
||||
}
|
||||
if (op.type === 'replaceConnections') {
|
||||
return hasConnectionFixes;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
@@ -424,12 +411,7 @@ class WorkflowAutoFixer {
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0,
|
||||
'tool-variant-correction': 0,
|
||||
'connection-numeric-keys': 0,
|
||||
'connection-invalid-type': 0,
|
||||
'connection-id-to-name': 0,
|
||||
'connection-duplicate-removal': 0,
|
||||
'connection-input-index': 0
|
||||
'tool-variant-correction': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -472,299 +454,11 @@ class WorkflowAutoFixer {
|
||||
if (stats.byType['tool-variant-correction'] > 0) {
|
||||
parts.push(`${stats.byType['tool-variant-correction']} tool variant ${stats.byType['tool-variant-correction'] === 1 ? 'correction' : 'corrections'}`);
|
||||
}
|
||||
const connectionIssueCount = (stats.byType['connection-numeric-keys'] || 0) +
|
||||
(stats.byType['connection-invalid-type'] || 0) +
|
||||
(stats.byType['connection-id-to-name'] || 0) +
|
||||
(stats.byType['connection-duplicate-removal'] || 0) +
|
||||
(stats.byType['connection-input-index'] || 0);
|
||||
if (connectionIssueCount > 0) {
|
||||
parts.push(`${connectionIssueCount} connection ${connectionIssueCount === 1 ? 'issue' : 'issues'}`);
|
||||
}
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
processConnectionFixes(workflow, validationResult, config, operations, fixes) {
|
||||
if (!workflow.connections || Object.keys(workflow.connections).length === 0) {
|
||||
return;
|
||||
}
|
||||
const idToNameMap = new Map();
|
||||
const nameSet = new Set();
|
||||
for (const node of workflow.nodes) {
|
||||
idToNameMap.set(node.id, node.name);
|
||||
nameSet.add(node.name);
|
||||
}
|
||||
const conn = JSON.parse(JSON.stringify(workflow.connections));
|
||||
let anyFixed = false;
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-numeric-keys')) {
|
||||
const numericKeyResult = this.fixNumericKeys(conn);
|
||||
if (numericKeyResult.length > 0) {
|
||||
fixes.push(...numericKeyResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-id-to-name')) {
|
||||
const idToNameResult = this.fixIdToName(conn, idToNameMap, nameSet);
|
||||
if (idToNameResult.length > 0) {
|
||||
fixes.push(...idToNameResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-invalid-type')) {
|
||||
const invalidTypeResult = this.fixInvalidTypes(conn);
|
||||
if (invalidTypeResult.length > 0) {
|
||||
fixes.push(...invalidTypeResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-input-index')) {
|
||||
const inputIndexResult = this.fixInputIndices(conn, validationResult, workflow);
|
||||
if (inputIndexResult.length > 0) {
|
||||
fixes.push(...inputIndexResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-duplicate-removal')) {
|
||||
const dedupResult = this.fixDuplicateConnections(conn);
|
||||
if (dedupResult.length > 0) {
|
||||
fixes.push(...dedupResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (anyFixed) {
|
||||
const op = {
|
||||
type: 'replaceConnections',
|
||||
connections: conn
|
||||
};
|
||||
operations.push(op);
|
||||
}
|
||||
}
|
||||
fixNumericKeys(conn) {
|
||||
const fixes = [];
|
||||
const sourceNodes = Object.keys(conn);
|
||||
for (const sourceName of sourceNodes) {
|
||||
const nodeConn = conn[sourceName];
|
||||
const numericKeys = Object.keys(nodeConn).filter(k => /^\d+$/.test(k));
|
||||
if (numericKeys.length === 0)
|
||||
continue;
|
||||
if (!nodeConn['main']) {
|
||||
nodeConn['main'] = [];
|
||||
}
|
||||
for (const numKey of numericKeys) {
|
||||
const index = parseInt(numKey, 10);
|
||||
const entries = nodeConn[numKey];
|
||||
while (nodeConn['main'].length <= index) {
|
||||
nodeConn['main'].push([]);
|
||||
}
|
||||
const hadExisting = nodeConn['main'][index] && nodeConn['main'][index].length > 0;
|
||||
if (Array.isArray(entries)) {
|
||||
for (const outputGroup of entries) {
|
||||
if (Array.isArray(outputGroup)) {
|
||||
nodeConn['main'][index] = [
|
||||
...nodeConn['main'][index],
|
||||
...outputGroup
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hadExisting) {
|
||||
logger.warn(`Merged numeric key "${numKey}" into existing main[${index}] on node "${sourceName}" - dedup pass will clean exact duplicates`);
|
||||
}
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${numKey}`,
|
||||
type: 'connection-numeric-keys',
|
||||
before: numKey,
|
||||
after: `main[${index}]`,
|
||||
confidence: hadExisting ? 'medium' : 'high',
|
||||
description: hadExisting
|
||||
? `Merged numeric connection key "${numKey}" into existing main[${index}] on node "${sourceName}"`
|
||||
: `Converted numeric connection key "${numKey}" to main[${index}] on node "${sourceName}"`
|
||||
});
|
||||
delete nodeConn[numKey];
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixIdToName(conn, idToNameMap, nameSet) {
|
||||
const fixes = [];
|
||||
const renames = [];
|
||||
const sourceKeys = Object.keys(conn);
|
||||
for (const sourceKey of sourceKeys) {
|
||||
if (idToNameMap.has(sourceKey) && !nameSet.has(sourceKey)) {
|
||||
renames.push({ oldKey: sourceKey, newKey: idToNameMap.get(sourceKey) });
|
||||
}
|
||||
}
|
||||
const newKeyCount = new Map();
|
||||
for (const r of renames) {
|
||||
newKeyCount.set(r.newKey, (newKeyCount.get(r.newKey) || 0) + 1);
|
||||
}
|
||||
const safeRenames = renames.filter(r => {
|
||||
if ((newKeyCount.get(r.newKey) || 0) > 1) {
|
||||
logger.warn(`Skipping ambiguous ID-to-name rename: "${r.oldKey}" → "${r.newKey}" (multiple IDs map to same name)`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
for (const { oldKey, newKey } of safeRenames) {
|
||||
conn[newKey] = conn[oldKey];
|
||||
delete conn[oldKey];
|
||||
fixes.push({
|
||||
node: newKey,
|
||||
field: `connections.sourceKey`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldKey,
|
||||
after: newKey,
|
||||
confidence: 'high',
|
||||
description: `Replaced node ID "${oldKey}" with name "${newKey}" as connection source key`
|
||||
});
|
||||
}
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node && idToNameMap.has(entry.node) && !nameSet.has(entry.node)) {
|
||||
const oldNode = entry.node;
|
||||
const newNode = idToNameMap.get(entry.node);
|
||||
entry.node = newNode;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].node`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldNode,
|
||||
after: newNode,
|
||||
confidence: 'high',
|
||||
description: `Replaced target node ID "${oldNode}" with name "${newNode}" in connection from "${sourceName}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixInvalidTypes(conn) {
|
||||
const fixes = [];
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.type && !workflow_validator_1.VALID_CONNECTION_TYPES.has(entry.type)) {
|
||||
const oldType = entry.type;
|
||||
const newType = workflow_validator_1.VALID_CONNECTION_TYPES.has(outputKey) ? outputKey : 'main';
|
||||
entry.type = newType;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].type`,
|
||||
type: 'connection-invalid-type',
|
||||
before: oldType,
|
||||
after: newType,
|
||||
confidence: 'high',
|
||||
description: `Fixed invalid connection type "${oldType}" → "${newType}" in connection from "${sourceName}" to "${entry.node}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixInputIndices(conn, validationResult, workflow) {
|
||||
const fixes = [];
|
||||
for (const error of validationResult.errors) {
|
||||
if (error.code !== 'INPUT_INDEX_OUT_OF_BOUNDS')
|
||||
continue;
|
||||
const targetNodeName = error.nodeName;
|
||||
if (!targetNodeName)
|
||||
continue;
|
||||
const match = error.message.match(/Input index (\d+).*?has (\d+) main input/);
|
||||
if (!match) {
|
||||
logger.warn(`Could not parse INPUT_INDEX_OUT_OF_BOUNDS error for node "${targetNodeName}": ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
const badIndex = parseInt(match[1], 10);
|
||||
const inputCount = parseInt(match[2], 10);
|
||||
const clampedIndex = inputCount > 1 ? Math.min(badIndex, inputCount - 1) : 0;
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node === targetNodeName && entry.index === badIndex) {
|
||||
entry.index = clampedIndex;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].index`,
|
||||
type: 'connection-input-index',
|
||||
before: badIndex,
|
||||
after: clampedIndex,
|
||||
confidence: 'medium',
|
||||
description: `Clamped input index ${badIndex} → ${clampedIndex} for target node "${targetNodeName}" (has ${inputCount} input${inputCount === 1 ? '' : 's'})`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixDuplicateConnections(conn) {
|
||||
const fixes = [];
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (let i = 0; i < outputs.length; i++) {
|
||||
const outputGroup = outputs[i];
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
const seen = new Set();
|
||||
const deduped = [];
|
||||
for (const entry of outputGroup) {
|
||||
const key = JSON.stringify({ node: entry.node, type: entry.type, index: entry.index });
|
||||
if (seen.has(key)) {
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[${i}]`,
|
||||
type: 'connection-duplicate-removal',
|
||||
before: entry,
|
||||
after: null,
|
||||
confidence: 'high',
|
||||
description: `Removed duplicate connection from "${sourceName}" to "${entry.node}" (type: ${entry.type}, index: ${entry.index})`
|
||||
});
|
||||
}
|
||||
else {
|
||||
seen.add(key);
|
||||
deduped.push(entry);
|
||||
}
|
||||
}
|
||||
outputs[i] = deduped;
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
async processVersionUpgradeFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance) {
|
||||
if (!this.versionService || !this.migrationService || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version upgrade fixes.');
|
||||
|
||||
2
dist/services/workflow-auto-fixer.js.map
vendored
2
dist/services/workflow-auto-fixer.js.map
vendored
File diff suppressed because one or more lines are too long
6
dist/services/workflow-validator.d.ts
vendored
6
dist/services/workflow-validator.d.ts
vendored
@@ -1,6 +1,5 @@
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { EnhancedConfigValidator } from './enhanced-config-validator';
|
||||
export declare const VALID_CONNECTION_TYPES: Set<string>;
|
||||
interface WorkflowNode {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -85,12 +84,7 @@ export declare class WorkflowValidator {
|
||||
private validateErrorOutputConfiguration;
|
||||
private validateAIToolConnection;
|
||||
private validateAIToolSource;
|
||||
private getNodeOutputTypes;
|
||||
private validateNotAISubNode;
|
||||
private getShortNodeType;
|
||||
private getConditionalOutputInfo;
|
||||
private validateOutputIndexBounds;
|
||||
private validateConditionalBranchUsage;
|
||||
private validateInputIndexBounds;
|
||||
private flagOrphanedNodes;
|
||||
private validateTriggerReachability;
|
||||
|
||||
2
dist/services/workflow-validator.d.ts.map
vendored
2
dist/services/workflow-validator.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAiBtE,eAAO,MAAM,sBAAsB,aASjC,CAAC;AAEH,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,CAAC,UAAU,EAAE,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACnF,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IAmO9B,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,yBAAyB;IAuHjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAuE5B,OAAO,CAAC,kBAAkB;IAsB1B,OAAO,CAAC,oBAAoB;IA4B5B,OAAO,CAAC,gBAAgB;IASxB,OAAO,CAAC,wBAAwB;IAmBhC,OAAO,CAAC,yBAAyB;IA8DjC,OAAO,CAAC,8BAA8B;IAmDtC,OAAO,CAAC,wBAAwB;IAuChC,OAAO,CAAC,iBAAiB;IAoCzB,OAAO,CAAC,2BAA2B;IA4EnC,OAAO,CAAC,QAAQ;IA4EhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AA4BtE,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,CAAC,UAAU,EAAE,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACnF,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IAmO9B,OAAO,CAAC,mBAAmB;IAuF3B,OAAO,CAAC,yBAAyB;IAsHjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAsE5B,OAAO,CAAC,yBAAyB;IAiEjC,OAAO,CAAC,wBAAwB;IAuChC,OAAO,CAAC,iBAAiB;IAoCzB,OAAO,CAAC,2BAA2B;IA4EnC,OAAO,CAAC,QAAQ;IA4EhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
119
dist/services/workflow-validator.js
vendored
119
dist/services/workflow-validator.js
vendored
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WorkflowValidator = exports.VALID_CONNECTION_TYPES = void 0;
|
||||
exports.WorkflowValidator = void 0;
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const expression_validator_1 = require("./expression-validator");
|
||||
const expression_format_validator_1 = require("./expression-format-validator");
|
||||
@@ -16,7 +16,7 @@ const node_type_utils_1 = require("../utils/node-type-utils");
|
||||
const node_classification_1 = require("../utils/node-classification");
|
||||
const tool_variant_generator_1 = require("./tool-variant-generator");
|
||||
const logger = new logger_1.Logger({ prefix: '[WorkflowValidator]' });
|
||||
exports.VALID_CONNECTION_TYPES = new Set([
|
||||
const VALID_CONNECTION_TYPES = new Set([
|
||||
'main',
|
||||
'error',
|
||||
...ai_node_validator_1.AI_CONNECTION_TYPES,
|
||||
@@ -403,7 +403,7 @@ class WorkflowValidator {
|
||||
continue;
|
||||
}
|
||||
for (const [outputKey, outputConnections] of Object.entries(outputs)) {
|
||||
if (!exports.VALID_CONNECTION_TYPES.has(outputKey)) {
|
||||
if (!VALID_CONNECTION_TYPES.has(outputKey)) {
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(outputKey)) {
|
||||
suggestion = ` If you meant to use output index ${outputKey}, use main[${outputKey}] instead.`;
|
||||
@@ -411,7 +411,7 @@ class WorkflowValidator {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: sourceName,
|
||||
message: `Unknown connection output key "${outputKey}" on node "${sourceName}". Valid keys are: ${[...exports.VALID_CONNECTION_TYPES].join(', ')}.${suggestion}`,
|
||||
message: `Unknown connection output key "${outputKey}" on node "${sourceName}". Valid keys are: ${[...VALID_CONNECTION_TYPES].join(', ')}.${suggestion}`,
|
||||
code: 'UNKNOWN_CONNECTION_KEY'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
@@ -422,9 +422,6 @@ class WorkflowValidator {
|
||||
if (outputKey === 'ai_tool') {
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
}
|
||||
if (outputKey === 'main') {
|
||||
this.validateNotAISubNode(sourceNode, result);
|
||||
}
|
||||
this.validateConnectionOutputs(sourceName, outputConnections, nodeMap, nodeIdMap, result, outputKey);
|
||||
}
|
||||
}
|
||||
@@ -446,7 +443,6 @@ class WorkflowValidator {
|
||||
if (outputType === 'main' && sourceNode) {
|
||||
this.validateErrorOutputConfiguration(sourceName, sourceNode, outputs, nodeMap, result);
|
||||
this.validateOutputIndexBounds(sourceNode, outputs, result);
|
||||
this.validateConditionalBranchUsage(sourceNode, outputs, result);
|
||||
}
|
||||
outputs.forEach((outputConnections, outputIndex) => {
|
||||
if (!outputConnections)
|
||||
@@ -460,7 +456,7 @@ class WorkflowValidator {
|
||||
result.statistics.invalidConnections++;
|
||||
return;
|
||||
}
|
||||
if (connection.type && !exports.VALID_CONNECTION_TYPES.has(connection.type)) {
|
||||
if (connection.type && !VALID_CONNECTION_TYPES.has(connection.type)) {
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(connection.type)) {
|
||||
suggestion = ` Numeric types are not valid - use "main", "error", or an AI connection type.`;
|
||||
@@ -648,57 +644,6 @@ class WorkflowValidator {
|
||||
code: 'INVALID_AI_TOOL_SOURCE'
|
||||
});
|
||||
}
|
||||
getNodeOutputTypes(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs)
|
||||
return null;
|
||||
const outputs = nodeInfo.outputs;
|
||||
if (!Array.isArray(outputs))
|
||||
return null;
|
||||
for (const output of outputs) {
|
||||
if (typeof output === 'string' && output.startsWith('={{')) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return outputs;
|
||||
}
|
||||
validateNotAISubNode(sourceNode, result) {
|
||||
const outputTypes = this.getNodeOutputTypes(sourceNode.type);
|
||||
if (!outputTypes)
|
||||
return;
|
||||
const hasMainOutput = outputTypes.some(t => t === 'main');
|
||||
if (hasMainOutput)
|
||||
return;
|
||||
const aiTypes = outputTypes.filter(t => t !== 'main');
|
||||
const expectedType = aiTypes[0] || 'ai_languageModel';
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Node "${sourceNode.name}" (${sourceNode.type}) is an AI sub-node that outputs "${expectedType}" connections. ` +
|
||||
`It cannot be used with "main" connections. Connect it to an AI Agent or Chain via "${expectedType}" instead.`,
|
||||
code: 'AI_SUBNODE_MAIN_CONNECTION'
|
||||
});
|
||||
}
|
||||
getShortNodeType(sourceNode) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
return normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
}
|
||||
getConditionalOutputInfo(sourceNode) {
|
||||
const shortType = this.getShortNodeType(sourceNode);
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
return { shortType, expectedOutputs: 2 };
|
||||
}
|
||||
if (shortType === 'switch') {
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
return { shortType, expectedOutputs: rules.length + 1 };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
validateOutputIndexBounds(sourceNode, outputs, result) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
@@ -713,12 +658,18 @@ class WorkflowValidator {
|
||||
}
|
||||
if (mainOutputCount === 0)
|
||||
return;
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (conditionalInfo) {
|
||||
mainOutputCount = conditionalInfo.expectedOutputs;
|
||||
const shortType = normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
if (shortType === 'switch') {
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
mainOutputCount = rules.length + 1;
|
||||
}
|
||||
else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (this.getShortNodeType(sourceNode) === 'switch') {
|
||||
return;
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
mainOutputCount = 2;
|
||||
}
|
||||
if (sourceNode.onError === 'continueErrorOutput') {
|
||||
mainOutputCount += 1;
|
||||
@@ -740,44 +691,6 @@ class WorkflowValidator {
|
||||
}
|
||||
}
|
||||
}
|
||||
validateConditionalBranchUsage(sourceNode, outputs, result) {
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (!conditionalInfo || conditionalInfo.expectedOutputs < 2)
|
||||
return;
|
||||
const { shortType, expectedOutputs } = conditionalInfo;
|
||||
const main0Count = outputs[0]?.length || 0;
|
||||
if (main0Count < 2)
|
||||
return;
|
||||
const hasHigherIndexConnections = outputs.slice(1).some(conns => conns && conns.length > 0);
|
||||
if (hasHigherIndexConnections)
|
||||
return;
|
||||
let message;
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
const isFilter = shortType === 'filter';
|
||||
const displayName = isFilter ? 'Filter' : 'IF';
|
||||
const trueLabel = isFilter ? 'matched' : 'true';
|
||||
const falseLabel = isFilter ? 'unmatched' : 'false';
|
||||
message = `${displayName} node "${sourceNode.name}" has ${main0Count} connections on the "${trueLabel}" branch (main[0]) ` +
|
||||
`but no connections on the "${falseLabel}" branch (main[1]). ` +
|
||||
`All ${main0Count} target nodes execute together on the "${trueLabel}" branch, ` +
|
||||
`while the "${falseLabel}" branch has no effect. ` +
|
||||
`Split connections: main[0] for ${trueLabel}, main[1] for ${falseLabel}.`;
|
||||
}
|
||||
else {
|
||||
message = `Switch node "${sourceNode.name}" has ${main0Count} connections on output 0 ` +
|
||||
`but no connections on any other outputs (1-${expectedOutputs - 1}). ` +
|
||||
`All ${main0Count} target nodes execute together on output 0, ` +
|
||||
`while other switch branches have no effect. ` +
|
||||
`Distribute connections across outputs to match switch rules.`;
|
||||
}
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message,
|
||||
code: 'CONDITIONAL_BRANCH_FANOUT'
|
||||
});
|
||||
}
|
||||
validateInputIndexBounds(sourceName, targetNode, connection, result) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(targetNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
|
||||
2
dist/services/workflow-validator.js.map
vendored
2
dist/services/workflow-validator.js.map
vendored
File diff suppressed because one or more lines are too long
4174
package-lock.json
generated
4174
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
package.json
12
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp",
|
||||
"version": "2.36.2",
|
||||
"version": "2.36.0",
|
||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -152,17 +152,17 @@
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||
"@n8n/n8n-nodes-langchain": "^2.11.2",
|
||||
"@modelcontextprotocol/sdk": "1.20.1",
|
||||
"@n8n/n8n-nodes-langchain": "^2.10.1",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^5.1.0",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"form-data": "^4.0.5",
|
||||
"lru-cache": "^11.2.1",
|
||||
"n8n": "^2.11.4",
|
||||
"n8n-core": "^2.11.1",
|
||||
"n8n-workflow": "^2.11.1",
|
||||
"n8n": "^2.10.3",
|
||||
"n8n-core": "^2.10.1",
|
||||
"n8n-workflow": "^2.10.1",
|
||||
"openai": "^4.77.0",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
|
||||
@@ -424,13 +424,7 @@ const autofixWorkflowSchema = z.object({
|
||||
'node-type-correction',
|
||||
'webhook-missing-path',
|
||||
'typeversion-upgrade',
|
||||
'version-migration',
|
||||
'tool-variant-correction',
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
'version-migration'
|
||||
])).optional(),
|
||||
confidenceThreshold: z.enum(['high', 'medium', 'low']).optional().default('medium'),
|
||||
maxFixes: z.number().optional().default(50)
|
||||
|
||||
@@ -284,7 +284,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
},
|
||||
{
|
||||
name: 'n8n_autofix_workflow',
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths, connection structure issues (numeric keys, invalid types, ID-to-name, duplicates, out-of-bounds indices).`,
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths.`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -301,7 +301,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration', 'tool-variant-correction', 'connection-numeric-keys', 'connection-invalid-type', 'connection-id-to-name', 'connection-duplicate-removal', 'connection-input-index']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
|
||||
@@ -6,14 +6,13 @@
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
import { WorkflowValidationResult, VALID_CONNECTION_TYPES } from './workflow-validator';
|
||||
import { WorkflowValidationResult } from './workflow-validator';
|
||||
import { ExpressionFormatIssue } from './expression-format-validator';
|
||||
import { NodeSimilarityService } from './node-similarity-service';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import {
|
||||
WorkflowDiffOperation,
|
||||
UpdateNodeOperation,
|
||||
ReplaceConnectionsOperation
|
||||
UpdateNodeOperation
|
||||
} from '../types/workflow-diff';
|
||||
import { WorkflowNode, Workflow } from '../types/n8n-api';
|
||||
import { Logger } from '../utils/logger';
|
||||
@@ -31,22 +30,9 @@ export type FixType =
|
||||
| 'error-output-config'
|
||||
| 'node-type-correction'
|
||||
| 'webhook-missing-path'
|
||||
| 'typeversion-upgrade' // Proactive version upgrades
|
||||
| 'version-migration' // Smart version migrations with breaking changes
|
||||
| 'tool-variant-correction' // Fix base nodes used as AI tools when Tool variant exists
|
||||
| 'connection-numeric-keys' // "0","1" keys → main[0], main[1]
|
||||
| 'connection-invalid-type' // type:"0" → type:"main"
|
||||
| 'connection-id-to-name' // node ID refs → node name refs
|
||||
| 'connection-duplicate-removal' // Dedup identical connection entries
|
||||
| 'connection-input-index'; // Out-of-bounds input index → clamped
|
||||
|
||||
export const CONNECTION_FIX_TYPES: FixType[] = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
| 'typeversion-upgrade' // Proactive version upgrades
|
||||
| 'version-migration' // Smart version migrations with breaking changes
|
||||
| 'tool-variant-correction'; // Fix base nodes used as AI tools when Tool variant exists
|
||||
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
@@ -189,9 +175,6 @@ export class WorkflowAutoFixer {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// Process connection structure fixes (HIGH/MEDIUM confidence)
|
||||
this.processConnectionFixes(workflow, validationResult, fullConfig, operations, fixes);
|
||||
|
||||
// Filter by confidence threshold
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
@@ -672,14 +655,10 @@ export class WorkflowAutoFixer {
|
||||
allFixes: FixOperation[]
|
||||
): WorkflowDiffOperation[] {
|
||||
const fixedNodes = new Set(filteredFixes.map(f => f.node));
|
||||
const hasConnectionFixes = filteredFixes.some(f => CONNECTION_FIX_TYPES.includes(f.type));
|
||||
return operations.filter(op => {
|
||||
if (op.type === 'updateNode') {
|
||||
return fixedNodes.has(op.nodeId || '');
|
||||
}
|
||||
if (op.type === 'replaceConnections') {
|
||||
return hasConnectionFixes;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
@@ -698,12 +677,7 @@ export class WorkflowAutoFixer {
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0,
|
||||
'tool-variant-correction': 0,
|
||||
'connection-numeric-keys': 0,
|
||||
'connection-invalid-type': 0,
|
||||
'connection-id-to-name': 0,
|
||||
'connection-duplicate-removal': 0,
|
||||
'connection-input-index': 0
|
||||
'tool-variant-correction': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -756,16 +730,6 @@ export class WorkflowAutoFixer {
|
||||
parts.push(`${stats.byType['tool-variant-correction']} tool variant ${stats.byType['tool-variant-correction'] === 1 ? 'correction' : 'corrections'}`);
|
||||
}
|
||||
|
||||
const connectionIssueCount =
|
||||
(stats.byType['connection-numeric-keys'] || 0) +
|
||||
(stats.byType['connection-invalid-type'] || 0) +
|
||||
(stats.byType['connection-id-to-name'] || 0) +
|
||||
(stats.byType['connection-duplicate-removal'] || 0) +
|
||||
(stats.byType['connection-input-index'] || 0);
|
||||
if (connectionIssueCount > 0) {
|
||||
parts.push(`${connectionIssueCount} connection ${connectionIssueCount === 1 ? 'issue' : 'issues'}`);
|
||||
}
|
||||
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
@@ -773,370 +737,6 @@ export class WorkflowAutoFixer {
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process connection structure fixes.
|
||||
* Deep-clones workflow.connections, applies fixes in order:
|
||||
* numeric keys → ID-to-name → invalid type → input index → dedup
|
||||
* Emits a single ReplaceConnectionsOperation if any corrections were made.
|
||||
*/
|
||||
private processConnectionFixes(
|
||||
workflow: Workflow,
|
||||
validationResult: WorkflowValidationResult,
|
||||
config: AutoFixConfig,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[]
|
||||
): void {
|
||||
if (!workflow.connections || Object.keys(workflow.connections).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Build lookup maps
|
||||
const idToNameMap = new Map<string, string>();
|
||||
const nameSet = new Set<string>();
|
||||
for (const node of workflow.nodes) {
|
||||
idToNameMap.set(node.id, node.name);
|
||||
nameSet.add(node.name);
|
||||
}
|
||||
|
||||
// Deep-clone connections
|
||||
const conn: any = JSON.parse(JSON.stringify(workflow.connections));
|
||||
let anyFixed = false;
|
||||
|
||||
// 1. Fix numeric source keys ("0" → main[0])
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-numeric-keys')) {
|
||||
const numericKeyResult = this.fixNumericKeys(conn);
|
||||
if (numericKeyResult.length > 0) {
|
||||
fixes.push(...numericKeyResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fix ID-to-name references (source keys and .node values)
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-id-to-name')) {
|
||||
const idToNameResult = this.fixIdToName(conn, idToNameMap, nameSet);
|
||||
if (idToNameResult.length > 0) {
|
||||
fixes.push(...idToNameResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Fix invalid connection types
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-invalid-type')) {
|
||||
const invalidTypeResult = this.fixInvalidTypes(conn);
|
||||
if (invalidTypeResult.length > 0) {
|
||||
fixes.push(...invalidTypeResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Fix out-of-bounds input indices
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-input-index')) {
|
||||
const inputIndexResult = this.fixInputIndices(conn, validationResult, workflow);
|
||||
if (inputIndexResult.length > 0) {
|
||||
fixes.push(...inputIndexResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Dedup identical connection entries
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-duplicate-removal')) {
|
||||
const dedupResult = this.fixDuplicateConnections(conn);
|
||||
if (dedupResult.length > 0) {
|
||||
fixes.push(...dedupResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (anyFixed) {
|
||||
const op: ReplaceConnectionsOperation = {
|
||||
type: 'replaceConnections',
|
||||
connections: conn
|
||||
};
|
||||
operations.push(op);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix numeric connection output keys ("0", "1" → main[0], main[1])
|
||||
*/
|
||||
private fixNumericKeys(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
const sourceNodes = Object.keys(conn);
|
||||
|
||||
for (const sourceName of sourceNodes) {
|
||||
const nodeConn = conn[sourceName];
|
||||
const numericKeys = Object.keys(nodeConn).filter(k => /^\d+$/.test(k));
|
||||
|
||||
if (numericKeys.length === 0) continue;
|
||||
|
||||
// Ensure main array exists
|
||||
if (!nodeConn['main']) {
|
||||
nodeConn['main'] = [];
|
||||
}
|
||||
|
||||
for (const numKey of numericKeys) {
|
||||
const index = parseInt(numKey, 10);
|
||||
const entries = nodeConn[numKey];
|
||||
|
||||
// Extend main array if needed (fill gaps with empty arrays)
|
||||
while (nodeConn['main'].length <= index) {
|
||||
nodeConn['main'].push([]);
|
||||
}
|
||||
|
||||
// Merge entries into main[index]
|
||||
const hadExisting = nodeConn['main'][index] && nodeConn['main'][index].length > 0;
|
||||
if (Array.isArray(entries)) {
|
||||
for (const outputGroup of entries) {
|
||||
if (Array.isArray(outputGroup)) {
|
||||
nodeConn['main'][index] = [
|
||||
...nodeConn['main'][index],
|
||||
...outputGroup
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hadExisting) {
|
||||
logger.warn(`Merged numeric key "${numKey}" into existing main[${index}] on node "${sourceName}" - dedup pass will clean exact duplicates`);
|
||||
}
|
||||
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${numKey}`,
|
||||
type: 'connection-numeric-keys',
|
||||
before: numKey,
|
||||
after: `main[${index}]`,
|
||||
confidence: hadExisting ? 'medium' : 'high',
|
||||
description: hadExisting
|
||||
? `Merged numeric connection key "${numKey}" into existing main[${index}] on node "${sourceName}"`
|
||||
: `Converted numeric connection key "${numKey}" to main[${index}] on node "${sourceName}"`
|
||||
});
|
||||
|
||||
delete nodeConn[numKey];
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix node ID references in connections (replace IDs with names)
|
||||
*/
|
||||
private fixIdToName(
|
||||
conn: any,
|
||||
idToNameMap: Map<string, string>,
|
||||
nameSet: Set<string>
|
||||
): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
// Build rename plan for source keys, then check for collisions
|
||||
const renames: Array<{ oldKey: string; newKey: string }> = [];
|
||||
const sourceKeys = Object.keys(conn);
|
||||
for (const sourceKey of sourceKeys) {
|
||||
if (idToNameMap.has(sourceKey) && !nameSet.has(sourceKey)) {
|
||||
renames.push({ oldKey: sourceKey, newKey: idToNameMap.get(sourceKey)! });
|
||||
}
|
||||
}
|
||||
|
||||
// Check for collisions among renames (two IDs mapping to the same name)
|
||||
const newKeyCount = new Map<string, number>();
|
||||
for (const r of renames) {
|
||||
newKeyCount.set(r.newKey, (newKeyCount.get(r.newKey) || 0) + 1);
|
||||
}
|
||||
const safeRenames = renames.filter(r => {
|
||||
if ((newKeyCount.get(r.newKey) || 0) > 1) {
|
||||
logger.warn(`Skipping ambiguous ID-to-name rename: "${r.oldKey}" → "${r.newKey}" (multiple IDs map to same name)`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
for (const { oldKey, newKey } of safeRenames) {
|
||||
conn[newKey] = conn[oldKey];
|
||||
delete conn[oldKey];
|
||||
fixes.push({
|
||||
node: newKey,
|
||||
field: `connections.sourceKey`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldKey,
|
||||
after: newKey,
|
||||
confidence: 'high',
|
||||
description: `Replaced node ID "${oldKey}" with name "${newKey}" as connection source key`
|
||||
});
|
||||
}
|
||||
|
||||
// Fix .node values that are node IDs
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node && idToNameMap.has(entry.node) && !nameSet.has(entry.node)) {
|
||||
const oldNode = entry.node;
|
||||
const newNode = idToNameMap.get(entry.node)!;
|
||||
entry.node = newNode;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].node`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldNode,
|
||||
after: newNode,
|
||||
confidence: 'high',
|
||||
description: `Replaced target node ID "${oldNode}" with name "${newNode}" in connection from "${sourceName}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix invalid connection types in entries (e.g., type:"0" → type:"main")
|
||||
*/
|
||||
private fixInvalidTypes(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.type && !VALID_CONNECTION_TYPES.has(entry.type)) {
|
||||
const oldType = entry.type;
|
||||
// Use the parent output key if it's valid, otherwise default to "main"
|
||||
const newType = VALID_CONNECTION_TYPES.has(outputKey) ? outputKey : 'main';
|
||||
entry.type = newType;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].type`,
|
||||
type: 'connection-invalid-type',
|
||||
before: oldType,
|
||||
after: newType,
|
||||
confidence: 'high',
|
||||
description: `Fixed invalid connection type "${oldType}" → "${newType}" in connection from "${sourceName}" to "${entry.node}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix out-of-bounds input indices (clamp to valid range)
|
||||
*/
|
||||
private fixInputIndices(
|
||||
conn: any,
|
||||
validationResult: WorkflowValidationResult,
|
||||
workflow: Workflow
|
||||
): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
// Parse INPUT_INDEX_OUT_OF_BOUNDS errors from validation
|
||||
for (const error of validationResult.errors) {
|
||||
if (error.code !== 'INPUT_INDEX_OUT_OF_BOUNDS') continue;
|
||||
|
||||
const targetNodeName = error.nodeName;
|
||||
if (!targetNodeName) continue;
|
||||
|
||||
// Extract the bad index and input count from the error message
|
||||
const match = error.message.match(/Input index (\d+).*?has (\d+) main input/);
|
||||
if (!match) {
|
||||
logger.warn(`Could not parse INPUT_INDEX_OUT_OF_BOUNDS error for node "${targetNodeName}": ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const badIndex = parseInt(match[1], 10);
|
||||
const inputCount = parseInt(match[2], 10);
|
||||
|
||||
// For multi-input nodes, clamp to max valid index; for single-input, reset to 0
|
||||
const clampedIndex = inputCount > 1 ? Math.min(badIndex, inputCount - 1) : 0;
|
||||
|
||||
// Find and fix the bad index in connections
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node === targetNodeName && entry.index === badIndex) {
|
||||
entry.index = clampedIndex;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].index`,
|
||||
type: 'connection-input-index',
|
||||
before: badIndex,
|
||||
after: clampedIndex,
|
||||
confidence: 'medium',
|
||||
description: `Clamped input index ${badIndex} → ${clampedIndex} for target node "${targetNodeName}" (has ${inputCount} input${inputCount === 1 ? '' : 's'})`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate connection entries (same node, type, index)
|
||||
*/
|
||||
private fixDuplicateConnections(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (let i = 0; i < outputs.length; i++) {
|
||||
const outputGroup = outputs[i];
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
|
||||
const seen = new Set<string>();
|
||||
const deduped: any[] = [];
|
||||
|
||||
for (const entry of outputGroup) {
|
||||
const key = JSON.stringify({ node: entry.node, type: entry.type, index: entry.index });
|
||||
if (seen.has(key)) {
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[${i}]`,
|
||||
type: 'connection-duplicate-removal',
|
||||
before: entry,
|
||||
after: null,
|
||||
confidence: 'high',
|
||||
description: `Removed duplicate connection from "${sourceName}" to "${entry.node}" (type: ${entry.type}, index: ${entry.index})`
|
||||
});
|
||||
} else {
|
||||
seen.add(key);
|
||||
deduped.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
outputs[i] = deduped;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version upgrade fixes (proactive upgrades to latest versions)
|
||||
* HIGH confidence for non-breaking upgrades, MEDIUM for upgrades with auto-migratable changes
|
||||
|
||||
@@ -22,7 +22,7 @@ const logger = new Logger({ prefix: '[WorkflowValidator]' });
|
||||
* All valid connection output keys in n8n workflows.
|
||||
* Any key not in this set is malformed and should be flagged.
|
||||
*/
|
||||
export const VALID_CONNECTION_TYPES = new Set<string>([
|
||||
const VALID_CONNECTION_TYPES = new Set<string>([
|
||||
'main',
|
||||
'error',
|
||||
...AI_CONNECTION_TYPES,
|
||||
@@ -650,11 +650,6 @@ export class WorkflowValidator {
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
}
|
||||
|
||||
// Validate that AI sub-nodes are not connected via main
|
||||
if (outputKey === 'main') {
|
||||
this.validateNotAISubNode(sourceNode, result);
|
||||
}
|
||||
|
||||
this.validateConnectionOutputs(
|
||||
sourceName,
|
||||
outputConnections,
|
||||
@@ -700,7 +695,6 @@ export class WorkflowValidator {
|
||||
if (outputType === 'main' && sourceNode) {
|
||||
this.validateErrorOutputConfiguration(sourceName, sourceNode, outputs, nodeMap, result);
|
||||
this.validateOutputIndexBounds(sourceNode, outputs, result);
|
||||
this.validateConditionalBranchUsage(sourceNode, outputs, result);
|
||||
}
|
||||
|
||||
outputs.forEach((outputConnections, outputIndex) => {
|
||||
@@ -993,85 +987,6 @@ export class WorkflowValidator {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the static output types for a node from the database.
|
||||
* Returns null if outputs contain expressions (dynamic) or node not found.
|
||||
*/
|
||||
private getNodeOutputTypes(nodeType: string): string[] | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs) return null;
|
||||
|
||||
const outputs = nodeInfo.outputs;
|
||||
if (!Array.isArray(outputs)) return null;
|
||||
|
||||
// Skip if any output is an expression (dynamic — can't determine statically)
|
||||
for (const output of outputs) {
|
||||
if (typeof output === 'string' && output.startsWith('={{')) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return outputs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that AI sub-nodes (nodes that only output AI connection types)
|
||||
* are not connected via "main" connections.
|
||||
*/
|
||||
private validateNotAISubNode(
|
||||
sourceNode: WorkflowNode,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const outputTypes = this.getNodeOutputTypes(sourceNode.type);
|
||||
if (!outputTypes) return; // Unknown or dynamic — skip
|
||||
|
||||
// Check if the node outputs ONLY AI types (no 'main')
|
||||
const hasMainOutput = outputTypes.some(t => t === 'main');
|
||||
if (hasMainOutput) return; // Node can legitimately output main
|
||||
|
||||
// All outputs are AI types — this node should not be connected via main
|
||||
const aiTypes = outputTypes.filter(t => t !== 'main');
|
||||
const expectedType = aiTypes[0] || 'ai_languageModel';
|
||||
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Node "${sourceNode.name}" (${sourceNode.type}) is an AI sub-node that outputs "${expectedType}" connections. ` +
|
||||
`It cannot be used with "main" connections. Connect it to an AI Agent or Chain via "${expectedType}" instead.`,
|
||||
code: 'AI_SUBNODE_MAIN_CONNECTION'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive the short node type name (e.g., "if", "switch", "set") from a workflow node.
|
||||
*/
|
||||
private getShortNodeType(sourceNode: WorkflowNode): string {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
return normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected main output count for a conditional node (IF, Filter, Switch).
|
||||
* Returns null for non-conditional nodes or when the count cannot be determined.
|
||||
*/
|
||||
private getConditionalOutputInfo(sourceNode: WorkflowNode): { shortType: string; expectedOutputs: number } | null {
|
||||
const shortType = this.getShortNodeType(sourceNode);
|
||||
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
return { shortType, expectedOutputs: 2 };
|
||||
}
|
||||
if (shortType === 'switch') {
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
return { shortType, expectedOutputs: rules.length + 1 }; // rules + fallback
|
||||
}
|
||||
return null; // Cannot determine dynamic output count
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that output indices don't exceed what the node type supports.
|
||||
*/
|
||||
@@ -1097,13 +1012,19 @@ export class WorkflowValidator {
|
||||
|
||||
if (mainOutputCount === 0) return;
|
||||
|
||||
// Override with dynamic output counts for conditional nodes
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (conditionalInfo) {
|
||||
mainOutputCount = conditionalInfo.expectedOutputs;
|
||||
} else if (this.getShortNodeType(sourceNode) === 'switch') {
|
||||
// Switch without determinable rules -- skip bounds check
|
||||
return;
|
||||
// Account for dynamic output counts based on node type and parameters
|
||||
const shortType = normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
if (shortType === 'switch') {
|
||||
// Switch node: output count depends on rules configuration
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
mainOutputCount = rules.length + 1; // rules + fallback
|
||||
} else {
|
||||
return; // Cannot determine dynamic output count, skip bounds check
|
||||
}
|
||||
}
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
mainOutputCount = 2; // true/false
|
||||
}
|
||||
|
||||
// Account for continueErrorOutput adding an extra output
|
||||
@@ -1131,60 +1052,6 @@ export class WorkflowValidator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect when a conditional node (IF, Filter, Switch) has all connections
|
||||
* crammed into main[0] with higher-index outputs empty. This usually means
|
||||
* both branches execute together on one condition, while the other branches
|
||||
* have no effect.
|
||||
*/
|
||||
private validateConditionalBranchUsage(
|
||||
sourceNode: WorkflowNode,
|
||||
outputs: Array<Array<{ node: string; type: string; index: number }>>,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (!conditionalInfo || conditionalInfo.expectedOutputs < 2) return;
|
||||
|
||||
const { shortType, expectedOutputs } = conditionalInfo;
|
||||
|
||||
// Check: main[0] has >= 2 connections AND all main[1+] are empty
|
||||
const main0Count = outputs[0]?.length || 0;
|
||||
if (main0Count < 2) return;
|
||||
|
||||
const hasHigherIndexConnections = outputs.slice(1).some(
|
||||
conns => conns && conns.length > 0
|
||||
);
|
||||
if (hasHigherIndexConnections) return;
|
||||
|
||||
// Build a context-appropriate warning message
|
||||
let message: string;
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
const isFilter = shortType === 'filter';
|
||||
const displayName = isFilter ? 'Filter' : 'IF';
|
||||
const trueLabel = isFilter ? 'matched' : 'true';
|
||||
const falseLabel = isFilter ? 'unmatched' : 'false';
|
||||
message = `${displayName} node "${sourceNode.name}" has ${main0Count} connections on the "${trueLabel}" branch (main[0]) ` +
|
||||
`but no connections on the "${falseLabel}" branch (main[1]). ` +
|
||||
`All ${main0Count} target nodes execute together on the "${trueLabel}" branch, ` +
|
||||
`while the "${falseLabel}" branch has no effect. ` +
|
||||
`Split connections: main[0] for ${trueLabel}, main[1] for ${falseLabel}.`;
|
||||
} else {
|
||||
message = `Switch node "${sourceNode.name}" has ${main0Count} connections on output 0 ` +
|
||||
`but no connections on any other outputs (1-${expectedOutputs - 1}). ` +
|
||||
`All ${main0Count} target nodes execute together on output 0, ` +
|
||||
`while other switch branches have no effect. ` +
|
||||
`Distribute connections across outputs to match switch rules.`;
|
||||
}
|
||||
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message,
|
||||
code: 'CONDITIONAL_BRANCH_FANOUT'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that input index doesn't exceed what the target node accepts.
|
||||
*/
|
||||
|
||||
@@ -134,22 +134,9 @@ export class TestableN8NMCPServer {
|
||||
}
|
||||
}
|
||||
|
||||
// MCP SDK 1.27+ enforces single-connection per Server instance.
|
||||
// Close existing connections before connecting a new transport.
|
||||
for (const conn of this.connections) {
|
||||
try {
|
||||
if (conn && typeof conn.close === 'function') {
|
||||
await conn.close();
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
}
|
||||
this.connections.clear();
|
||||
|
||||
// Track this transport for cleanup
|
||||
this.transports.add(transport);
|
||||
|
||||
|
||||
const connection = await this.server.connect(transport);
|
||||
this.connections.add(connection);
|
||||
}
|
||||
|
||||
@@ -1,566 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowAutoFixer } from '@/services/workflow-auto-fixer';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import type { WorkflowValidationResult } from '@/services/workflow-validator';
|
||||
import type { Workflow, WorkflowNode } from '@/types/n8n-api';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/node-similarity-service');
|
||||
|
||||
describe('WorkflowAutoFixer - Connection Fixes', () => {
|
||||
let autoFixer: WorkflowAutoFixer;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
const createMockWorkflow = (
|
||||
nodes: WorkflowNode[],
|
||||
connections: any = {}
|
||||
): Workflow => ({
|
||||
id: 'test-workflow',
|
||||
name: 'Test Workflow',
|
||||
active: false,
|
||||
nodes,
|
||||
connections,
|
||||
settings: {},
|
||||
createdAt: '',
|
||||
updatedAt: ''
|
||||
});
|
||||
|
||||
const createMockNode = (id: string, name: string, type: string = 'n8n-nodes-base.noOp'): WorkflowNode => ({
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
});
|
||||
|
||||
const emptyValidation: WorkflowValidationResult = {
|
||||
valid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
statistics: {
|
||||
totalNodes: 0,
|
||||
enabledNodes: 0,
|
||||
triggerNodes: 0,
|
||||
validConnections: 0,
|
||||
invalidConnections: 0,
|
||||
expressionsValidated: 0
|
||||
},
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
autoFixer = new WorkflowAutoFixer(mockRepository);
|
||||
});
|
||||
|
||||
describe('Numeric Keys', () => {
|
||||
it('should convert single numeric key to main[index]', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('0');
|
||||
expect(connFixes[0].after).toBe('main[0]');
|
||||
|
||||
// Verify replaceConnections operation
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections');
|
||||
expect(replaceOp).toBeDefined();
|
||||
const connOp = replaceOp as any;
|
||||
expect(connOp.connections.Node1['main']).toBeDefined();
|
||||
expect(connOp.connections.Node1['0']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should convert multiple numeric keys', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'1': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(connFixes).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should merge with existing main entries', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'1': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1['main']).toHaveLength(2);
|
||||
expect(replaceOp.connections.Node1['main'][0]).toEqual([{ node: 'Node2', type: 'main', index: 0 }]);
|
||||
expect(replaceOp.connections.Node1['main'][1]).toEqual([{ node: 'Node3', type: 'main', index: 0 }]);
|
||||
});
|
||||
|
||||
it('should handle sparse numeric keys with gap filling', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'3': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1['main']).toHaveLength(4);
|
||||
expect(replaceOp.connections.Node1['main'][1]).toEqual([]);
|
||||
expect(replaceOp.connections.Node1['main'][2]).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid Type', () => {
|
||||
it('should fix numeric type to "main"', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('0');
|
||||
expect(connFixes[0].after).toBe('main');
|
||||
});
|
||||
|
||||
it('should use parent output key for AI connection types', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
ai_tool: [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].after).toBe('ai_tool');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ID-to-Name', () => {
|
||||
it('should replace source key when it matches a node ID', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'Node1'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
'abc-123': {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('abc-123');
|
||||
expect(connFixes[0].after).toBe('Node1');
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections['Node1']).toBeDefined();
|
||||
expect(replaceOp.connections['abc-123']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should replace target node value when it matches a node ID', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'Node1'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'def-456', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('def-456');
|
||||
expect(connFixes[0].after).toBe('Node2');
|
||||
});
|
||||
|
||||
it('should NOT fix when key matches both an ID and a name', async () => {
|
||||
// Node with name that looks like an ID of another node
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'abc-123'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
'abc-123': {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dedup', () => {
|
||||
it('should remove exact duplicate connections', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-duplicate-removal');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1.main[0]).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should keep near-duplicates with different index', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
{ node: 'Node2', type: 'main', index: 1 },
|
||||
]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-duplicate-removal');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Index', () => {
|
||||
it('should reset to 0 for single-input nodes', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Input index 3 on node "Node2" exceeds its input count (1). Connection from "Node1" targets input 3, but this node has 1 main input(s) (indices 0-0).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2', 'n8n-nodes-base.httpRequest')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 3 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe(3);
|
||||
expect(connFixes[0].after).toBe(0);
|
||||
expect(connFixes[0].confidence).toBe('medium');
|
||||
});
|
||||
|
||||
it('should clamp for Merge nodes', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'MergeNode',
|
||||
message: 'Input index 5 on node "MergeNode" exceeds its input count (2). Connection from "Node1" targets input 5, but this node has 2 main input(s) (indices 0-1).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'MergeNode', 'n8n-nodes-base.merge')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'MergeNode', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe(5);
|
||||
expect(connFixes[0].after).toBe(1); // clamped to max valid index
|
||||
});
|
||||
|
||||
it('should not fix valid indices', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Combined', () => {
|
||||
it('should fix multiple issues in one workflow', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[
|
||||
createMockNode('id1', 'Node1'),
|
||||
createMockNode('id2', 'Node2'),
|
||||
createMockNode('id3', 'Node3')
|
||||
],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[
|
||||
{ node: 'Node2', type: '0', index: 0 },
|
||||
{ node: 'Node2', type: '0', index: 0 }, // duplicate
|
||||
]]
|
||||
},
|
||||
'id3': { // ID instead of name
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.fixes.length).toBeGreaterThan(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeDefined();
|
||||
|
||||
// Should have numeric key, invalid type, dedup, and id-to-name fixes
|
||||
const types = new Set(result.fixes.map(f => f.type));
|
||||
expect(types.has('connection-numeric-keys')).toBe(true);
|
||||
expect(types.has('connection-id-to-name')).toBe(true);
|
||||
});
|
||||
|
||||
it('should be idempotent (no fixes on valid connections)', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connectionFixTypes = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
const connFixes = result.fixes.filter(f => connectionFixTypes.includes(f.type));
|
||||
expect(connFixes).toHaveLength(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty connections', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1')],
|
||||
{}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should respect fixTypes filtering', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Only allow numeric key fixes, not invalid type fixes
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, [], {
|
||||
fixTypes: ['connection-numeric-keys']
|
||||
});
|
||||
|
||||
const numericFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
const typeFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(numericFixes.length).toBeGreaterThan(0);
|
||||
expect(typeFixes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should filter replaceConnections from operations when confidence threshold filters all connection fixes', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Input index 5 on node "Node2" exceeds its input count (1). Connection from "Node1" targets input 5, but this node has 1 main input(s) (indices 0-0).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
// Input index fixes are medium confidence. Filter to high only.
|
||||
const result = await autoFixer.generateFixes(workflow, validation, [], {
|
||||
confidenceThreshold: 'high'
|
||||
});
|
||||
|
||||
// Medium confidence fixes should be filtered out
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include connection issues in summary', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.summary).toContain('connection');
|
||||
});
|
||||
|
||||
it('should handle non-existent target nodes gracefully', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'NonExistent', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.fixes.some(f => f.type === 'connection-numeric-keys')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip unparseable INPUT_INDEX_OUT_OF_BOUNDS errors gracefully', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Something unexpected about input indices',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should fix both source keys and target .node values as IDs in the same workflow', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[
|
||||
createMockNode('abc-123', 'Node1'),
|
||||
createMockNode('def-456', 'Node2'),
|
||||
createMockNode('ghi-789', 'Node3')
|
||||
],
|
||||
{
|
||||
'abc-123': { // source key is ID
|
||||
main: [[{ node: 'def-456', type: 'main', index: 0 }]] // target .node is also ID
|
||||
},
|
||||
Node2: {
|
||||
main: [[{ node: 'ghi-789', type: 'main', index: 0 }]] // another target ID
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
|
||||
// Should fix: source key abc-123 → Node1, target def-456 → Node2, target ghi-789 → Node3
|
||||
expect(connFixes).toHaveLength(3);
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections['Node1']).toBeDefined();
|
||||
expect(replaceOp.connections['abc-123']).toBeUndefined();
|
||||
|
||||
// Verify target .node values were also replaced
|
||||
const node1Conns = replaceOp.connections['Node1'].main[0];
|
||||
expect(node1Conns[0].node).toBe('Node2');
|
||||
|
||||
const node2Conns = replaceOp.connections['Node2'].main[0];
|
||||
expect(node2Conns[0].node).toBe('Node3');
|
||||
});
|
||||
|
||||
it('should lower confidence to medium when merging numeric key into non-empty main slot', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'0': [[{ node: 'Node3', type: 'main', index: 0 }]] // conflicts with existing main[0]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const numericFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(numericFixes).toHaveLength(1);
|
||||
expect(numericFixes[0].confidence).toBe('medium');
|
||||
expect(numericFixes[0].description).toContain('Merged');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,217 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowValidator } from '@/services/workflow-validator';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { EnhancedConfigValidator } from '@/services/enhanced-config-validator';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/enhanced-config-validator');
|
||||
vi.mock('@/services/expression-validator');
|
||||
vi.mock('@/utils/logger');
|
||||
|
||||
describe('WorkflowValidator - AI Sub-Node Main Connection Detection', () => {
|
||||
let validator: WorkflowValidator;
|
||||
let mockNodeRepository: NodeRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockNodeRepository = new NodeRepository({} as any) as any;
|
||||
|
||||
if (!mockNodeRepository.getAllNodes) {
|
||||
mockNodeRepository.getAllNodes = vi.fn();
|
||||
}
|
||||
if (!mockNodeRepository.getNode) {
|
||||
mockNodeRepository.getNode = vi.fn();
|
||||
}
|
||||
|
||||
const nodeTypes: Record<string, any> = {
|
||||
'nodes-base.manualTrigger': {
|
||||
type: 'nodes-base.manualTrigger',
|
||||
displayName: 'Manual Trigger',
|
||||
package: 'n8n-nodes-base',
|
||||
isTrigger: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.set': {
|
||||
type: 'nodes-base.set',
|
||||
displayName: 'Set',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.lmChatGoogleGemini': {
|
||||
type: 'nodes-langchain.lmChatGoogleGemini',
|
||||
displayName: 'Google Gemini Chat Model',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_languageModel'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.memoryBufferWindow': {
|
||||
type: 'nodes-langchain.memoryBufferWindow',
|
||||
displayName: 'Window Buffer Memory',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_memory'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.embeddingsOpenAi': {
|
||||
type: 'nodes-langchain.embeddingsOpenAi',
|
||||
displayName: 'Embeddings OpenAI',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_embedding'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.agent': {
|
||||
type: 'nodes-langchain.agent',
|
||||
displayName: 'AI Agent',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
isAITool: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.openAi': {
|
||||
type: 'nodes-langchain.openAi',
|
||||
displayName: 'OpenAI',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.textClassifier': {
|
||||
type: 'nodes-langchain.textClassifier',
|
||||
displayName: 'Text Classifier',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['={{}}'], // Dynamic expression-based outputs
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.vectorStoreInMemory': {
|
||||
type: 'nodes-langchain.vectorStoreInMemory',
|
||||
displayName: 'In-Memory Vector Store',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['={{$parameter["mode"] === "retrieve" ? "main" : "ai_vectorStore"}}'],
|
||||
properties: [],
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(mockNodeRepository.getNode).mockImplementation((nodeType: string) => {
|
||||
return nodeTypes[nodeType] || null;
|
||||
});
|
||||
vi.mocked(mockNodeRepository.getAllNodes).mockReturnValue(Object.values(nodeTypes));
|
||||
|
||||
validator = new WorkflowValidator(
|
||||
mockNodeRepository,
|
||||
EnhancedConfigValidator as any
|
||||
);
|
||||
});
|
||||
|
||||
function makeWorkflow(sourceType: string, sourceName: string, connectionKey: string = 'main') {
|
||||
return {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Manual Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: sourceName, type: sourceType, position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Manual Trigger': {
|
||||
main: [[{ node: sourceName, type: 'main', index: 0 }]]
|
||||
},
|
||||
[sourceName]: {
|
||||
[connectionKey]: [[{ node: 'Set', type: connectionKey, index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
it('should flag LLM node (lmChatGoogleGemini) connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.lmChatGoogleGemini',
|
||||
'Google Gemini'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_languageModel');
|
||||
expect(error!.message).toContain('AI sub-node');
|
||||
expect(error!.nodeName).toBe('Google Gemini');
|
||||
});
|
||||
|
||||
it('should flag memory node (memoryBufferWindow) connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.memoryBufferWindow',
|
||||
'Window Buffer Memory'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_memory');
|
||||
});
|
||||
|
||||
it('should flag embeddings node connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.embeddingsOpenAi',
|
||||
'Embeddings OpenAI'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_embedding');
|
||||
});
|
||||
|
||||
it('should NOT flag regular langchain nodes (agent, openAi) connected via main', async () => {
|
||||
const workflow1 = makeWorkflow('n8n-nodes-langchain.agent', 'AI Agent');
|
||||
const workflow2 = makeWorkflow('n8n-nodes-langchain.openAi', 'OpenAI');
|
||||
|
||||
const result1 = await validator.validateWorkflow(workflow1 as any);
|
||||
const result2 = await validator.validateWorkflow(workflow2 as any);
|
||||
|
||||
expect(result1.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
expect(result2.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag dynamic-output nodes (expression-based outputs)', async () => {
|
||||
const workflow1 = makeWorkflow('n8n-nodes-langchain.textClassifier', 'Text Classifier');
|
||||
const workflow2 = makeWorkflow('n8n-nodes-langchain.vectorStoreInMemory', 'Vector Store');
|
||||
|
||||
const result1 = await validator.validateWorkflow(workflow1 as any);
|
||||
const result2 = await validator.validateWorkflow(workflow2 as any);
|
||||
|
||||
expect(result1.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
expect(result2.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag AI sub-node connected via correct AI type', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Manual Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'AI Agent', type: 'n8n-nodes-langchain.agent', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Google Gemini', type: 'n8n-nodes-langchain.lmChatGoogleGemini', position: [200, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Manual Trigger': {
|
||||
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Google Gemini': {
|
||||
ai_languageModel: [[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
expect(result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag unknown/community nodes not in database', async () => {
|
||||
const workflow = makeWorkflow('n8n-nodes-community.someNode', 'Community Node');
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
expect(result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -63,20 +63,6 @@ describe('WorkflowValidator - Connection Validation (#620)', () => {
|
||||
outputs: ['main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.filter': {
|
||||
type: 'nodes-base.filter',
|
||||
displayName: 'Filter',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.switch': {
|
||||
type: 'nodes-base.switch',
|
||||
displayName: 'Switch',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main', 'main', 'main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.googleSheets': {
|
||||
type: 'nodes-base.googleSheets',
|
||||
displayName: 'Google Sheets',
|
||||
@@ -729,190 +715,4 @@ describe('WorkflowValidator - Connection Validation (#620)', () => {
|
||||
expect(orphanWarning!.message).toContain('not connected to any other nodes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Conditional branch fan-out detection (CONDITIONAL_BRANCH_FANOUT)', () => {
|
||||
it('should warn when IF node has both branches in main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueTarget', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [[{ node: 'TrueTarget', type: 'main', index: 0 }, { node: 'FalseTarget', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('Route');
|
||||
expect(warning!.message).toContain('2 connections on the "true" branch');
|
||||
expect(warning!.message).toContain('"false" branch has no effect');
|
||||
});
|
||||
|
||||
it('should not warn when IF node has correct true/false split', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueTarget', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [
|
||||
[{ node: 'TrueTarget', type: 'main', index: 0 }],
|
||||
[{ node: 'FalseTarget', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when IF has fan-out on main[0] AND connections on main[1]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TrueB', type: 'n8n-nodes-base.set', position: [400, 100], parameters: {} },
|
||||
{ id: '5', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [
|
||||
[{ node: 'TrueA', type: 'main', index: 0 }, { node: 'TrueB', type: 'main', index: 0 }],
|
||||
[{ node: 'FalseTarget', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should warn when Switch node has all connections on main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySwitch', type: 'n8n-nodes-base.switch', position: [200, 0], parameters: { rules: { values: [{ value: 'a' }, { value: 'b' }] } } },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
{ id: '5', name: 'TargetC', type: 'n8n-nodes-base.set', position: [400, 400], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySwitch', type: 'main', index: 0 }]] },
|
||||
'MySwitch': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }, { node: 'TargetC', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('MySwitch');
|
||||
expect(warning!.message).toContain('3 connections on output 0');
|
||||
expect(warning!.message).toContain('other switch branches have no effect');
|
||||
});
|
||||
|
||||
it('should not warn when Switch node has no rules parameter (indeterminate outputs)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySwitch', type: 'n8n-nodes-base.switch', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySwitch', type: 'main', index: 0 }]] },
|
||||
'MySwitch': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when regular node has fan-out on main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySet', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySet', type: 'main', index: 0 }]] },
|
||||
'MySet': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when IF has only 1 connection on main[0] with empty main[1]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueOnly', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [[{ node: 'TrueOnly', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should warn for Filter node with both branches in main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MyFilter', type: 'n8n-nodes-base.filter', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Matched', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Unmatched', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MyFilter', type: 'main', index: 0 }]] },
|
||||
'MyFilter': {
|
||||
main: [[{ node: 'Matched', type: 'main', index: 0 }, { node: 'Unmatched', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('MyFilter');
|
||||
expect(warning!.message).toContain('"matched" branch');
|
||||
expect(warning!.message).toContain('"unmatched" branch has no effect');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user