mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-03-17 07:53:08 +00:00
Compare commits
1 Commits
v2.36.1
...
fix/605-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4009cee7c |
4
.github/workflows/dependency-check.yml
vendored
4
.github/workflows/dependency-check.yml
vendored
@@ -59,9 +59,7 @@ jobs:
|
||||
run: |
|
||||
npm init -y
|
||||
# Install from tarball WITHOUT lockfile (simulates npm install n8n-mcp)
|
||||
# Use --ignore-scripts to skip native compilation of transitive deps like isolated-vm
|
||||
# (n8n-mcp only reads node metadata, it doesn't execute n8n nodes at runtime)
|
||||
npm install --ignore-scripts ./n8n-mcp-*.tgz
|
||||
npm install ./n8n-mcp-*.tgz
|
||||
|
||||
- name: Verify critical dependency versions
|
||||
working-directory: /tmp/fresh-install-test
|
||||
|
||||
40
CHANGELOG.md
40
CHANGELOG.md
@@ -7,46 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [2.36.1] - 2026-03-08
|
||||
|
||||
### Added
|
||||
|
||||
- **Conditional branch fan-out detection** (`CONDITIONAL_BRANCH_FANOUT`): Warns when IF, Filter, or Switch nodes have all connections crammed into `main[0]` with higher-index outputs empty, which usually means all target nodes execute together on one branch while other branches have no effect
|
||||
- Detects IF nodes with both true/false targets on `main[0]`
|
||||
- Detects Filter nodes with both matched/unmatched targets on `main[0]`
|
||||
- Detects Switch nodes with all targets on output 0 and other outputs unused
|
||||
- Skips warning when fan-out is legitimate (higher outputs also have connections)
|
||||
- Skips warning for single connections (intentional true-only/matched-only usage)
|
||||
|
||||
### Changed
|
||||
|
||||
- **Refactored output index validation**: Extracted `getShortNodeType()` and `getConditionalOutputInfo()` helpers to eliminate duplicated conditional node detection logic between `validateOutputIndexBounds` and the new `validateConditionalBranchUsage`
|
||||
|
||||
Conceived by Romuald Czlonkowski - https://www.aiadvisors.pl/en
|
||||
|
||||
## [2.36.0] - 2026-03-07
|
||||
|
||||
### Added
|
||||
|
||||
- **Connection validation: detect broken/malformed workflow connections** (Issue #620):
|
||||
- Unknown output keys (`UNKNOWN_CONNECTION_KEY`): Flags invalid connection keys like `"0"`, `"1"`, `"output"` with fix suggestions (e.g., "use main[1] instead" for numeric keys)
|
||||
- Invalid type field (`INVALID_CONNECTION_TYPE`): Detects invalid `type` values in connection targets (e.g., `"0"` instead of `"main"`)
|
||||
- Output index bounds checking (`OUTPUT_INDEX_OUT_OF_BOUNDS`): Catches connections using output indices beyond what a node supports, with awareness of `onError: 'continueErrorOutput'`, Switch rules, and IF/Filter nodes
|
||||
- Input index bounds checking (`INPUT_INDEX_OUT_OF_BOUNDS`): Validates target input indices against known node input counts (Merge=2, triggers=0, others=1)
|
||||
- BFS-based trigger reachability analysis: Replaces simple orphan detection with proper graph traversal from trigger nodes, flagging unreachable subgraphs
|
||||
- Flexible `WorkflowConnection` interface: Changed from explicit `main?/error?/ai_tool?` to `[outputType: string]` for accurate validation of all connection types
|
||||
|
||||
Conceived by Romuald Czlonkowski - https://www.aiadvisors.pl/en
|
||||
|
||||
## [2.35.6] - 2026-03-04
|
||||
|
||||
### Changed
|
||||
|
||||
- **Updated n8n dependencies**: n8n 2.8.3 → 2.10.3, n8n-core 2.8.1 → 2.10.1, n8n-workflow 2.8.0 → 2.10.1, @n8n/n8n-nodes-langchain 2.8.1 → 2.10.1
|
||||
- Rebuilt node database with 806 core nodes (community nodes preserved from previous build)
|
||||
|
||||
Conceived by Romuald Czlonkowski - https://www.aiadvisors.pl/en
|
||||
|
||||
## [2.35.5] - 2026-02-22
|
||||
|
||||
### Fixed
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
2
dist/mcp/handlers-n8n-manager.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AA2HD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
{"version":3,"file":"handlers-n8n-manager.d.ts","sourceRoot":"","sources":["../../src/mcp/handlers-n8n-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE1D,OAAO,EAML,eAAe,EAGhB,MAAM,kBAAkB,CAAC;AAkB1B,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,eAAe,EAA2B,MAAM,2BAA2B,CAAC;AAOrF,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAqNhE,wBAAgB,0BAA0B,IAAI,MAAM,CAEnD;AAMD,wBAAgB,uBAAuB,gDAEtC;AAKD,wBAAgB,kBAAkB,IAAI,IAAI,CAIzC;AAED,wBAAgB,eAAe,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,YAAY,GAAG,IAAI,CAgF9E;AAqHD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmF7G;AAED,wBAAsB,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC1G;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAoDjH;AAED,wBAAsB,0BAA0B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAmDnH;AAED,wBAAsB,wBAAwB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyCjH;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA8H1B;AAeD,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAsC7G;AAED,wBAAsB,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiE5G;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CA0F1B;AAED,wBAAsB,qBAAqB,CACzC,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoK1B;AAQD,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwJ3G;AAED,wBAAsB,kBAAkB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CA8H3G;AAED,wBAAsB,oBAAoB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAgD7G;AAED,wBAAsB,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAiC9G;AAID,wBAAsB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAwG3F;AAkLD,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAkQxG;AAED,wBAAsB,sBAAsB,CAC1C,IAAI,EAAE,OAAO,EACb,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAsL1B;AA+BD,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,OAAO,EACb,eAAe,EAAE,eAAe,EAChC,UAAU,EAAE,cAAc,EAC1B,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,eAAe,CAAC,CAoM1B;AAQD,wBAAsB,4BAA4B,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAyErH"}
|
||||
8
dist/mcp/handlers-n8n-manager.js
vendored
8
dist/mcp/handlers-n8n-manager.js
vendored
@@ -212,13 +212,7 @@ const autofixWorkflowSchema = zod_1.z.object({
|
||||
'node-type-correction',
|
||||
'webhook-missing-path',
|
||||
'typeversion-upgrade',
|
||||
'version-migration',
|
||||
'tool-variant-correction',
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
'version-migration'
|
||||
])).optional(),
|
||||
confidenceThreshold: zod_1.z.enum(['high', 'medium', 'low']).optional().default('medium'),
|
||||
maxFixes: zod_1.z.number().optional().default(50)
|
||||
|
||||
2
dist/mcp/handlers-n8n-manager.js.map
vendored
2
dist/mcp/handlers-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
4
dist/mcp/tools-n8n-manager.js
vendored
4
dist/mcp/tools-n8n-manager.js
vendored
@@ -278,7 +278,7 @@ exports.n8nManagementTools = [
|
||||
},
|
||||
{
|
||||
name: 'n8n_autofix_workflow',
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths, connection structure issues (numeric keys, invalid types, ID-to-name, duplicates, out-of-bounds indices).`,
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths.`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -295,7 +295,7 @@ exports.n8nManagementTools = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration', 'tool-variant-correction', 'connection-numeric-keys', 'connection-invalid-type', 'connection-id-to-name', 'connection-duplicate-removal', 'connection-input-index']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
|
||||
2
dist/mcp/tools-n8n-manager.js.map
vendored
2
dist/mcp/tools-n8n-manager.js.map
vendored
File diff suppressed because one or more lines are too long
9
dist/services/workflow-auto-fixer.d.ts
vendored
9
dist/services/workflow-auto-fixer.d.ts
vendored
@@ -5,8 +5,7 @@ import { WorkflowDiffOperation } from '../types/workflow-diff';
|
||||
import { Workflow } from '../types/n8n-api';
|
||||
import { PostUpdateGuidance } from './post-update-validator';
|
||||
export type FixConfidenceLevel = 'high' | 'medium' | 'low';
|
||||
export type FixType = 'expression-format' | 'typeversion-correction' | 'error-output-config' | 'node-type-correction' | 'webhook-missing-path' | 'typeversion-upgrade' | 'version-migration' | 'tool-variant-correction' | 'connection-numeric-keys' | 'connection-invalid-type' | 'connection-id-to-name' | 'connection-duplicate-removal' | 'connection-input-index';
|
||||
export declare const CONNECTION_FIX_TYPES: FixType[];
|
||||
export type FixType = 'expression-format' | 'typeversion-correction' | 'error-output-config' | 'node-type-correction' | 'webhook-missing-path' | 'typeversion-upgrade' | 'version-migration' | 'tool-variant-correction';
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
fixTypes?: FixType[];
|
||||
@@ -69,12 +68,6 @@ export declare class WorkflowAutoFixer {
|
||||
private filterOperationsByFixes;
|
||||
private calculateStats;
|
||||
private generateSummary;
|
||||
private processConnectionFixes;
|
||||
private fixNumericKeys;
|
||||
private fixIdToName;
|
||||
private fixInvalidTypes;
|
||||
private fixInputIndices;
|
||||
private fixDuplicateConnections;
|
||||
private processVersionUpgradeFixes;
|
||||
private processVersionMigrationFixes;
|
||||
}
|
||||
|
||||
2
dist/services/workflow-auto-fixer.d.ts.map
vendored
2
dist/services/workflow-auto-fixer.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"workflow-auto-fixer.d.ts","sourceRoot":"","sources":["../../src/services/workflow-auto-fixer.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,wBAAwB,EAA0B,MAAM,sBAAsB,CAAC;AACxF,OAAO,EAAE,qBAAqB,EAAE,MAAM,+BAA+B,CAAC;AAEtE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EACL,qBAAqB,EAGtB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAgB,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAK1D,OAAO,EAAuB,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAIlF,MAAM,MAAM,kBAAkB,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAC3D,MAAM,MAAM,OAAO,GACf,mBAAmB,GACnB,wBAAwB,GACxB,qBAAqB,GACrB,sBAAsB,GACtB,sBAAsB,GACtB,qBAAqB,GACrB,mBAAmB,GACnB,yBAAyB,GACzB,yBAAyB,GACzB,yBAAyB,GACzB,uBAAuB,GACvB,8BAA8B,GAC9B,wBAAwB,CAAC;AAE7B,eAAO,MAAM,oBAAoB,EAAE,OAAO,EAMzC,CAAC;AAEF,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,mBAAmB,CAAC,EAAE,kBAAkB,CAAC;IACzC,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,GAAG,CAAC;IACZ,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,kBAAkB,CAAC;IAC/B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,qBAAqB,EAAE,CAAC;IACpC,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAChC,YAAY,EAAE,MAAM,CAAC,kBAAkB,EAAE,MAAM,CAAC,CAAC;KAClD,CAAC;IACF,kBAAkB,CAAC,EAAE,kBAAkB,EAAE,CAAC;CAC3C;AAED,MAAM,WAAW,eAAgB,SAAQ,qBAAqB;IAC5D,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAKD,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,qBAAqB,GAAG,KAAK,IAAI,eAAe,CAIxF;AAKD,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,KAAK,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,UAAU,EAAE,MAAM,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACJ;AAED,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,QAAQ,CAAC,aAAa,CAI5B;IACF,OAAO,CAAC,iBAAiB,CAAsC;IAC/D,OAAO,CAAC,cAAc,CAAmC;IACzD,OAAO,CAAC,sBAAsB,CAAuC;IACrE,OAAO,CAAC,gBAAgB,CAAqC;IAC7D,OAAO,CAAC,mBAAmB,CAAoC;gBAEnD,UAAU,CAAC,EAAE,cAAc;IAajC,aAAa,CACjB,QAAQ,EAAE,QAAQ,EAClB,gBAAgB,EAAE,wBAAwB,EAC1C,YAAY,GAAE,qBAAqB,EAAO,EAC1C,MAAM,GAAE,OAAO,CAAC,aAAa,CAAM,GAClC,OAAO,CAAC,aAAa,CAAC;IAgFzB,OAAO,CAAC,4BAA4B;IAqEpC,OAAO,CAAC,uBAAuB;IA8C/B,OAAO,CAAC,uBAAuB;IA0C/B,OAAO,CAAC,oBAAoB;IAkD5B,OAAO,CAAC,uBAAuB;IAwE/B,OAAO,CAAC,uBAAuB;IAsD/B,OAAO,CAAC,cAAc;IAmGtB,OAAO,CAAC,kBAAkB;IAkB1B,OAAO,CAAC,uBAAuB;IAqB/B,OAAO,CAAC,cAAc;IAoCtB,OAAO,CAAC,eAAe;IAwDvB,OAAO,CAAC,sBAAsB;IAgF9B,OAAO,CAAC,cAAc;IA+DtB,OAAO,CAAC,WAAW;IA6EnB,OAAO,CAAC,eAAe;IAqCvB,OAAO,CAAC,eAAe;IA4DvB,OAAO,CAAC,uBAAuB;YA6CjB,0BAA0B;YAmF1B,4BAA4B;CAiF3C"}
|
||||
{"version":3,"file":"workflow-auto-fixer.d.ts","sourceRoot":"","sources":["../../src/services/workflow-auto-fixer.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,wBAAwB,EAAE,MAAM,sBAAsB,CAAC;AAChE,OAAO,EAAE,qBAAqB,EAAE,MAAM,+BAA+B,CAAC;AAEtE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EACL,qBAAqB,EAEtB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAgB,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAK1D,OAAO,EAAuB,kBAAkB,EAAE,MAAM,yBAAyB,CAAC;AAIlF,MAAM,MAAM,kBAAkB,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAC3D,MAAM,MAAM,OAAO,GACf,mBAAmB,GACnB,wBAAwB,GACxB,qBAAqB,GACrB,sBAAsB,GACtB,sBAAsB,GACtB,qBAAqB,GACrB,mBAAmB,GACnB,yBAAyB,CAAC;AAE9B,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,mBAAmB,CAAC,EAAE,kBAAkB,CAAC;IACzC,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,GAAG,CAAC;IACZ,KAAK,EAAE,GAAG,CAAC;IACX,UAAU,EAAE,kBAAkB,CAAC;IAC/B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,aAAa;IAC5B,UAAU,EAAE,qBAAqB,EAAE,CAAC;IACpC,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE;QACL,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAChC,YAAY,EAAE,MAAM,CAAC,kBAAkB,EAAE,MAAM,CAAC,CAAC;KAClD,CAAC;IACF,kBAAkB,CAAC,EAAE,kBAAkB,EAAE,CAAC;CAC3C;AAED,MAAM,WAAW,eAAgB,SAAQ,qBAAqB;IAC5D,QAAQ,EAAE,MAAM,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB;AAKD,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,qBAAqB,GAAG,KAAK,IAAI,eAAe,CAIxF;AAKD,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,CAAC,EAAE,KAAK,CAAC;QAClB,QAAQ,EAAE,MAAM,CAAC;QACjB,UAAU,EAAE,MAAM,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC,CAAC;CACJ;AAED,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,QAAQ,CAAC,aAAa,CAI5B;IACF,OAAO,CAAC,iBAAiB,CAAsC;IAC/D,OAAO,CAAC,cAAc,CAAmC;IACzD,OAAO,CAAC,sBAAsB,CAAuC;IACrE,OAAO,CAAC,gBAAgB,CAAqC;IAC7D,OAAO,CAAC,mBAAmB,CAAoC;gBAEnD,UAAU,CAAC,EAAE,cAAc;IAajC,aAAa,CACjB,QAAQ,EAAE,QAAQ,EAClB,gBAAgB,EAAE,wBAAwB,EAC1C,YAAY,GAAE,qBAAqB,EAAO,EAC1C,MAAM,GAAE,OAAO,CAAC,aAAa,CAAM,GAClC,OAAO,CAAC,aAAa,CAAC;IA6EzB,OAAO,CAAC,4BAA4B;IAqEpC,OAAO,CAAC,uBAAuB;IA8C/B,OAAO,CAAC,uBAAuB;IA0C/B,OAAO,CAAC,oBAAoB;IAkD5B,OAAO,CAAC,uBAAuB;IAwE/B,OAAO,CAAC,uBAAuB;IAsD/B,OAAO,CAAC,cAAc;IAmGtB,OAAO,CAAC,kBAAkB;IAkB1B,OAAO,CAAC,uBAAuB;IAiB/B,OAAO,CAAC,cAAc;IA+BtB,OAAO,CAAC,eAAe;YA4CT,0BAA0B;YAmF1B,4BAA4B;CAiF3C"}
|
||||
310
dist/services/workflow-auto-fixer.js
vendored
310
dist/services/workflow-auto-fixer.js
vendored
@@ -3,10 +3,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WorkflowAutoFixer = exports.CONNECTION_FIX_TYPES = void 0;
|
||||
exports.WorkflowAutoFixer = void 0;
|
||||
exports.isNodeFormatIssue = isNodeFormatIssue;
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const workflow_validator_1 = require("./workflow-validator");
|
||||
const node_similarity_service_1 = require("./node-similarity-service");
|
||||
const logger_1 = require("../utils/logger");
|
||||
const node_version_service_1 = require("./node-version-service");
|
||||
@@ -14,13 +13,6 @@ const breaking_change_detector_1 = require("./breaking-change-detector");
|
||||
const node_migration_service_1 = require("./node-migration-service");
|
||||
const post_update_validator_1 = require("./post-update-validator");
|
||||
const logger = new logger_1.Logger({ prefix: '[WorkflowAutoFixer]' });
|
||||
exports.CONNECTION_FIX_TYPES = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
function isNodeFormatIssue(issue) {
|
||||
return 'nodeName' in issue && 'nodeId' in issue &&
|
||||
typeof issue.nodeName === 'string' &&
|
||||
@@ -80,7 +72,6 @@ class WorkflowAutoFixer {
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('version-migration')) {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
this.processConnectionFixes(workflow, validationResult, fullConfig, operations, fixes);
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
const limitedFixes = filteredFixes.slice(0, fullConfig.maxFixes);
|
||||
@@ -402,14 +393,10 @@ class WorkflowAutoFixer {
|
||||
}
|
||||
filterOperationsByFixes(operations, filteredFixes, allFixes) {
|
||||
const fixedNodes = new Set(filteredFixes.map(f => f.node));
|
||||
const hasConnectionFixes = filteredFixes.some(f => exports.CONNECTION_FIX_TYPES.includes(f.type));
|
||||
return operations.filter(op => {
|
||||
if (op.type === 'updateNode') {
|
||||
return fixedNodes.has(op.nodeId || '');
|
||||
}
|
||||
if (op.type === 'replaceConnections') {
|
||||
return hasConnectionFixes;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
@@ -424,12 +411,7 @@ class WorkflowAutoFixer {
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0,
|
||||
'tool-variant-correction': 0,
|
||||
'connection-numeric-keys': 0,
|
||||
'connection-invalid-type': 0,
|
||||
'connection-id-to-name': 0,
|
||||
'connection-duplicate-removal': 0,
|
||||
'connection-input-index': 0
|
||||
'tool-variant-correction': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -472,299 +454,11 @@ class WorkflowAutoFixer {
|
||||
if (stats.byType['tool-variant-correction'] > 0) {
|
||||
parts.push(`${stats.byType['tool-variant-correction']} tool variant ${stats.byType['tool-variant-correction'] === 1 ? 'correction' : 'corrections'}`);
|
||||
}
|
||||
const connectionIssueCount = (stats.byType['connection-numeric-keys'] || 0) +
|
||||
(stats.byType['connection-invalid-type'] || 0) +
|
||||
(stats.byType['connection-id-to-name'] || 0) +
|
||||
(stats.byType['connection-duplicate-removal'] || 0) +
|
||||
(stats.byType['connection-input-index'] || 0);
|
||||
if (connectionIssueCount > 0) {
|
||||
parts.push(`${connectionIssueCount} connection ${connectionIssueCount === 1 ? 'issue' : 'issues'}`);
|
||||
}
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
processConnectionFixes(workflow, validationResult, config, operations, fixes) {
|
||||
if (!workflow.connections || Object.keys(workflow.connections).length === 0) {
|
||||
return;
|
||||
}
|
||||
const idToNameMap = new Map();
|
||||
const nameSet = new Set();
|
||||
for (const node of workflow.nodes) {
|
||||
idToNameMap.set(node.id, node.name);
|
||||
nameSet.add(node.name);
|
||||
}
|
||||
const conn = JSON.parse(JSON.stringify(workflow.connections));
|
||||
let anyFixed = false;
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-numeric-keys')) {
|
||||
const numericKeyResult = this.fixNumericKeys(conn);
|
||||
if (numericKeyResult.length > 0) {
|
||||
fixes.push(...numericKeyResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-id-to-name')) {
|
||||
const idToNameResult = this.fixIdToName(conn, idToNameMap, nameSet);
|
||||
if (idToNameResult.length > 0) {
|
||||
fixes.push(...idToNameResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-invalid-type')) {
|
||||
const invalidTypeResult = this.fixInvalidTypes(conn);
|
||||
if (invalidTypeResult.length > 0) {
|
||||
fixes.push(...invalidTypeResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-input-index')) {
|
||||
const inputIndexResult = this.fixInputIndices(conn, validationResult, workflow);
|
||||
if (inputIndexResult.length > 0) {
|
||||
fixes.push(...inputIndexResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-duplicate-removal')) {
|
||||
const dedupResult = this.fixDuplicateConnections(conn);
|
||||
if (dedupResult.length > 0) {
|
||||
fixes.push(...dedupResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
if (anyFixed) {
|
||||
const op = {
|
||||
type: 'replaceConnections',
|
||||
connections: conn
|
||||
};
|
||||
operations.push(op);
|
||||
}
|
||||
}
|
||||
fixNumericKeys(conn) {
|
||||
const fixes = [];
|
||||
const sourceNodes = Object.keys(conn);
|
||||
for (const sourceName of sourceNodes) {
|
||||
const nodeConn = conn[sourceName];
|
||||
const numericKeys = Object.keys(nodeConn).filter(k => /^\d+$/.test(k));
|
||||
if (numericKeys.length === 0)
|
||||
continue;
|
||||
if (!nodeConn['main']) {
|
||||
nodeConn['main'] = [];
|
||||
}
|
||||
for (const numKey of numericKeys) {
|
||||
const index = parseInt(numKey, 10);
|
||||
const entries = nodeConn[numKey];
|
||||
while (nodeConn['main'].length <= index) {
|
||||
nodeConn['main'].push([]);
|
||||
}
|
||||
const hadExisting = nodeConn['main'][index] && nodeConn['main'][index].length > 0;
|
||||
if (Array.isArray(entries)) {
|
||||
for (const outputGroup of entries) {
|
||||
if (Array.isArray(outputGroup)) {
|
||||
nodeConn['main'][index] = [
|
||||
...nodeConn['main'][index],
|
||||
...outputGroup
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hadExisting) {
|
||||
logger.warn(`Merged numeric key "${numKey}" into existing main[${index}] on node "${sourceName}" - dedup pass will clean exact duplicates`);
|
||||
}
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${numKey}`,
|
||||
type: 'connection-numeric-keys',
|
||||
before: numKey,
|
||||
after: `main[${index}]`,
|
||||
confidence: hadExisting ? 'medium' : 'high',
|
||||
description: hadExisting
|
||||
? `Merged numeric connection key "${numKey}" into existing main[${index}] on node "${sourceName}"`
|
||||
: `Converted numeric connection key "${numKey}" to main[${index}] on node "${sourceName}"`
|
||||
});
|
||||
delete nodeConn[numKey];
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixIdToName(conn, idToNameMap, nameSet) {
|
||||
const fixes = [];
|
||||
const renames = [];
|
||||
const sourceKeys = Object.keys(conn);
|
||||
for (const sourceKey of sourceKeys) {
|
||||
if (idToNameMap.has(sourceKey) && !nameSet.has(sourceKey)) {
|
||||
renames.push({ oldKey: sourceKey, newKey: idToNameMap.get(sourceKey) });
|
||||
}
|
||||
}
|
||||
const newKeyCount = new Map();
|
||||
for (const r of renames) {
|
||||
newKeyCount.set(r.newKey, (newKeyCount.get(r.newKey) || 0) + 1);
|
||||
}
|
||||
const safeRenames = renames.filter(r => {
|
||||
if ((newKeyCount.get(r.newKey) || 0) > 1) {
|
||||
logger.warn(`Skipping ambiguous ID-to-name rename: "${r.oldKey}" → "${r.newKey}" (multiple IDs map to same name)`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
for (const { oldKey, newKey } of safeRenames) {
|
||||
conn[newKey] = conn[oldKey];
|
||||
delete conn[oldKey];
|
||||
fixes.push({
|
||||
node: newKey,
|
||||
field: `connections.sourceKey`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldKey,
|
||||
after: newKey,
|
||||
confidence: 'high',
|
||||
description: `Replaced node ID "${oldKey}" with name "${newKey}" as connection source key`
|
||||
});
|
||||
}
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node && idToNameMap.has(entry.node) && !nameSet.has(entry.node)) {
|
||||
const oldNode = entry.node;
|
||||
const newNode = idToNameMap.get(entry.node);
|
||||
entry.node = newNode;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].node`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldNode,
|
||||
after: newNode,
|
||||
confidence: 'high',
|
||||
description: `Replaced target node ID "${oldNode}" with name "${newNode}" in connection from "${sourceName}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixInvalidTypes(conn) {
|
||||
const fixes = [];
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.type && !workflow_validator_1.VALID_CONNECTION_TYPES.has(entry.type)) {
|
||||
const oldType = entry.type;
|
||||
const newType = workflow_validator_1.VALID_CONNECTION_TYPES.has(outputKey) ? outputKey : 'main';
|
||||
entry.type = newType;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].type`,
|
||||
type: 'connection-invalid-type',
|
||||
before: oldType,
|
||||
after: newType,
|
||||
confidence: 'high',
|
||||
description: `Fixed invalid connection type "${oldType}" → "${newType}" in connection from "${sourceName}" to "${entry.node}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixInputIndices(conn, validationResult, workflow) {
|
||||
const fixes = [];
|
||||
for (const error of validationResult.errors) {
|
||||
if (error.code !== 'INPUT_INDEX_OUT_OF_BOUNDS')
|
||||
continue;
|
||||
const targetNodeName = error.nodeName;
|
||||
if (!targetNodeName)
|
||||
continue;
|
||||
const match = error.message.match(/Input index (\d+).*?has (\d+) main input/);
|
||||
if (!match) {
|
||||
logger.warn(`Could not parse INPUT_INDEX_OUT_OF_BOUNDS error for node "${targetNodeName}": ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
const badIndex = parseInt(match[1], 10);
|
||||
const inputCount = parseInt(match[2], 10);
|
||||
const clampedIndex = inputCount > 1 ? Math.min(badIndex, inputCount - 1) : 0;
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node === targetNodeName && entry.index === badIndex) {
|
||||
entry.index = clampedIndex;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].index`,
|
||||
type: 'connection-input-index',
|
||||
before: badIndex,
|
||||
after: clampedIndex,
|
||||
confidence: 'medium',
|
||||
description: `Clamped input index ${badIndex} → ${clampedIndex} for target node "${targetNodeName}" (has ${inputCount} input${inputCount === 1 ? '' : 's'})`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
fixDuplicateConnections(conn) {
|
||||
const fixes = [];
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs))
|
||||
continue;
|
||||
for (let i = 0; i < outputs.length; i++) {
|
||||
const outputGroup = outputs[i];
|
||||
if (!Array.isArray(outputGroup))
|
||||
continue;
|
||||
const seen = new Set();
|
||||
const deduped = [];
|
||||
for (const entry of outputGroup) {
|
||||
const key = JSON.stringify({ node: entry.node, type: entry.type, index: entry.index });
|
||||
if (seen.has(key)) {
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[${i}]`,
|
||||
type: 'connection-duplicate-removal',
|
||||
before: entry,
|
||||
after: null,
|
||||
confidence: 'high',
|
||||
description: `Removed duplicate connection from "${sourceName}" to "${entry.node}" (type: ${entry.type}, index: ${entry.index})`
|
||||
});
|
||||
}
|
||||
else {
|
||||
seen.add(key);
|
||||
deduped.push(entry);
|
||||
}
|
||||
}
|
||||
outputs[i] = deduped;
|
||||
}
|
||||
}
|
||||
}
|
||||
return fixes;
|
||||
}
|
||||
async processVersionUpgradeFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance) {
|
||||
if (!this.versionService || !this.migrationService || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version upgrade fixes.');
|
||||
|
||||
2
dist/services/workflow-auto-fixer.js.map
vendored
2
dist/services/workflow-auto-fixer.js.map
vendored
File diff suppressed because one or more lines are too long
22
dist/services/workflow-validator.d.ts
vendored
22
dist/services/workflow-validator.d.ts
vendored
@@ -1,6 +1,5 @@
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { EnhancedConfigValidator } from './enhanced-config-validator';
|
||||
export declare const VALID_CONNECTION_TYPES: Set<string>;
|
||||
interface WorkflowNode {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -22,7 +21,17 @@ interface WorkflowNode {
|
||||
}
|
||||
interface WorkflowConnection {
|
||||
[sourceNode: string]: {
|
||||
[outputType: string]: Array<Array<{
|
||||
main?: Array<Array<{
|
||||
node: string;
|
||||
type: string;
|
||||
index: number;
|
||||
}>>;
|
||||
error?: Array<Array<{
|
||||
node: string;
|
||||
type: string;
|
||||
index: number;
|
||||
}>>;
|
||||
ai_tool?: Array<Array<{
|
||||
node: string;
|
||||
type: string;
|
||||
index: number;
|
||||
@@ -85,15 +94,6 @@ export declare class WorkflowValidator {
|
||||
private validateErrorOutputConfiguration;
|
||||
private validateAIToolConnection;
|
||||
private validateAIToolSource;
|
||||
private getNodeOutputTypes;
|
||||
private validateNotAISubNode;
|
||||
private getShortNodeType;
|
||||
private getConditionalOutputInfo;
|
||||
private validateOutputIndexBounds;
|
||||
private validateConditionalBranchUsage;
|
||||
private validateInputIndexBounds;
|
||||
private flagOrphanedNodes;
|
||||
private validateTriggerReachability;
|
||||
private hasCycle;
|
||||
private validateExpressions;
|
||||
private countExpressionsInObject;
|
||||
|
||||
2
dist/services/workflow-validator.d.ts.map
vendored
2
dist/services/workflow-validator.d.ts.map
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAiBtE,eAAO,MAAM,sBAAsB,aASjC,CAAC;AAEH,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,CAAC,UAAU,EAAE,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACnF,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IAmO9B,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,yBAAyB;IAuHjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAuE5B,OAAO,CAAC,kBAAkB;IAsB1B,OAAO,CAAC,oBAAoB;IA4B5B,OAAO,CAAC,gBAAgB;IASxB,OAAO,CAAC,wBAAwB;IAmBhC,OAAO,CAAC,yBAAyB;IA8DjC,OAAO,CAAC,8BAA8B;IAmDtC,OAAO,CAAC,wBAAwB;IAuChC,OAAO,CAAC,iBAAiB;IAoCzB,OAAO,CAAC,2BAA2B;IA4EnC,OAAO,CAAC,QAAQ;IA4EhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
{"version":3,"file":"workflow-validator.d.ts","sourceRoot":"","sources":["../../src/services/workflow-validator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAC;AAC7D,OAAO,EAAE,uBAAuB,EAAE,MAAM,6BAA6B,CAAC;AAatE,UAAU,YAAY;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3B,UAAU,EAAE,GAAG,CAAC;IAChB,WAAW,CAAC,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,OAAO,CAAC,EAAE,uBAAuB,GAAG,qBAAqB,GAAG,cAAc,CAAC;IAC3E,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,OAAO,CAAC;IAC3B,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,UAAU,kBAAkB;IAC1B,CAAC,UAAU,EAAE,MAAM,GAAG;QACpB,IAAI,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACnE,KAAK,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;QACpE,OAAO,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,IAAI,EAAE,MAAM,CAAC;YAAC,KAAK,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC,CAAC;KACvE,CAAC;CACH;AAED,UAAU,YAAY;IACpB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,EAAE,CAAC;IACtB,WAAW,EAAE,kBAAkB,CAAC;IAChC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,UAAU,CAAC,EAAE,GAAG,CAAC;IACjB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,GAAG,CAAC;CACZ;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,OAAO,GAAG,SAAS,CAAC;IAC1B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,CAAC;CACH;AAED,MAAM,WAAW,wBAAwB;IACvC,KAAK,EAAE,OAAO,CAAC;IACf,MAAM,EAAE,eAAe,EAAE,CAAC;IAC1B,QAAQ,EAAE,eAAe,EAAE,CAAC;IAC5B,UAAU,EAAE;QACV,UAAU,EAAE,MAAM,CAAC;QACnB,YAAY,EAAE,MAAM,CAAC;QACrB,YAAY,EAAE,MAAM,CAAC;QACrB,gBAAgB,EAAE,MAAM,CAAC;QACzB,kBAAkB,EAAE,MAAM,CAAC;QAC3B,oBAAoB,EAAE,MAAM,CAAC;KAC9B,CAAC;IACF,WAAW,EAAE,MAAM,EAAE,CAAC;CACvB;AAED,qBAAa,iBAAiB;IAK1B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,aAAa;IALvB,OAAO,CAAC,eAAe,CAA6B;IACpD,OAAO,CAAC,iBAAiB,CAAwB;gBAGvC,cAAc,EAAE,cAAc,EAC9B,aAAa,EAAE,OAAO,uBAAuB;IAWjD,gBAAgB,CACpB,QAAQ,EAAE,YAAY,EACtB,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,OAAO,CAAC;QACxB,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;QAC9B,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,aAAa,GAAG,QAAQ,CAAC;KACvD,GACL,OAAO,CAAC,wBAAwB,CAAC;IAgHpC,OAAO,CAAC,yBAAyB;YAkInB,gBAAgB;IAmO9B,OAAO,CAAC,mBAAmB;IA8H3B,OAAO,CAAC,yBAAyB;IAgGjC,OAAO,CAAC,gCAAgC;IAoFxC,OAAO,CAAC,wBAAwB;IAsChC,OAAO,CAAC,oBAAoB;IAuE5B,OAAO,CAAC,QAAQ;IAsFhB,OAAO,CAAC,mBAAmB;IA4F3B,OAAO,CAAC,wBAAwB;IA2BhC,OAAO,CAAC,YAAY;IAgBpB,OAAO,CAAC,qBAAqB;IAgG7B,OAAO,CAAC,qBAAqB;IA8C7B,OAAO,CAAC,mBAAmB;IA4E3B,OAAO,CAAC,sBAAsB;IAyT9B,OAAO,CAAC,yBAAyB;IAqCjC,OAAO,CAAC,gCAAgC;IA8BxC,OAAO,CAAC,gCAAgC;IAsFxC,OAAO,CAAC,gBAAgB;IA4CxB,OAAO,CAAC,2BAA2B;CAmEpC"}
|
||||
374
dist/services/workflow-validator.js
vendored
374
dist/services/workflow-validator.js
vendored
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WorkflowValidator = exports.VALID_CONNECTION_TYPES = void 0;
|
||||
exports.WorkflowValidator = void 0;
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const expression_validator_1 = require("./expression-validator");
|
||||
const expression_format_validator_1 = require("./expression-format-validator");
|
||||
@@ -16,15 +16,6 @@ const node_type_utils_1 = require("../utils/node-type-utils");
|
||||
const node_classification_1 = require("../utils/node-classification");
|
||||
const tool_variant_generator_1 = require("./tool-variant-generator");
|
||||
const logger = new logger_1.Logger({ prefix: '[WorkflowValidator]' });
|
||||
exports.VALID_CONNECTION_TYPES = new Set([
|
||||
'main',
|
||||
'error',
|
||||
...ai_node_validator_1.AI_CONNECTION_TYPES,
|
||||
'ai_agent',
|
||||
'ai_chain',
|
||||
'ai_retriever',
|
||||
'ai_reranker',
|
||||
]);
|
||||
class WorkflowValidator {
|
||||
constructor(nodeRepository, nodeValidator) {
|
||||
this.nodeRepository = nodeRepository;
|
||||
@@ -402,37 +393,51 @@ class WorkflowValidator {
|
||||
result.statistics.invalidConnections++;
|
||||
continue;
|
||||
}
|
||||
for (const [outputKey, outputConnections] of Object.entries(outputs)) {
|
||||
if (!exports.VALID_CONNECTION_TYPES.has(outputKey)) {
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(outputKey)) {
|
||||
suggestion = ` If you meant to use output index ${outputKey}, use main[${outputKey}] instead.`;
|
||||
}
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: sourceName,
|
||||
message: `Unknown connection output key "${outputKey}" on node "${sourceName}". Valid keys are: ${[...exports.VALID_CONNECTION_TYPES].join(', ')}.${suggestion}`,
|
||||
code: 'UNKNOWN_CONNECTION_KEY'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
continue;
|
||||
}
|
||||
if (!outputConnections || !Array.isArray(outputConnections))
|
||||
continue;
|
||||
if (outputKey === 'ai_tool') {
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
}
|
||||
if (outputKey === 'main') {
|
||||
this.validateNotAISubNode(sourceNode, result);
|
||||
}
|
||||
this.validateConnectionOutputs(sourceName, outputConnections, nodeMap, nodeIdMap, result, outputKey);
|
||||
if (outputs.main) {
|
||||
this.validateConnectionOutputs(sourceName, outputs.main, nodeMap, nodeIdMap, result, 'main');
|
||||
}
|
||||
if (outputs.error) {
|
||||
this.validateConnectionOutputs(sourceName, outputs.error, nodeMap, nodeIdMap, result, 'error');
|
||||
}
|
||||
if (outputs.ai_tool) {
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
this.validateConnectionOutputs(sourceName, outputs.ai_tool, nodeMap, nodeIdMap, result, 'ai_tool');
|
||||
}
|
||||
}
|
||||
if (profile !== 'minimal') {
|
||||
this.validateTriggerReachability(workflow, result);
|
||||
}
|
||||
else {
|
||||
this.flagOrphanedNodes(workflow, result);
|
||||
const connectedNodes = new Set();
|
||||
Object.keys(workflow.connections).forEach(name => connectedNodes.add(name));
|
||||
Object.values(workflow.connections).forEach(outputs => {
|
||||
if (outputs.main) {
|
||||
outputs.main.flat().forEach(conn => {
|
||||
if (conn)
|
||||
connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
if (outputs.error) {
|
||||
outputs.error.flat().forEach(conn => {
|
||||
if (conn)
|
||||
connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
if (outputs.ai_tool) {
|
||||
outputs.ai_tool.flat().forEach(conn => {
|
||||
if (conn)
|
||||
connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
});
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || (0, node_classification_1.isNonExecutableNode)(node.type))
|
||||
continue;
|
||||
const isNodeTrigger = (0, node_type_utils_1.isTriggerNode)(node.type);
|
||||
if (!connectedNodes.has(node.name) && !isNodeTrigger) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not connected to any other nodes'
|
||||
});
|
||||
}
|
||||
}
|
||||
if (profile !== 'minimal' && this.hasCycle(workflow)) {
|
||||
result.errors.push({
|
||||
@@ -445,8 +450,6 @@ class WorkflowValidator {
|
||||
const sourceNode = nodeMap.get(sourceName);
|
||||
if (outputType === 'main' && sourceNode) {
|
||||
this.validateErrorOutputConfiguration(sourceName, sourceNode, outputs, nodeMap, result);
|
||||
this.validateOutputIndexBounds(sourceNode, outputs, result);
|
||||
this.validateConditionalBranchUsage(sourceNode, outputs, result);
|
||||
}
|
||||
outputs.forEach((outputConnections, outputIndex) => {
|
||||
if (!outputConnections)
|
||||
@@ -460,20 +463,6 @@ class WorkflowValidator {
|
||||
result.statistics.invalidConnections++;
|
||||
return;
|
||||
}
|
||||
if (connection.type && !exports.VALID_CONNECTION_TYPES.has(connection.type)) {
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(connection.type)) {
|
||||
suggestion = ` Numeric types are not valid - use "main", "error", or an AI connection type.`;
|
||||
}
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: sourceName,
|
||||
message: `Invalid connection type "${connection.type}" in connection from "${sourceName}" to "${connection.node}". Expected "main", "error", or an AI connection type (ai_tool, ai_languageModel, etc.).${suggestion}`,
|
||||
code: 'INVALID_CONNECTION_TYPE'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
return;
|
||||
}
|
||||
const isSplitInBatches = sourceNode && (sourceNode.type === 'n8n-nodes-base.splitInBatches' ||
|
||||
sourceNode.type === 'nodes-base.splitInBatches');
|
||||
if (isSplitInBatches) {
|
||||
@@ -517,9 +506,6 @@ class WorkflowValidator {
|
||||
if (outputType === 'ai_tool') {
|
||||
this.validateAIToolConnection(sourceName, targetNode, result);
|
||||
}
|
||||
if (outputType === 'main') {
|
||||
this.validateInputIndexBounds(sourceName, targetNode, connection, result);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -648,254 +634,6 @@ class WorkflowValidator {
|
||||
code: 'INVALID_AI_TOOL_SOURCE'
|
||||
});
|
||||
}
|
||||
getNodeOutputTypes(nodeType) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs)
|
||||
return null;
|
||||
const outputs = nodeInfo.outputs;
|
||||
if (!Array.isArray(outputs))
|
||||
return null;
|
||||
for (const output of outputs) {
|
||||
if (typeof output === 'string' && output.startsWith('={{')) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return outputs;
|
||||
}
|
||||
validateNotAISubNode(sourceNode, result) {
|
||||
const outputTypes = this.getNodeOutputTypes(sourceNode.type);
|
||||
if (!outputTypes)
|
||||
return;
|
||||
const hasMainOutput = outputTypes.some(t => t === 'main');
|
||||
if (hasMainOutput)
|
||||
return;
|
||||
const aiTypes = outputTypes.filter(t => t !== 'main');
|
||||
const expectedType = aiTypes[0] || 'ai_languageModel';
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Node "${sourceNode.name}" (${sourceNode.type}) is an AI sub-node that outputs "${expectedType}" connections. ` +
|
||||
`It cannot be used with "main" connections. Connect it to an AI Agent or Chain via "${expectedType}" instead.`,
|
||||
code: 'AI_SUBNODE_MAIN_CONNECTION'
|
||||
});
|
||||
}
|
||||
getShortNodeType(sourceNode) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
return normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
}
|
||||
getConditionalOutputInfo(sourceNode) {
|
||||
const shortType = this.getShortNodeType(sourceNode);
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
return { shortType, expectedOutputs: 2 };
|
||||
}
|
||||
if (shortType === 'switch') {
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
return { shortType, expectedOutputs: rules.length + 1 };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
validateOutputIndexBounds(sourceNode, outputs, result) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs)
|
||||
return;
|
||||
let mainOutputCount;
|
||||
if (Array.isArray(nodeInfo.outputs)) {
|
||||
mainOutputCount = nodeInfo.outputs.filter((o) => typeof o === 'string' ? o === 'main' : (o.type === 'main' || !o.type)).length;
|
||||
}
|
||||
else {
|
||||
return;
|
||||
}
|
||||
if (mainOutputCount === 0)
|
||||
return;
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (conditionalInfo) {
|
||||
mainOutputCount = conditionalInfo.expectedOutputs;
|
||||
}
|
||||
else if (this.getShortNodeType(sourceNode) === 'switch') {
|
||||
return;
|
||||
}
|
||||
if (sourceNode.onError === 'continueErrorOutput') {
|
||||
mainOutputCount += 1;
|
||||
}
|
||||
const maxOutputIndex = outputs.length - 1;
|
||||
if (maxOutputIndex >= mainOutputCount) {
|
||||
for (let i = mainOutputCount; i < outputs.length; i++) {
|
||||
if (outputs[i] && outputs[i].length > 0) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Output index ${i} on node "${sourceNode.name}" exceeds its output count (${mainOutputCount}). ` +
|
||||
`This node has ${mainOutputCount} main output(s) (indices 0-${mainOutputCount - 1}).`,
|
||||
code: 'OUTPUT_INDEX_OUT_OF_BOUNDS'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
validateConditionalBranchUsage(sourceNode, outputs, result) {
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (!conditionalInfo || conditionalInfo.expectedOutputs < 2)
|
||||
return;
|
||||
const { shortType, expectedOutputs } = conditionalInfo;
|
||||
const main0Count = outputs[0]?.length || 0;
|
||||
if (main0Count < 2)
|
||||
return;
|
||||
const hasHigherIndexConnections = outputs.slice(1).some(conns => conns && conns.length > 0);
|
||||
if (hasHigherIndexConnections)
|
||||
return;
|
||||
let message;
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
const isFilter = shortType === 'filter';
|
||||
const displayName = isFilter ? 'Filter' : 'IF';
|
||||
const trueLabel = isFilter ? 'matched' : 'true';
|
||||
const falseLabel = isFilter ? 'unmatched' : 'false';
|
||||
message = `${displayName} node "${sourceNode.name}" has ${main0Count} connections on the "${trueLabel}" branch (main[0]) ` +
|
||||
`but no connections on the "${falseLabel}" branch (main[1]). ` +
|
||||
`All ${main0Count} target nodes execute together on the "${trueLabel}" branch, ` +
|
||||
`while the "${falseLabel}" branch has no effect. ` +
|
||||
`Split connections: main[0] for ${trueLabel}, main[1] for ${falseLabel}.`;
|
||||
}
|
||||
else {
|
||||
message = `Switch node "${sourceNode.name}" has ${main0Count} connections on output 0 ` +
|
||||
`but no connections on any other outputs (1-${expectedOutputs - 1}). ` +
|
||||
`All ${main0Count} target nodes execute together on output 0, ` +
|
||||
`while other switch branches have no effect. ` +
|
||||
`Distribute connections across outputs to match switch rules.`;
|
||||
}
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message,
|
||||
code: 'CONDITIONAL_BRANCH_FANOUT'
|
||||
});
|
||||
}
|
||||
validateInputIndexBounds(sourceName, targetNode, connection, result) {
|
||||
const normalizedType = node_type_normalizer_1.NodeTypeNormalizer.normalizeToFullForm(targetNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo)
|
||||
return;
|
||||
const shortType = normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
let mainInputCount = 1;
|
||||
if (shortType === 'merge' || shortType === 'compareDatasets') {
|
||||
mainInputCount = 2;
|
||||
}
|
||||
if (nodeInfo.isTrigger || (0, node_type_utils_1.isTriggerNode)(targetNode.type)) {
|
||||
mainInputCount = 0;
|
||||
}
|
||||
if (mainInputCount > 0 && connection.index >= mainInputCount) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: targetNode.name,
|
||||
message: `Input index ${connection.index} on node "${targetNode.name}" exceeds its input count (${mainInputCount}). ` +
|
||||
`Connection from "${sourceName}" targets input ${connection.index}, but this node has ${mainInputCount} main input(s) (indices 0-${mainInputCount - 1}).`,
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
}
|
||||
}
|
||||
flagOrphanedNodes(workflow, result) {
|
||||
const connectedNodes = new Set();
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
connectedNodes.add(sourceName);
|
||||
for (const outputConns of Object.values(outputs)) {
|
||||
if (!Array.isArray(outputConns))
|
||||
continue;
|
||||
for (const conns of outputConns) {
|
||||
if (!conns)
|
||||
continue;
|
||||
for (const conn of conns) {
|
||||
if (conn)
|
||||
connectedNodes.add(conn.node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || (0, node_classification_1.isNonExecutableNode)(node.type))
|
||||
continue;
|
||||
if ((0, node_type_utils_1.isTriggerNode)(node.type))
|
||||
continue;
|
||||
if (!connectedNodes.has(node.name)) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not connected to any other nodes'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
validateTriggerReachability(workflow, result) {
|
||||
const adjacency = new Map();
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
if (!adjacency.has(sourceName))
|
||||
adjacency.set(sourceName, new Set());
|
||||
for (const outputConns of Object.values(outputs)) {
|
||||
if (Array.isArray(outputConns)) {
|
||||
for (const conns of outputConns) {
|
||||
if (!conns)
|
||||
continue;
|
||||
for (const conn of conns) {
|
||||
if (conn) {
|
||||
adjacency.get(sourceName).add(conn.node);
|
||||
if (!adjacency.has(conn.node))
|
||||
adjacency.set(conn.node, new Set());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const triggerNodes = [];
|
||||
for (const node of workflow.nodes) {
|
||||
if ((0, node_type_utils_1.isTriggerNode)(node.type) && !node.disabled) {
|
||||
triggerNodes.push(node.name);
|
||||
}
|
||||
}
|
||||
if (triggerNodes.length === 0) {
|
||||
this.flagOrphanedNodes(workflow, result);
|
||||
return;
|
||||
}
|
||||
const reachable = new Set();
|
||||
const queue = [...triggerNodes];
|
||||
for (const t of triggerNodes)
|
||||
reachable.add(t);
|
||||
while (queue.length > 0) {
|
||||
const current = queue.shift();
|
||||
const neighbors = adjacency.get(current);
|
||||
if (neighbors) {
|
||||
for (const neighbor of neighbors) {
|
||||
if (!reachable.has(neighbor)) {
|
||||
reachable.add(neighbor);
|
||||
queue.push(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || (0, node_classification_1.isNonExecutableNode)(node.type))
|
||||
continue;
|
||||
if ((0, node_type_utils_1.isTriggerNode)(node.type))
|
||||
continue;
|
||||
if (!reachable.has(node.name)) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not reachable from any trigger node'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
hasCycle(workflow) {
|
||||
const visited = new Set();
|
||||
const recursionStack = new Set();
|
||||
@@ -919,13 +657,23 @@ class WorkflowValidator {
|
||||
const connections = workflow.connections[nodeName];
|
||||
if (connections) {
|
||||
const allTargets = [];
|
||||
for (const outputConns of Object.values(connections)) {
|
||||
if (Array.isArray(outputConns)) {
|
||||
outputConns.flat().forEach(conn => {
|
||||
if (conn)
|
||||
allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
if (connections.main) {
|
||||
connections.main.flat().forEach(conn => {
|
||||
if (conn)
|
||||
allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
if (connections.error) {
|
||||
connections.error.flat().forEach(conn => {
|
||||
if (conn)
|
||||
allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
if (connections.ai_tool) {
|
||||
connections.ai_tool.flat().forEach(conn => {
|
||||
if (conn)
|
||||
allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
const currentNodeType = nodeTypeMap.get(nodeName);
|
||||
const isLoopNode = loopNodeTypes.includes(currentNodeType || '');
|
||||
|
||||
2
dist/services/workflow-validator.js.map
vendored
2
dist/services/workflow-validator.js.map
vendored
File diff suppressed because one or more lines are too long
7243
package-lock.json
generated
7243
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp",
|
||||
"version": "2.36.1",
|
||||
"version": "2.35.5",
|
||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -153,16 +153,16 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.20.1",
|
||||
"@n8n/n8n-nodes-langchain": "^2.10.1",
|
||||
"@n8n/n8n-nodes-langchain": "^2.8.1",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^5.1.0",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"form-data": "^4.0.5",
|
||||
"lru-cache": "^11.2.1",
|
||||
"n8n": "^2.10.3",
|
||||
"n8n-core": "^2.10.1",
|
||||
"n8n-workflow": "^2.10.1",
|
||||
"n8n": "^2.8.3",
|
||||
"n8n-core": "^2.8.1",
|
||||
"n8n-workflow": "^2.8.0",
|
||||
"openai": "^4.77.0",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
@@ -175,7 +175,6 @@
|
||||
"better-sqlite3": "^11.10.0"
|
||||
},
|
||||
"overrides": {
|
||||
"pyodide": "0.26.4",
|
||||
"isolated-vm": "npm:empty-npm-package@1.0.0"
|
||||
"pyodide": "0.26.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -424,13 +424,7 @@ const autofixWorkflowSchema = z.object({
|
||||
'node-type-correction',
|
||||
'webhook-missing-path',
|
||||
'typeversion-upgrade',
|
||||
'version-migration',
|
||||
'tool-variant-correction',
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
'version-migration'
|
||||
])).optional(),
|
||||
confidenceThreshold: z.enum(['high', 'medium', 'low']).optional().default('medium'),
|
||||
maxFixes: z.number().optional().default(50)
|
||||
|
||||
@@ -284,7 +284,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
},
|
||||
{
|
||||
name: 'n8n_autofix_workflow',
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths, connection structure issues (numeric keys, invalid types, ID-to-name, duplicates, out-of-bounds indices).`,
|
||||
description: `Automatically fix common workflow validation errors. Preview fixes or apply them. Fixes expression format, typeVersion, error output config, webhook paths.`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -301,7 +301,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration', 'tool-variant-correction', 'connection-numeric-keys', 'connection-invalid-type', 'connection-id-to-name', 'connection-duplicate-removal', 'connection-input-index']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
|
||||
@@ -6,14 +6,13 @@
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
import { WorkflowValidationResult, VALID_CONNECTION_TYPES } from './workflow-validator';
|
||||
import { WorkflowValidationResult } from './workflow-validator';
|
||||
import { ExpressionFormatIssue } from './expression-format-validator';
|
||||
import { NodeSimilarityService } from './node-similarity-service';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import {
|
||||
WorkflowDiffOperation,
|
||||
UpdateNodeOperation,
|
||||
ReplaceConnectionsOperation
|
||||
UpdateNodeOperation
|
||||
} from '../types/workflow-diff';
|
||||
import { WorkflowNode, Workflow } from '../types/n8n-api';
|
||||
import { Logger } from '../utils/logger';
|
||||
@@ -31,22 +30,9 @@ export type FixType =
|
||||
| 'error-output-config'
|
||||
| 'node-type-correction'
|
||||
| 'webhook-missing-path'
|
||||
| 'typeversion-upgrade' // Proactive version upgrades
|
||||
| 'version-migration' // Smart version migrations with breaking changes
|
||||
| 'tool-variant-correction' // Fix base nodes used as AI tools when Tool variant exists
|
||||
| 'connection-numeric-keys' // "0","1" keys → main[0], main[1]
|
||||
| 'connection-invalid-type' // type:"0" → type:"main"
|
||||
| 'connection-id-to-name' // node ID refs → node name refs
|
||||
| 'connection-duplicate-removal' // Dedup identical connection entries
|
||||
| 'connection-input-index'; // Out-of-bounds input index → clamped
|
||||
|
||||
export const CONNECTION_FIX_TYPES: FixType[] = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
| 'typeversion-upgrade' // Proactive version upgrades
|
||||
| 'version-migration' // Smart version migrations with breaking changes
|
||||
| 'tool-variant-correction'; // Fix base nodes used as AI tools when Tool variant exists
|
||||
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
@@ -189,9 +175,6 @@ export class WorkflowAutoFixer {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// Process connection structure fixes (HIGH/MEDIUM confidence)
|
||||
this.processConnectionFixes(workflow, validationResult, fullConfig, operations, fixes);
|
||||
|
||||
// Filter by confidence threshold
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
@@ -672,14 +655,10 @@ export class WorkflowAutoFixer {
|
||||
allFixes: FixOperation[]
|
||||
): WorkflowDiffOperation[] {
|
||||
const fixedNodes = new Set(filteredFixes.map(f => f.node));
|
||||
const hasConnectionFixes = filteredFixes.some(f => CONNECTION_FIX_TYPES.includes(f.type));
|
||||
return operations.filter(op => {
|
||||
if (op.type === 'updateNode') {
|
||||
return fixedNodes.has(op.nodeId || '');
|
||||
}
|
||||
if (op.type === 'replaceConnections') {
|
||||
return hasConnectionFixes;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
@@ -698,12 +677,7 @@ export class WorkflowAutoFixer {
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0,
|
||||
'tool-variant-correction': 0,
|
||||
'connection-numeric-keys': 0,
|
||||
'connection-invalid-type': 0,
|
||||
'connection-id-to-name': 0,
|
||||
'connection-duplicate-removal': 0,
|
||||
'connection-input-index': 0
|
||||
'tool-variant-correction': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -756,16 +730,6 @@ export class WorkflowAutoFixer {
|
||||
parts.push(`${stats.byType['tool-variant-correction']} tool variant ${stats.byType['tool-variant-correction'] === 1 ? 'correction' : 'corrections'}`);
|
||||
}
|
||||
|
||||
const connectionIssueCount =
|
||||
(stats.byType['connection-numeric-keys'] || 0) +
|
||||
(stats.byType['connection-invalid-type'] || 0) +
|
||||
(stats.byType['connection-id-to-name'] || 0) +
|
||||
(stats.byType['connection-duplicate-removal'] || 0) +
|
||||
(stats.byType['connection-input-index'] || 0);
|
||||
if (connectionIssueCount > 0) {
|
||||
parts.push(`${connectionIssueCount} connection ${connectionIssueCount === 1 ? 'issue' : 'issues'}`);
|
||||
}
|
||||
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
@@ -773,370 +737,6 @@ export class WorkflowAutoFixer {
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process connection structure fixes.
|
||||
* Deep-clones workflow.connections, applies fixes in order:
|
||||
* numeric keys → ID-to-name → invalid type → input index → dedup
|
||||
* Emits a single ReplaceConnectionsOperation if any corrections were made.
|
||||
*/
|
||||
private processConnectionFixes(
|
||||
workflow: Workflow,
|
||||
validationResult: WorkflowValidationResult,
|
||||
config: AutoFixConfig,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[]
|
||||
): void {
|
||||
if (!workflow.connections || Object.keys(workflow.connections).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Build lookup maps
|
||||
const idToNameMap = new Map<string, string>();
|
||||
const nameSet = new Set<string>();
|
||||
for (const node of workflow.nodes) {
|
||||
idToNameMap.set(node.id, node.name);
|
||||
nameSet.add(node.name);
|
||||
}
|
||||
|
||||
// Deep-clone connections
|
||||
const conn: any = JSON.parse(JSON.stringify(workflow.connections));
|
||||
let anyFixed = false;
|
||||
|
||||
// 1. Fix numeric source keys ("0" → main[0])
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-numeric-keys')) {
|
||||
const numericKeyResult = this.fixNumericKeys(conn);
|
||||
if (numericKeyResult.length > 0) {
|
||||
fixes.push(...numericKeyResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fix ID-to-name references (source keys and .node values)
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-id-to-name')) {
|
||||
const idToNameResult = this.fixIdToName(conn, idToNameMap, nameSet);
|
||||
if (idToNameResult.length > 0) {
|
||||
fixes.push(...idToNameResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Fix invalid connection types
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-invalid-type')) {
|
||||
const invalidTypeResult = this.fixInvalidTypes(conn);
|
||||
if (invalidTypeResult.length > 0) {
|
||||
fixes.push(...invalidTypeResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Fix out-of-bounds input indices
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-input-index')) {
|
||||
const inputIndexResult = this.fixInputIndices(conn, validationResult, workflow);
|
||||
if (inputIndexResult.length > 0) {
|
||||
fixes.push(...inputIndexResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Dedup identical connection entries
|
||||
if (!config.fixTypes || config.fixTypes.includes('connection-duplicate-removal')) {
|
||||
const dedupResult = this.fixDuplicateConnections(conn);
|
||||
if (dedupResult.length > 0) {
|
||||
fixes.push(...dedupResult);
|
||||
anyFixed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (anyFixed) {
|
||||
const op: ReplaceConnectionsOperation = {
|
||||
type: 'replaceConnections',
|
||||
connections: conn
|
||||
};
|
||||
operations.push(op);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix numeric connection output keys ("0", "1" → main[0], main[1])
|
||||
*/
|
||||
private fixNumericKeys(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
const sourceNodes = Object.keys(conn);
|
||||
|
||||
for (const sourceName of sourceNodes) {
|
||||
const nodeConn = conn[sourceName];
|
||||
const numericKeys = Object.keys(nodeConn).filter(k => /^\d+$/.test(k));
|
||||
|
||||
if (numericKeys.length === 0) continue;
|
||||
|
||||
// Ensure main array exists
|
||||
if (!nodeConn['main']) {
|
||||
nodeConn['main'] = [];
|
||||
}
|
||||
|
||||
for (const numKey of numericKeys) {
|
||||
const index = parseInt(numKey, 10);
|
||||
const entries = nodeConn[numKey];
|
||||
|
||||
// Extend main array if needed (fill gaps with empty arrays)
|
||||
while (nodeConn['main'].length <= index) {
|
||||
nodeConn['main'].push([]);
|
||||
}
|
||||
|
||||
// Merge entries into main[index]
|
||||
const hadExisting = nodeConn['main'][index] && nodeConn['main'][index].length > 0;
|
||||
if (Array.isArray(entries)) {
|
||||
for (const outputGroup of entries) {
|
||||
if (Array.isArray(outputGroup)) {
|
||||
nodeConn['main'][index] = [
|
||||
...nodeConn['main'][index],
|
||||
...outputGroup
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hadExisting) {
|
||||
logger.warn(`Merged numeric key "${numKey}" into existing main[${index}] on node "${sourceName}" - dedup pass will clean exact duplicates`);
|
||||
}
|
||||
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${numKey}`,
|
||||
type: 'connection-numeric-keys',
|
||||
before: numKey,
|
||||
after: `main[${index}]`,
|
||||
confidence: hadExisting ? 'medium' : 'high',
|
||||
description: hadExisting
|
||||
? `Merged numeric connection key "${numKey}" into existing main[${index}] on node "${sourceName}"`
|
||||
: `Converted numeric connection key "${numKey}" to main[${index}] on node "${sourceName}"`
|
||||
});
|
||||
|
||||
delete nodeConn[numKey];
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix node ID references in connections (replace IDs with names)
|
||||
*/
|
||||
private fixIdToName(
|
||||
conn: any,
|
||||
idToNameMap: Map<string, string>,
|
||||
nameSet: Set<string>
|
||||
): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
// Build rename plan for source keys, then check for collisions
|
||||
const renames: Array<{ oldKey: string; newKey: string }> = [];
|
||||
const sourceKeys = Object.keys(conn);
|
||||
for (const sourceKey of sourceKeys) {
|
||||
if (idToNameMap.has(sourceKey) && !nameSet.has(sourceKey)) {
|
||||
renames.push({ oldKey: sourceKey, newKey: idToNameMap.get(sourceKey)! });
|
||||
}
|
||||
}
|
||||
|
||||
// Check for collisions among renames (two IDs mapping to the same name)
|
||||
const newKeyCount = new Map<string, number>();
|
||||
for (const r of renames) {
|
||||
newKeyCount.set(r.newKey, (newKeyCount.get(r.newKey) || 0) + 1);
|
||||
}
|
||||
const safeRenames = renames.filter(r => {
|
||||
if ((newKeyCount.get(r.newKey) || 0) > 1) {
|
||||
logger.warn(`Skipping ambiguous ID-to-name rename: "${r.oldKey}" → "${r.newKey}" (multiple IDs map to same name)`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
for (const { oldKey, newKey } of safeRenames) {
|
||||
conn[newKey] = conn[oldKey];
|
||||
delete conn[oldKey];
|
||||
fixes.push({
|
||||
node: newKey,
|
||||
field: `connections.sourceKey`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldKey,
|
||||
after: newKey,
|
||||
confidence: 'high',
|
||||
description: `Replaced node ID "${oldKey}" with name "${newKey}" as connection source key`
|
||||
});
|
||||
}
|
||||
|
||||
// Fix .node values that are node IDs
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node && idToNameMap.has(entry.node) && !nameSet.has(entry.node)) {
|
||||
const oldNode = entry.node;
|
||||
const newNode = idToNameMap.get(entry.node)!;
|
||||
entry.node = newNode;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].node`,
|
||||
type: 'connection-id-to-name',
|
||||
before: oldNode,
|
||||
after: newNode,
|
||||
confidence: 'high',
|
||||
description: `Replaced target node ID "${oldNode}" with name "${newNode}" in connection from "${sourceName}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix invalid connection types in entries (e.g., type:"0" → type:"main")
|
||||
*/
|
||||
private fixInvalidTypes(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.type && !VALID_CONNECTION_TYPES.has(entry.type)) {
|
||||
const oldType = entry.type;
|
||||
// Use the parent output key if it's valid, otherwise default to "main"
|
||||
const newType = VALID_CONNECTION_TYPES.has(outputKey) ? outputKey : 'main';
|
||||
entry.type = newType;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].type`,
|
||||
type: 'connection-invalid-type',
|
||||
before: oldType,
|
||||
after: newType,
|
||||
confidence: 'high',
|
||||
description: `Fixed invalid connection type "${oldType}" → "${newType}" in connection from "${sourceName}" to "${entry.node}"`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fix out-of-bounds input indices (clamp to valid range)
|
||||
*/
|
||||
private fixInputIndices(
|
||||
conn: any,
|
||||
validationResult: WorkflowValidationResult,
|
||||
workflow: Workflow
|
||||
): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
// Parse INPUT_INDEX_OUT_OF_BOUNDS errors from validation
|
||||
for (const error of validationResult.errors) {
|
||||
if (error.code !== 'INPUT_INDEX_OUT_OF_BOUNDS') continue;
|
||||
|
||||
const targetNodeName = error.nodeName;
|
||||
if (!targetNodeName) continue;
|
||||
|
||||
// Extract the bad index and input count from the error message
|
||||
const match = error.message.match(/Input index (\d+).*?has (\d+) main input/);
|
||||
if (!match) {
|
||||
logger.warn(`Could not parse INPUT_INDEX_OUT_OF_BOUNDS error for node "${targetNodeName}": ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const badIndex = parseInt(match[1], 10);
|
||||
const inputCount = parseInt(match[2], 10);
|
||||
|
||||
// For multi-input nodes, clamp to max valid index; for single-input, reset to 0
|
||||
const clampedIndex = inputCount > 1 ? Math.min(badIndex, inputCount - 1) : 0;
|
||||
|
||||
// Find and fix the bad index in connections
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (const outputGroup of outputs) {
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
for (const entry of outputGroup) {
|
||||
if (entry && entry.node === targetNodeName && entry.index === badIndex) {
|
||||
entry.index = clampedIndex;
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[].index`,
|
||||
type: 'connection-input-index',
|
||||
before: badIndex,
|
||||
after: clampedIndex,
|
||||
confidence: 'medium',
|
||||
description: `Clamped input index ${badIndex} → ${clampedIndex} for target node "${targetNodeName}" (has ${inputCount} input${inputCount === 1 ? '' : 's'})`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate connection entries (same node, type, index)
|
||||
*/
|
||||
private fixDuplicateConnections(conn: any): FixOperation[] {
|
||||
const fixes: FixOperation[] = [];
|
||||
|
||||
for (const sourceName of Object.keys(conn)) {
|
||||
const nodeConn = conn[sourceName];
|
||||
for (const outputKey of Object.keys(nodeConn)) {
|
||||
const outputs = nodeConn[outputKey];
|
||||
if (!Array.isArray(outputs)) continue;
|
||||
for (let i = 0; i < outputs.length; i++) {
|
||||
const outputGroup = outputs[i];
|
||||
if (!Array.isArray(outputGroup)) continue;
|
||||
|
||||
const seen = new Set<string>();
|
||||
const deduped: any[] = [];
|
||||
|
||||
for (const entry of outputGroup) {
|
||||
const key = JSON.stringify({ node: entry.node, type: entry.type, index: entry.index });
|
||||
if (seen.has(key)) {
|
||||
fixes.push({
|
||||
node: sourceName,
|
||||
field: `connections.${sourceName}.${outputKey}[${i}]`,
|
||||
type: 'connection-duplicate-removal',
|
||||
before: entry,
|
||||
after: null,
|
||||
confidence: 'high',
|
||||
description: `Removed duplicate connection from "${sourceName}" to "${entry.node}" (type: ${entry.type}, index: ${entry.index})`
|
||||
});
|
||||
} else {
|
||||
seen.add(key);
|
||||
deduped.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
outputs[i] = deduped;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version upgrade fixes (proactive upgrades to latest versions)
|
||||
* HIGH confidence for non-breaking upgrades, MEDIUM for upgrades with auto-migratable changes
|
||||
|
||||
@@ -11,28 +11,13 @@ import { ExpressionFormatValidator } from './expression-format-validator';
|
||||
import { NodeSimilarityService, NodeSuggestion } from './node-similarity-service';
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { validateAISpecificNodes, hasAINodes, AI_CONNECTION_TYPES } from './ai-node-validator';
|
||||
import { validateAISpecificNodes, hasAINodes } from './ai-node-validator';
|
||||
import { isAIToolSubNode } from './ai-tool-validators';
|
||||
import { isTriggerNode } from '../utils/node-type-utils';
|
||||
import { isNonExecutableNode } from '../utils/node-classification';
|
||||
import { ToolVariantGenerator } from './tool-variant-generator';
|
||||
const logger = new Logger({ prefix: '[WorkflowValidator]' });
|
||||
|
||||
/**
|
||||
* All valid connection output keys in n8n workflows.
|
||||
* Any key not in this set is malformed and should be flagged.
|
||||
*/
|
||||
export const VALID_CONNECTION_TYPES = new Set<string>([
|
||||
'main',
|
||||
'error',
|
||||
...AI_CONNECTION_TYPES,
|
||||
// Additional AI types from n8n-workflow NodeConnectionTypes not in AI_CONNECTION_TYPES
|
||||
'ai_agent',
|
||||
'ai_chain',
|
||||
'ai_retriever',
|
||||
'ai_reranker',
|
||||
]);
|
||||
|
||||
interface WorkflowNode {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -55,7 +40,9 @@ interface WorkflowNode {
|
||||
|
||||
interface WorkflowConnection {
|
||||
[sourceNode: string]: {
|
||||
[outputType: string]: Array<Array<{ node: string; type: string; index: number }>>;
|
||||
main?: Array<Array<{ node: string; type: string; index: number }>>;
|
||||
error?: Array<Array<{ node: string; type: string; index: number }>>;
|
||||
ai_tool?: Array<Array<{ node: string; type: string; index: number }>>;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -625,52 +612,86 @@ export class WorkflowValidator {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Detect unknown output keys and validate known ones
|
||||
for (const [outputKey, outputConnections] of Object.entries(outputs)) {
|
||||
if (!VALID_CONNECTION_TYPES.has(outputKey)) {
|
||||
// Flag unknown connection output key
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(outputKey)) {
|
||||
suggestion = ` If you meant to use output index ${outputKey}, use main[${outputKey}] instead.`;
|
||||
}
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: sourceName,
|
||||
message: `Unknown connection output key "${outputKey}" on node "${sourceName}". Valid keys are: ${[...VALID_CONNECTION_TYPES].join(', ')}.${suggestion}`,
|
||||
code: 'UNKNOWN_CONNECTION_KEY'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!outputConnections || !Array.isArray(outputConnections)) continue;
|
||||
|
||||
// Validate that the source node can actually output ai_tool
|
||||
if (outputKey === 'ai_tool') {
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
}
|
||||
|
||||
// Validate that AI sub-nodes are not connected via main
|
||||
if (outputKey === 'main') {
|
||||
this.validateNotAISubNode(sourceNode, result);
|
||||
}
|
||||
|
||||
// Check main outputs
|
||||
if (outputs.main) {
|
||||
this.validateConnectionOutputs(
|
||||
sourceName,
|
||||
outputConnections,
|
||||
outputs.main,
|
||||
nodeMap,
|
||||
nodeIdMap,
|
||||
result,
|
||||
outputKey
|
||||
'main'
|
||||
);
|
||||
}
|
||||
|
||||
// Check error outputs
|
||||
if (outputs.error) {
|
||||
this.validateConnectionOutputs(
|
||||
sourceName,
|
||||
outputs.error,
|
||||
nodeMap,
|
||||
nodeIdMap,
|
||||
result,
|
||||
'error'
|
||||
);
|
||||
}
|
||||
|
||||
// Check AI tool outputs
|
||||
if (outputs.ai_tool) {
|
||||
// Validate that the source node can actually output ai_tool
|
||||
this.validateAIToolSource(sourceNode, result);
|
||||
|
||||
this.validateConnectionOutputs(
|
||||
sourceName,
|
||||
outputs.ai_tool,
|
||||
nodeMap,
|
||||
nodeIdMap,
|
||||
result,
|
||||
'ai_tool'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger reachability analysis: BFS from all triggers to find unreachable nodes
|
||||
if (profile !== 'minimal') {
|
||||
this.validateTriggerReachability(workflow, result);
|
||||
} else {
|
||||
this.flagOrphanedNodes(workflow, result);
|
||||
// Check for orphaned nodes (not connected and not triggers)
|
||||
const connectedNodes = new Set<string>();
|
||||
|
||||
// Add all source nodes
|
||||
Object.keys(workflow.connections).forEach(name => connectedNodes.add(name));
|
||||
|
||||
// Add all target nodes
|
||||
Object.values(workflow.connections).forEach(outputs => {
|
||||
if (outputs.main) {
|
||||
outputs.main.flat().forEach(conn => {
|
||||
if (conn) connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
if (outputs.error) {
|
||||
outputs.error.flat().forEach(conn => {
|
||||
if (conn) connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
if (outputs.ai_tool) {
|
||||
outputs.ai_tool.flat().forEach(conn => {
|
||||
if (conn) connectedNodes.add(conn.node);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Check for orphaned nodes (exclude sticky notes)
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
|
||||
// Use shared trigger detection function for consistency
|
||||
const isNodeTrigger = isTriggerNode(node.type);
|
||||
|
||||
if (!connectedNodes.has(node.name) && !isNodeTrigger) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not connected to any other nodes'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cycles (skip in minimal profile to reduce false positives)
|
||||
@@ -691,21 +712,19 @@ export class WorkflowValidator {
|
||||
nodeMap: Map<string, WorkflowNode>,
|
||||
nodeIdMap: Map<string, WorkflowNode>,
|
||||
result: WorkflowValidationResult,
|
||||
outputType: string
|
||||
outputType: 'main' | 'error' | 'ai_tool'
|
||||
): void {
|
||||
// Get source node for special validation
|
||||
const sourceNode = nodeMap.get(sourceName);
|
||||
|
||||
// Main-output-specific validation: error handling config and index bounds
|
||||
// Special validation for main outputs with error handling
|
||||
if (outputType === 'main' && sourceNode) {
|
||||
this.validateErrorOutputConfiguration(sourceName, sourceNode, outputs, nodeMap, result);
|
||||
this.validateOutputIndexBounds(sourceNode, outputs, result);
|
||||
this.validateConditionalBranchUsage(sourceNode, outputs, result);
|
||||
}
|
||||
|
||||
|
||||
outputs.forEach((outputConnections, outputIndex) => {
|
||||
if (!outputConnections) return;
|
||||
|
||||
|
||||
outputConnections.forEach(connection => {
|
||||
// Check for negative index
|
||||
if (connection.index < 0) {
|
||||
@@ -717,22 +736,6 @@ export class WorkflowValidator {
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate connection type field
|
||||
if (connection.type && !VALID_CONNECTION_TYPES.has(connection.type)) {
|
||||
let suggestion = '';
|
||||
if (/^\d+$/.test(connection.type)) {
|
||||
suggestion = ` Numeric types are not valid - use "main", "error", or an AI connection type.`;
|
||||
}
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: sourceName,
|
||||
message: `Invalid connection type "${connection.type}" in connection from "${sourceName}" to "${connection.node}". Expected "main", "error", or an AI connection type (ai_tool, ai_languageModel, etc.).${suggestion}`,
|
||||
code: 'INVALID_CONNECTION_TYPE'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
return;
|
||||
}
|
||||
|
||||
// Special validation for SplitInBatches node
|
||||
// Check both full form (n8n-nodes-base.*) and short form (nodes-base.*)
|
||||
const isSplitInBatches = sourceNode && (
|
||||
@@ -786,16 +789,11 @@ export class WorkflowValidator {
|
||||
});
|
||||
} else {
|
||||
result.statistics.validConnections++;
|
||||
|
||||
|
||||
// Additional validation for AI tool connections
|
||||
if (outputType === 'ai_tool') {
|
||||
this.validateAIToolConnection(sourceName, targetNode, result);
|
||||
}
|
||||
|
||||
// Input index bounds checking
|
||||
if (outputType === 'main') {
|
||||
this.validateInputIndexBounds(sourceName, targetNode, connection, result);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -993,348 +991,6 @@ export class WorkflowValidator {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the static output types for a node from the database.
|
||||
* Returns null if outputs contain expressions (dynamic) or node not found.
|
||||
*/
|
||||
private getNodeOutputTypes(nodeType: string): string[] | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs) return null;
|
||||
|
||||
const outputs = nodeInfo.outputs;
|
||||
if (!Array.isArray(outputs)) return null;
|
||||
|
||||
// Skip if any output is an expression (dynamic — can't determine statically)
|
||||
for (const output of outputs) {
|
||||
if (typeof output === 'string' && output.startsWith('={{')) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return outputs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that AI sub-nodes (nodes that only output AI connection types)
|
||||
* are not connected via "main" connections.
|
||||
*/
|
||||
private validateNotAISubNode(
|
||||
sourceNode: WorkflowNode,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const outputTypes = this.getNodeOutputTypes(sourceNode.type);
|
||||
if (!outputTypes) return; // Unknown or dynamic — skip
|
||||
|
||||
// Check if the node outputs ONLY AI types (no 'main')
|
||||
const hasMainOutput = outputTypes.some(t => t === 'main');
|
||||
if (hasMainOutput) return; // Node can legitimately output main
|
||||
|
||||
// All outputs are AI types — this node should not be connected via main
|
||||
const aiTypes = outputTypes.filter(t => t !== 'main');
|
||||
const expectedType = aiTypes[0] || 'ai_languageModel';
|
||||
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Node "${sourceNode.name}" (${sourceNode.type}) is an AI sub-node that outputs "${expectedType}" connections. ` +
|
||||
`It cannot be used with "main" connections. Connect it to an AI Agent or Chain via "${expectedType}" instead.`,
|
||||
code: 'AI_SUBNODE_MAIN_CONNECTION'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive the short node type name (e.g., "if", "switch", "set") from a workflow node.
|
||||
*/
|
||||
private getShortNodeType(sourceNode: WorkflowNode): string {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
return normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected main output count for a conditional node (IF, Filter, Switch).
|
||||
* Returns null for non-conditional nodes or when the count cannot be determined.
|
||||
*/
|
||||
private getConditionalOutputInfo(sourceNode: WorkflowNode): { shortType: string; expectedOutputs: number } | null {
|
||||
const shortType = this.getShortNodeType(sourceNode);
|
||||
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
return { shortType, expectedOutputs: 2 };
|
||||
}
|
||||
if (shortType === 'switch') {
|
||||
const rules = sourceNode.parameters?.rules?.values || sourceNode.parameters?.rules;
|
||||
if (Array.isArray(rules)) {
|
||||
return { shortType, expectedOutputs: rules.length + 1 }; // rules + fallback
|
||||
}
|
||||
return null; // Cannot determine dynamic output count
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that output indices don't exceed what the node type supports.
|
||||
*/
|
||||
private validateOutputIndexBounds(
|
||||
sourceNode: WorkflowNode,
|
||||
outputs: Array<Array<{ node: string; type: string; index: number }>>,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(sourceNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo || !nodeInfo.outputs) return;
|
||||
|
||||
// Count main outputs from node description
|
||||
let mainOutputCount: number;
|
||||
if (Array.isArray(nodeInfo.outputs)) {
|
||||
// outputs can be strings like "main" or objects with { type: "main" }
|
||||
mainOutputCount = nodeInfo.outputs.filter((o: any) =>
|
||||
typeof o === 'string' ? o === 'main' : (o.type === 'main' || !o.type)
|
||||
).length;
|
||||
} else {
|
||||
return; // Dynamic outputs (expression string), skip check
|
||||
}
|
||||
|
||||
if (mainOutputCount === 0) return;
|
||||
|
||||
// Override with dynamic output counts for conditional nodes
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (conditionalInfo) {
|
||||
mainOutputCount = conditionalInfo.expectedOutputs;
|
||||
} else if (this.getShortNodeType(sourceNode) === 'switch') {
|
||||
// Switch without determinable rules -- skip bounds check
|
||||
return;
|
||||
}
|
||||
|
||||
// Account for continueErrorOutput adding an extra output
|
||||
if (sourceNode.onError === 'continueErrorOutput') {
|
||||
mainOutputCount += 1;
|
||||
}
|
||||
|
||||
// Check if any output index exceeds bounds
|
||||
const maxOutputIndex = outputs.length - 1;
|
||||
if (maxOutputIndex >= mainOutputCount) {
|
||||
// Only flag if there are actual connections at the out-of-bounds indices
|
||||
for (let i = mainOutputCount; i < outputs.length; i++) {
|
||||
if (outputs[i] && outputs[i].length > 0) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message: `Output index ${i} on node "${sourceNode.name}" exceeds its output count (${mainOutputCount}). ` +
|
||||
`This node has ${mainOutputCount} main output(s) (indices 0-${mainOutputCount - 1}).`,
|
||||
code: 'OUTPUT_INDEX_OUT_OF_BOUNDS'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect when a conditional node (IF, Filter, Switch) has all connections
|
||||
* crammed into main[0] with higher-index outputs empty. This usually means
|
||||
* both branches execute together on one condition, while the other branches
|
||||
* have no effect.
|
||||
*/
|
||||
private validateConditionalBranchUsage(
|
||||
sourceNode: WorkflowNode,
|
||||
outputs: Array<Array<{ node: string; type: string; index: number }>>,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const conditionalInfo = this.getConditionalOutputInfo(sourceNode);
|
||||
if (!conditionalInfo || conditionalInfo.expectedOutputs < 2) return;
|
||||
|
||||
const { shortType, expectedOutputs } = conditionalInfo;
|
||||
|
||||
// Check: main[0] has >= 2 connections AND all main[1+] are empty
|
||||
const main0Count = outputs[0]?.length || 0;
|
||||
if (main0Count < 2) return;
|
||||
|
||||
const hasHigherIndexConnections = outputs.slice(1).some(
|
||||
conns => conns && conns.length > 0
|
||||
);
|
||||
if (hasHigherIndexConnections) return;
|
||||
|
||||
// Build a context-appropriate warning message
|
||||
let message: string;
|
||||
if (shortType === 'if' || shortType === 'filter') {
|
||||
const isFilter = shortType === 'filter';
|
||||
const displayName = isFilter ? 'Filter' : 'IF';
|
||||
const trueLabel = isFilter ? 'matched' : 'true';
|
||||
const falseLabel = isFilter ? 'unmatched' : 'false';
|
||||
message = `${displayName} node "${sourceNode.name}" has ${main0Count} connections on the "${trueLabel}" branch (main[0]) ` +
|
||||
`but no connections on the "${falseLabel}" branch (main[1]). ` +
|
||||
`All ${main0Count} target nodes execute together on the "${trueLabel}" branch, ` +
|
||||
`while the "${falseLabel}" branch has no effect. ` +
|
||||
`Split connections: main[0] for ${trueLabel}, main[1] for ${falseLabel}.`;
|
||||
} else {
|
||||
message = `Switch node "${sourceNode.name}" has ${main0Count} connections on output 0 ` +
|
||||
`but no connections on any other outputs (1-${expectedOutputs - 1}). ` +
|
||||
`All ${main0Count} target nodes execute together on output 0, ` +
|
||||
`while other switch branches have no effect. ` +
|
||||
`Distribute connections across outputs to match switch rules.`;
|
||||
}
|
||||
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: sourceNode.id,
|
||||
nodeName: sourceNode.name,
|
||||
message,
|
||||
code: 'CONDITIONAL_BRANCH_FANOUT'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that input index doesn't exceed what the target node accepts.
|
||||
*/
|
||||
private validateInputIndexBounds(
|
||||
sourceName: string,
|
||||
targetNode: WorkflowNode,
|
||||
connection: { node: string; type: string; index: number },
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(targetNode.type);
|
||||
const nodeInfo = this.nodeRepository.getNode(normalizedType);
|
||||
if (!nodeInfo) return;
|
||||
|
||||
// Most nodes have 1 main input. Known exceptions:
|
||||
const shortType = normalizedType.replace(/^(n8n-)?nodes-base\./, '');
|
||||
let mainInputCount = 1; // Default: most nodes have 1 input
|
||||
|
||||
if (shortType === 'merge' || shortType === 'compareDatasets') {
|
||||
mainInputCount = 2; // Merge nodes have 2 inputs
|
||||
}
|
||||
|
||||
// Trigger nodes have 0 inputs
|
||||
if (nodeInfo.isTrigger || isTriggerNode(targetNode.type)) {
|
||||
mainInputCount = 0;
|
||||
}
|
||||
|
||||
if (mainInputCount > 0 && connection.index >= mainInputCount) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeName: targetNode.name,
|
||||
message: `Input index ${connection.index} on node "${targetNode.name}" exceeds its input count (${mainInputCount}). ` +
|
||||
`Connection from "${sourceName}" targets input ${connection.index}, but this node has ${mainInputCount} main input(s) (indices 0-${mainInputCount - 1}).`,
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
});
|
||||
result.statistics.invalidConnections++;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flag nodes that are not referenced in any connection (source or target).
|
||||
* Used as a lightweight check when BFS reachability is not applicable.
|
||||
*/
|
||||
private flagOrphanedNodes(
|
||||
workflow: WorkflowJson,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
const connectedNodes = new Set<string>();
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
connectedNodes.add(sourceName);
|
||||
for (const outputConns of Object.values(outputs)) {
|
||||
if (!Array.isArray(outputConns)) continue;
|
||||
for (const conns of outputConns) {
|
||||
if (!conns) continue;
|
||||
for (const conn of conns) {
|
||||
if (conn) connectedNodes.add(conn.node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (isTriggerNode(node.type)) continue;
|
||||
if (!connectedNodes.has(node.name)) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not connected to any other nodes'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* BFS from all trigger nodes to detect unreachable nodes.
|
||||
* Replaces the simple "is node in any connection" check with proper graph traversal.
|
||||
*/
|
||||
private validateTriggerReachability(
|
||||
workflow: WorkflowJson,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
// Build adjacency list (forward direction)
|
||||
const adjacency = new Map<string, Set<string>>();
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
if (!adjacency.has(sourceName)) adjacency.set(sourceName, new Set());
|
||||
for (const outputConns of Object.values(outputs)) {
|
||||
if (Array.isArray(outputConns)) {
|
||||
for (const conns of outputConns) {
|
||||
if (!conns) continue;
|
||||
for (const conn of conns) {
|
||||
if (conn) {
|
||||
adjacency.get(sourceName)!.add(conn.node);
|
||||
// Also track that the target exists in the graph
|
||||
if (!adjacency.has(conn.node)) adjacency.set(conn.node, new Set());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Identify trigger nodes
|
||||
const triggerNodes: string[] = [];
|
||||
for (const node of workflow.nodes) {
|
||||
if (isTriggerNode(node.type) && !node.disabled) {
|
||||
triggerNodes.push(node.name);
|
||||
}
|
||||
}
|
||||
|
||||
// If no trigger nodes, fall back to simple orphaned check
|
||||
if (triggerNodes.length === 0) {
|
||||
this.flagOrphanedNodes(workflow, result);
|
||||
return;
|
||||
}
|
||||
|
||||
// BFS from all trigger nodes
|
||||
const reachable = new Set<string>();
|
||||
const queue: string[] = [...triggerNodes];
|
||||
for (const t of triggerNodes) reachable.add(t);
|
||||
|
||||
while (queue.length > 0) {
|
||||
const current = queue.shift()!;
|
||||
const neighbors = adjacency.get(current);
|
||||
if (neighbors) {
|
||||
for (const neighbor of neighbors) {
|
||||
if (!reachable.has(neighbor)) {
|
||||
reachable.add(neighbor);
|
||||
queue.push(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flag unreachable nodes
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (isTriggerNode(node.type)) continue;
|
||||
|
||||
if (!reachable.has(node.name)) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Node is not reachable from any trigger node'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if workflow has cycles
|
||||
* Allow legitimate loops for SplitInBatches and similar loop nodes
|
||||
@@ -1368,13 +1024,23 @@ export class WorkflowValidator {
|
||||
const connections = workflow.connections[nodeName];
|
||||
if (connections) {
|
||||
const allTargets: string[] = [];
|
||||
|
||||
for (const outputConns of Object.values(connections)) {
|
||||
if (Array.isArray(outputConns)) {
|
||||
outputConns.flat().forEach(conn => {
|
||||
if (conn) allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
|
||||
if (connections.main) {
|
||||
connections.main.flat().forEach(conn => {
|
||||
if (conn) allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
|
||||
if (connections.error) {
|
||||
connections.error.flat().forEach(conn => {
|
||||
if (conn) allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
|
||||
if (connections.ai_tool) {
|
||||
connections.ai_tool.flat().forEach(conn => {
|
||||
if (conn) allTargets.push(conn.node);
|
||||
});
|
||||
}
|
||||
|
||||
const currentNodeType = nodeTypeMap.get(nodeName);
|
||||
|
||||
@@ -1,566 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowAutoFixer } from '@/services/workflow-auto-fixer';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import type { WorkflowValidationResult } from '@/services/workflow-validator';
|
||||
import type { Workflow, WorkflowNode } from '@/types/n8n-api';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/node-similarity-service');
|
||||
|
||||
describe('WorkflowAutoFixer - Connection Fixes', () => {
|
||||
let autoFixer: WorkflowAutoFixer;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
const createMockWorkflow = (
|
||||
nodes: WorkflowNode[],
|
||||
connections: any = {}
|
||||
): Workflow => ({
|
||||
id: 'test-workflow',
|
||||
name: 'Test Workflow',
|
||||
active: false,
|
||||
nodes,
|
||||
connections,
|
||||
settings: {},
|
||||
createdAt: '',
|
||||
updatedAt: ''
|
||||
});
|
||||
|
||||
const createMockNode = (id: string, name: string, type: string = 'n8n-nodes-base.noOp'): WorkflowNode => ({
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
});
|
||||
|
||||
const emptyValidation: WorkflowValidationResult = {
|
||||
valid: true,
|
||||
errors: [],
|
||||
warnings: [],
|
||||
statistics: {
|
||||
totalNodes: 0,
|
||||
enabledNodes: 0,
|
||||
triggerNodes: 0,
|
||||
validConnections: 0,
|
||||
invalidConnections: 0,
|
||||
expressionsValidated: 0
|
||||
},
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
autoFixer = new WorkflowAutoFixer(mockRepository);
|
||||
});
|
||||
|
||||
describe('Numeric Keys', () => {
|
||||
it('should convert single numeric key to main[index]', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('0');
|
||||
expect(connFixes[0].after).toBe('main[0]');
|
||||
|
||||
// Verify replaceConnections operation
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections');
|
||||
expect(replaceOp).toBeDefined();
|
||||
const connOp = replaceOp as any;
|
||||
expect(connOp.connections.Node1['main']).toBeDefined();
|
||||
expect(connOp.connections.Node1['0']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should convert multiple numeric keys', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'1': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(connFixes).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should merge with existing main entries', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'1': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1['main']).toHaveLength(2);
|
||||
expect(replaceOp.connections.Node1['main'][0]).toEqual([{ node: 'Node2', type: 'main', index: 0 }]);
|
||||
expect(replaceOp.connections.Node1['main'][1]).toEqual([{ node: 'Node3', type: 'main', index: 0 }]);
|
||||
});
|
||||
|
||||
it('should handle sparse numeric keys with gap filling', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'3': [[{ node: 'Node3', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1['main']).toHaveLength(4);
|
||||
expect(replaceOp.connections.Node1['main'][1]).toEqual([]);
|
||||
expect(replaceOp.connections.Node1['main'][2]).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid Type', () => {
|
||||
it('should fix numeric type to "main"', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('0');
|
||||
expect(connFixes[0].after).toBe('main');
|
||||
});
|
||||
|
||||
it('should use parent output key for AI connection types', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
ai_tool: [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].after).toBe('ai_tool');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ID-to-Name', () => {
|
||||
it('should replace source key when it matches a node ID', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'Node1'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
'abc-123': {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('abc-123');
|
||||
expect(connFixes[0].after).toBe('Node1');
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections['Node1']).toBeDefined();
|
||||
expect(replaceOp.connections['abc-123']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should replace target node value when it matches a node ID', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'Node1'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'def-456', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe('def-456');
|
||||
expect(connFixes[0].after).toBe('Node2');
|
||||
});
|
||||
|
||||
it('should NOT fix when key matches both an ID and a name', async () => {
|
||||
// Node with name that looks like an ID of another node
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('abc-123', 'abc-123'), createMockNode('def-456', 'Node2')],
|
||||
{
|
||||
'abc-123': {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dedup', () => {
|
||||
it('should remove exact duplicate connections', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-duplicate-removal');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections.Node1.main[0]).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should keep near-duplicates with different index', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[
|
||||
{ node: 'Node2', type: 'main', index: 0 },
|
||||
{ node: 'Node2', type: 'main', index: 1 },
|
||||
]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-duplicate-removal');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Index', () => {
|
||||
it('should reset to 0 for single-input nodes', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Input index 3 on node "Node2" exceeds its input count (1). Connection from "Node1" targets input 3, but this node has 1 main input(s) (indices 0-0).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2', 'n8n-nodes-base.httpRequest')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 3 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe(3);
|
||||
expect(connFixes[0].after).toBe(0);
|
||||
expect(connFixes[0].confidence).toBe('medium');
|
||||
});
|
||||
|
||||
it('should clamp for Merge nodes', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'MergeNode',
|
||||
message: 'Input index 5 on node "MergeNode" exceeds its input count (2). Connection from "Node1" targets input 5, but this node has 2 main input(s) (indices 0-1).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'MergeNode', 'n8n-nodes-base.merge')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'MergeNode', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(1);
|
||||
expect(connFixes[0].before).toBe(5);
|
||||
expect(connFixes[0].after).toBe(1); // clamped to max valid index
|
||||
});
|
||||
|
||||
it('should not fix valid indices', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Combined', () => {
|
||||
it('should fix multiple issues in one workflow', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[
|
||||
createMockNode('id1', 'Node1'),
|
||||
createMockNode('id2', 'Node2'),
|
||||
createMockNode('id3', 'Node3')
|
||||
],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[
|
||||
{ node: 'Node2', type: '0', index: 0 },
|
||||
{ node: 'Node2', type: '0', index: 0 }, // duplicate
|
||||
]]
|
||||
},
|
||||
'id3': { // ID instead of name
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.fixes.length).toBeGreaterThan(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeDefined();
|
||||
|
||||
// Should have numeric key, invalid type, dedup, and id-to-name fixes
|
||||
const types = new Set(result.fixes.map(f => f.type));
|
||||
expect(types.has('connection-numeric-keys')).toBe(true);
|
||||
expect(types.has('connection-id-to-name')).toBe(true);
|
||||
});
|
||||
|
||||
it('should be idempotent (no fixes on valid connections)', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connectionFixTypes = [
|
||||
'connection-numeric-keys',
|
||||
'connection-invalid-type',
|
||||
'connection-id-to-name',
|
||||
'connection-duplicate-removal',
|
||||
'connection-input-index'
|
||||
];
|
||||
const connFixes = result.fixes.filter(f => connectionFixTypes.includes(f.type));
|
||||
expect(connFixes).toHaveLength(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty connections', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1')],
|
||||
{}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should respect fixTypes filtering', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Only allow numeric key fixes, not invalid type fixes
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, [], {
|
||||
fixTypes: ['connection-numeric-keys']
|
||||
});
|
||||
|
||||
const numericFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
const typeFixes = result.fixes.filter(f => f.type === 'connection-invalid-type');
|
||||
expect(numericFixes.length).toBeGreaterThan(0);
|
||||
expect(typeFixes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should filter replaceConnections from operations when confidence threshold filters all connection fixes', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Input index 5 on node "Node2" exceeds its input count (1). Connection from "Node1" targets input 5, but this node has 1 main input(s) (indices 0-0).',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
// Input index fixes are medium confidence. Filter to high only.
|
||||
const result = await autoFixer.generateFixes(workflow, validation, [], {
|
||||
confidenceThreshold: 'high'
|
||||
});
|
||||
|
||||
// Medium confidence fixes should be filtered out
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
expect(result.operations.find(op => op.type === 'replaceConnections')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include connection issues in summary', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.summary).toContain('connection');
|
||||
});
|
||||
|
||||
it('should handle non-existent target nodes gracefully', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1')],
|
||||
{
|
||||
Node1: {
|
||||
'0': [[{ node: 'NonExistent', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
expect(result.fixes.some(f => f.type === 'connection-numeric-keys')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip unparseable INPUT_INDEX_OUT_OF_BOUNDS errors gracefully', async () => {
|
||||
const validation: WorkflowValidationResult = {
|
||||
...emptyValidation,
|
||||
errors: [{
|
||||
type: 'error',
|
||||
nodeName: 'Node2',
|
||||
message: 'Something unexpected about input indices',
|
||||
code: 'INPUT_INDEX_OUT_OF_BOUNDS'
|
||||
}]
|
||||
};
|
||||
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-input-index');
|
||||
expect(connFixes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should fix both source keys and target .node values as IDs in the same workflow', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[
|
||||
createMockNode('abc-123', 'Node1'),
|
||||
createMockNode('def-456', 'Node2'),
|
||||
createMockNode('ghi-789', 'Node3')
|
||||
],
|
||||
{
|
||||
'abc-123': { // source key is ID
|
||||
main: [[{ node: 'def-456', type: 'main', index: 0 }]] // target .node is also ID
|
||||
},
|
||||
Node2: {
|
||||
main: [[{ node: 'ghi-789', type: 'main', index: 0 }]] // another target ID
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const connFixes = result.fixes.filter(f => f.type === 'connection-id-to-name');
|
||||
|
||||
// Should fix: source key abc-123 → Node1, target def-456 → Node2, target ghi-789 → Node3
|
||||
expect(connFixes).toHaveLength(3);
|
||||
|
||||
const replaceOp = result.operations.find(op => op.type === 'replaceConnections') as any;
|
||||
expect(replaceOp.connections['Node1']).toBeDefined();
|
||||
expect(replaceOp.connections['abc-123']).toBeUndefined();
|
||||
|
||||
// Verify target .node values were also replaced
|
||||
const node1Conns = replaceOp.connections['Node1'].main[0];
|
||||
expect(node1Conns[0].node).toBe('Node2');
|
||||
|
||||
const node2Conns = replaceOp.connections['Node2'].main[0];
|
||||
expect(node2Conns[0].node).toBe('Node3');
|
||||
});
|
||||
|
||||
it('should lower confidence to medium when merging numeric key into non-empty main slot', async () => {
|
||||
const workflow = createMockWorkflow(
|
||||
[createMockNode('id1', 'Node1'), createMockNode('id2', 'Node2'), createMockNode('id3', 'Node3')],
|
||||
{
|
||||
Node1: {
|
||||
main: [[{ node: 'Node2', type: 'main', index: 0 }]],
|
||||
'0': [[{ node: 'Node3', type: 'main', index: 0 }]] // conflicts with existing main[0]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, emptyValidation, []);
|
||||
const numericFixes = result.fixes.filter(f => f.type === 'connection-numeric-keys');
|
||||
expect(numericFixes).toHaveLength(1);
|
||||
expect(numericFixes[0].confidence).toBe('medium');
|
||||
expect(numericFixes[0].description).toContain('Merged');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,217 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowValidator } from '@/services/workflow-validator';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { EnhancedConfigValidator } from '@/services/enhanced-config-validator';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/enhanced-config-validator');
|
||||
vi.mock('@/services/expression-validator');
|
||||
vi.mock('@/utils/logger');
|
||||
|
||||
describe('WorkflowValidator - AI Sub-Node Main Connection Detection', () => {
|
||||
let validator: WorkflowValidator;
|
||||
let mockNodeRepository: NodeRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockNodeRepository = new NodeRepository({} as any) as any;
|
||||
|
||||
if (!mockNodeRepository.getAllNodes) {
|
||||
mockNodeRepository.getAllNodes = vi.fn();
|
||||
}
|
||||
if (!mockNodeRepository.getNode) {
|
||||
mockNodeRepository.getNode = vi.fn();
|
||||
}
|
||||
|
||||
const nodeTypes: Record<string, any> = {
|
||||
'nodes-base.manualTrigger': {
|
||||
type: 'nodes-base.manualTrigger',
|
||||
displayName: 'Manual Trigger',
|
||||
package: 'n8n-nodes-base',
|
||||
isTrigger: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.set': {
|
||||
type: 'nodes-base.set',
|
||||
displayName: 'Set',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.lmChatGoogleGemini': {
|
||||
type: 'nodes-langchain.lmChatGoogleGemini',
|
||||
displayName: 'Google Gemini Chat Model',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_languageModel'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.memoryBufferWindow': {
|
||||
type: 'nodes-langchain.memoryBufferWindow',
|
||||
displayName: 'Window Buffer Memory',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_memory'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.embeddingsOpenAi': {
|
||||
type: 'nodes-langchain.embeddingsOpenAi',
|
||||
displayName: 'Embeddings OpenAI',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['ai_embedding'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.agent': {
|
||||
type: 'nodes-langchain.agent',
|
||||
displayName: 'AI Agent',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
isAITool: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.openAi': {
|
||||
type: 'nodes-langchain.openAi',
|
||||
displayName: 'OpenAI',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.textClassifier': {
|
||||
type: 'nodes-langchain.textClassifier',
|
||||
displayName: 'Text Classifier',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['={{}}'], // Dynamic expression-based outputs
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.vectorStoreInMemory': {
|
||||
type: 'nodes-langchain.vectorStoreInMemory',
|
||||
displayName: 'In-Memory Vector Store',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
outputs: ['={{$parameter["mode"] === "retrieve" ? "main" : "ai_vectorStore"}}'],
|
||||
properties: [],
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(mockNodeRepository.getNode).mockImplementation((nodeType: string) => {
|
||||
return nodeTypes[nodeType] || null;
|
||||
});
|
||||
vi.mocked(mockNodeRepository.getAllNodes).mockReturnValue(Object.values(nodeTypes));
|
||||
|
||||
validator = new WorkflowValidator(
|
||||
mockNodeRepository,
|
||||
EnhancedConfigValidator as any
|
||||
);
|
||||
});
|
||||
|
||||
function makeWorkflow(sourceType: string, sourceName: string, connectionKey: string = 'main') {
|
||||
return {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Manual Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: sourceName, type: sourceType, position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Manual Trigger': {
|
||||
main: [[{ node: sourceName, type: 'main', index: 0 }]]
|
||||
},
|
||||
[sourceName]: {
|
||||
[connectionKey]: [[{ node: 'Set', type: connectionKey, index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
it('should flag LLM node (lmChatGoogleGemini) connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.lmChatGoogleGemini',
|
||||
'Google Gemini'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_languageModel');
|
||||
expect(error!.message).toContain('AI sub-node');
|
||||
expect(error!.nodeName).toBe('Google Gemini');
|
||||
});
|
||||
|
||||
it('should flag memory node (memoryBufferWindow) connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.memoryBufferWindow',
|
||||
'Window Buffer Memory'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_memory');
|
||||
});
|
||||
|
||||
it('should flag embeddings node connected via main', async () => {
|
||||
const workflow = makeWorkflow(
|
||||
'n8n-nodes-langchain.embeddingsOpenAi',
|
||||
'Embeddings OpenAI'
|
||||
);
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const error = result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION');
|
||||
expect(error).toBeDefined();
|
||||
expect(error!.message).toContain('ai_embedding');
|
||||
});
|
||||
|
||||
it('should NOT flag regular langchain nodes (agent, openAi) connected via main', async () => {
|
||||
const workflow1 = makeWorkflow('n8n-nodes-langchain.agent', 'AI Agent');
|
||||
const workflow2 = makeWorkflow('n8n-nodes-langchain.openAi', 'OpenAI');
|
||||
|
||||
const result1 = await validator.validateWorkflow(workflow1 as any);
|
||||
const result2 = await validator.validateWorkflow(workflow2 as any);
|
||||
|
||||
expect(result1.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
expect(result2.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag dynamic-output nodes (expression-based outputs)', async () => {
|
||||
const workflow1 = makeWorkflow('n8n-nodes-langchain.textClassifier', 'Text Classifier');
|
||||
const workflow2 = makeWorkflow('n8n-nodes-langchain.vectorStoreInMemory', 'Vector Store');
|
||||
|
||||
const result1 = await validator.validateWorkflow(workflow1 as any);
|
||||
const result2 = await validator.validateWorkflow(workflow2 as any);
|
||||
|
||||
expect(result1.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
expect(result2.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag AI sub-node connected via correct AI type', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Manual Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'AI Agent', type: 'n8n-nodes-langchain.agent', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Google Gemini', type: 'n8n-nodes-langchain.lmChatGoogleGemini', position: [200, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Manual Trigger': {
|
||||
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Google Gemini': {
|
||||
ai_languageModel: [[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
expect(result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT flag unknown/community nodes not in database', async () => {
|
||||
const workflow = makeWorkflow('n8n-nodes-community.someNode', 'Community Node');
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
expect(result.errors.find(e => e.code === 'AI_SUBNODE_MAIN_CONNECTION')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -1067,7 +1067,7 @@ describe('WorkflowValidator - Comprehensive Tests', () => {
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
expect(result.warnings.some(w => w.message.includes('not reachable from any trigger node') && w.nodeName === 'Orphaned')).toBe(true);
|
||||
expect(result.warnings.some(w => w.message.includes('Node is not connected to any other nodes') && w.nodeName === 'Orphaned')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect cycles in workflow', async () => {
|
||||
@@ -1987,7 +1987,7 @@ describe('WorkflowValidator - Comprehensive Tests', () => {
|
||||
|
||||
// Warnings
|
||||
expect(result.warnings.some(w => w.message.includes('Connection to disabled node'))).toBe(true);
|
||||
expect(result.warnings.some(w => w.message.includes('not reachable from any trigger node') && w.nodeName === 'Orphaned')).toBe(true);
|
||||
expect(result.warnings.some(w => w.message.includes('Node is not connected') && w.nodeName === 'Orphaned')).toBe(true);
|
||||
expect(result.warnings.some(w => w.message.includes('AI Agent has no tools connected'))).toBe(true);
|
||||
|
||||
// Statistics
|
||||
|
||||
@@ -1,918 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowValidator } from '@/services/workflow-validator';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { EnhancedConfigValidator } from '@/services/enhanced-config-validator';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/enhanced-config-validator');
|
||||
vi.mock('@/services/expression-validator');
|
||||
vi.mock('@/utils/logger');
|
||||
|
||||
describe('WorkflowValidator - Connection Validation (#620)', () => {
|
||||
let validator: WorkflowValidator;
|
||||
let mockNodeRepository: NodeRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockNodeRepository = new NodeRepository({} as any) as any;
|
||||
|
||||
if (!mockNodeRepository.getAllNodes) {
|
||||
mockNodeRepository.getAllNodes = vi.fn();
|
||||
}
|
||||
if (!mockNodeRepository.getNode) {
|
||||
mockNodeRepository.getNode = vi.fn();
|
||||
}
|
||||
|
||||
const nodeTypes: Record<string, any> = {
|
||||
'nodes-base.webhook': {
|
||||
type: 'nodes-base.webhook',
|
||||
displayName: 'Webhook',
|
||||
package: 'n8n-nodes-base',
|
||||
isTrigger: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.manualTrigger': {
|
||||
type: 'nodes-base.manualTrigger',
|
||||
displayName: 'Manual Trigger',
|
||||
package: 'n8n-nodes-base',
|
||||
isTrigger: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.set': {
|
||||
type: 'nodes-base.set',
|
||||
displayName: 'Set',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.code': {
|
||||
type: 'nodes-base.code',
|
||||
displayName: 'Code',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.if': {
|
||||
type: 'nodes-base.if',
|
||||
displayName: 'IF',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.filter': {
|
||||
type: 'nodes-base.filter',
|
||||
displayName: 'Filter',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.switch': {
|
||||
type: 'nodes-base.switch',
|
||||
displayName: 'Switch',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main', 'main', 'main', 'main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.googleSheets': {
|
||||
type: 'nodes-base.googleSheets',
|
||||
displayName: 'Google Sheets',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-base.merge': {
|
||||
type: 'nodes-base.merge',
|
||||
displayName: 'Merge',
|
||||
package: 'n8n-nodes-base',
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
'nodes-langchain.agent': {
|
||||
type: 'nodes-langchain.agent',
|
||||
displayName: 'AI Agent',
|
||||
package: '@n8n/n8n-nodes-langchain',
|
||||
isAITool: true,
|
||||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
};
|
||||
|
||||
vi.mocked(mockNodeRepository.getNode).mockImplementation((nodeType: string) => {
|
||||
return nodeTypes[nodeType] || null;
|
||||
});
|
||||
vi.mocked(mockNodeRepository.getAllNodes).mockReturnValue(Object.values(nodeTypes));
|
||||
|
||||
validator = new WorkflowValidator(
|
||||
mockNodeRepository,
|
||||
EnhancedConfigValidator as any
|
||||
);
|
||||
});
|
||||
|
||||
describe('Unknown output keys (P0)', () => {
|
||||
it('should flag numeric string key "1" with index suggestion', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Save to Google Sheets', type: 'n8n-nodes-base.googleSheets', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Format Error', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Success Response', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Save to Google Sheets', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Save to Google Sheets': {
|
||||
'1': [[{ node: 'Format Error', type: '0', index: 0 }]],
|
||||
main: [[{ node: 'Success Response', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unknownKeyError = result.errors.find(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyError).toBeDefined();
|
||||
expect(unknownKeyError!.message).toContain('Unknown connection output key "1"');
|
||||
expect(unknownKeyError!.message).toContain('use main[1] instead');
|
||||
expect(unknownKeyError!.nodeName).toBe('Save to Google Sheets');
|
||||
});
|
||||
|
||||
it('should flag random string key "output"', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
output: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unknownKeyError = result.errors.find(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyError).toBeDefined();
|
||||
expect(unknownKeyError!.message).toContain('Unknown connection output key "output"');
|
||||
// Should NOT have index suggestion for non-numeric key
|
||||
expect(unknownKeyError!.message).not.toContain('use main[');
|
||||
});
|
||||
|
||||
it('should accept valid keys: main, error, ai_tool', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unknownKeyErrors = result.errors.filter(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should accept AI connection types as valid keys', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Chat Trigger', type: 'n8n-nodes-base.chatTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'AI Agent', type: 'nodes-langchain.agent', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'LLM', type: 'nodes-langchain.lmChatOpenAi', position: [200, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Chat Trigger': {
|
||||
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'LLM': {
|
||||
ai_languageModel: [[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unknownKeyErrors = result.errors.filter(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should flag multiple unknown keys on the same node', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set1', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Set2', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
'0': [[{ node: 'Set1', type: 'main', index: 0 }]],
|
||||
'1': [[{ node: 'Set2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unknownKeyErrors = result.errors.filter(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyErrors).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid type field (P0)', () => {
|
||||
it('should flag numeric type "0" in connection target', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Sheets', type: 'n8n-nodes-base.googleSheets', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Error Handler', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Sheets', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Sheets': {
|
||||
main: [[{ node: 'Error Handler', type: '0', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const typeError = result.errors.find(e => e.code === 'INVALID_CONNECTION_TYPE');
|
||||
expect(typeError).toBeDefined();
|
||||
expect(typeError!.message).toContain('Invalid connection type "0"');
|
||||
expect(typeError!.message).toContain('Numeric types are not valid');
|
||||
});
|
||||
|
||||
it('should flag invented type "output"', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
main: [[{ node: 'Set', type: 'output', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const typeError = result.errors.find(e => e.code === 'INVALID_CONNECTION_TYPE');
|
||||
expect(typeError).toBeDefined();
|
||||
expect(typeError!.message).toContain('Invalid connection type "output"');
|
||||
});
|
||||
|
||||
it('should accept valid type "main"', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const typeErrors = result.errors.filter(e => e.code === 'INVALID_CONNECTION_TYPE');
|
||||
expect(typeErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should accept AI connection types in type field', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Chat Trigger', type: 'n8n-nodes-base.chatTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'AI Agent', type: 'nodes-langchain.agent', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Memory', type: 'nodes-langchain.memoryBufferWindow', position: [200, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Chat Trigger': {
|
||||
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Memory': {
|
||||
ai_memory: [[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const typeErrors = result.errors.filter(e => e.code === 'INVALID_CONNECTION_TYPE');
|
||||
expect(typeErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should catch the real-world example from issue #620', async () => {
|
||||
// Exact reproduction of the bug reported in the issue
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Save to Google Sheets', type: 'n8n-nodes-base.googleSheets', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Format AI Integration Error', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Webhook Success Response', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Save to Google Sheets', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Save to Google Sheets': {
|
||||
'1': [[{ node: 'Format AI Integration Error', type: '0', index: 0 }]],
|
||||
main: [[{ node: 'Webhook Success Response', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
// Should detect both bugs
|
||||
const unknownKeyError = result.errors.find(e => e.code === 'UNKNOWN_CONNECTION_KEY');
|
||||
expect(unknownKeyError).toBeDefined();
|
||||
expect(unknownKeyError!.message).toContain('"1"');
|
||||
expect(unknownKeyError!.message).toContain('use main[1] instead');
|
||||
|
||||
// The type "0" error won't appear since the "1" key is unknown and skipped,
|
||||
// but the error count should reflect the invalid connection
|
||||
expect(result.statistics.invalidConnections).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Output index bounds checking (P1)', () => {
|
||||
it('should flag Code node with main[1] (only has 1 output)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Success', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Error', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
main: [
|
||||
[{ node: 'Success', type: 'main', index: 0 }],
|
||||
[{ node: 'Error', type: 'main', index: 0 }] // main[1] - out of bounds
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const boundsError = result.errors.find(e => e.code === 'OUTPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(boundsError).toBeDefined();
|
||||
expect(boundsError!.message).toContain('Output index 1');
|
||||
expect(boundsError!.message).toContain('Code');
|
||||
});
|
||||
|
||||
it('should accept IF node with main[0] and main[1] (2 outputs)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'IF', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'True', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'False', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'IF', type: 'main', index: 0 }]]
|
||||
},
|
||||
'IF': {
|
||||
main: [
|
||||
[{ node: 'True', type: 'main', index: 0 }],
|
||||
[{ node: 'False', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const boundsErrors = result.errors.filter(e => e.code === 'OUTPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(boundsErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should flag IF node with main[2] (only 2 outputs)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'IF', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'True', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'False', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
{ id: '5', name: 'Extra', type: 'n8n-nodes-base.set', position: [400, 400], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'IF', type: 'main', index: 0 }]]
|
||||
},
|
||||
'IF': {
|
||||
main: [
|
||||
[{ node: 'True', type: 'main', index: 0 }],
|
||||
[{ node: 'False', type: 'main', index: 0 }],
|
||||
[{ node: 'Extra', type: 'main', index: 0 }] // main[2] - out of bounds
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const boundsError = result.errors.find(e => e.code === 'OUTPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(boundsError).toBeDefined();
|
||||
expect(boundsError!.message).toContain('Output index 2');
|
||||
});
|
||||
|
||||
it('should allow extra output when onError is continueErrorOutput', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {}, onError: 'continueErrorOutput' as const },
|
||||
{ id: '3', name: 'Success', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Error', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
main: [
|
||||
[{ node: 'Success', type: 'main', index: 0 }],
|
||||
[{ node: 'Error', type: 'main', index: 0 }] // Error output - allowed with continueErrorOutput
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const boundsErrors = result.errors.filter(e => e.code === 'OUTPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(boundsErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should skip bounds check for unknown node types', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Custom', type: 'n8n-nodes-community.customNode', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set1', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Set2', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Custom', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Custom': {
|
||||
main: [
|
||||
[{ node: 'Set1', type: 'main', index: 0 }],
|
||||
[{ node: 'Set2', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const boundsErrors = result.errors.filter(e => e.code === 'OUTPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(boundsErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input index bounds checking (P1)', () => {
|
||||
it('should accept regular node with index 0', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const inputErrors = result.errors.filter(e => e.code === 'INPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(inputErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should flag regular node with index 1 (only 1 input)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 1 }]] // index 1 - out of bounds
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const inputError = result.errors.find(e => e.code === 'INPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(inputError).toBeDefined();
|
||||
expect(inputError!.message).toContain('Input index 1');
|
||||
expect(inputError!.message).toContain('Code');
|
||||
});
|
||||
|
||||
it('should accept Merge node with index 1 (has 2 inputs)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set1', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set2', type: 'n8n-nodes-base.set', position: [200, 200], parameters: {} },
|
||||
{ id: '4', name: 'Merge', type: 'n8n-nodes-base.merge', position: [400, 100], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set1', type: 'main', index: 0 }, { node: 'Set2', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Set1': {
|
||||
main: [[{ node: 'Merge', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Set2': {
|
||||
main: [[{ node: 'Merge', type: 'main', index: 1 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const inputErrors = result.errors.filter(e => e.code === 'INPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(inputErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should skip bounds check for unknown node types', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Custom', type: 'n8n-nodes-community.unknownNode', position: [200, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Custom', type: 'main', index: 5 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const inputErrors = result.errors.filter(e => e.code === 'INPUT_INDEX_OUT_OF_BOUNDS');
|
||||
expect(inputErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Trigger reachability analysis (P2)', () => {
|
||||
it('should flag nodes in disconnected subgraph as unreachable', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Connected', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
// Disconnected subgraph - two nodes connected to each other but not reachable from trigger
|
||||
{ id: '3', name: 'Island1', type: 'n8n-nodes-base.code', position: [0, 300], parameters: {} },
|
||||
{ id: '4', name: 'Island2', type: 'n8n-nodes-base.set', position: [200, 300], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Connected', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Island1': {
|
||||
main: [[{ node: 'Island2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
// Both Island1 and Island2 should be flagged as unreachable
|
||||
const unreachable = result.warnings.filter(w => w.message.includes('not reachable from any trigger'));
|
||||
expect(unreachable.length).toBe(2);
|
||||
expect(unreachable.some(w => w.nodeName === 'Island1')).toBe(true);
|
||||
expect(unreachable.some(w => w.nodeName === 'Island2')).toBe(true);
|
||||
});
|
||||
|
||||
it('should pass when all nodes are reachable from trigger', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Code', type: 'n8n-nodes-base.code', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Set', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Code', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Code': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unreachable = result.warnings.filter(w => w.message.includes('not reachable'));
|
||||
expect(unreachable).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should flag single orphaned node as unreachable', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Orphaned', type: 'n8n-nodes-base.code', position: [500, 500], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unreachable = result.warnings.filter(w => w.message.includes('not reachable') && w.nodeName === 'Orphaned');
|
||||
expect(unreachable).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should not flag disabled nodes', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Disabled', type: 'n8n-nodes-base.code', position: [500, 500], parameters: {}, disabled: true },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unreachable = result.warnings.filter(w => w.nodeName === 'Disabled');
|
||||
expect(unreachable).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should not flag sticky notes', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Note', type: 'n8n-nodes-base.stickyNote', position: [500, 500], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
const unreachable = result.warnings.filter(w => w.nodeName === 'Note');
|
||||
expect(unreachable).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should use simple orphan check when no triggers exist', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Set1', type: 'n8n-nodes-base.set', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Set2', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Orphan', type: 'n8n-nodes-base.code', position: [500, 500], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Set1': {
|
||||
main: [[{ node: 'Set2', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
|
||||
// Orphan should still be flagged with the simple "not connected" message
|
||||
const orphanWarning = result.warnings.find(w => w.nodeName === 'Orphan');
|
||||
expect(orphanWarning).toBeDefined();
|
||||
expect(orphanWarning!.message).toContain('not connected to any other nodes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Conditional branch fan-out detection (CONDITIONAL_BRANCH_FANOUT)', () => {
|
||||
it('should warn when IF node has both branches in main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueTarget', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [[{ node: 'TrueTarget', type: 'main', index: 0 }, { node: 'FalseTarget', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('Route');
|
||||
expect(warning!.message).toContain('2 connections on the "true" branch');
|
||||
expect(warning!.message).toContain('"false" branch has no effect');
|
||||
});
|
||||
|
||||
it('should not warn when IF node has correct true/false split', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueTarget', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [
|
||||
[{ node: 'TrueTarget', type: 'main', index: 0 }],
|
||||
[{ node: 'FalseTarget', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when IF has fan-out on main[0] AND connections on main[1]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TrueB', type: 'n8n-nodes-base.set', position: [400, 100], parameters: {} },
|
||||
{ id: '5', name: 'FalseTarget', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [
|
||||
[{ node: 'TrueA', type: 'main', index: 0 }, { node: 'TrueB', type: 'main', index: 0 }],
|
||||
[{ node: 'FalseTarget', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should warn when Switch node has all connections on main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySwitch', type: 'n8n-nodes-base.switch', position: [200, 0], parameters: { rules: { values: [{ value: 'a' }, { value: 'b' }] } } },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
{ id: '5', name: 'TargetC', type: 'n8n-nodes-base.set', position: [400, 400], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySwitch', type: 'main', index: 0 }]] },
|
||||
'MySwitch': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }, { node: 'TargetC', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('MySwitch');
|
||||
expect(warning!.message).toContain('3 connections on output 0');
|
||||
expect(warning!.message).toContain('other switch branches have no effect');
|
||||
});
|
||||
|
||||
it('should not warn when Switch node has no rules parameter (indeterminate outputs)', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySwitch', type: 'n8n-nodes-base.switch', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySwitch', type: 'main', index: 0 }]] },
|
||||
'MySwitch': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when regular node has fan-out on main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MySet', type: 'n8n-nodes-base.set', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TargetA', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'TargetB', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MySet', type: 'main', index: 0 }]] },
|
||||
'MySet': {
|
||||
main: [[{ node: 'TargetA', type: 'main', index: 0 }, { node: 'TargetB', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not warn when IF has only 1 connection on main[0] with empty main[1]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'Route', type: 'n8n-nodes-base.if', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'TrueOnly', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'Route', type: 'main', index: 0 }]] },
|
||||
'Route': {
|
||||
main: [[{ node: 'TrueOnly', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should warn for Filter node with both branches in main[0]', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{ id: '1', name: 'Trigger', type: 'n8n-nodes-base.manualTrigger', position: [0, 0], parameters: {} },
|
||||
{ id: '2', name: 'MyFilter', type: 'n8n-nodes-base.filter', position: [200, 0], parameters: {} },
|
||||
{ id: '3', name: 'Matched', type: 'n8n-nodes-base.set', position: [400, 0], parameters: {} },
|
||||
{ id: '4', name: 'Unmatched', type: 'n8n-nodes-base.set', position: [400, 200], parameters: {} },
|
||||
],
|
||||
connections: {
|
||||
'Trigger': { main: [[{ node: 'MyFilter', type: 'main', index: 0 }]] },
|
||||
'MyFilter': {
|
||||
main: [[{ node: 'Matched', type: 'main', index: 0 }, { node: 'Unmatched', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const result = await validator.validateWorkflow(workflow as any);
|
||||
const warning = result.warnings.find(w => w.code === 'CONDITIONAL_BRANCH_FANOUT');
|
||||
expect(warning).toBeDefined();
|
||||
expect(warning!.nodeName).toBe('MyFilter');
|
||||
expect(warning!.message).toContain('"matched" branch');
|
||||
expect(warning!.message).toContain('"unmatched" branch has no effect');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -291,7 +291,7 @@ describe('WorkflowValidator - Expression Format Validation', () => {
|
||||
});
|
||||
|
||||
describe('Real-world workflow examples', () => {
|
||||
it.skip('should validate Email workflow with expression issues', async () => {
|
||||
it('should validate Email workflow with expression issues', async () => {
|
||||
const workflow = {
|
||||
name: 'Error Notification Workflow',
|
||||
nodes: [
|
||||
@@ -342,7 +342,7 @@ describe('WorkflowValidator - Expression Format Validation', () => {
|
||||
expect(fromEmailError?.message).toContain('={{ $env.ADMIN_EMAIL }}');
|
||||
});
|
||||
|
||||
it.skip('should validate GitHub workflow with resource locator issues', async () => {
|
||||
it('should validate GitHub workflow with resource locator issues', async () => {
|
||||
const workflow = {
|
||||
name: 'GitHub Issue Handler',
|
||||
nodes: [
|
||||
|
||||
@@ -646,10 +646,9 @@ describe('WorkflowValidator - Mock-based Unit Tests', () => {
|
||||
await validator.validateWorkflow(workflow as any);
|
||||
|
||||
// Should have called getNode for each node type (normalized to short form)
|
||||
// Called during node validation + output/input index bounds checking
|
||||
expect(mockGetNode).toHaveBeenCalledWith('nodes-base.httpRequest');
|
||||
expect(mockGetNode).toHaveBeenCalledWith('nodes-base.set');
|
||||
expect(mockGetNode.mock.calls.length).toBeGreaterThanOrEqual(2);
|
||||
expect(mockGetNode).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle repository errors gracefully', async () => {
|
||||
|
||||
Reference in New Issue
Block a user