mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-01-30 14:32:04 +00:00
Compare commits
2 Commits
feature/au
...
enhance/va
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee4e20a1ee | ||
|
|
0c050bda6d |
86
.github/workflows/release.yml
vendored
86
.github/workflows/release.yml
vendored
@@ -112,85 +112,53 @@ jobs:
|
||||
|
||||
echo "✅ Version $CURRENT_VERSION is valid (higher than npm version $NPM_VERSION)"
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
extract-changelog:
|
||||
name: Extract Changelog
|
||||
runs-on: ubuntu-latest
|
||||
needs: detect-version-change
|
||||
if: needs.detect-version-change.outputs.version-changed == 'true'
|
||||
outputs:
|
||||
release-notes: ${{ steps.generate.outputs.notes }}
|
||||
has-notes: ${{ steps.generate.outputs.has-notes }}
|
||||
release-notes: ${{ steps.extract.outputs.notes }}
|
||||
has-notes: ${{ steps.extract.outputs.has-notes }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Need full history for git log
|
||||
|
||||
- name: Generate release notes from commits
|
||||
id: generate
|
||||
|
||||
- name: Extract changelog for version
|
||||
id: extract
|
||||
run: |
|
||||
CURRENT_VERSION="${{ needs.detect-version-change.outputs.new-version }}"
|
||||
CURRENT_TAG="v$CURRENT_VERSION"
|
||||
|
||||
# Get the previous tag (excluding the current tag which doesn't exist yet)
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -v "^$CURRENT_TAG$" | head -1)
|
||||
|
||||
echo "Current version: $CURRENT_VERSION"
|
||||
echo "Current tag: $CURRENT_TAG"
|
||||
echo "Previous tag: $PREVIOUS_TAG"
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
echo "ℹ️ No previous tag found, this might be the first release"
|
||||
|
||||
# Get all commits up to current commit - use heredoc for multiline
|
||||
NOTES=$(cat <<EOF
|
||||
### 🎉 Initial Release
|
||||
|
||||
This is the initial release of n8n-mcp v$CURRENT_VERSION.
|
||||
|
||||
---
|
||||
|
||||
**Release Statistics:**
|
||||
- Commit count: $(git rev-list --count HEAD)
|
||||
- First release setup
|
||||
EOF
|
||||
)
|
||||
|
||||
VERSION="${{ needs.detect-version-change.outputs.new-version }}"
|
||||
CHANGELOG_FILE="docs/CHANGELOG.md"
|
||||
|
||||
if [ ! -f "$CHANGELOG_FILE" ]; then
|
||||
echo "Changelog file not found at $CHANGELOG_FILE"
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=No changelog entries found for version $VERSION" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use the extracted changelog script
|
||||
if NOTES=$(node scripts/extract-changelog.js "$VERSION" "$CHANGELOG_FILE" 2>/dev/null); then
|
||||
echo "has-notes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Use heredoc to properly handle multiline content
|
||||
{
|
||||
echo "notes<<EOF"
|
||||
echo "$NOTES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
echo "✅ Successfully extracted changelog for version $VERSION"
|
||||
else
|
||||
echo "✅ Previous tag found: $PREVIOUS_TAG"
|
||||
|
||||
# Generate release notes between tags
|
||||
if NOTES=$(node scripts/generate-release-notes.js "$PREVIOUS_TAG" "HEAD" 2>/dev/null); then
|
||||
echo "has-notes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Use heredoc to properly handle multiline content
|
||||
{
|
||||
echo "notes<<EOF"
|
||||
echo "$NOTES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
echo "✅ Successfully generated release notes from $PREVIOUS_TAG to $CURRENT_TAG"
|
||||
else
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=Failed to generate release notes for version $CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ Could not generate release notes for version $CURRENT_VERSION"
|
||||
fi
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=No changelog entries found for version $VERSION" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ Could not extract changelog for version $VERSION"
|
||||
fi
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [detect-version-change, generate-release-notes]
|
||||
needs: [detect-version-change, extract-changelog]
|
||||
if: needs.detect-version-change.outputs.version-changed == 'true'
|
||||
outputs:
|
||||
release-id: ${{ steps.create.outputs.id }}
|
||||
@@ -221,7 +189,7 @@ EOF
|
||||
cat > release_body.md << 'EOF'
|
||||
# Release v${{ needs.detect-version-change.outputs.new-version }}
|
||||
|
||||
${{ needs.generate-release-notes.outputs.release-notes }}
|
||||
${{ needs.extract-changelog.outputs.release-notes }}
|
||||
|
||||
---
|
||||
|
||||
|
||||
933
CHANGELOG.md
933
CHANGELOG.md
@@ -7,939 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
**Auto-Update Node Versions with Smart Migration**
|
||||
|
||||
Added comprehensive node version upgrade functionality to the autofixer, enabling automatic detection and migration of outdated node versions with intelligent breaking change handling.
|
||||
|
||||
#### Key Features
|
||||
|
||||
1. **Smart Version Upgrades** (`typeversion-upgrade` fix type):
|
||||
- Automatically detects outdated node versions
|
||||
- Applies intelligent migrations with auto-migratable property changes
|
||||
- Handles well-known breaking changes (Execute Workflow v1.0→v1.1, Webhook v2.0→v2.1)
|
||||
- Generates UUIDs and sensible defaults for new required fields
|
||||
- HIGH confidence for non-breaking upgrades, MEDIUM for breaking changes with auto-migration
|
||||
|
||||
2. **Version Migration Guidance** (`version-migration` fix type):
|
||||
- Documents complex migrations requiring manual intervention
|
||||
- Provides AI-friendly post-update guidance with step-by-step instructions
|
||||
- Lists required actions by priority (CRITICAL, HIGH, MEDIUM, LOW)
|
||||
- Documents behavior changes and their impact
|
||||
- Estimates time required for manual migration steps
|
||||
- MEDIUM/LOW confidence - requires review before applying
|
||||
|
||||
3. **Breaking Changes Registry**:
|
||||
- Centralized registry of known breaking changes across n8n nodes
|
||||
- Example: Execute Workflow v1.1+ requires `inputFieldMapping` (auto-added)
|
||||
- Example: Webhook v2.1+ requires `webhookId` field (auto-generated UUID)
|
||||
- Extensible for future node version changes
|
||||
|
||||
4. **Post-Update Validation**:
|
||||
- Generates comprehensive migration reports for AI agents
|
||||
- Includes required actions, deprecated properties, behavior changes
|
||||
- Provides actionable migration steps with estimated time
|
||||
- Helps AI agents understand what manual work is needed after auto-migration
|
||||
|
||||
#### Architecture
|
||||
|
||||
- **NodeVersionService**: Version discovery, comparison, upgrade path recommendation
|
||||
- **BreakingChangeDetector**: Detects changes from registry and dynamic schema comparison
|
||||
- **NodeMigrationService**: Applies smart migrations with confidence scoring
|
||||
- **PostUpdateValidator**: Generates AI-friendly migration guidance
|
||||
- **Enhanced Database Schema**:
|
||||
- `node_versions` table - tracks all available versions per node
|
||||
- `version_property_changes` table - detailed migration tracking
|
||||
|
||||
#### Usage Example
|
||||
|
||||
```typescript
|
||||
// Preview all fixes including version upgrades
|
||||
n8n_autofix_workflow({id: "wf_123"})
|
||||
|
||||
// Only upgrade versions with smart migrations
|
||||
n8n_autofix_workflow({
|
||||
id: "wf_123",
|
||||
fixTypes: ["typeversion-upgrade"],
|
||||
applyFixes: true
|
||||
})
|
||||
|
||||
// Get migration guidance for breaking changes
|
||||
n8n_autofix_workflow({
|
||||
id: "wf_123",
|
||||
fixTypes: ["version-migration"]
|
||||
})
|
||||
```
|
||||
|
||||
#### Impact
|
||||
|
||||
- Proactively keeps workflows up-to-date with latest node versions
|
||||
- Reduces manual migration effort for Execute Workflow, Webhook, and other versioned nodes
|
||||
- Provides clear guidance for AI agents on handling breaking changes
|
||||
- Ensures workflows benefit from latest node features and bug fixes
|
||||
|
||||
**Conceived by Romuald Członkowski - www.aiadvisors.pl/en**
|
||||
|
||||
---
|
||||
|
||||
**Workflow Versioning & Rollback System**
|
||||
|
||||
Added comprehensive workflow versioning, backup, and rollback capabilities with automatic pruning to prevent memory leaks. Every workflow update now creates an automatic backup that can be restored on failure.
|
||||
|
||||
#### Key Features
|
||||
|
||||
1. **Automatic Backups**:
|
||||
- Every workflow update automatically creates a version backup (opt-out via `createBackup: false`)
|
||||
- Captures full workflow state before modifications
|
||||
- Auto-prunes to 10 versions per workflow (prevents unbounded storage growth)
|
||||
- Tracks trigger context (partial_update, full_update, autofix)
|
||||
- Stores operation sequences for audit trail
|
||||
|
||||
2. **Rollback Capability** (`n8n_workflow_versions` tool):
|
||||
- Restore workflow to any previous version
|
||||
- Automatic backup of current state before rollback
|
||||
- Optional pre-rollback validation
|
||||
- Six operational modes: list, get, rollback, delete, prune, truncate
|
||||
|
||||
3. **Version Management**:
|
||||
- List version history with metadata (size, trigger, operations applied)
|
||||
- Get detailed version information including full workflow snapshot
|
||||
- Delete specific versions or all versions for a workflow
|
||||
- Manual pruning with custom retention count
|
||||
|
||||
4. **Memory Safety**:
|
||||
- Automatic pruning to max 10 versions per workflow after each backup
|
||||
- Manual cleanup tools (delete, prune, truncate)
|
||||
- Storage statistics tracking (total size, per-workflow breakdown)
|
||||
- Zero configuration required - works automatically
|
||||
|
||||
5. **Non-Blocking Design**:
|
||||
- Backup failures don't block workflow updates
|
||||
- Logged warnings for failed backups
|
||||
- Continues with update even if versioning service unavailable
|
||||
|
||||
#### Architecture
|
||||
|
||||
- **WorkflowVersioningService**: Core versioning logic (backup, restore, cleanup)
|
||||
- **workflow_versions Table**: Stores full workflow snapshots with metadata
|
||||
- **Auto-Pruning**: FIFO policy keeps 10 most recent versions
|
||||
- **Hybrid Storage**: Full snapshots + operation sequences for audit trail
|
||||
|
||||
#### Usage Examples
|
||||
|
||||
```typescript
|
||||
// Automatic backups (default behavior)
|
||||
n8n_update_partial_workflow({
|
||||
id: "wf_123",
|
||||
operations: [...]
|
||||
// createBackup: true is default
|
||||
})
|
||||
|
||||
// List version history
|
||||
n8n_workflow_versions({
|
||||
mode: "list",
|
||||
workflowId: "wf_123",
|
||||
limit: 10
|
||||
})
|
||||
|
||||
// Rollback to previous version
|
||||
n8n_workflow_versions({
|
||||
mode: "rollback",
|
||||
workflowId: "wf_123"
|
||||
// Restores to latest backup, creates backup of current state first
|
||||
})
|
||||
|
||||
// Rollback to specific version
|
||||
n8n_workflow_versions({
|
||||
mode: "rollback",
|
||||
workflowId: "wf_123",
|
||||
versionId: 42
|
||||
})
|
||||
|
||||
// Delete old versions manually
|
||||
n8n_workflow_versions({
|
||||
mode: "prune",
|
||||
workflowId: "wf_123",
|
||||
maxVersions: 5
|
||||
})
|
||||
|
||||
// Emergency cleanup (requires confirmation)
|
||||
n8n_workflow_versions({
|
||||
mode: "truncate",
|
||||
confirmTruncate: true
|
||||
})
|
||||
```
|
||||
|
||||
#### Impact
|
||||
|
||||
- **Confidence**: Increases AI agent confidence by 3x (per UX analysis)
|
||||
- **Safety**: Transforms feature from "use with caution" to "production-ready"
|
||||
- **Recovery**: Failed updates can be instantly rolled back
|
||||
- **Audit**: Complete history of workflow changes with operation sequences
|
||||
- **Memory**: Auto-pruning prevents storage leaks (~200KB per workflow max)
|
||||
|
||||
#### Integration Points
|
||||
|
||||
- `n8n_update_partial_workflow`: Automatic backup before diff operations
|
||||
- `n8n_update_full_workflow`: Automatic backup before full replacement
|
||||
- `n8n_autofix_workflow`: Automatic backup with fix types metadata
|
||||
- `n8n_workflow_versions`: Unified rollback/cleanup interface (6 modes)
|
||||
|
||||
**Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)**
|
||||
|
||||
## [2.21.1] - 2025-10-23
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
**Issue #357: Fix AI Node Connection Validation in Partial Workflow Updates**
|
||||
|
||||
Fixed critical validation issue where `n8n_update_partial_workflow` incorrectly required `main` connections for AI nodes that exclusively use AI-specific connection types (`ai_languageModel`, `ai_memory`, `ai_embedding`, `ai_vectorStore`, `ai_tool`).
|
||||
|
||||
#### Problem
|
||||
|
||||
Workflows containing AI nodes (OpenAI Chat Model, Postgres Chat Memory, Embeddings OpenAI, Supabase Vector Store) could not be updated via `n8n_update_partial_workflow`, even for trivial changes to unrelated nodes. The validation logic incorrectly expected ALL nodes to have `main` connections, causing false positive errors:
|
||||
|
||||
```
|
||||
Invalid connections: [
|
||||
{
|
||||
"code": "invalid_type",
|
||||
"expected": "array",
|
||||
"received": "undefined",
|
||||
"path": ["OpenAI Chat Model", "main"],
|
||||
"message": "Required"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
**Impact**: Users could not update any workflows containing AI Agent nodes via MCP tools, forcing manual updates through the n8n UI.
|
||||
|
||||
#### Root Cause
|
||||
|
||||
The Zod schema in `src/services/n8n-validation.ts` (lines 27-39) defined `main` connections as a **required field** for all nodes, without support for AI-specific connection types:
|
||||
|
||||
```typescript
|
||||
// BEFORE (Broken):
|
||||
export const workflowConnectionSchema = z.record(
|
||||
z.object({
|
||||
main: z.array(...), // Required - WRONG for AI nodes!
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
AI nodes use specialized connection types exclusively:
|
||||
- **ai_languageModel** - Language models (OpenAI, Anthropic, etc.)
|
||||
- **ai_memory** - Memory systems (Postgres Chat Memory, etc.)
|
||||
- **ai_embedding** - Embedding models (Embeddings OpenAI, etc.)
|
||||
- **ai_vectorStore** - Vector stores (Supabase Vector Store, etc.)
|
||||
- **ai_tool** - Tools for AI agents
|
||||
|
||||
These nodes **never have `main` connections** - they only have their AI-specific connection types.
|
||||
|
||||
#### Fixed
|
||||
|
||||
**1. Updated Zod Schema** (`src/services/n8n-validation.ts` lines 27-49):
|
||||
```typescript
|
||||
// AFTER (Fixed):
|
||||
const connectionArraySchema = z.array(
|
||||
z.array(
|
||||
z.object({
|
||||
node: z.string(),
|
||||
type: z.string(),
|
||||
index: z.number(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
export const workflowConnectionSchema = z.record(
|
||||
z.object({
|
||||
main: connectionArraySchema.optional(), // Now optional
|
||||
error: connectionArraySchema.optional(), // Error connections
|
||||
ai_tool: connectionArraySchema.optional(), // AI tool connections
|
||||
ai_languageModel: connectionArraySchema.optional(), // Language model connections
|
||||
ai_memory: connectionArraySchema.optional(), // Memory connections
|
||||
ai_embedding: connectionArraySchema.optional(), // Embedding connections
|
||||
ai_vectorStore: connectionArraySchema.optional(), // Vector store connections
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
**2. Comprehensive Test Suite** (New file: `tests/integration/workflow-diff/ai-node-connection-validation.test.ts`):
|
||||
- 13 test scenarios covering all AI connection types
|
||||
- Tests for AI nodes with ONLY AI-specific connections (no `main`)
|
||||
- Tests for mixed workflows (regular nodes + AI nodes)
|
||||
- Tests for the exact scenario from issue #357
|
||||
- All tests passing ✅
|
||||
|
||||
**3. Updated Documentation** (`src/mcp/tool-docs/workflow_management/n8n-update-partial-workflow.ts`):
|
||||
- Added clarification that AI nodes do NOT require `main` connections
|
||||
- Documented fix for issue #357
|
||||
- Updated best practices for AI workflows
|
||||
|
||||
#### Testing
|
||||
|
||||
**Before Fix**:
|
||||
- ❌ `n8n_validate_workflow`: Returns `valid: true` (correct)
|
||||
- ❌ `n8n_update_partial_workflow`: FAILS with "main connections required" errors
|
||||
- ❌ Cannot update workflows containing AI nodes at all
|
||||
|
||||
**After Fix**:
|
||||
- ✅ `n8n_validate_workflow`: Returns `valid: true` (still correct)
|
||||
- ✅ `n8n_update_partial_workflow`: SUCCEEDS without validation errors
|
||||
- ✅ AI nodes correctly recognized with AI-specific connection types only
|
||||
- ✅ All 13 new integration tests passing
|
||||
- ✅ Tested with actual workflow `019Vrw56aROeEzVj` from issue #357
|
||||
|
||||
#### Impact
|
||||
|
||||
**Zero Breaking Changes**:
|
||||
- Making required fields optional is always backward compatible
|
||||
- All existing workflows continue working
|
||||
- Validation now correctly matches n8n's actual connection model
|
||||
|
||||
**Fixes**:
|
||||
- Users can now update AI workflows via `n8n_update_partial_workflow`
|
||||
- AI nodes no longer generate false positive validation errors
|
||||
- Consistent validation between `n8n_validate_workflow` and `n8n_update_partial_workflow`
|
||||
|
||||
#### Files Changed
|
||||
|
||||
**Modified (3 files)**:
|
||||
- `src/services/n8n-validation.ts` - Fixed Zod schema to support all connection types
|
||||
- `src/mcp/tool-docs/workflow_management/n8n-update-partial-workflow.ts` - Updated documentation
|
||||
- `package.json` - Version bump to 2.21.1
|
||||
|
||||
**Added (1 file)**:
|
||||
- `tests/integration/workflow-diff/ai-node-connection-validation.test.ts` - Comprehensive test suite (13 tests)
|
||||
|
||||
#### References
|
||||
|
||||
- **Issue**: #357 - n8n_update_partial_workflow incorrectly validates AI nodes requiring 'main' connections
|
||||
- **Workflow**: `019Vrw56aROeEzVj` (WOO_Workflow_21_POST_Chat_Send_AI_Agent)
|
||||
- **Investigation**: Deep code analysis by Explore agent identified exact root cause in Zod schema
|
||||
- **Confirmation**: n8n-mcp-tester agent verified fix with real workflow
|
||||
|
||||
Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)
|
||||
|
||||
## [2.21.0] - 2025-10-23
|
||||
|
||||
### ✨ Features
|
||||
|
||||
**Issue #353: Auto-Update Connection References on Node Rename**
|
||||
|
||||
Enhanced `n8n_update_partial_workflow` to automatically update all connection references when renaming nodes, matching n8n UI behavior and eliminating the need for complex manual workarounds.
|
||||
|
||||
#### Problem
|
||||
When renaming a node using the `updateNode` operation, connections still referenced the old node name, causing validation errors:
|
||||
```
|
||||
"Connection references non-existent target node: Old Name"
|
||||
```
|
||||
|
||||
This forced users to manually remove and re-add all connections, requiring:
|
||||
- 3+ operations instead of 1 simple rename
|
||||
- Manual tracking of all connection details (source, branch/case, indices)
|
||||
- Error-prone connection management
|
||||
- Inconsistent behavior compared to n8n UI
|
||||
|
||||
#### Solution: Automatic Connection Reference Updates
|
||||
|
||||
When you rename a node, **all connection references are automatically updated throughout the entire workflow**. The system:
|
||||
1. Detects name changes during `updateNode` operations
|
||||
2. Tracks old→new name mappings
|
||||
3. Updates all connection references after node operations complete
|
||||
4. Handles all connection types and branch configurations
|
||||
|
||||
#### What Gets Updated Automatically
|
||||
|
||||
**Connection Source Keys:**
|
||||
- If a source node is renamed, its connections object key is updated
|
||||
- Example: `connections['Old Name']` → `connections['New Name']`
|
||||
|
||||
**Connection Target References:**
|
||||
- If a target node is renamed, all connections pointing to it are updated
|
||||
- Example: `{node: 'Old Name', type: 'main', index: 0}` → `{node: 'New Name', type: 'main', index: 0}`
|
||||
|
||||
**All Connection Types:**
|
||||
- `main` - Standard connections
|
||||
- `error` - Error output connections
|
||||
- `ai_tool` - AI tool connections
|
||||
- `ai_languageModel` - AI language model connections
|
||||
- `ai_memory` - AI memory connections
|
||||
- All other connection types
|
||||
|
||||
**All Branch Configurations:**
|
||||
- IF node branches (true/false outputs)
|
||||
- Switch node cases (multiple numbered outputs)
|
||||
- Error output branches
|
||||
- AI-specific connection routing
|
||||
|
||||
#### Examples
|
||||
|
||||
**Before (v2.20.8 and earlier) - Failed:**
|
||||
```javascript
|
||||
// Attempting to rename would fail
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [{
|
||||
type: "updateNode",
|
||||
nodeId: "8546d741-1af1-4aa0-bf11-af6c926c0008",
|
||||
updates: {
|
||||
name: "Return 404 Not Found" // Rename from "Return 403 Forbidden"
|
||||
}
|
||||
}]
|
||||
});
|
||||
|
||||
// Result: ERROR
|
||||
// "Workflow validation failed with 2 structural issues"
|
||||
// "Connection references non-existent target node: Return 403 Forbidden"
|
||||
|
||||
// Required workaround (3 operations):
|
||||
operations: [
|
||||
{type: "removeConnection", source: "IF", target: "Return 403 Forbidden", branch: "false"},
|
||||
{type: "updateNode", nodeId: "...", updates: {name: "Return 404 Not Found"}},
|
||||
{type: "addConnection", source: "IF", target: "Return 404 Not Found", branch: "false"}
|
||||
]
|
||||
```
|
||||
|
||||
**After (v2.21.0) - Works Automatically:**
|
||||
```javascript
|
||||
// Same operation now succeeds automatically!
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [{
|
||||
type: "updateNode",
|
||||
nodeId: "8546d741-1af1-4aa0-bf11-af6c926c0008",
|
||||
updates: {
|
||||
name: "Return 404 Not Found", // Connections auto-update!
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Not Found"} }}',
|
||||
options: { responseCode: 404 }
|
||||
}
|
||||
}
|
||||
}]
|
||||
});
|
||||
|
||||
// Result: SUCCESS
|
||||
// All connections automatically point to "Return 404 Not Found"
|
||||
// Single operation instead of 3+
|
||||
```
|
||||
|
||||
#### Additional Features
|
||||
|
||||
**Name Collision Detection:**
|
||||
```javascript
|
||||
// Attempting to rename to existing name
|
||||
{type: "updateNode", nodeId: "abc", updates: {name: "Existing Name"}}
|
||||
|
||||
// Result: Clear error message
|
||||
"Cannot rename node 'Old Name' to 'Existing Name': A node with that name
|
||||
already exists (id: xyz123...). Please choose a different name."
|
||||
```
|
||||
|
||||
**Batch Rename Support:**
|
||||
```javascript
|
||||
// Multiple renames in single call - all connections update correctly
|
||||
operations: [
|
||||
{type: "updateNode", nodeId: "node1", updates: {name: "New Name 1"}},
|
||||
{type: "updateNode", nodeId: "node2", updates: {name: "New Name 2"}},
|
||||
{type: "updateNode", nodeId: "node3", updates: {name: "New Name 3"}}
|
||||
]
|
||||
```
|
||||
|
||||
**Chain Operations:**
|
||||
```javascript
|
||||
// Rename then immediately use new name in subsequent operations
|
||||
operations: [
|
||||
{type: "updateNode", nodeId: "abc", updates: {name: "New Name"}},
|
||||
{type: "addConnection", source: "New Name", target: "Other Node"}
|
||||
]
|
||||
```
|
||||
|
||||
#### Technical Implementation
|
||||
|
||||
**Files Modified:**
|
||||
- `src/services/workflow-diff-engine.ts` - Core auto-update logic
|
||||
- Added `renameMap` property to track name changes
|
||||
- Added `updateConnectionReferences()` method (lines 943-994)
|
||||
- Enhanced `validateUpdateNode()` with name collision detection (lines 369-392)
|
||||
- Modified `applyUpdateNode()` to track renames (lines 613-635)
|
||||
- Connection updates applied after Pass 1 node operations (lines 156-160)
|
||||
|
||||
- `src/mcp/tool-docs/workflow_management/n8n-update-partial-workflow.ts`
|
||||
- Added comprehensive "Automatic Connection Reference Updates" section
|
||||
- Added to tips: "Node renames: Connections automatically update"
|
||||
- Includes before/after examples and best practices
|
||||
|
||||
**New Test Files:**
|
||||
- `tests/unit/services/workflow-diff-node-rename.test.ts` (925 lines, 14 scenarios)
|
||||
- `tests/integration/workflow-diff/node-rename-integration.test.ts` (4 real-world workflows)
|
||||
|
||||
**Test Coverage:**
|
||||
1. Simple rename with single connection
|
||||
2. Multiple incoming connections
|
||||
3. Multiple outgoing connections
|
||||
4. IF node branches (true/false)
|
||||
5. Switch node cases (0, 1, 2, ..., N)
|
||||
6. Error connections
|
||||
7. AI tool connections (ai_tool, ai_languageModel)
|
||||
8. Name collision detection
|
||||
9. Rename to same name (no-op)
|
||||
10. Multiple renames in single batch
|
||||
11. Chain operations (rename + add/remove connections)
|
||||
12. validateOnly mode
|
||||
13. continueOnError mode
|
||||
14. Self-connections (loops)
|
||||
15. Real-world Issue #353 scenario
|
||||
|
||||
#### Benefits
|
||||
|
||||
**User Experience:**
|
||||
- ✅ **Principle of Least Surprise**: Matches n8n UI behavior
|
||||
- ✅ **Single Operation**: Rename with 1 operation instead of 3+
|
||||
- ✅ **No Manual Tracking**: System handles all connection updates
|
||||
- ✅ **Safer**: Collision detection prevents naming conflicts
|
||||
- ✅ **Faster**: Less error-prone, fewer operations
|
||||
|
||||
**Technical:**
|
||||
- ✅ **100% Backward Compatible**: Enhances existing `updateNode` operation
|
||||
- ✅ **All Connection Types**: main, error, AI connections, etc.
|
||||
- ✅ **All Branch Types**: IF, Switch, error outputs
|
||||
- ✅ **Atomic**: All connections update together or rollback
|
||||
- ✅ **Works in Both Modes**: atomic and continueOnError
|
||||
|
||||
**Comprehensive:**
|
||||
- ✅ **14 Test Scenarios**: Unit tests covering all edge cases
|
||||
- ✅ **4 Integration Tests**: Real-world workflow validation
|
||||
- ✅ **Complete Documentation**: Tool docs with examples
|
||||
- ✅ **Clear Error Messages**: Name collision detection with actionable guidance
|
||||
|
||||
#### Impact on Existing Workflows
|
||||
|
||||
**Zero Breaking Changes:**
|
||||
- All existing workflows continue working
|
||||
- Existing operations work identically
|
||||
- Only enhances rename behavior
|
||||
- No API changes required
|
||||
|
||||
**Migration:**
|
||||
- No migration needed
|
||||
- Update to v2.21.0 and renames "just work"
|
||||
- Remove manual connection workarounds at your convenience
|
||||
|
||||
#### Related
|
||||
|
||||
- **Issue:** #353 - Enhancement: Auto-update connection references on node rename
|
||||
- **Use Case:** Real-world API endpoint workflow (POST /patients/:id/approaches)
|
||||
- **Reporter:** Internal testing during workflow refactoring
|
||||
- **Solution:** Recommended Solution 1 from issue (auto-update)
|
||||
|
||||
Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)
|
||||
|
||||
## [2.20.8] - 2025-10-23
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
This release includes two critical bug fixes that improve workflow validation for sticky notes and trigger nodes.
|
||||
|
||||
**Fix #1: Sticky Notes Validation - Disconnected Node False Positives (PR #350)**
|
||||
|
||||
Fixed bug where sticky notes (UI-only annotation nodes) were incorrectly triggering "disconnected node" validation errors when updating workflows via MCP tools.
|
||||
|
||||
#### Problem
|
||||
- Workflows with sticky notes failed validation with "Node is disconnected" errors
|
||||
- Validation logic was inconsistent between `workflow-validator.ts` and `n8n-validation.ts`
|
||||
- Sticky notes are UI-only annotations and should never trigger connection validation
|
||||
|
||||
#### Fixed
|
||||
- **Created Shared Utility Module** (`src/utils/node-classification.ts`):
|
||||
- `isStickyNote()`: Identifies all sticky note type variations
|
||||
- `isTriggerNode()`: Identifies trigger nodes (webhook, manual, cron, schedule)
|
||||
- `isNonExecutableNode()`: Identifies UI-only nodes
|
||||
- `requiresIncomingConnection()`: Determines if node needs incoming connections
|
||||
- **Updated Validators**: Both validation files now properly skip sticky notes
|
||||
|
||||
**Fix #2: Issue #351 - Recognize All Trigger Node Types Including Execute Workflow Trigger (PR #352)**
|
||||
|
||||
Fixed validation logic that was incorrectly treating Execute Workflow Trigger and other trigger nodes as regular nodes, causing "disconnected node" errors during partial workflow updates.
|
||||
|
||||
#### Problem
|
||||
The workflow validation system used a hardcoded list of only 5 trigger types, missing 200+ trigger nodes including `executeWorkflowTrigger`.
|
||||
|
||||
Additionally, no validation prevented users from activating workflows that only have `executeWorkflowTrigger` nodes (which cannot activate workflows - they can only be invoked by other workflows).
|
||||
|
||||
#### Fixed
|
||||
- **Enhanced Trigger Detection** (`src/utils/node-type-utils.ts`):
|
||||
- `isTriggerNode()`: Flexible pattern matching recognizes ALL triggers (200+)
|
||||
- `isActivatableTrigger()`: Distinguishes triggers that can activate workflows
|
||||
- `getTriggerTypeDescription()`: Human-readable trigger descriptions
|
||||
|
||||
- **Active Workflow Validation** (`src/services/n8n-validation.ts`):
|
||||
- Prevents activation of workflows with only `executeWorkflowTrigger` nodes
|
||||
- Clear error messages guide users to add activatable triggers or deactivate the workflow
|
||||
|
||||
- **Comprehensive Test Coverage**: 30+ new tests for trigger detection
|
||||
|
||||
#### Impact
|
||||
|
||||
**Before Fix:**
|
||||
- ❌ Execute Workflow Trigger and 195+ other triggers flagged as "disconnected nodes"
|
||||
- ❌ Sticky notes triggered false positive validation errors
|
||||
- ❌ Could activate workflows with only `executeWorkflowTrigger` (n8n API would reject)
|
||||
|
||||
**After Fix:**
|
||||
- ✅ ALL trigger types recognized (executeWorkflowTrigger, scheduleTrigger, emailTrigger, etc.)
|
||||
- ✅ Sticky notes properly excluded from validation
|
||||
- ✅ Clear error messages when trying to activate workflow with only `executeWorkflowTrigger`
|
||||
- ✅ Future-proof (new trigger nodes automatically supported)
|
||||
- ✅ Consistent node classification across entire codebase
|
||||
|
||||
#### Technical Details
|
||||
|
||||
**Files Modified:**
|
||||
- `src/utils/node-classification.ts` - NEW: Shared node classification utilities
|
||||
- `src/utils/node-type-utils.ts` - Enhanced trigger detection functions
|
||||
- `src/services/n8n-validation.ts` - Updated to use shared utilities
|
||||
- `src/services/workflow-validator.ts` - Updated to use shared utilities
|
||||
- `tests/unit/utils/node-type-utils.test.ts` - Added 30+ tests
|
||||
- `package.json` - Version bump to 2.20.8
|
||||
|
||||
**Related:**
|
||||
- **Issue:** #351 - Execute Workflow Trigger not recognized as valid trigger
|
||||
- **PR:** #350 - Sticky notes validation fix
|
||||
- **PR:** #352 - Comprehensive trigger detection
|
||||
|
||||
Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)
|
||||
|
||||
## [2.20.7] - 2025-10-22
|
||||
|
||||
### 🔄 Dependencies
|
||||
|
||||
**Updated n8n to v1.116.2**
|
||||
|
||||
Updated all n8n dependencies to the latest compatible versions:
|
||||
- `n8n`: 1.115.2 → 1.116.2
|
||||
- `n8n-core`: 1.114.0 → 1.115.1
|
||||
- `n8n-workflow`: 1.112.0 → 1.113.0
|
||||
- `@n8n/n8n-nodes-langchain`: 1.114.1 → 1.115.1
|
||||
|
||||
**Database Rebuild:**
|
||||
- Rebuilt node database with 542 nodes from updated n8n packages
|
||||
- All 542 nodes loaded successfully from both n8n-nodes-base (439 nodes) and @n8n/n8n-nodes-langchain (103 nodes)
|
||||
- Documentation mapping completed for all nodes
|
||||
|
||||
**Testing:**
|
||||
- Changes validated in CI/CD pipeline with full test suite (705 tests)
|
||||
- Critical nodes validated: httpRequest, code, slack, agent
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
**FTS5 Search Ranking - Exact Match Prioritization**
|
||||
|
||||
Fixed critical bug in production search where exact matches weren't appearing first in search results.
|
||||
|
||||
#### Problem
|
||||
- SQL ORDER BY clause was `ORDER BY rank, CASE ... END` (wrong order)
|
||||
- FTS5 rank sorted first, CASE statement only acted as tiebreaker
|
||||
- Since FTS5 ranks are always unique, CASE boosting never applied
|
||||
- Additionally, CASE used case-sensitive comparison failing to match nodes like "Webhook" when searching "webhook"
|
||||
- Result: Searching "webhook" returned "Webflow Trigger" first, actual "Webhook" node ranked 4th
|
||||
|
||||
#### Root Cause Analysis
|
||||
**SQL Ordering Issue:**
|
||||
```sql
|
||||
-- BEFORE (Broken):
|
||||
ORDER BY rank, CASE ... END -- rank first, CASE never used
|
||||
-- Result: webhook ranks 4th (-9.64 rank)
|
||||
-- Top 3: webflowTrigger (-10.20), vonage (-10.09), renameKeys (-10.01)
|
||||
|
||||
-- AFTER (Fixed):
|
||||
ORDER BY CASE ... END, rank -- CASE first, exact matches prioritized
|
||||
-- Result: webhook ranks 1st (CASE priority 0)
|
||||
```
|
||||
|
||||
**Case-Sensitivity Issue:**
|
||||
- Old: `WHEN n.display_name = ?` (case-sensitive, fails on "Webhook" vs "webhook")
|
||||
- New: `WHEN LOWER(n.display_name) = LOWER(?)` (case-insensitive, matches correctly)
|
||||
|
||||
#### Fixed
|
||||
|
||||
**1. Production Code** (`src/mcp/server.ts` lines 1278-1295)
|
||||
- Changed ORDER BY from: `rank, CASE ... END`
|
||||
- To: `CASE WHEN LOWER(n.display_name) = LOWER(?) ... END, rank`
|
||||
- Added case-insensitive comparison with LOWER() function
|
||||
- Exact matches now consistently appear first in search results
|
||||
|
||||
**2. Test Files Updated**
|
||||
- `tests/integration/database/node-fts5-search.test.ts` (lines 137-160)
|
||||
- `tests/integration/ci/database-population.test.ts` (lines 206-234)
|
||||
- Both updated to match corrected SQL logic with case-insensitive comparison
|
||||
- Tests now accurately validate production search behavior
|
||||
|
||||
#### Impact
|
||||
|
||||
**Search Quality:**
|
||||
- ✅ Exact matches now always rank first (webhook, http, code, etc.)
|
||||
- ✅ Case-insensitive matching works correctly (Webhook = webhook = WEBHOOK)
|
||||
- ✅ Better user experience - predictable search results
|
||||
- ✅ SQL query more efficient (correct ordering at database level)
|
||||
|
||||
**Performance:**
|
||||
- Same or better performance (less JavaScript sorting needed)
|
||||
- Database does the heavy lifting with correct ORDER BY
|
||||
- JavaScript sorting still provides additional relevance refinement
|
||||
|
||||
**Testing:**
|
||||
- All 705 tests passing (703 passed + 2 fixed)
|
||||
- Comprehensive testing by n8n-mcp-tester agent
|
||||
- Code review approved with minor optimization suggestions for future
|
||||
|
||||
**Verified Search Results:**
|
||||
- "webhook" → nodes-base.webhook (1st)
|
||||
- "http" → nodes-base.httpRequest (1st)
|
||||
- "code" → nodes-base.code (1st)
|
||||
- "slack" → nodes-base.slack (1st)
|
||||
- All case variations work correctly (WEBHOOK, Webhook, webhook)
|
||||
|
||||
## [2.20.6] - 2025-10-21
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
**Issue #342: Missing `tslib` Dependency Causing MODULE_NOT_FOUND on Windows**
|
||||
|
||||
Fixed critical dependency issue where `tslib` was missing from the published npm package, causing immediate failure when users ran `npx n8n-mcp@latest` on Windows (and potentially other platforms).
|
||||
|
||||
#### Problem
|
||||
|
||||
Users installing via `npx n8n-mcp@latest` experienced MODULE_NOT_FOUND errors:
|
||||
```
|
||||
Error: Cannot find module 'tslib'
|
||||
Require stack:
|
||||
- node_modules/@supabase/functions-js/dist/main/FunctionsClient.js
|
||||
- node_modules/@supabase/supabase-js/dist/main/index.js
|
||||
- node_modules/n8n-mcp/dist/telemetry/telemetry-manager.js
|
||||
```
|
||||
|
||||
**Root Cause Analysis:**
|
||||
- `@supabase/supabase-js` depends on `@supabase/functions-js` which requires `tslib` at runtime
|
||||
- `tslib` was NOT explicitly listed in `package.runtime.json` dependencies
|
||||
- The publish script (`scripts/publish-npm.sh`) copies `package.runtime.json` → `package.json` before publishing to npm
|
||||
- CI/CD workflow (`.github/workflows/release.yml` line 329) does the same: `cp package.runtime.json $PUBLISH_DIR/package.json`
|
||||
- Result: Published npm package had no `tslib` dependency
|
||||
- When users installed via `npx`, npm didn't install `tslib` → MODULE_NOT_FOUND error
|
||||
|
||||
**Why It Worked Locally:**
|
||||
- Local development uses main `package.json` which has full n8n package dependencies
|
||||
- `tslib` existed as a transitive dependency through AWS SDK packages
|
||||
- npm's hoisting made it available locally
|
||||
|
||||
**Why It Failed in Production:**
|
||||
- `npx` installations use the published package (which comes from `package.runtime.json`)
|
||||
- No transitive path to `tslib` in the minimal runtime dependencies
|
||||
- npm's dependency resolution on Windows didn't hoist it properly
|
||||
|
||||
**Why Docker Worked:**
|
||||
- Docker builds used `package-lock.json` which included all transitive dependencies
|
||||
- Or the base image already had `tslib` installed
|
||||
|
||||
#### Fixed
|
||||
|
||||
**1. Added `tslib` to Runtime Dependencies**
|
||||
- Added `"tslib": "^2.6.2"` to `package.runtime.json` dependencies (line 14)
|
||||
- This is the **critical fix** since `package.runtime.json` gets published to npm
|
||||
- Version `^2.6.2` matches existing transitive dependency versions
|
||||
|
||||
**2. Added `tslib` to Development Dependencies**
|
||||
- Added `"tslib": "^2.6.2"` to `package.json` dependencies (line 154)
|
||||
- Ensures consistency between development and production
|
||||
- Prevents confusion for developers
|
||||
|
||||
**3. Synced `package.runtime.json` Version**
|
||||
- Updated `package.runtime.json` version from `2.20.2` to `2.20.5`
|
||||
- Keeps runtime package version in sync with main package version
|
||||
|
||||
#### Technical Details
|
||||
|
||||
**Dependency Chain:**
|
||||
```
|
||||
n8n-mcp
|
||||
└── @supabase/supabase-js@2.57.4
|
||||
└── @supabase/functions-js@2.4.6
|
||||
└── tslib (MISSING) ❌
|
||||
```
|
||||
|
||||
**Publish Process:**
|
||||
```bash
|
||||
# CI/CD workflow (.github/workflows/release.yml:329)
|
||||
cp package.runtime.json $PUBLISH_DIR/package.json
|
||||
npm publish --access public
|
||||
|
||||
# Users install via npx
|
||||
npx n8n-mcp@latest
|
||||
# → Gets dependencies from package.runtime.json (now includes tslib ✅)
|
||||
```
|
||||
|
||||
**Files Modified:**
|
||||
- `package.json` line 154: Added `tslib: "^2.6.2"`
|
||||
- `package.runtime.json` line 14: Added `tslib: "^2.6.2"` (critical fix)
|
||||
- `package.runtime.json` line 3: Updated version `2.20.2` → `2.20.5`
|
||||
|
||||
#### Impact
|
||||
|
||||
**Before Fix:**
|
||||
- ❌ Package completely broken on Windows for `npx` users
|
||||
- ❌ Affected all platforms using `npx` (not just Windows)
|
||||
- ❌ 100% failure rate on fresh installations
|
||||
- ❌ Workaround: Use v2.19.6 or install with `npm install` + run locally
|
||||
|
||||
**After Fix:**
|
||||
- ✅ `npx n8n-mcp@latest` works on all platforms
|
||||
- ✅ `tslib` guaranteed to be installed with the package
|
||||
- ✅ No breaking changes (adding a dependency that was already in transitive tree)
|
||||
- ✅ Consistent behavior across Windows, macOS, Linux
|
||||
|
||||
#### Verification
|
||||
|
||||
**Build & Tests:**
|
||||
- ✅ TypeScript compilation passes
|
||||
- ✅ Type checking passes (`npm run typecheck`)
|
||||
- ✅ All tests pass
|
||||
- ✅ Build succeeds (`npm run build`)
|
||||
|
||||
**CI/CD Validation:**
|
||||
- ✅ Verified CI workflow copies `package.runtime.json` → `package.json` before publish
|
||||
- ✅ Confirmed `tslib` will be included in published package
|
||||
- ✅ No changes needed to CI/CD workflows
|
||||
|
||||
#### Related
|
||||
|
||||
- **Issue:** #342 - Missing `tslib` dependency in v2.20.3 causing MODULE_NOT_FOUND error on Windows
|
||||
- **Reporter:** @eddyc (thank you for the detailed bug report!)
|
||||
- **Severity:** CRITICAL - Package unusable via `npx` on Windows
|
||||
- **Affected Versions:** 2.20.0 - 2.20.5
|
||||
- **Fixed Version:** 2.20.6
|
||||
|
||||
Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)
|
||||
|
||||
## [2.20.5] - 2025-10-21
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
**Validation False Positives Eliminated (80% → 0%)**
|
||||
|
||||
This release completely eliminates validation false positives on production workflows through comprehensive improvements to expression detection, webhook validation, and validation profile handling.
|
||||
|
||||
#### Problem Statement
|
||||
|
||||
Production workflows were experiencing an 80% false positive rate during validation:
|
||||
- Expression-based URLs flagged as invalid (e.g., `={{ $json.protocol }}://{{ $json.domain }}/api`)
|
||||
- Expression-based JSON flagged as invalid (e.g., `={{ { key: $json.value } }}`)
|
||||
- Webhook `onError` validation checking wrong property location (node-level vs parameters)
|
||||
- "Missing $ prefix" regex flagging valid property access (e.g., `item['json']`)
|
||||
- `respondToWebhook` nodes incorrectly warned about missing error handling
|
||||
- Hardcoded credential warnings appearing in all validation profiles
|
||||
|
||||
#### Solution Overview
|
||||
|
||||
**Phase 1: Centralized Expression Detection**
|
||||
- Created `src/utils/expression-utils.ts` with 5 core utilities:
|
||||
- `isExpression()`: Type predicate detecting `=` prefix
|
||||
- `containsExpression()`: Detects `{{ }}` markers (optimized with single regex)
|
||||
- `shouldSkipLiteralValidation()`: Main decision utility for validators
|
||||
- `extractExpressionContent()`: Extracts expression code
|
||||
- `hasMixedContent()`: Detects mixed text+expression patterns
|
||||
- Added comprehensive test suite with 75 tests (100% statement coverage)
|
||||
|
||||
**Phase 2: URL and JSON Validation Fixes**
|
||||
- Modified `config-validator.ts` to skip expression validation:
|
||||
- URL validation: Skip when `shouldSkipLiteralValidation()` returns true (lines 385-397)
|
||||
- JSON validation: Skip when value contains expressions (lines 424-439)
|
||||
- Improved error messages to include actual JSON parse errors
|
||||
|
||||
**Phase 3: Webhook Validation Improvements**
|
||||
- Fixed `onError` property location check in `workflow-validator.ts`:
|
||||
- Now checks node-level `onError` property, not `parameters.onError`
|
||||
- Added context-aware validation for webhook response modes
|
||||
- Created specialized `checkWebhookErrorHandling()` helper method (lines 1618-1662):
|
||||
- Skips validation for `respondToWebhook` nodes (response nodes)
|
||||
- Requires `onError` for `responseNode` mode
|
||||
- Provides warnings for regular webhook nodes
|
||||
- Moved responseNode validation from `node-specific-validators.ts` to `workflow-validator.ts`
|
||||
|
||||
**Phase 4: Regex Pattern Enhancement**
|
||||
- Updated missing prefix pattern in `expression-validator.ts` (line 217):
|
||||
- Old: `/(?<!\$|\.)\b(json|node)\b/`
|
||||
- New: `/(?<![.$\w['])\b(json|node|input|items|workflow|execution)\b(?!\s*[:''])/`
|
||||
- Now correctly excludes:
|
||||
- Dollar prefix: `$json` ✓
|
||||
- Dot access: `.json` ✓
|
||||
- Word chars: `myJson` ✓
|
||||
- Bracket notation: `item['json']` ✓
|
||||
- After quotes: `"json"` ✓
|
||||
|
||||
**Phase 5: Profile-Based Filtering**
|
||||
- Made hardcoded credential warnings configurable in `enhanced-config-validator.ts`:
|
||||
- Created `shouldFilterCredentialWarning()` helper method (lines 469-476)
|
||||
- Only show hardcoded credential warnings in `strict` profile
|
||||
- Filters warnings in `minimal`, `runtime`, and `ai-friendly` profiles
|
||||
- Replaced 3 instances of duplicate filtering code (lines 492, 510, 539)
|
||||
|
||||
**Phase 6: Code Quality Improvements**
|
||||
- Fixed type guard order in `hasMixedContent()` (line 90)
|
||||
- Added type predicate to `isExpression()` for better TypeScript narrowing
|
||||
- Extracted helper methods to reduce code duplication
|
||||
- Improved error messages with actual parsing details
|
||||
|
||||
**Phase 7: Comprehensive Testing**
|
||||
- Created `tests/unit/utils/expression-utils.test.ts` with 75 tests:
|
||||
- `isExpression()`: 18 tests (valid, invalid, edge cases, type narrowing)
|
||||
- `containsExpression()`: 14 tests (markers, edge cases)
|
||||
- `shouldSkipLiteralValidation()`: 12 tests (skip conditions, real-world)
|
||||
- `extractExpressionContent()`: 11 tests (extraction, edge cases)
|
||||
- `hasMixedContent()`: 19 tests (mixed content, type guards)
|
||||
- Integration scenarios: 4 tests (real workflow scenarios)
|
||||
- Performance test: 10k iterations in <100ms
|
||||
- Fixed CI test failure by skipping moved validation tests in `node-specific-validators.test.ts`
|
||||
|
||||
#### Results
|
||||
|
||||
**Validation Accuracy:**
|
||||
- Total Errors: 16 → 0 (100% elimination)
|
||||
- Total Warnings: 45 → 27 (40% reduction)
|
||||
- Valid Workflows: 0/6 → 6/6 (100% success rate)
|
||||
- False Positive Rate: 80% → 0%
|
||||
|
||||
**Test Coverage:**
|
||||
- New tests: 75 comprehensive test cases
|
||||
- Statement coverage: 100%
|
||||
- Line coverage: 100%
|
||||
- Branch coverage: 95.23%
|
||||
- All 143 tests passing ✓
|
||||
|
||||
**Files Changed:**
|
||||
- Modified: 7 files
|
||||
- `src/services/config-validator.ts`
|
||||
- `src/services/enhanced-config-validator.ts`
|
||||
- `src/services/expression-validator.ts`
|
||||
- `src/services/workflow-validator.ts`
|
||||
- `src/services/node-specific-validators.ts`
|
||||
- `tests/unit/services/node-specific-validators.test.ts`
|
||||
- Created: 2 files
|
||||
- `src/utils/expression-utils.ts`
|
||||
- `tests/unit/utils/expression-utils.test.ts`
|
||||
|
||||
**Code Review:**
|
||||
- ✅ READY TO MERGE
|
||||
- All phases implemented with critical warnings and suggestions addressed
|
||||
- Type safety improved with type predicates
|
||||
- Code duplication eliminated with helper methods
|
||||
- Comprehensive test coverage with real-world scenarios
|
||||
|
||||
**Related:**
|
||||
- PR #346
|
||||
- Branch: `feat/sticky-note-validation`
|
||||
|
||||
Conceived by Romuald Członkowski - [www.aiadvisors.pl/en](https://www.aiadvisors.pl/en)
|
||||
|
||||
## [2.20.4] - 2025-10-21
|
||||
|
||||
### 🛡️ Safety & Reliability Enhancements
|
||||
|
||||
@@ -192,5 +192,4 @@ NEVER proactively create documentation files (*.md) or README files. Only create
|
||||
- When the user asks to review issues, you should use GH CLI to get the issue and all the comments
|
||||
- When the task can be divided into separated subtasks, you should spawn separate sub-agents to handle them in paralel
|
||||
- Use the best sub-agent for the task as per their descriptions
|
||||
- Do not use hyperbolic or dramatic language in comments and documentation
|
||||
- Add to every commit and PR: Concieved by Romuald Członkowski - and then link to www.aiadvisors.pl/en. Don't add it in conversations
|
||||
- Do not use hyperbolic or dramatic language in comments and documentation
|
||||
@@ -5,7 +5,7 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
@@ -981,7 +981,6 @@ These powerful tools allow you to manage n8n workflows directly from Claude. The
|
||||
- **`n8n_list_workflows`** - List workflows with filtering and pagination
|
||||
- **`n8n_validate_workflow`** - Validate workflows already in n8n by ID (NEW in v2.6.3)
|
||||
- **`n8n_autofix_workflow`** - Automatically fix common workflow errors (NEW in v2.13.0!)
|
||||
- **`n8n_workflow_versions`** - Manage workflow version history and rollback (NEW in v2.22.0!)
|
||||
|
||||
#### Execution Management
|
||||
- **`n8n_trigger_webhook_workflow`** - Trigger workflows via webhook URL
|
||||
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
5997
package-lock.json
generated
5997
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp",
|
||||
"version": "2.22.0",
|
||||
"version": "2.20.4",
|
||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -140,18 +140,17 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.20.1",
|
||||
"@n8n/n8n-nodes-langchain": "^1.115.1",
|
||||
"@n8n/n8n-nodes-langchain": "^1.114.1",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^5.1.0",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"lru-cache": "^11.2.1",
|
||||
"n8n": "^1.116.2",
|
||||
"n8n-core": "^1.115.1",
|
||||
"n8n-workflow": "^1.113.0",
|
||||
"n8n": "^1.115.2",
|
||||
"n8n-core": "^1.114.0",
|
||||
"n8n-workflow": "^1.112.0",
|
||||
"openai": "^4.77.0",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.24.1"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp-runtime",
|
||||
"version": "2.22.0",
|
||||
"version": "2.20.2",
|
||||
"description": "n8n MCP Server Runtime Dependencies Only",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
@@ -11,7 +11,6 @@
|
||||
"dotenv": "^16.5.0",
|
||||
"lru-cache": "^11.2.1",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"axios": "^1.7.7"
|
||||
},
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Generate release notes from commit messages between two tags
|
||||
* Used by GitHub Actions to create automated release notes
|
||||
*/
|
||||
|
||||
const { execSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function generateReleaseNotes(previousTag, currentTag) {
|
||||
try {
|
||||
console.log(`Generating release notes from ${previousTag} to ${currentTag}`);
|
||||
|
||||
// Get commits between tags
|
||||
const gitLogCommand = `git log --pretty=format:"%H|%s|%an|%ae|%ad" --date=short --no-merges ${previousTag}..${currentTag}`;
|
||||
const commitsOutput = execSync(gitLogCommand, { encoding: 'utf8' });
|
||||
|
||||
if (!commitsOutput.trim()) {
|
||||
console.log('No commits found between tags');
|
||||
return 'No changes in this release.';
|
||||
}
|
||||
|
||||
const commits = commitsOutput.trim().split('\n').map(line => {
|
||||
const [hash, subject, author, email, date] = line.split('|');
|
||||
return { hash, subject, author, email, date };
|
||||
});
|
||||
|
||||
// Categorize commits
|
||||
const categories = {
|
||||
'feat': { title: '✨ Features', commits: [] },
|
||||
'fix': { title: '🐛 Bug Fixes', commits: [] },
|
||||
'docs': { title: '📚 Documentation', commits: [] },
|
||||
'refactor': { title: '♻️ Refactoring', commits: [] },
|
||||
'test': { title: '🧪 Testing', commits: [] },
|
||||
'perf': { title: '⚡ Performance', commits: [] },
|
||||
'style': { title: '💅 Styling', commits: [] },
|
||||
'ci': { title: '🔧 CI/CD', commits: [] },
|
||||
'build': { title: '📦 Build', commits: [] },
|
||||
'chore': { title: '🔧 Maintenance', commits: [] },
|
||||
'other': { title: '📝 Other Changes', commits: [] }
|
||||
};
|
||||
|
||||
commits.forEach(commit => {
|
||||
const subject = commit.subject.toLowerCase();
|
||||
let categorized = false;
|
||||
|
||||
// Check for conventional commit prefixes
|
||||
for (const [prefix, category] of Object.entries(categories)) {
|
||||
if (prefix !== 'other' && subject.startsWith(`${prefix}:`)) {
|
||||
category.commits.push(commit);
|
||||
categorized = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If not categorized, put in other
|
||||
if (!categorized) {
|
||||
categories.other.commits.push(commit);
|
||||
}
|
||||
});
|
||||
|
||||
// Generate release notes
|
||||
const releaseNotes = [];
|
||||
|
||||
for (const [key, category] of Object.entries(categories)) {
|
||||
if (category.commits.length > 0) {
|
||||
releaseNotes.push(`### ${category.title}`);
|
||||
releaseNotes.push('');
|
||||
|
||||
category.commits.forEach(commit => {
|
||||
// Clean up the subject by removing the prefix if it exists
|
||||
let cleanSubject = commit.subject;
|
||||
const colonIndex = cleanSubject.indexOf(':');
|
||||
if (colonIndex !== -1 && cleanSubject.substring(0, colonIndex).match(/^(feat|fix|docs|refactor|test|perf|style|ci|build|chore)$/)) {
|
||||
cleanSubject = cleanSubject.substring(colonIndex + 1).trim();
|
||||
// Capitalize first letter
|
||||
cleanSubject = cleanSubject.charAt(0).toUpperCase() + cleanSubject.slice(1);
|
||||
}
|
||||
|
||||
releaseNotes.push(`- ${cleanSubject} (${commit.hash.substring(0, 7)})`);
|
||||
});
|
||||
|
||||
releaseNotes.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// Add commit statistics
|
||||
const totalCommits = commits.length;
|
||||
const contributors = [...new Set(commits.map(c => c.author))];
|
||||
|
||||
releaseNotes.push('---');
|
||||
releaseNotes.push('');
|
||||
releaseNotes.push(`**Release Statistics:**`);
|
||||
releaseNotes.push(`- ${totalCommits} commit${totalCommits !== 1 ? 's' : ''}`);
|
||||
releaseNotes.push(`- ${contributors.length} contributor${contributors.length !== 1 ? 's' : ''}`);
|
||||
|
||||
if (contributors.length <= 5) {
|
||||
releaseNotes.push(`- Contributors: ${contributors.join(', ')}`);
|
||||
}
|
||||
|
||||
return releaseNotes.join('\n');
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error generating release notes: ${error.message}`);
|
||||
return `Failed to generate release notes: ${error.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse command line arguments
|
||||
const previousTag = process.argv[2];
|
||||
const currentTag = process.argv[3];
|
||||
|
||||
if (!previousTag || !currentTag) {
|
||||
console.error('Usage: generate-release-notes.js <previous-tag> <current-tag>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const releaseNotes = generateReleaseNotes(previousTag, currentTag);
|
||||
console.log(releaseNotes);
|
||||
@@ -1,287 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Test Workflow Versioning System
|
||||
*
|
||||
* Tests the complete workflow rollback and versioning functionality:
|
||||
* - Automatic backup creation
|
||||
* - Auto-pruning to 10 versions
|
||||
* - Version history retrieval
|
||||
* - Rollback with validation
|
||||
* - Manual pruning and cleanup
|
||||
* - Storage statistics
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../src/database/node-repository';
|
||||
import { createDatabaseAdapter } from '../src/database/database-adapter';
|
||||
import { WorkflowVersioningService } from '../src/services/workflow-versioning-service';
|
||||
import { logger } from '../src/utils/logger';
|
||||
import { existsSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
// Mock workflow for testing
|
||||
const createMockWorkflow = (id: string, name: string, nodeCount: number = 3) => ({
|
||||
id,
|
||||
name,
|
||||
active: false,
|
||||
nodes: Array.from({ length: nodeCount }, (_, i) => ({
|
||||
id: `node-${i}`,
|
||||
name: `Node ${i}`,
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [250 + i * 200, 300],
|
||||
parameters: { values: { string: [{ name: `field${i}`, value: `value${i}` }] } }
|
||||
})),
|
||||
connections: nodeCount > 1 ? {
|
||||
'node-0': { main: [[{ node: 'node-1', type: 'main', index: 0 }]] },
|
||||
...(nodeCount > 2 && { 'node-1': { main: [[{ node: 'node-2', type: 'main', index: 0 }]] } })
|
||||
} : {},
|
||||
settings: {}
|
||||
});
|
||||
|
||||
async function runTests() {
|
||||
console.log('🧪 Testing Workflow Versioning System\n');
|
||||
|
||||
// Find database path
|
||||
const possiblePaths = [
|
||||
path.join(process.cwd(), 'data', 'nodes.db'),
|
||||
path.join(__dirname, '../../data', 'nodes.db'),
|
||||
'./data/nodes.db'
|
||||
];
|
||||
|
||||
let dbPath: string | null = null;
|
||||
for (const p of possiblePaths) {
|
||||
if (existsSync(p)) {
|
||||
dbPath = p;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!dbPath) {
|
||||
console.error('❌ Database not found. Please run npm run rebuild first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`📁 Using database: ${dbPath}\n`);
|
||||
|
||||
// Initialize repository
|
||||
const db = await createDatabaseAdapter(dbPath);
|
||||
const repository = new NodeRepository(db);
|
||||
const service = new WorkflowVersioningService(repository);
|
||||
|
||||
const workflowId = 'test-workflow-001';
|
||||
let testsPassed = 0;
|
||||
let testsFailed = 0;
|
||||
|
||||
try {
|
||||
// Test 1: Create initial backup
|
||||
console.log('📝 Test 1: Create initial backup');
|
||||
const workflow1 = createMockWorkflow(workflowId, 'Test Workflow v1', 3);
|
||||
const backup1 = await service.createBackup(workflowId, workflow1, {
|
||||
trigger: 'partial_update',
|
||||
operations: [{ type: 'addNode', node: workflow1.nodes[0] }]
|
||||
});
|
||||
|
||||
if (backup1.versionId && backup1.versionNumber === 1 && backup1.pruned === 0) {
|
||||
console.log('✅ Initial backup created successfully');
|
||||
console.log(` Version ID: ${backup1.versionId}, Version Number: ${backup1.versionNumber}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to create initial backup');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 2: Create multiple backups to test auto-pruning
|
||||
console.log('\n📝 Test 2: Create 12 backups to test auto-pruning (should keep only 10)');
|
||||
for (let i = 2; i <= 12; i++) {
|
||||
const workflow = createMockWorkflow(workflowId, `Test Workflow v${i}`, 3 + i);
|
||||
await service.createBackup(workflowId, workflow, {
|
||||
trigger: i % 3 === 0 ? 'full_update' : 'partial_update',
|
||||
operations: [{ type: 'addNode', node: { id: `node-${i}` } }]
|
||||
});
|
||||
}
|
||||
|
||||
const versions = await service.getVersionHistory(workflowId, 100);
|
||||
if (versions.length === 10) {
|
||||
console.log(`✅ Auto-pruning works correctly (kept exactly 10 versions)`);
|
||||
console.log(` Latest version: ${versions[0].versionNumber}, Oldest: ${versions[9].versionNumber}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log(`❌ Auto-pruning failed (expected 10 versions, got ${versions.length})`);
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 3: Get version history
|
||||
console.log('\n📝 Test 3: Get version history');
|
||||
const history = await service.getVersionHistory(workflowId, 5);
|
||||
if (history.length === 5 && history[0].versionNumber > history[4].versionNumber) {
|
||||
console.log(`✅ Version history retrieved successfully (${history.length} versions)`);
|
||||
console.log(' Recent versions:');
|
||||
history.forEach(v => {
|
||||
console.log(` - v${v.versionNumber} (${v.trigger}) - ${v.workflowName} - ${(v.size / 1024).toFixed(2)} KB`);
|
||||
});
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get version history');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 4: Get specific version
|
||||
console.log('\n📝 Test 4: Get specific version details');
|
||||
const specificVersion = await service.getVersion(history[2].id);
|
||||
if (specificVersion && specificVersion.workflowSnapshot) {
|
||||
console.log(`✅ Retrieved version ${specificVersion.versionNumber} successfully`);
|
||||
console.log(` Workflow name: ${specificVersion.workflowName}`);
|
||||
console.log(` Node count: ${specificVersion.workflowSnapshot.nodes.length}`);
|
||||
console.log(` Trigger: ${specificVersion.trigger}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get specific version');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 5: Compare two versions
|
||||
console.log('\n📝 Test 5: Compare two versions');
|
||||
if (history.length >= 2) {
|
||||
const diff = await service.compareVersions(history[0].id, history[1].id);
|
||||
console.log(`✅ Version comparison successful`);
|
||||
console.log(` Comparing v${diff.version1Number} → v${diff.version2Number}`);
|
||||
console.log(` Added nodes: ${diff.addedNodes.length}`);
|
||||
console.log(` Removed nodes: ${diff.removedNodes.length}`);
|
||||
console.log(` Modified nodes: ${diff.modifiedNodes.length}`);
|
||||
console.log(` Connection changes: ${diff.connectionChanges}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Not enough versions to compare');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 6: Manual pruning
|
||||
console.log('\n📝 Test 6: Manual pruning (keep only 5 versions)');
|
||||
const pruneResult = await service.pruneVersions(workflowId, 5);
|
||||
if (pruneResult.pruned === 5 && pruneResult.remaining === 5) {
|
||||
console.log(`✅ Manual pruning successful`);
|
||||
console.log(` Pruned: ${pruneResult.pruned} versions, Remaining: ${pruneResult.remaining}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log(`❌ Manual pruning failed (expected 5 pruned, 5 remaining, got ${pruneResult.pruned} pruned, ${pruneResult.remaining} remaining)`);
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 7: Storage statistics
|
||||
console.log('\n📝 Test 7: Storage statistics');
|
||||
const stats = await service.getStorageStats();
|
||||
if (stats.totalVersions > 0 && stats.byWorkflow.length > 0) {
|
||||
console.log(`✅ Storage stats retrieved successfully`);
|
||||
console.log(` Total versions: ${stats.totalVersions}`);
|
||||
console.log(` Total size: ${stats.totalSizeFormatted}`);
|
||||
console.log(` Workflows with versions: ${stats.byWorkflow.length}`);
|
||||
stats.byWorkflow.forEach(w => {
|
||||
console.log(` - ${w.workflowName}: ${w.versionCount} versions, ${w.totalSizeFormatted}`);
|
||||
});
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get storage stats');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 8: Delete specific version
|
||||
console.log('\n📝 Test 8: Delete specific version');
|
||||
const versionsBeforeDelete = await service.getVersionHistory(workflowId, 100);
|
||||
const versionToDelete = versionsBeforeDelete[versionsBeforeDelete.length - 1];
|
||||
const deleteResult = await service.deleteVersion(versionToDelete.id);
|
||||
const versionsAfterDelete = await service.getVersionHistory(workflowId, 100);
|
||||
|
||||
if (deleteResult.success && versionsAfterDelete.length === versionsBeforeDelete.length - 1) {
|
||||
console.log(`✅ Version deletion successful`);
|
||||
console.log(` Deleted version ${versionToDelete.versionNumber}`);
|
||||
console.log(` Remaining versions: ${versionsAfterDelete.length}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to delete version');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 9: Test different trigger types
|
||||
console.log('\n📝 Test 9: Test different trigger types');
|
||||
const workflow2 = createMockWorkflow(workflowId, 'Test Workflow Autofix', 2);
|
||||
const backupAutofix = await service.createBackup(workflowId, workflow2, {
|
||||
trigger: 'autofix',
|
||||
fixTypes: ['expression-format', 'typeversion-correction']
|
||||
});
|
||||
|
||||
const workflow3 = createMockWorkflow(workflowId, 'Test Workflow Full Update', 4);
|
||||
const backupFull = await service.createBackup(workflowId, workflow3, {
|
||||
trigger: 'full_update',
|
||||
metadata: { reason: 'Major refactoring' }
|
||||
});
|
||||
|
||||
const allVersions = await service.getVersionHistory(workflowId, 100);
|
||||
const autofixVersions = allVersions.filter(v => v.trigger === 'autofix');
|
||||
const fullUpdateVersions = allVersions.filter(v => v.trigger === 'full_update');
|
||||
const partialUpdateVersions = allVersions.filter(v => v.trigger === 'partial_update');
|
||||
|
||||
if (autofixVersions.length > 0 && fullUpdateVersions.length > 0 && partialUpdateVersions.length > 0) {
|
||||
console.log(`✅ All trigger types working correctly`);
|
||||
console.log(` Partial updates: ${partialUpdateVersions.length}`);
|
||||
console.log(` Full updates: ${fullUpdateVersions.length}`);
|
||||
console.log(` Autofixes: ${autofixVersions.length}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to create versions with different trigger types');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 10: Cleanup - Delete all versions for workflow
|
||||
console.log('\n📝 Test 10: Delete all versions for workflow');
|
||||
const deleteAllResult = await service.deleteAllVersions(workflowId);
|
||||
const versionsAfterDeleteAll = await service.getVersionHistory(workflowId, 100);
|
||||
|
||||
if (deleteAllResult.deleted > 0 && versionsAfterDeleteAll.length === 0) {
|
||||
console.log(`✅ Delete all versions successful`);
|
||||
console.log(` Deleted ${deleteAllResult.deleted} versions`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to delete all versions');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 11: Truncate all versions (requires confirmation)
|
||||
console.log('\n📝 Test 11: Test truncate without confirmation');
|
||||
const truncateResult1 = await service.truncateAllVersions(false);
|
||||
if (truncateResult1.deleted === 0 && truncateResult1.message.includes('not confirmed')) {
|
||||
console.log(`✅ Truncate safety check works (requires confirmation)`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Truncate safety check failed');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('📊 Test Summary');
|
||||
console.log('='.repeat(60));
|
||||
console.log(`✅ Passed: ${testsPassed}`);
|
||||
console.log(`❌ Failed: ${testsFailed}`);
|
||||
console.log(`📈 Success Rate: ${((testsPassed / (testsPassed + testsFailed)) * 100).toFixed(1)}%`);
|
||||
console.log('='.repeat(60));
|
||||
|
||||
if (testsFailed === 0) {
|
||||
console.log('\n🎉 All tests passed! Workflow versioning system is working correctly.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log('\n⚠️ Some tests failed. Please review the implementation.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Test suite failed with error:', error.message);
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run tests
|
||||
runTests().catch(error => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -462,501 +462,4 @@ export class NodeRepository {
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* VERSION MANAGEMENT METHODS
|
||||
* Methods for working with node_versions and version_property_changes tables
|
||||
*/
|
||||
|
||||
/**
|
||||
* Save a specific node version to the database
|
||||
*/
|
||||
saveNodeVersion(versionData: {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
packageName: string;
|
||||
displayName: string;
|
||||
description?: string;
|
||||
category?: string;
|
||||
isCurrentMax?: boolean;
|
||||
propertiesSchema?: any;
|
||||
operations?: any;
|
||||
credentialsRequired?: any;
|
||||
outputs?: any;
|
||||
minimumN8nVersion?: string;
|
||||
breakingChanges?: any[];
|
||||
deprecatedProperties?: string[];
|
||||
addedProperties?: string[];
|
||||
releasedAt?: Date;
|
||||
}): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO node_versions (
|
||||
node_type, version, package_name, display_name, description,
|
||||
category, is_current_max, properties_schema, operations,
|
||||
credentials_required, outputs, minimum_n8n_version,
|
||||
breaking_changes, deprecated_properties, added_properties,
|
||||
released_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
versionData.nodeType,
|
||||
versionData.version,
|
||||
versionData.packageName,
|
||||
versionData.displayName,
|
||||
versionData.description || null,
|
||||
versionData.category || null,
|
||||
versionData.isCurrentMax ? 1 : 0,
|
||||
versionData.propertiesSchema ? JSON.stringify(versionData.propertiesSchema) : null,
|
||||
versionData.operations ? JSON.stringify(versionData.operations) : null,
|
||||
versionData.credentialsRequired ? JSON.stringify(versionData.credentialsRequired) : null,
|
||||
versionData.outputs ? JSON.stringify(versionData.outputs) : null,
|
||||
versionData.minimumN8nVersion || null,
|
||||
versionData.breakingChanges ? JSON.stringify(versionData.breakingChanges) : null,
|
||||
versionData.deprecatedProperties ? JSON.stringify(versionData.deprecatedProperties) : null,
|
||||
versionData.addedProperties ? JSON.stringify(versionData.addedProperties) : null,
|
||||
versionData.releasedAt || null
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available versions for a specific node type
|
||||
*/
|
||||
getNodeVersions(nodeType: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ?
|
||||
ORDER BY version DESC
|
||||
`).all(normalizedType) as any[];
|
||||
|
||||
return rows.map(row => this.parseNodeVersionRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest (current max) version for a node type
|
||||
*/
|
||||
getLatestNodeVersion(nodeType: string): any | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND is_current_max = 1
|
||||
LIMIT 1
|
||||
`).get(normalizedType) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific version of a node
|
||||
*/
|
||||
getNodeVersion(nodeType: string, version: string): any | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND version = ?
|
||||
`).get(normalizedType, version) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a property change between versions
|
||||
*/
|
||||
savePropertyChange(changeData: {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking?: boolean;
|
||||
oldValue?: string;
|
||||
newValue?: string;
|
||||
migrationHint?: string;
|
||||
autoMigratable?: boolean;
|
||||
migrationStrategy?: any;
|
||||
severity?: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
}): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO version_property_changes (
|
||||
node_type, from_version, to_version, property_name, change_type,
|
||||
is_breaking, old_value, new_value, migration_hint, auto_migratable,
|
||||
migration_strategy, severity
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
changeData.nodeType,
|
||||
changeData.fromVersion,
|
||||
changeData.toVersion,
|
||||
changeData.propertyName,
|
||||
changeData.changeType,
|
||||
changeData.isBreaking ? 1 : 0,
|
||||
changeData.oldValue || null,
|
||||
changeData.newValue || null,
|
||||
changeData.migrationHint || null,
|
||||
changeData.autoMigratable ? 1 : 0,
|
||||
changeData.migrationStrategy ? JSON.stringify(changeData.migrationStrategy) : null,
|
||||
changeData.severity || 'MEDIUM'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get property changes between two versions
|
||||
*/
|
||||
getPropertyChanges(nodeType: string, fromVersion: string, toVersion: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND from_version = ? AND to_version = ?
|
||||
ORDER BY severity DESC, property_name
|
||||
`).all(normalizedType, fromVersion, toVersion) as any[];
|
||||
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all breaking changes for upgrading from one version to another
|
||||
* Can handle multi-step upgrades (e.g., 1.0 -> 2.0 via 1.5)
|
||||
*/
|
||||
getBreakingChanges(nodeType: string, fromVersion: string, toVersion?: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
let sql = `
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND is_breaking = 1
|
||||
`;
|
||||
const params: any[] = [normalizedType];
|
||||
|
||||
if (toVersion) {
|
||||
// Get changes between specific versions
|
||||
sql += ` AND from_version >= ? AND to_version <= ?`;
|
||||
params.push(fromVersion, toVersion);
|
||||
} else {
|
||||
// Get all breaking changes from this version onwards
|
||||
sql += ` AND from_version >= ?`;
|
||||
params.push(fromVersion);
|
||||
}
|
||||
|
||||
sql += ` ORDER BY from_version, to_version, severity DESC`;
|
||||
|
||||
const rows = this.db.prepare(sql).all(...params) as any[];
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auto-migratable changes for a version upgrade
|
||||
*/
|
||||
getAutoMigratableChanges(nodeType: string, fromVersion: string, toVersion: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ?
|
||||
AND from_version = ?
|
||||
AND to_version = ?
|
||||
AND auto_migratable = 1
|
||||
ORDER BY severity DESC
|
||||
`).all(normalizedType, fromVersion, toVersion) as any[];
|
||||
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a version upgrade path exists between two versions
|
||||
*/
|
||||
hasVersionUpgradePath(nodeType: string, fromVersion: string, toVersion: string): boolean {
|
||||
const versions = this.getNodeVersions(nodeType);
|
||||
if (versions.length === 0) return false;
|
||||
|
||||
// Check if both versions exist
|
||||
const fromExists = versions.some(v => v.version === fromVersion);
|
||||
const toExists = versions.some(v => v.version === toVersion);
|
||||
|
||||
return fromExists && toExists;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get count of nodes with multiple versions
|
||||
*/
|
||||
getVersionedNodesCount(): number {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(DISTINCT node_type) as count
|
||||
FROM node_versions
|
||||
`).get() as any;
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse node version row from database
|
||||
*/
|
||||
private parseNodeVersionRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
version: row.version,
|
||||
packageName: row.package_name,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
category: row.category,
|
||||
isCurrentMax: Number(row.is_current_max) === 1,
|
||||
propertiesSchema: row.properties_schema ? this.safeJsonParse(row.properties_schema, []) : null,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, []) : null,
|
||||
credentialsRequired: row.credentials_required ? this.safeJsonParse(row.credentials_required, []) : null,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
minimumN8nVersion: row.minimum_n8n_version,
|
||||
breakingChanges: row.breaking_changes ? this.safeJsonParse(row.breaking_changes, []) : [],
|
||||
deprecatedProperties: row.deprecated_properties ? this.safeJsonParse(row.deprecated_properties, []) : [],
|
||||
addedProperties: row.added_properties ? this.safeJsonParse(row.added_properties, []) : [],
|
||||
releasedAt: row.released_at,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse property change row from database
|
||||
*/
|
||||
private parsePropertyChangeRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
fromVersion: row.from_version,
|
||||
toVersion: row.to_version,
|
||||
propertyName: row.property_name,
|
||||
changeType: row.change_type,
|
||||
isBreaking: Number(row.is_breaking) === 1,
|
||||
oldValue: row.old_value,
|
||||
newValue: row.new_value,
|
||||
migrationHint: row.migration_hint,
|
||||
autoMigratable: Number(row.auto_migratable) === 1,
|
||||
migrationStrategy: row.migration_strategy ? this.safeJsonParse(row.migration_strategy, null) : null,
|
||||
severity: row.severity,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Workflow Versioning Methods
|
||||
// ========================================
|
||||
|
||||
/**
|
||||
* Create a new workflow version (backup before modification)
|
||||
*/
|
||||
createWorkflowVersion(data: {
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
workflowSnapshot: any;
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
}): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO workflow_versions (
|
||||
workflow_id, version_number, workflow_name, workflow_snapshot,
|
||||
trigger, operations, fix_types, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const result = stmt.run(
|
||||
data.workflowId,
|
||||
data.versionNumber,
|
||||
data.workflowName,
|
||||
JSON.stringify(data.workflowSnapshot),
|
||||
data.trigger,
|
||||
data.operations ? JSON.stringify(data.operations) : null,
|
||||
data.fixTypes ? JSON.stringify(data.fixTypes) : null,
|
||||
data.metadata ? JSON.stringify(data.metadata) : null
|
||||
);
|
||||
|
||||
return result.lastInsertRowid as number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow versions ordered by version number (newest first)
|
||||
*/
|
||||
getWorkflowVersions(workflowId: string, limit?: number): any[] {
|
||||
let sql = `
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`;
|
||||
|
||||
if (limit) {
|
||||
sql += ` LIMIT ?`;
|
||||
const rows = this.db.prepare(sql).all(workflowId, limit) as any[];
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
|
||||
const rows = this.db.prepare(sql).all(workflowId) as any[];
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific workflow version by ID
|
||||
*/
|
||||
getWorkflowVersion(versionId: number): any | null {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions WHERE id = ?
|
||||
`).get(versionId) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest workflow version for a workflow
|
||||
*/
|
||||
getLatestWorkflowVersion(workflowId: string): any | null {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
LIMIT 1
|
||||
`).get(workflowId) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific workflow version
|
||||
*/
|
||||
deleteWorkflowVersion(versionId: number): void {
|
||||
this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id = ?
|
||||
`).run(versionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all versions for a specific workflow
|
||||
*/
|
||||
deleteWorkflowVersionsByWorkflowId(workflowId: string): number {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE workflow_id = ?
|
||||
`).run(workflowId);
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune old workflow versions, keeping only the most recent N versions
|
||||
* Returns number of versions deleted
|
||||
*/
|
||||
pruneWorkflowVersions(workflowId: string, keepCount: number): number {
|
||||
// Get all versions ordered by version_number DESC
|
||||
const versions = this.db.prepare(`
|
||||
SELECT id FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`).all(workflowId) as any[];
|
||||
|
||||
// If we have fewer versions than keepCount, no pruning needed
|
||||
if (versions.length <= keepCount) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Get IDs of versions to delete (all except the most recent keepCount)
|
||||
const idsToDelete = versions.slice(keepCount).map(v => v.id);
|
||||
|
||||
if (idsToDelete.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Delete old versions
|
||||
const placeholders = idsToDelete.map(() => '?').join(',');
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id IN (${placeholders})
|
||||
`).run(...idsToDelete);
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate the entire workflow_versions table
|
||||
* Returns number of rows deleted
|
||||
*/
|
||||
truncateWorkflowVersions(): number {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions
|
||||
`).run();
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get count of versions for a specific workflow
|
||||
*/
|
||||
getWorkflowVersionCount(workflowId: string): number {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions WHERE workflow_id = ?
|
||||
`).get(workflowId) as any;
|
||||
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage statistics for workflow versions
|
||||
*/
|
||||
getVersionStorageStats(): any {
|
||||
// Total versions
|
||||
const totalResult = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions
|
||||
`).get() as any;
|
||||
|
||||
// Total size (approximate - sum of JSON lengths)
|
||||
const sizeResult = this.db.prepare(`
|
||||
SELECT SUM(LENGTH(workflow_snapshot)) as total_size FROM workflow_versions
|
||||
`).get() as any;
|
||||
|
||||
// Per-workflow breakdown
|
||||
const byWorkflow = this.db.prepare(`
|
||||
SELECT
|
||||
workflow_id,
|
||||
workflow_name,
|
||||
COUNT(*) as version_count,
|
||||
SUM(LENGTH(workflow_snapshot)) as total_size,
|
||||
MAX(created_at) as last_backup
|
||||
FROM workflow_versions
|
||||
GROUP BY workflow_id
|
||||
ORDER BY version_count DESC
|
||||
`).all() as any[];
|
||||
|
||||
return {
|
||||
totalVersions: totalResult.count,
|
||||
totalSize: sizeResult.total_size || 0,
|
||||
byWorkflow: byWorkflow.map(row => ({
|
||||
workflowId: row.workflow_id,
|
||||
workflowName: row.workflow_name,
|
||||
versionCount: row.version_count,
|
||||
totalSize: row.total_size,
|
||||
lastBackup: row.last_backup
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse workflow version row from database
|
||||
*/
|
||||
private parseWorkflowVersionRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
workflowId: row.workflow_id,
|
||||
versionNumber: row.version_number,
|
||||
workflowName: row.workflow_name,
|
||||
workflowSnapshot: this.safeJsonParse(row.workflow_snapshot, null),
|
||||
trigger: row.trigger,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, null) : null,
|
||||
fixTypes: row.fix_types ? this.safeJsonParse(row.fix_types, null) : null,
|
||||
metadata: row.metadata ? this.safeJsonParse(row.metadata, null) : null,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -144,93 +144,4 @@ ORDER BY node_type, rank;
|
||||
|
||||
-- Note: Template FTS5 tables are created conditionally at runtime if FTS5 is supported
|
||||
-- See template-repository.ts initializeFTS5() method
|
||||
-- Node FTS5 table (nodes_fts) is created above during schema initialization
|
||||
|
||||
-- Node versions table for tracking all available versions of each node
|
||||
-- Enables version upgrade detection and migration
|
||||
CREATE TABLE IF NOT EXISTS node_versions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT NOT NULL, -- e.g., "n8n-nodes-base.executeWorkflow"
|
||||
version TEXT NOT NULL, -- e.g., "1.0", "1.1", "2.0"
|
||||
package_name TEXT NOT NULL, -- e.g., "n8n-nodes-base"
|
||||
display_name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
category TEXT,
|
||||
is_current_max INTEGER DEFAULT 0, -- 1 if this is the latest version
|
||||
properties_schema TEXT, -- JSON schema for this specific version
|
||||
operations TEXT, -- JSON array of operations for this version
|
||||
credentials_required TEXT, -- JSON array of required credentials
|
||||
outputs TEXT, -- JSON array of output definitions
|
||||
minimum_n8n_version TEXT, -- Minimum n8n version required (e.g., "1.0.0")
|
||||
breaking_changes TEXT, -- JSON array of breaking changes from previous version
|
||||
deprecated_properties TEXT, -- JSON array of removed/deprecated properties
|
||||
added_properties TEXT, -- JSON array of newly added properties
|
||||
released_at DATETIME, -- When this version was released
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(node_type, version),
|
||||
FOREIGN KEY (node_type) REFERENCES nodes(node_type) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Indexes for version queries
|
||||
CREATE INDEX IF NOT EXISTS idx_version_node_type ON node_versions(node_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_version_current_max ON node_versions(is_current_max);
|
||||
CREATE INDEX IF NOT EXISTS idx_version_composite ON node_versions(node_type, version);
|
||||
|
||||
-- Version property changes for detailed migration tracking
|
||||
-- Records specific property-level changes between versions
|
||||
CREATE TABLE IF NOT EXISTS version_property_changes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT NOT NULL,
|
||||
from_version TEXT NOT NULL, -- Version where change occurred (e.g., "1.0")
|
||||
to_version TEXT NOT NULL, -- Target version (e.g., "1.1")
|
||||
property_name TEXT NOT NULL, -- Property path (e.g., "parameters.inputFieldMapping")
|
||||
change_type TEXT NOT NULL CHECK(change_type IN (
|
||||
'added', -- Property added (may be required)
|
||||
'removed', -- Property removed/deprecated
|
||||
'renamed', -- Property renamed
|
||||
'type_changed', -- Property type changed
|
||||
'requirement_changed', -- Required → Optional or vice versa
|
||||
'default_changed' -- Default value changed
|
||||
)),
|
||||
is_breaking INTEGER DEFAULT 0, -- 1 if this is a breaking change
|
||||
old_value TEXT, -- For renamed/type_changed: old property name or type
|
||||
new_value TEXT, -- For renamed/type_changed: new property name or type
|
||||
migration_hint TEXT, -- Human-readable migration guidance
|
||||
auto_migratable INTEGER DEFAULT 0, -- 1 if can be automatically migrated
|
||||
migration_strategy TEXT, -- JSON: strategy for auto-migration
|
||||
severity TEXT CHECK(severity IN ('LOW', 'MEDIUM', 'HIGH')), -- Impact severity
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (node_type, from_version) REFERENCES node_versions(node_type, version) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Indexes for property change queries
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_node ON version_property_changes(node_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_versions ON version_property_changes(node_type, from_version, to_version);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_breaking ON version_property_changes(is_breaking);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_auto ON version_property_changes(auto_migratable);
|
||||
|
||||
-- Workflow versions table for rollback and version history tracking
|
||||
-- Stores full workflow snapshots before modifications for guaranteed reversibility
|
||||
-- Auto-prunes to 10 versions per workflow to prevent memory leaks
|
||||
CREATE TABLE IF NOT EXISTS workflow_versions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
workflow_id TEXT NOT NULL, -- n8n workflow ID
|
||||
version_number INTEGER NOT NULL, -- Incremental version number (1, 2, 3...)
|
||||
workflow_name TEXT NOT NULL, -- Workflow name at time of backup
|
||||
workflow_snapshot TEXT NOT NULL, -- Full workflow JSON before modification
|
||||
trigger TEXT NOT NULL CHECK(trigger IN (
|
||||
'partial_update', -- Created by n8n_update_partial_workflow
|
||||
'full_update', -- Created by n8n_update_full_workflow
|
||||
'autofix' -- Created by n8n_autofix_workflow
|
||||
)),
|
||||
operations TEXT, -- JSON array of diff operations (if partial update)
|
||||
fix_types TEXT, -- JSON array of fix types (if autofix)
|
||||
metadata TEXT, -- Additional context (JSON)
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(workflow_id, version_number)
|
||||
);
|
||||
|
||||
-- Indexes for workflow version queries
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_workflow_id ON workflow_versions(workflow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_created_at ON workflow_versions(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_trigger ON workflow_versions(trigger);
|
||||
-- Node FTS5 table (nodes_fts) is created above during schema initialization
|
||||
@@ -31,7 +31,6 @@ import { InstanceContext, validateInstanceContext } from '../types/instance-cont
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
import { WorkflowAutoFixer, AutoFixConfig } from '../services/workflow-auto-fixer';
|
||||
import { ExpressionFormatValidator, ExpressionFormatIssue } from '../services/expression-format-validator';
|
||||
import { WorkflowVersioningService } from '../services/workflow-versioning-service';
|
||||
import { handleUpdatePartialWorkflow } from './handlers-workflow-diff';
|
||||
import { telemetry } from '../telemetry';
|
||||
import {
|
||||
@@ -364,7 +363,6 @@ const updateWorkflowSchema = z.object({
|
||||
nodes: z.array(z.any()).optional(),
|
||||
connections: z.record(z.any()).optional(),
|
||||
settings: z.any().optional(),
|
||||
createBackup: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const listWorkflowsSchema = z.object({
|
||||
@@ -417,17 +415,6 @@ const listExecutionsSchema = z.object({
|
||||
includeData: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const workflowVersionsSchema = z.object({
|
||||
mode: z.enum(['list', 'get', 'rollback', 'delete', 'prune', 'truncate']),
|
||||
workflowId: z.string().optional(),
|
||||
versionId: z.number().optional(),
|
||||
limit: z.number().default(10).optional(),
|
||||
validateBefore: z.boolean().default(true).optional(),
|
||||
deleteAll: z.boolean().default(false).optional(),
|
||||
maxVersions: z.number().default(10).optional(),
|
||||
confirmTruncate: z.boolean().default(false).optional(),
|
||||
});
|
||||
|
||||
// Workflow Management Handlers
|
||||
|
||||
export async function handleCreateWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
@@ -695,44 +682,16 @@ export async function handleGetWorkflowMinimal(args: unknown, context?: Instance
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleUpdateWorkflow(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
export async function handleUpdateWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
try {
|
||||
const client = ensureApiConfigured(context);
|
||||
const input = updateWorkflowSchema.parse(args);
|
||||
const { id, createBackup, ...updateData } = input;
|
||||
const { id, ...updateData } = input;
|
||||
|
||||
// If nodes/connections are being updated, validate the structure
|
||||
if (updateData.nodes || updateData.connections) {
|
||||
// Always fetch current workflow for validation (need all fields like name)
|
||||
const current = await client.getWorkflow(id);
|
||||
|
||||
// Create backup before modifying workflow (default: true)
|
||||
if (createBackup !== false) {
|
||||
try {
|
||||
const versioningService = new WorkflowVersioningService(repository, client);
|
||||
const backupResult = await versioningService.createBackup(id, current, {
|
||||
trigger: 'full_update'
|
||||
});
|
||||
|
||||
logger.info('Workflow backup created', {
|
||||
workflowId: id,
|
||||
versionId: backupResult.versionId,
|
||||
versionNumber: backupResult.versionNumber,
|
||||
pruned: backupResult.pruned
|
||||
});
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to create workflow backup', {
|
||||
workflowId: id,
|
||||
error: error.message
|
||||
});
|
||||
// Continue with update even if backup fails (non-blocking)
|
||||
}
|
||||
}
|
||||
|
||||
const fullWorkflow = {
|
||||
...current,
|
||||
...updateData
|
||||
@@ -748,7 +707,7 @@ export async function handleUpdateWorkflow(
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Update workflow
|
||||
const workflow = await client.updateWorkflow(id, updateData);
|
||||
|
||||
@@ -1036,7 +995,7 @@ export async function handleAutofixWorkflow(
|
||||
|
||||
// Generate fixes using WorkflowAutoFixer
|
||||
const autoFixer = new WorkflowAutoFixer(repository);
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
workflow,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -1086,10 +1045,8 @@ export async function handleAutofixWorkflow(
|
||||
const updateResult = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: workflow.id,
|
||||
operations: fixResult.operations,
|
||||
createBackup: true // Ensure backup is created with autofix metadata
|
||||
operations: fixResult.operations
|
||||
},
|
||||
repository,
|
||||
context
|
||||
);
|
||||
|
||||
@@ -2005,191 +1962,3 @@ export async function handleDiagnostic(request: any, context?: InstanceContext):
|
||||
data: diagnostic
|
||||
};
|
||||
}
|
||||
|
||||
export async function handleWorkflowVersions(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
try {
|
||||
const input = workflowVersionsSchema.parse(args);
|
||||
const client = context ? getN8nApiClient(context) : null;
|
||||
const versioningService = new WorkflowVersioningService(repository, client || undefined);
|
||||
|
||||
switch (input.mode) {
|
||||
case 'list': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for list mode'
|
||||
};
|
||||
}
|
||||
|
||||
const versions = await versioningService.getVersionHistory(input.workflowId, input.limit);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
versions,
|
||||
count: versions.length,
|
||||
message: `Found ${versions.length} version(s) for workflow ${input.workflowId}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'get': {
|
||||
if (!input.versionId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'versionId is required for get mode'
|
||||
};
|
||||
}
|
||||
|
||||
const version = await versioningService.getVersion(input.versionId);
|
||||
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Version ${input.versionId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: version
|
||||
};
|
||||
}
|
||||
|
||||
case 'rollback': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for rollback mode'
|
||||
};
|
||||
}
|
||||
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'n8n API not configured. Cannot perform rollback without API access.'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.restoreVersion(
|
||||
input.workflowId,
|
||||
input.versionId,
|
||||
input.validateBefore
|
||||
);
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
data: result.success ? result : undefined,
|
||||
error: result.success ? undefined : result.message,
|
||||
details: result.success ? undefined : {
|
||||
validationErrors: result.validationErrors
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
if (input.deleteAll) {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for deleteAll mode'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.deleteAllVersions(input.workflowId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
deleted: result.deleted,
|
||||
message: result.message
|
||||
}
|
||||
};
|
||||
} else {
|
||||
if (!input.versionId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'versionId is required for single version delete'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.deleteVersion(input.versionId);
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
data: result.success ? { message: result.message } : undefined,
|
||||
error: result.success ? undefined : result.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
case 'prune': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for prune mode'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.pruneVersions(
|
||||
input.workflowId,
|
||||
input.maxVersions || 10
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
pruned: result.pruned,
|
||||
remaining: result.remaining,
|
||||
message: `Pruned ${result.pruned} old version(s), ${result.remaining} version(s) remaining`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'truncate': {
|
||||
if (!input.confirmTruncate) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'confirmTruncate must be true to truncate all versions. This action cannot be undone.'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.truncateAllVersions(true);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
deleted: result.deleted,
|
||||
message: result.message
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
success: false,
|
||||
error: `Unknown mode: ${input.mode}`
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid input',
|
||||
details: { errors: error.errors }
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,8 +12,6 @@ import { N8nApiError, getUserFriendlyErrorMessage } from '../utils/n8n-errors';
|
||||
import { logger } from '../utils/logger';
|
||||
import { InstanceContext } from '../types/instance-context';
|
||||
import { validateWorkflowStructure } from '../services/n8n-validation';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { WorkflowVersioningService } from '../services/workflow-versioning-service';
|
||||
|
||||
// Zod schema for the diff request
|
||||
const workflowDiffSchema = z.object({
|
||||
@@ -50,14 +48,9 @@ const workflowDiffSchema = z.object({
|
||||
})),
|
||||
validateOnly: z.boolean().optional(),
|
||||
continueOnError: z.boolean().optional(),
|
||||
createBackup: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export async function handleUpdatePartialWorkflow(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
export async function handleUpdatePartialWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
try {
|
||||
// Debug logging (only in debug mode)
|
||||
if (process.env.DEBUG_MCP === 'true') {
|
||||
@@ -95,31 +88,7 @@ export async function handleUpdatePartialWorkflow(
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Create backup before modifying workflow (default: true)
|
||||
if (input.createBackup !== false && !input.validateOnly) {
|
||||
try {
|
||||
const versioningService = new WorkflowVersioningService(repository, client);
|
||||
const backupResult = await versioningService.createBackup(input.id, workflow, {
|
||||
trigger: 'partial_update',
|
||||
operations: input.operations
|
||||
});
|
||||
|
||||
logger.info('Workflow backup created', {
|
||||
workflowId: input.id,
|
||||
versionId: backupResult.versionId,
|
||||
versionNumber: backupResult.versionNumber,
|
||||
pruned: backupResult.pruned
|
||||
});
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to create workflow backup', {
|
||||
workflowId: input.id,
|
||||
error: error.message
|
||||
});
|
||||
// Continue with update even if backup fails (non-blocking)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Apply diff operations
|
||||
const diffEngine = new WorkflowDiffEngine();
|
||||
const diffRequest = input as WorkflowDiffRequest;
|
||||
|
||||
@@ -1009,10 +1009,10 @@ export class N8NDocumentationMCPServer {
|
||||
return n8nHandlers.handleGetWorkflowMinimal(args, this.instanceContext);
|
||||
case 'n8n_update_full_workflow':
|
||||
this.validateToolParams(name, args, ['id']);
|
||||
return n8nHandlers.handleUpdateWorkflow(args, this.repository!, this.instanceContext);
|
||||
return n8nHandlers.handleUpdateWorkflow(args, this.instanceContext);
|
||||
case 'n8n_update_partial_workflow':
|
||||
this.validateToolParams(name, args, ['id', 'operations']);
|
||||
return handleUpdatePartialWorkflow(args, this.repository!, this.instanceContext);
|
||||
return handleUpdatePartialWorkflow(args, this.instanceContext);
|
||||
case 'n8n_delete_workflow':
|
||||
this.validateToolParams(name, args, ['id']);
|
||||
return n8nHandlers.handleDeleteWorkflow(args, this.instanceContext);
|
||||
@@ -1050,10 +1050,7 @@ export class N8NDocumentationMCPServer {
|
||||
case 'n8n_diagnostic':
|
||||
// No required parameters
|
||||
return n8nHandlers.handleDiagnostic({ params: { arguments: args } }, this.instanceContext);
|
||||
case 'n8n_workflow_versions':
|
||||
this.validateToolParams(name, args, ['mode']);
|
||||
return n8nHandlers.handleWorkflowVersions(args, this.repository!, this.instanceContext);
|
||||
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
@@ -1279,20 +1276,20 @@ export class N8NDocumentationMCPServer {
|
||||
try {
|
||||
// Use FTS5 with ranking
|
||||
const nodes = this.db.prepare(`
|
||||
SELECT
|
||||
SELECT
|
||||
n.*,
|
||||
rank
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
WHERE nodes_fts MATCH ?
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER(?) THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER(?) THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER(?) THEN 2
|
||||
ORDER BY
|
||||
rank,
|
||||
CASE
|
||||
WHEN n.display_name = ? THEN 0
|
||||
WHEN n.display_name LIKE ? THEN 1
|
||||
WHEN n.node_type LIKE ? THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank,
|
||||
n.display_name
|
||||
LIMIT ?
|
||||
`).all(ftsQuery, cleanedQuery, `%${cleanedQuery}%`, `%${cleanedQuery}%`, limit) as (NodeRow & { rank: number })[];
|
||||
|
||||
@@ -48,7 +48,7 @@ An n8n AI Agent workflow typically consists of:
|
||||
- Manages conversation flow
|
||||
- Decides when to use tools
|
||||
- Iterates until task is complete
|
||||
- Supports fallback models for reliability
|
||||
- Supports fallback models (v2.1+)
|
||||
|
||||
3. **Language Model**: The AI brain
|
||||
- OpenAI GPT-4, Claude, Gemini, etc.
|
||||
@@ -441,7 +441,7 @@ For real-time user experience:
|
||||
|
||||
### Pattern 2: Fallback Language Models
|
||||
|
||||
For production reliability with fallback language models:
|
||||
For production reliability (requires AI Agent v2.1+):
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
@@ -724,7 +724,7 @@ n8n_validate_workflow({id: "workflow_id"})
|
||||
'Always validate workflows after making changes',
|
||||
'AI connections require sourceOutput parameter',
|
||||
'Streaming mode has specific constraints',
|
||||
'Fallback models require AI Agent node with fallback support'
|
||||
'Some features require specific AI Agent versions (v2.1+ for fallback)'
|
||||
],
|
||||
relatedTools: [
|
||||
'n8n_create_workflow',
|
||||
|
||||
@@ -12,7 +12,7 @@ export const validateNodeOperationDoc: ToolDocumentation = {
|
||||
'Profile choices: minimal (editing), runtime (execution), ai-friendly (balanced), strict (deployment)',
|
||||
'Returns fixes you can apply directly',
|
||||
'Operation-aware - knows Slack post needs text',
|
||||
'Validates operator structures for IF and Switch nodes with conditions'
|
||||
'Validates operator structures for IF v2.2+ and Switch v3.2+ nodes'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -90,7 +90,7 @@ export const validateNodeOperationDoc: ToolDocumentation = {
|
||||
'Fixes are suggestions - review before applying',
|
||||
'Profile affects what\'s validated - minimal skips many checks',
|
||||
'**Binary vs Unary operators**: Binary operators (equals, contains, greaterThan) must NOT have singleValue:true. Unary operators (isEmpty, isNotEmpty, true, false) REQUIRE singleValue:true',
|
||||
'**IF and Switch nodes with conditions**: Must have complete conditions.options structure: {version: 2, leftValue: "", caseSensitive: true/false, typeValidation: "strict"}',
|
||||
'**IF v2.2+ and Switch v3.2+ nodes**: Must have complete conditions.options structure: {version: 2, leftValue: "", caseSensitive: true/false, typeValidation: "strict"}',
|
||||
'**Operator type field**: Must be data type (string/number/boolean/dateTime/array/object), NOT operation name (e.g., use type:"string" operation:"equals", not type:"equals")'
|
||||
],
|
||||
relatedTools: ['validate_node_minimal for quick checks', 'get_node_essentials for valid examples', 'validate_workflow for complete workflow validation']
|
||||
|
||||
@@ -4,17 +4,15 @@ export const n8nAutofixWorkflowDoc: ToolDocumentation = {
|
||||
name: 'n8n_autofix_workflow',
|
||||
category: 'workflow_management',
|
||||
essentials: {
|
||||
description: 'Automatically fix common workflow validation errors - expression formats, typeVersions, error outputs, webhook paths, and smart version upgrades',
|
||||
description: 'Automatically fix common workflow validation errors - expression formats, typeVersions, error outputs, webhook paths',
|
||||
keyParameters: ['id', 'applyFixes'],
|
||||
example: 'n8n_autofix_workflow({id: "wf_abc123", applyFixes: false})',
|
||||
performance: 'Network-dependent (200-1500ms) - fetches, validates, and optionally updates workflow with smart migrations',
|
||||
performance: 'Network-dependent (200-1000ms) - fetches, validates, and optionally updates workflow',
|
||||
tips: [
|
||||
'Use applyFixes: false to preview changes before applying',
|
||||
'Set confidenceThreshold to control fix aggressiveness (high/medium/low)',
|
||||
'Supports expression formats, typeVersion issues, error outputs, node corrections, webhook paths, AND version upgrades',
|
||||
'High-confidence fixes (≥90%) are safe for auto-application',
|
||||
'Version upgrades include smart migration with breaking change detection',
|
||||
'Post-update guidance provides AI-friendly step-by-step instructions for manual changes'
|
||||
'Supports fixing expression formats, typeVersion issues, error outputs, node type corrections, and webhook paths',
|
||||
'High-confidence fixes (≥90%) are safe for auto-application'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -41,20 +39,6 @@ The auto-fixer can resolve:
|
||||
- Sets both 'path' parameter and 'webhookId' field to the same UUID
|
||||
- Ensures webhook nodes become functional with valid endpoints
|
||||
- High confidence fix as UUID generation is deterministic
|
||||
6. **Smart Version Upgrades** (NEW): Proactively upgrades nodes to their latest versions:
|
||||
- Detects outdated node versions and recommends upgrades
|
||||
- Applies smart migrations with auto-migratable property changes
|
||||
- Handles breaking changes intelligently (Execute Workflow v1.0→v1.1, Webhook v2.0→v2.1, etc.)
|
||||
- Generates UUIDs for required fields (webhookId), sets sensible defaults
|
||||
- HIGH confidence for non-breaking upgrades, MEDIUM for breaking changes with auto-migration
|
||||
- Example: Execute Workflow v1.0→v1.1 adds inputFieldMapping automatically
|
||||
7. **Version Migration Guidance** (NEW): Documents complex migrations requiring manual intervention:
|
||||
- Identifies breaking changes that cannot be auto-migrated
|
||||
- Provides AI-friendly post-update guidance with step-by-step instructions
|
||||
- Lists required actions by priority (CRITICAL, HIGH, MEDIUM, LOW)
|
||||
- Documents behavior changes and their impact
|
||||
- Estimates time required for manual migration steps
|
||||
- MEDIUM/LOW confidence - requires review before applying
|
||||
|
||||
The tool uses a confidence-based system to ensure safe fixes:
|
||||
- **High (≥90%)**: Safe to auto-apply (exact matches, known patterns)
|
||||
@@ -76,7 +60,7 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
fixTypes: {
|
||||
type: 'array',
|
||||
required: false,
|
||||
description: 'Types of fixes to apply. Options: ["expression-format", "typeversion-correction", "error-output-config", "node-type-correction", "webhook-missing-path", "typeversion-upgrade", "version-migration"]. Default: all types. NEW: "typeversion-upgrade" for smart version upgrades, "version-migration" for complex migration guidance.'
|
||||
description: 'Types of fixes to apply. Options: ["expression-format", "typeversion-correction", "error-output-config", "node-type-correction", "webhook-missing-path"]. Default: all types.'
|
||||
},
|
||||
confidenceThreshold: {
|
||||
type: 'string',
|
||||
@@ -94,21 +78,13 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
- fixes: Detailed list of individual fixes with before/after values
|
||||
- summary: Human-readable summary of fixes
|
||||
- stats: Statistics by fix type and confidence level
|
||||
- applied: Boolean indicating if fixes were applied (when applyFixes: true)
|
||||
- postUpdateGuidance: (NEW) Array of AI-friendly migration guidance for version upgrades, including:
|
||||
* Required actions by priority (CRITICAL, HIGH, MEDIUM, LOW)
|
||||
* Deprecated properties to remove
|
||||
* Behavior changes and their impact
|
||||
* Step-by-step migration instructions
|
||||
* Estimated time for manual changes`,
|
||||
- applied: Boolean indicating if fixes were applied (when applyFixes: true)`,
|
||||
examples: [
|
||||
'n8n_autofix_workflow({id: "wf_abc123"}) - Preview all possible fixes including version upgrades',
|
||||
'n8n_autofix_workflow({id: "wf_abc123"}) - Preview all possible fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true}) - Apply all medium+ confidence fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true, confidenceThreshold: "high"}) - Only apply high-confidence fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["expression-format"]}) - Only fix expression format issues',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["webhook-missing-path"]}) - Only fix webhook path issues',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["typeversion-upgrade"]}) - NEW: Only upgrade node versions with smart migrations',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["typeversion-upgrade", "version-migration"]}) - NEW: Upgrade versions and provide migration guidance',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true, maxFixes: 10}) - Apply up to 10 fixes'
|
||||
],
|
||||
useCases: [
|
||||
@@ -118,23 +94,16 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
'Cleaning up workflows before production deployment',
|
||||
'Batch fixing common issues across multiple workflows',
|
||||
'Migrating workflows between n8n instances with different versions',
|
||||
'Repairing webhook nodes that lost their path configuration',
|
||||
'Upgrading Execute Workflow nodes from v1.0 to v1.1+ with automatic inputFieldMapping',
|
||||
'Modernizing webhook nodes to v2.1+ with stable webhookId fields',
|
||||
'Proactively keeping workflows up-to-date with latest node versions',
|
||||
'Getting detailed migration guidance for complex breaking changes'
|
||||
'Repairing webhook nodes that lost their path configuration'
|
||||
],
|
||||
performance: 'Depends on workflow size and number of issues. Preview mode: 200-500ms. Apply mode: 500-1500ms for medium workflows with version upgrades. Node similarity matching and version metadata are cached for 5 minutes for improved performance on repeated validations.',
|
||||
performance: 'Depends on workflow size and number of issues. Preview mode: 200-500ms. Apply mode: 500-1000ms for medium workflows. Node similarity matching is cached for 5 minutes for improved performance on repeated validations.',
|
||||
bestPractices: [
|
||||
'Always preview fixes first (applyFixes: false) before applying',
|
||||
'Start with high confidence threshold for production workflows',
|
||||
'Review the fix summary to understand what changed',
|
||||
'Test workflows after auto-fixing to ensure expected behavior',
|
||||
'Use fixTypes parameter to target specific issue categories',
|
||||
'Keep maxFixes reasonable to avoid too many changes at once',
|
||||
'NEW: Review postUpdateGuidance for version upgrades - contains step-by-step migration instructions',
|
||||
'NEW: Test workflows after version upgrades - behavior may change even with successful auto-migration',
|
||||
'NEW: Apply version upgrades incrementally - start with high-confidence, non-breaking upgrades'
|
||||
'Keep maxFixes reasonable to avoid too many changes at once'
|
||||
],
|
||||
pitfalls: [
|
||||
'Some fixes may change workflow behavior - always test after fixing',
|
||||
@@ -143,12 +112,7 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
'Node type corrections only work for known node types in the database',
|
||||
'Cannot fix structural issues like missing nodes or invalid connections',
|
||||
'TypeVersion downgrades might remove node features added in newer versions',
|
||||
'Generated webhook paths are new UUIDs - existing webhook URLs will change',
|
||||
'NEW: Version upgrades may introduce breaking changes - review postUpdateGuidance carefully',
|
||||
'NEW: Auto-migrated properties use sensible defaults which may not match your use case',
|
||||
'NEW: Execute Workflow v1.1+ requires explicit inputFieldMapping - automatic mapping uses empty array',
|
||||
'NEW: Some breaking changes cannot be auto-migrated and require manual intervention',
|
||||
'NEW: Version history is based on registry - unknown nodes cannot be upgraded'
|
||||
'Generated webhook paths are new UUIDs - existing webhook URLs will change'
|
||||
],
|
||||
relatedTools: [
|
||||
'n8n_validate_workflow',
|
||||
|
||||
@@ -18,8 +18,7 @@ export const n8nUpdatePartialWorkflowDoc: ToolDocumentation = {
|
||||
'Validate with validateOnly first',
|
||||
'For AI connections, specify sourceOutput type (ai_languageModel, ai_tool, etc.)',
|
||||
'Batch AI component connections for atomic updates',
|
||||
'Auto-sanitization: ALL nodes auto-fixed during updates (operator structures, missing metadata)',
|
||||
'Node renames automatically update all connection references - no manual connection operations needed'
|
||||
'Auto-sanitization: ALL nodes auto-fixed during updates (operator structures, missing metadata)'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -81,10 +80,6 @@ Full support for all 8 AI connection types used in n8n AI workflows:
|
||||
- Multiple tools: Batch multiple \`sourceOutput: "ai_tool"\` connections to one AI Agent
|
||||
- Vector retrieval: Chain ai_embedding → ai_vectorStore → ai_tool → AI Agent
|
||||
|
||||
**Important Notes**:
|
||||
- **AI nodes do NOT require main connections**: Nodes like OpenAI Chat Model, Postgres Chat Memory, Embeddings OpenAI, and Supabase Vector Store use AI-specific connection types exclusively. They should ONLY have connections like \`ai_languageModel\`, \`ai_memory\`, \`ai_embedding\`, or \`ai_tool\` - NOT \`main\` connections.
|
||||
- **Fixed in v2.21.1**: Validation now correctly recognizes AI nodes that only have AI-specific connections without requiring \`main\` connections (resolves issue #357).
|
||||
|
||||
**Best Practices**:
|
||||
- Always specify \`sourceOutput\` for AI connections (defaults to "main" if omitted)
|
||||
- Connect language model BEFORE creating/enabling AI Agent (validation requirement)
|
||||
@@ -113,8 +108,8 @@ When ANY workflow update is made, ALL nodes in the workflow are automatically sa
|
||||
- Invalid operator structures (e.g., \`{type: "isNotEmpty"}\`) are corrected to \`{type: "boolean", operation: "isNotEmpty"}\`
|
||||
|
||||
2. **Missing Metadata Added**:
|
||||
- IF nodes with conditions get complete \`conditions.options\` structure if missing
|
||||
- Switch nodes with conditions get complete \`conditions.options\` for all rules
|
||||
- IF v2.2+ nodes get complete \`conditions.options\` structure if missing
|
||||
- Switch v3.2+ nodes get complete \`conditions.options\` for all rules
|
||||
- Required fields: \`{version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}\`
|
||||
|
||||
### Sanitization Scope
|
||||
@@ -134,59 +129,7 @@ If validation still fails after auto-sanitization:
|
||||
2. Use \`validate_workflow\` to see all validation errors
|
||||
3. For connection issues, use \`cleanStaleConnections\` operation
|
||||
4. For branch mismatches, add missing output connections
|
||||
5. For paradoxical corrupted workflows, create new workflow and migrate nodes
|
||||
|
||||
## Automatic Connection Reference Updates
|
||||
|
||||
When you rename a node using **updateNode**, all connection references throughout the workflow are automatically updated. Both the connection source keys and target references are updated for all connection types (main, error, ai_tool, ai_languageModel, ai_memory, etc.) and all branch configurations (IF node branches, Switch node cases, error outputs).
|
||||
|
||||
### Basic Example
|
||||
\`\`\`javascript
|
||||
// Rename a node - connections update automatically
|
||||
n8n_update_partial_workflow({
|
||||
id: "wf_123",
|
||||
operations: [{
|
||||
type: "updateNode",
|
||||
nodeId: "node_abc",
|
||||
updates: { name: "Data Processor" }
|
||||
}]
|
||||
});
|
||||
// All incoming and outgoing connections now reference "Data Processor"
|
||||
\`\`\`
|
||||
|
||||
### Multi-Output Node Example
|
||||
\`\`\`javascript
|
||||
// Rename nodes in a branching workflow
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeId: "if_node_id",
|
||||
updates: { name: "Value Checker" }
|
||||
},
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeId: "error_node_id",
|
||||
updates: { name: "Error Handler" }
|
||||
}
|
||||
]
|
||||
});
|
||||
// IF node branches and error connections automatically updated
|
||||
\`\`\`
|
||||
|
||||
### Name Collision Protection
|
||||
Attempting to rename a node to an existing name returns a clear error:
|
||||
\`\`\`
|
||||
Cannot rename node "Old Name" to "New Name": A node with that name already exists (id: abc123...).
|
||||
Please choose a different name.
|
||||
\`\`\`
|
||||
|
||||
### Usage Notes
|
||||
- Simply rename nodes with updateNode - no manual connection operations needed
|
||||
- Multiple renames in one call work atomically
|
||||
- Can rename a node and add/remove connections using the new name in the same batch
|
||||
- Use \`validateOnly: true\` to preview effects before applying`,
|
||||
5. For paradoxical corrupted workflows, create new workflow and migrate nodes`,
|
||||
parameters: {
|
||||
id: { type: 'string', required: true, description: 'Workflow ID to update' },
|
||||
operations: {
|
||||
@@ -219,7 +162,7 @@ Please choose a different name.
|
||||
'// Connect memory to AI Agent\nn8n_update_partial_workflow({id: "ai3", operations: [{type: "addConnection", source: "Window Buffer Memory", target: "AI Agent", sourceOutput: "ai_memory"}]})',
|
||||
'// Connect output parser to AI Agent\nn8n_update_partial_workflow({id: "ai4", operations: [{type: "addConnection", source: "Structured Output Parser", target: "AI Agent", sourceOutput: "ai_outputParser"}]})',
|
||||
'// Complete AI Agent setup: Add language model, tools, and memory\nn8n_update_partial_workflow({id: "ai5", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel"},\n {type: "addConnection", source: "HTTP Request Tool", target: "AI Agent", sourceOutput: "ai_tool"},\n {type: "addConnection", source: "Code Tool", target: "AI Agent", sourceOutput: "ai_tool"},\n {type: "addConnection", source: "Window Buffer Memory", target: "AI Agent", sourceOutput: "ai_memory"}\n]})',
|
||||
'// Add fallback model to AI Agent for reliability\nn8n_update_partial_workflow({id: "ai6", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 0},\n {type: "addConnection", source: "Anthropic Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 1}\n]})',
|
||||
'// Add fallback model to AI Agent (requires v2.1+)\nn8n_update_partial_workflow({id: "ai6", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 0},\n {type: "addConnection", source: "Anthropic Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 1}\n]})',
|
||||
'// Vector Store setup: Connect embeddings and documents\nn8n_update_partial_workflow({id: "ai7", operations: [\n {type: "addConnection", source: "Embeddings OpenAI", target: "Pinecone Vector Store", sourceOutput: "ai_embedding"},\n {type: "addConnection", source: "Default Data Loader", target: "Pinecone Vector Store", sourceOutput: "ai_document"}\n]})',
|
||||
'// Connect Vector Store Tool to AI Agent (retrieval setup)\nn8n_update_partial_workflow({id: "ai8", operations: [\n {type: "addConnection", source: "Pinecone Vector Store", target: "Vector Store Tool", sourceOutput: "ai_vectorStore"},\n {type: "addConnection", source: "Vector Store Tool", target: "AI Agent", sourceOutput: "ai_tool"}\n]})',
|
||||
'// Rewire AI Agent to use different language model\nn8n_update_partial_workflow({id: "ai9", operations: [{type: "rewireConnection", source: "AI Agent", from: "OpenAI Chat Model", to: "Anthropic Chat Model", sourceOutput: "ai_languageModel"}]})',
|
||||
|
||||
@@ -293,7 +293,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
@@ -462,59 +462,5 @@ Examples:
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_workflow_versions',
|
||||
description: `Manage workflow version history, rollback, and cleanup. Six modes:
|
||||
- list: Show version history for a workflow
|
||||
- get: Get details of specific version
|
||||
- rollback: Restore workflow to previous version (creates backup first)
|
||||
- delete: Delete specific version or all versions for a workflow
|
||||
- prune: Manually trigger pruning to keep N most recent versions
|
||||
- truncate: Delete ALL versions for ALL workflows (requires confirmation)`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
mode: {
|
||||
type: 'string',
|
||||
enum: ['list', 'get', 'rollback', 'delete', 'prune', 'truncate'],
|
||||
description: 'Operation mode'
|
||||
},
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'Workflow ID (required for list, rollback, delete, prune)'
|
||||
},
|
||||
versionId: {
|
||||
type: 'number',
|
||||
description: 'Version ID (required for get mode and single version delete, optional for rollback)'
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'Max versions to return in list mode'
|
||||
},
|
||||
validateBefore: {
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Validate workflow structure before rollback'
|
||||
},
|
||||
deleteAll: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Delete all versions for workflow (delete mode only)'
|
||||
},
|
||||
maxVersions: {
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'Keep N most recent versions (prune mode only)'
|
||||
},
|
||||
confirmTruncate: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'REQUIRED: Must be true to truncate all versions (truncate mode only)'
|
||||
}
|
||||
},
|
||||
required: ['mode']
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -164,7 +164,7 @@ async function testAutofix() {
|
||||
// Step 3: Generate fixes in preview mode
|
||||
logger.info('\nStep 3: Generating fixes (preview mode)...');
|
||||
const autoFixer = new WorkflowAutoFixer();
|
||||
const previewResult = await autoFixer.generateFixes(
|
||||
const previewResult = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -210,7 +210,7 @@ async function testAutofix() {
|
||||
logger.info('\n\n=== Testing Different Confidence Thresholds ===');
|
||||
|
||||
for (const threshold of ['high', 'medium', 'low'] as const) {
|
||||
const result = await autoFixer.generateFixes(
|
||||
const result = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -227,7 +227,7 @@ async function testAutofix() {
|
||||
|
||||
const fixTypes = ['expression-format', 'typeversion-correction', 'error-output-config'] as const;
|
||||
for (const fixType of fixTypes) {
|
||||
const result = await autoFixer.generateFixes(
|
||||
const result = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
|
||||
@@ -173,7 +173,7 @@ async function testNodeSimilarity() {
|
||||
console.log('='.repeat(60));
|
||||
|
||||
const autoFixer = new WorkflowAutoFixer(repository);
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
[],
|
||||
|
||||
@@ -87,7 +87,7 @@ async function testWebhookAutofix() {
|
||||
// Step 2: Generate fixes (preview mode)
|
||||
logger.info('\nStep 2: Generating fixes in preview mode...');
|
||||
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
testWorkflow,
|
||||
validationResult,
|
||||
[], // No expression format issues to pass
|
||||
|
||||
@@ -1,321 +0,0 @@
|
||||
/**
|
||||
* Breaking Change Detector
|
||||
*
|
||||
* Detects breaking changes between node versions by:
|
||||
* 1. Consulting the hardcoded breaking changes registry
|
||||
* 2. Dynamically comparing property schemas between versions
|
||||
* 3. Analyzing property requirement changes
|
||||
*
|
||||
* Used by the autofixer to intelligently upgrade node versions.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import {
|
||||
BREAKING_CHANGES_REGISTRY,
|
||||
BreakingChange,
|
||||
getBreakingChangesForNode,
|
||||
getAllChangesForNode
|
||||
} from './breaking-changes-registry';
|
||||
|
||||
export interface DetectedChange {
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking: boolean;
|
||||
oldValue?: any;
|
||||
newValue?: any;
|
||||
migrationHint: string;
|
||||
autoMigratable: boolean;
|
||||
migrationStrategy?: any;
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
source: 'registry' | 'dynamic'; // Where this change was detected
|
||||
}
|
||||
|
||||
export interface VersionUpgradeAnalysis {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
hasBreakingChanges: boolean;
|
||||
changes: DetectedChange[];
|
||||
autoMigratableCount: number;
|
||||
manualRequiredCount: number;
|
||||
overallSeverity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
recommendations: string[];
|
||||
}
|
||||
|
||||
export class BreakingChangeDetector {
|
||||
constructor(private nodeRepository: NodeRepository) {}
|
||||
|
||||
/**
|
||||
* Analyze a version upgrade and detect all changes
|
||||
*/
|
||||
async analyzeVersionUpgrade(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): Promise<VersionUpgradeAnalysis> {
|
||||
// Get changes from registry
|
||||
const registryChanges = this.getRegistryChanges(nodeType, fromVersion, toVersion);
|
||||
|
||||
// Get dynamic changes by comparing schemas
|
||||
const dynamicChanges = this.detectDynamicChanges(nodeType, fromVersion, toVersion);
|
||||
|
||||
// Merge and deduplicate changes
|
||||
const allChanges = this.mergeChanges(registryChanges, dynamicChanges);
|
||||
|
||||
// Calculate statistics
|
||||
const hasBreakingChanges = allChanges.some(c => c.isBreaking);
|
||||
const autoMigratableCount = allChanges.filter(c => c.autoMigratable).length;
|
||||
const manualRequiredCount = allChanges.filter(c => !c.autoMigratable).length;
|
||||
|
||||
// Determine overall severity
|
||||
const overallSeverity = this.calculateOverallSeverity(allChanges);
|
||||
|
||||
// Generate recommendations
|
||||
const recommendations = this.generateRecommendations(allChanges);
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
fromVersion,
|
||||
toVersion,
|
||||
hasBreakingChanges,
|
||||
changes: allChanges,
|
||||
autoMigratableCount,
|
||||
manualRequiredCount,
|
||||
overallSeverity,
|
||||
recommendations
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get changes from the hardcoded registry
|
||||
*/
|
||||
private getRegistryChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): DetectedChange[] {
|
||||
const registryChanges = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
|
||||
return registryChanges.map(change => ({
|
||||
propertyName: change.propertyName,
|
||||
changeType: change.changeType,
|
||||
isBreaking: change.isBreaking,
|
||||
oldValue: change.oldValue,
|
||||
newValue: change.newValue,
|
||||
migrationHint: change.migrationHint,
|
||||
autoMigratable: change.autoMigratable,
|
||||
migrationStrategy: change.migrationStrategy,
|
||||
severity: change.severity,
|
||||
source: 'registry' as const
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamically detect changes by comparing property schemas
|
||||
*/
|
||||
private detectDynamicChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): DetectedChange[] {
|
||||
// Get both versions from the database
|
||||
const oldVersionData = this.nodeRepository.getNodeVersion(nodeType, fromVersion);
|
||||
const newVersionData = this.nodeRepository.getNodeVersion(nodeType, toVersion);
|
||||
|
||||
if (!oldVersionData || !newVersionData) {
|
||||
return []; // Can't detect dynamic changes without version data
|
||||
}
|
||||
|
||||
const changes: DetectedChange[] = [];
|
||||
|
||||
// Compare properties schemas
|
||||
const oldProps = this.flattenProperties(oldVersionData.propertiesSchema || []);
|
||||
const newProps = this.flattenProperties(newVersionData.propertiesSchema || []);
|
||||
|
||||
// Detect added properties
|
||||
for (const propName of Object.keys(newProps)) {
|
||||
if (!oldProps[propName]) {
|
||||
const prop = newProps[propName];
|
||||
const isRequired = prop.required === true;
|
||||
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'added',
|
||||
isBreaking: isRequired, // Breaking if required
|
||||
newValue: prop.type || 'unknown',
|
||||
migrationHint: isRequired
|
||||
? `Property "${propName}" is now required in v${toVersion}. Provide a value to prevent validation errors.`
|
||||
: `Property "${propName}" was added in v${toVersion}. Optional parameter, safe to ignore if not needed.`,
|
||||
autoMigratable: !isRequired, // Can auto-add with default if not required
|
||||
migrationStrategy: !isRequired
|
||||
? {
|
||||
type: 'add_property',
|
||||
defaultValue: prop.default || null
|
||||
}
|
||||
: undefined,
|
||||
severity: isRequired ? 'HIGH' : 'LOW',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Detect removed properties
|
||||
for (const propName of Object.keys(oldProps)) {
|
||||
if (!newProps[propName]) {
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'removed',
|
||||
isBreaking: true, // Removal is always breaking
|
||||
oldValue: oldProps[propName].type || 'unknown',
|
||||
migrationHint: `Property "${propName}" was removed in v${toVersion}. Remove this property from your configuration.`,
|
||||
autoMigratable: true, // Can auto-remove
|
||||
migrationStrategy: {
|
||||
type: 'remove_property'
|
||||
},
|
||||
severity: 'MEDIUM',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Detect requirement changes
|
||||
for (const propName of Object.keys(newProps)) {
|
||||
if (oldProps[propName]) {
|
||||
const oldRequired = oldProps[propName].required === true;
|
||||
const newRequired = newProps[propName].required === true;
|
||||
|
||||
if (oldRequired !== newRequired) {
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: newRequired && !oldRequired, // Breaking if became required
|
||||
oldValue: oldRequired ? 'required' : 'optional',
|
||||
newValue: newRequired ? 'required' : 'optional',
|
||||
migrationHint: newRequired
|
||||
? `Property "${propName}" is now required in v${toVersion}. Ensure a value is provided.`
|
||||
: `Property "${propName}" is now optional in v${toVersion}.`,
|
||||
autoMigratable: false, // Requirement changes need manual review
|
||||
severity: newRequired ? 'HIGH' : 'LOW',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten nested properties into a map for easy comparison
|
||||
*/
|
||||
private flattenProperties(properties: any[], prefix: string = ''): Record<string, any> {
|
||||
const flat: Record<string, any> = {};
|
||||
|
||||
for (const prop of properties) {
|
||||
if (!prop.name && !prop.displayName) continue;
|
||||
|
||||
const propName = prop.name || prop.displayName;
|
||||
const fullPath = prefix ? `${prefix}.${propName}` : propName;
|
||||
|
||||
flat[fullPath] = prop;
|
||||
|
||||
// Recursively flatten nested options
|
||||
if (prop.options && Array.isArray(prop.options)) {
|
||||
Object.assign(flat, this.flattenProperties(prop.options, fullPath));
|
||||
}
|
||||
}
|
||||
|
||||
return flat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge registry and dynamic changes, avoiding duplicates
|
||||
*/
|
||||
private mergeChanges(
|
||||
registryChanges: DetectedChange[],
|
||||
dynamicChanges: DetectedChange[]
|
||||
): DetectedChange[] {
|
||||
const merged = [...registryChanges];
|
||||
|
||||
// Add dynamic changes that aren't already in registry
|
||||
for (const dynamicChange of dynamicChanges) {
|
||||
const existsInRegistry = registryChanges.some(
|
||||
rc => rc.propertyName === dynamicChange.propertyName &&
|
||||
rc.changeType === dynamicChange.changeType
|
||||
);
|
||||
|
||||
if (!existsInRegistry) {
|
||||
merged.push(dynamicChange);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by severity (HIGH -> MEDIUM -> LOW)
|
||||
const severityOrder = { HIGH: 0, MEDIUM: 1, LOW: 2 };
|
||||
merged.sort((a, b) => severityOrder[a.severity] - severityOrder[b.severity]);
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall severity of the upgrade
|
||||
*/
|
||||
private calculateOverallSeverity(changes: DetectedChange[]): 'LOW' | 'MEDIUM' | 'HIGH' {
|
||||
if (changes.some(c => c.severity === 'HIGH')) return 'HIGH';
|
||||
if (changes.some(c => c.severity === 'MEDIUM')) return 'MEDIUM';
|
||||
return 'LOW';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate actionable recommendations for the upgrade
|
||||
*/
|
||||
private generateRecommendations(changes: DetectedChange[]): string[] {
|
||||
const recommendations: string[] = [];
|
||||
|
||||
const breakingChanges = changes.filter(c => c.isBreaking);
|
||||
const autoMigratable = changes.filter(c => c.autoMigratable);
|
||||
const manualRequired = changes.filter(c => !c.autoMigratable);
|
||||
|
||||
if (breakingChanges.length === 0) {
|
||||
recommendations.push('✓ No breaking changes detected. This upgrade should be safe.');
|
||||
} else {
|
||||
recommendations.push(
|
||||
`⚠ ${breakingChanges.length} breaking change(s) detected. Review carefully before applying.`
|
||||
);
|
||||
}
|
||||
|
||||
if (autoMigratable.length > 0) {
|
||||
recommendations.push(
|
||||
`✓ ${autoMigratable.length} change(s) can be automatically migrated.`
|
||||
);
|
||||
}
|
||||
|
||||
if (manualRequired.length > 0) {
|
||||
recommendations.push(
|
||||
`✋ ${manualRequired.length} change(s) require manual intervention.`
|
||||
);
|
||||
|
||||
// List specific manual changes
|
||||
for (const change of manualRequired) {
|
||||
recommendations.push(` - ${change.propertyName}: ${change.migrationHint}`);
|
||||
}
|
||||
}
|
||||
|
||||
return recommendations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick check: does this upgrade have breaking changes?
|
||||
*/
|
||||
hasBreakingChanges(nodeType: string, fromVersion: string, toVersion: string): boolean {
|
||||
const registryChanges = getBreakingChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return registryChanges.length > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get simple list of property names that changed
|
||||
*/
|
||||
getChangedProperties(nodeType: string, fromVersion: string, toVersion: string): string[] {
|
||||
const registryChanges = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return registryChanges.map(c => c.propertyName);
|
||||
}
|
||||
}
|
||||
@@ -1,315 +0,0 @@
|
||||
/**
|
||||
* Breaking Changes Registry
|
||||
*
|
||||
* Central registry of known breaking changes between node versions.
|
||||
* Used by the autofixer to detect and migrate version upgrades intelligently.
|
||||
*
|
||||
* Each entry defines:
|
||||
* - Which versions are affected
|
||||
* - What properties changed
|
||||
* - Whether it's auto-migratable
|
||||
* - Migration strategies and hints
|
||||
*/
|
||||
|
||||
export interface BreakingChange {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking: boolean;
|
||||
oldValue?: string;
|
||||
newValue?: string;
|
||||
migrationHint: string;
|
||||
autoMigratable: boolean;
|
||||
migrationStrategy?: {
|
||||
type: 'add_property' | 'remove_property' | 'rename_property' | 'set_default';
|
||||
defaultValue?: any;
|
||||
sourceProperty?: string;
|
||||
targetProperty?: string;
|
||||
};
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry of known breaking changes across all n8n nodes
|
||||
*/
|
||||
export const BREAKING_CHANGES_REGISTRY: BreakingChange[] = [
|
||||
// ==========================================
|
||||
// Execute Workflow Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.inputFieldMapping',
|
||||
changeType: 'added',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v1.1+, the Execute Workflow node requires explicit field mapping to pass data to sub-workflows. Add an "inputFieldMapping" object with "mappings" array defining how to map fields from parent to child workflow.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: {
|
||||
mappings: []
|
||||
}
|
||||
},
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.mode',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'The "mode" parameter behavior changed in v1.1. Default is now "static" instead of "list". Ensure your workflow ID specification matches the selected mode.',
|
||||
autoMigratable: false,
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Webhook Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
propertyName: 'webhookId',
|
||||
changeType: 'added',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v2.1+, webhooks require a unique "webhookId" field in addition to the path. This ensures webhook persistence across workflow updates. A UUID will be auto-generated if not provided.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: null // Will be generated as UUID at runtime
|
||||
},
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.path',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v2.0+, the webhook path must be explicitly defined and cannot be empty. Ensure a valid path is set.',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.responseMode',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'v2.0 introduces a "responseMode" parameter to control how the webhook responds. Default is "onReceived" (immediate response). Use "lastNode" to wait for workflow completion.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: 'onReceived'
|
||||
},
|
||||
severity: 'LOW'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// HTTP Request Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.httpRequest',
|
||||
fromVersion: '4.1',
|
||||
toVersion: '4.2',
|
||||
propertyName: 'parameters.sendBody',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'In v4.2+, "sendBody" must be explicitly set to true for POST/PUT/PATCH requests to include a body. Previous versions had implicit body sending.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: true
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Code Node (JavaScript)
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.code',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.mode',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'v2.0 introduces execution modes: "runOnceForAllItems" (default) and "runOnceForEachItem". The default mode processes all items at once, which may differ from v1.0 behavior.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: 'runOnceForAllItems'
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Schedule Trigger Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.scheduleTrigger',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.rule.interval',
|
||||
changeType: 'type_changed',
|
||||
isBreaking: true,
|
||||
oldValue: 'string',
|
||||
newValue: 'array',
|
||||
migrationHint: 'In v1.1+, the interval parameter changed from a single string to an array of interval objects. Convert your single interval to an array format: [{field: "hours", value: 1}]',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Error Handling (Global Change)
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: '*', // Applies to all nodes
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'continueOnFail',
|
||||
changeType: 'removed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'The "continueOnFail" property is deprecated. Use "onError" instead with value "continueErrorOutput" or "continueRegularOutput".',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'continueOnFail',
|
||||
targetProperty: 'onError',
|
||||
defaultValue: 'continueErrorOutput'
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Get breaking changes for a specific node type and version upgrade
|
||||
*/
|
||||
export function getBreakingChangesForNode(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return BREAKING_CHANGES_REGISTRY.filter(change => {
|
||||
// Match exact node type or wildcard (*)
|
||||
const nodeMatches = change.nodeType === nodeType || change.nodeType === '*';
|
||||
|
||||
// Check if version range matches
|
||||
const versionMatches =
|
||||
compareVersions(fromVersion, change.fromVersion) >= 0 &&
|
||||
compareVersions(toVersion, change.toVersion) <= 0;
|
||||
|
||||
return nodeMatches && versionMatches && change.isBreaking;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all changes (breaking and non-breaking) for a version upgrade
|
||||
*/
|
||||
export function getAllChangesForNode(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return BREAKING_CHANGES_REGISTRY.filter(change => {
|
||||
const nodeMatches = change.nodeType === nodeType || change.nodeType === '*';
|
||||
const versionMatches =
|
||||
compareVersions(fromVersion, change.fromVersion) >= 0 &&
|
||||
compareVersions(toVersion, change.toVersion) <= 0;
|
||||
|
||||
return nodeMatches && versionMatches;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auto-migratable changes for a version upgrade
|
||||
*/
|
||||
export function getAutoMigratableChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return getAllChangesForNode(nodeType, fromVersion, toVersion).filter(
|
||||
change => change.autoMigratable
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific node has known breaking changes for a version upgrade
|
||||
*/
|
||||
export function hasBreakingChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): boolean {
|
||||
return getBreakingChangesForNode(nodeType, fromVersion, toVersion).length > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get migration hints for a version upgrade
|
||||
*/
|
||||
export function getMigrationHints(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): string[] {
|
||||
const changes = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return changes.map(change => change.migrationHint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple version comparison
|
||||
* Returns: -1 if v1 < v2, 0 if equal, 1 if v1 > v2
|
||||
*/
|
||||
function compareVersions(v1: string, v2: string): number {
|
||||
const parts1 = v1.split('.').map(Number);
|
||||
const parts2 = v2.split('.').map(Number);
|
||||
|
||||
for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) {
|
||||
const p1 = parts1[i] || 0;
|
||||
const p2 = parts2[i] || 0;
|
||||
|
||||
if (p1 < p2) return -1;
|
||||
if (p1 > p2) return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nodes with known version migrations
|
||||
*/
|
||||
export function getNodesWithVersionMigrations(): string[] {
|
||||
const nodeTypes = new Set<string>();
|
||||
|
||||
BREAKING_CHANGES_REGISTRY.forEach(change => {
|
||||
if (change.nodeType !== '*') {
|
||||
nodeTypes.add(change.nodeType);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(nodeTypes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all versions tracked for a specific node
|
||||
*/
|
||||
export function getTrackedVersionsForNode(nodeType: string): string[] {
|
||||
const versions = new Set<string>();
|
||||
|
||||
BREAKING_CHANGES_REGISTRY
|
||||
.filter(change => change.nodeType === nodeType || change.nodeType === '*')
|
||||
.forEach(change => {
|
||||
versions.add(change.fromVersion);
|
||||
versions.add(change.toVersion);
|
||||
});
|
||||
|
||||
return Array.from(versions).sort((a, b) => compareVersions(a, b));
|
||||
}
|
||||
@@ -1,12 +1,10 @@
|
||||
/**
|
||||
* Configuration Validator Service
|
||||
*
|
||||
*
|
||||
* Validates node configurations to catch errors before execution.
|
||||
* Provides helpful suggestions and identifies missing or misconfigured properties.
|
||||
*/
|
||||
|
||||
import { shouldSkipLiteralValidation } from '../utils/expression-utils.js';
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: ValidationError[];
|
||||
@@ -383,16 +381,13 @@ export class ConfigValidator {
|
||||
): void {
|
||||
// URL validation
|
||||
if (config.url && typeof config.url === 'string') {
|
||||
// Skip validation for expressions - they will be evaluated at runtime
|
||||
if (!shouldSkipLiteralValidation(config.url)) {
|
||||
if (!config.url.startsWith('http://') && !config.url.startsWith('https://')) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: 'URL must start with http:// or https://',
|
||||
fix: 'Add https:// to the beginning of your URL'
|
||||
});
|
||||
}
|
||||
if (!config.url.startsWith('http://') && !config.url.startsWith('https://')) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: 'URL must start with http:// or https://',
|
||||
fix: 'Add https:// to the beginning of your URL'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,19 +417,15 @@ export class ConfigValidator {
|
||||
|
||||
// JSON body validation
|
||||
if (config.sendBody && config.contentType === 'json' && config.jsonBody) {
|
||||
// Skip validation for expressions - they will be evaluated at runtime
|
||||
if (!shouldSkipLiteralValidation(config.jsonBody)) {
|
||||
try {
|
||||
JSON.parse(config.jsonBody);
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown parsing error';
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'jsonBody',
|
||||
message: `jsonBody contains invalid JSON: ${errorMsg}`,
|
||||
fix: 'Fix JSON syntax error and ensure valid JSON format'
|
||||
});
|
||||
}
|
||||
try {
|
||||
JSON.parse(config.jsonBody);
|
||||
} catch (e) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'jsonBody',
|
||||
message: 'jsonBody contains invalid JSON',
|
||||
fix: 'Ensure jsonBody contains valid JSON syntax'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -466,15 +466,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
return Array.from(seen.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a warning should be filtered out (hardcoded credentials shown only in strict mode)
|
||||
*/
|
||||
private static shouldFilterCredentialWarning(warning: ValidationWarning): boolean {
|
||||
return warning.type === 'security' &&
|
||||
warning.message !== undefined &&
|
||||
warning.message.includes('Hardcoded nodeCredentialType');
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply profile-based filtering to validation results
|
||||
*/
|
||||
@@ -487,13 +478,9 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
// Only keep missing required errors
|
||||
result.errors = result.errors.filter(e => e.type === 'missing_required');
|
||||
// Keep ONLY critical warnings (security and deprecated)
|
||||
// But filter out hardcoded credential type warnings (only show in strict mode)
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
return w.type === 'security' || w.type === 'deprecated';
|
||||
});
|
||||
result.warnings = result.warnings.filter(w =>
|
||||
w.type === 'security' || w.type === 'deprecated'
|
||||
);
|
||||
result.suggestions = [];
|
||||
break;
|
||||
|
||||
@@ -506,10 +493,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
);
|
||||
// Keep security and deprecated warnings, REMOVE property visibility warnings
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
// Filter out hardcoded credential type warnings (only show in strict mode)
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
if (w.type === 'security' || w.type === 'deprecated') return true;
|
||||
// FILTER OUT property visibility warnings (too noisy)
|
||||
if (w.type === 'inefficient' && w.message && w.message.includes('not visible')) {
|
||||
@@ -535,10 +518,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
// Current behavior - balanced for AI agents
|
||||
// Filter out noise but keep helpful warnings
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
// Filter out hardcoded credential type warnings (only show in strict mode)
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
// Keep security and deprecated warnings
|
||||
if (w.type === 'security' || w.type === 'deprecated') return true;
|
||||
// Keep missing common properties
|
||||
|
||||
@@ -207,14 +207,8 @@ export class ExpressionValidator {
|
||||
expr: string,
|
||||
result: ExpressionValidationResult
|
||||
): void {
|
||||
// Check for missing $ prefix - but exclude cases where $ is already present OR it's property access (e.g., .json)
|
||||
// The pattern now excludes:
|
||||
// - Immediately preceded by $ (e.g., $json) - handled by (?<!\$)
|
||||
// - Preceded by a dot (e.g., .json in $('Node').item.json.field) - handled by (?<!\.)
|
||||
// - Inside word characters (e.g., myJson) - handled by (?<!\w)
|
||||
// - Inside bracket notation (e.g., ['json']) - handled by (?<![)
|
||||
// - After opening bracket or quote (e.g., "json" or ['json'])
|
||||
const missingPrefixPattern = /(?<![.$\w['])\b(json|node|input|items|workflow|execution)\b(?!\s*[:''])/;
|
||||
// Check for missing $ prefix - but exclude cases where $ is already present
|
||||
const missingPrefixPattern = /(?<!\$)\b(json|node|input|items|workflow|execution)\b(?!\s*:)/;
|
||||
if (expr.match(missingPrefixPattern)) {
|
||||
result.warnings.push(
|
||||
'Possible missing $ prefix for variable (e.g., use $json instead of json)'
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
import { WorkflowNode, WorkflowConnection, Workflow } from '../types/n8n-api';
|
||||
import { isTriggerNode, isActivatableTrigger } from '../utils/node-type-utils';
|
||||
import { isNonExecutableNode } from '../utils/node-classification';
|
||||
|
||||
// Zod schemas for n8n API validation
|
||||
|
||||
@@ -24,31 +22,17 @@ export const workflowNodeSchema = z.object({
|
||||
executeOnce: z.boolean().optional(),
|
||||
});
|
||||
|
||||
// Connection array schema used by all connection types
|
||||
const connectionArraySchema = z.array(
|
||||
z.array(
|
||||
z.object({
|
||||
node: z.string(),
|
||||
type: z.string(),
|
||||
index: z.number(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
/**
|
||||
* Workflow connection schema supporting all connection types.
|
||||
* Note: 'main' is optional because AI nodes exclusively use AI-specific
|
||||
* connection types (ai_languageModel, ai_memory, etc.) without main connections.
|
||||
*/
|
||||
export const workflowConnectionSchema = z.record(
|
||||
z.object({
|
||||
main: connectionArraySchema.optional(),
|
||||
error: connectionArraySchema.optional(),
|
||||
ai_tool: connectionArraySchema.optional(),
|
||||
ai_languageModel: connectionArraySchema.optional(),
|
||||
ai_memory: connectionArraySchema.optional(),
|
||||
ai_embedding: connectionArraySchema.optional(),
|
||||
ai_vectorStore: connectionArraySchema.optional(),
|
||||
main: z.array(
|
||||
z.array(
|
||||
z.object({
|
||||
node: z.string(),
|
||||
type: z.string(),
|
||||
index: z.number(),
|
||||
})
|
||||
)
|
||||
),
|
||||
})
|
||||
);
|
||||
|
||||
@@ -210,14 +194,6 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
errors.push('Workflow must have at least one node');
|
||||
}
|
||||
|
||||
// Check if workflow has only non-executable nodes (sticky notes)
|
||||
if (workflow.nodes && workflow.nodes.length > 0) {
|
||||
const hasExecutableNodes = workflow.nodes.some(node => !isNonExecutableNode(node.type));
|
||||
if (!hasExecutableNodes) {
|
||||
errors.push('Workflow must have at least one executable node. Sticky notes alone cannot form a valid workflow.');
|
||||
}
|
||||
}
|
||||
|
||||
if (!workflow.connections) {
|
||||
errors.push('Workflow connections are required');
|
||||
}
|
||||
@@ -235,15 +211,13 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
|
||||
// Check for disconnected nodes in multi-node workflows
|
||||
if (workflow.nodes && workflow.nodes.length > 1 && workflow.connections) {
|
||||
// Filter out non-executable nodes (sticky notes) when counting nodes
|
||||
const executableNodes = workflow.nodes.filter(node => !isNonExecutableNode(node.type));
|
||||
const connectionCount = Object.keys(workflow.connections).length;
|
||||
|
||||
// First check: workflow has no connections at all (only check if there are multiple executable nodes)
|
||||
if (connectionCount === 0 && executableNodes.length > 1) {
|
||||
const nodeNames = executableNodes.slice(0, 2).map(n => n.name);
|
||||
// First check: workflow has no connections at all
|
||||
if (connectionCount === 0) {
|
||||
const nodeNames = workflow.nodes.slice(0, 2).map(n => n.name);
|
||||
errors.push(`Multi-node workflow has no connections between nodes. Add a connection using: {type: 'addConnection', source: '${nodeNames[0]}', target: '${nodeNames[1]}', sourcePort: 'main', targetPort: 'main'}`);
|
||||
} else if (connectionCount > 0 || executableNodes.length > 1) {
|
||||
} else {
|
||||
// Second check: detect disconnected nodes (nodes with no incoming or outgoing connections)
|
||||
const connectedNodes = new Set<string>();
|
||||
|
||||
@@ -262,20 +236,19 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
}
|
||||
});
|
||||
|
||||
// Find disconnected nodes (excluding non-executable nodes and triggers)
|
||||
// Non-executable nodes (sticky notes) are UI-only and don't need connections
|
||||
// Trigger nodes only need outgoing connections
|
||||
// Find disconnected nodes (excluding webhook triggers which can be source-only)
|
||||
const webhookTypes = new Set([
|
||||
'n8n-nodes-base.webhook',
|
||||
'n8n-nodes-base.webhookTrigger',
|
||||
'n8n-nodes-base.manualTrigger'
|
||||
]);
|
||||
|
||||
const disconnectedNodes = workflow.nodes.filter(node => {
|
||||
// Skip non-executable nodes (sticky notes, etc.) - they're UI-only annotations
|
||||
if (isNonExecutableNode(node.type)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isConnected = connectedNodes.has(node.name);
|
||||
const isNodeTrigger = isTriggerNode(node.type);
|
||||
const isWebhookOrTrigger = webhookTypes.has(node.type);
|
||||
|
||||
// Trigger nodes only need outgoing connections
|
||||
if (isNodeTrigger) {
|
||||
// Webhook/trigger nodes only need outgoing connections
|
||||
if (isWebhookOrTrigger) {
|
||||
return !workflow.connections?.[node.name]; // Disconnected if no outgoing connections
|
||||
}
|
||||
|
||||
@@ -330,29 +303,6 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
}
|
||||
}
|
||||
|
||||
// Validate active workflows have activatable triggers
|
||||
// Issue #351: executeWorkflowTrigger cannot activate a workflow
|
||||
// It can only be invoked by other workflows
|
||||
if ((workflow as any).active === true && workflow.nodes && workflow.nodes.length > 0) {
|
||||
const activatableTriggers = workflow.nodes.filter(node =>
|
||||
!node.disabled && isActivatableTrigger(node.type)
|
||||
);
|
||||
|
||||
const executeWorkflowTriggers = workflow.nodes.filter(node =>
|
||||
!node.disabled && node.type.toLowerCase().includes('executeworkflow')
|
||||
);
|
||||
|
||||
if (activatableTriggers.length === 0 && executeWorkflowTriggers.length > 0) {
|
||||
// Workflow is active but only has executeWorkflowTrigger nodes
|
||||
const triggerNames = executeWorkflowTriggers.map(n => n.name).join(', ');
|
||||
errors.push(
|
||||
`Cannot activate workflow with only Execute Workflow Trigger nodes (${triggerNames}). ` +
|
||||
'Execute Workflow Trigger can only be invoked by other workflows, not activated. ' +
|
||||
'Either deactivate the workflow or add a webhook/schedule/polling trigger.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate Switch and IF node connection structures match their rules
|
||||
if (workflow.nodes && workflow.connections) {
|
||||
const switchNodes = workflow.nodes.filter(n => {
|
||||
|
||||
@@ -1,410 +0,0 @@
|
||||
/**
|
||||
* Node Migration Service
|
||||
*
|
||||
* Handles smart auto-migration of node configurations during version upgrades.
|
||||
* Applies migration strategies from the breaking changes registry and detectors.
|
||||
*
|
||||
* Migration strategies:
|
||||
* - add_property: Add new required/optional properties with defaults
|
||||
* - remove_property: Remove deprecated properties
|
||||
* - rename_property: Rename properties that changed names
|
||||
* - set_default: Set default values for properties
|
||||
*/
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { BreakingChangeDetector, DetectedChange } from './breaking-change-detector';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean;
|
||||
nodeId: string;
|
||||
nodeName: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
appliedMigrations: AppliedMigration[];
|
||||
remainingIssues: string[];
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
updatedNode: any; // The migrated node configuration
|
||||
}
|
||||
|
||||
export interface AppliedMigration {
|
||||
propertyName: string;
|
||||
action: string;
|
||||
oldValue?: any;
|
||||
newValue?: any;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export class NodeMigrationService {
|
||||
constructor(
|
||||
private versionService: NodeVersionService,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Migrate a node from its current version to a target version
|
||||
*/
|
||||
async migrateNode(
|
||||
node: any,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): Promise<MigrationResult> {
|
||||
const nodeId = node.id || 'unknown';
|
||||
const nodeName = node.name || 'Unknown Node';
|
||||
const nodeType = node.type;
|
||||
|
||||
// Analyze the version upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
fromVersion,
|
||||
toVersion
|
||||
);
|
||||
|
||||
// Start with a copy of the node
|
||||
const migratedNode = JSON.parse(JSON.stringify(node));
|
||||
|
||||
// Apply the version update
|
||||
migratedNode.typeVersion = this.parseVersion(toVersion);
|
||||
|
||||
const appliedMigrations: AppliedMigration[] = [];
|
||||
const remainingIssues: string[] = [];
|
||||
|
||||
// Apply auto-migratable changes
|
||||
for (const change of analysis.changes.filter(c => c.autoMigratable)) {
|
||||
const migration = this.applyMigration(migratedNode, change);
|
||||
|
||||
if (migration) {
|
||||
appliedMigrations.push(migration);
|
||||
}
|
||||
}
|
||||
|
||||
// Collect remaining manual issues
|
||||
for (const change of analysis.changes.filter(c => !c.autoMigratable)) {
|
||||
remainingIssues.push(
|
||||
`Manual action required for "${change.propertyName}": ${change.migrationHint}`
|
||||
);
|
||||
}
|
||||
|
||||
// Determine confidence based on remaining issues
|
||||
let confidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
|
||||
if (remainingIssues.length > 0) {
|
||||
confidence = remainingIssues.length > 3 ? 'LOW' : 'MEDIUM';
|
||||
}
|
||||
|
||||
return {
|
||||
success: remainingIssues.length === 0,
|
||||
nodeId,
|
||||
nodeName,
|
||||
fromVersion,
|
||||
toVersion,
|
||||
appliedMigrations,
|
||||
remainingIssues,
|
||||
confidence,
|
||||
updatedNode: migratedNode
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a single migration change to a node
|
||||
*/
|
||||
private applyMigration(node: any, change: DetectedChange): AppliedMigration | null {
|
||||
if (!change.migrationStrategy) return null;
|
||||
|
||||
const { type, defaultValue, sourceProperty, targetProperty } = change.migrationStrategy;
|
||||
|
||||
switch (type) {
|
||||
case 'add_property':
|
||||
return this.addProperty(node, change.propertyName, defaultValue, change);
|
||||
|
||||
case 'remove_property':
|
||||
return this.removeProperty(node, change.propertyName, change);
|
||||
|
||||
case 'rename_property':
|
||||
return this.renameProperty(node, sourceProperty!, targetProperty!, change);
|
||||
|
||||
case 'set_default':
|
||||
return this.setDefault(node, change.propertyName, defaultValue, change);
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new property to the node configuration
|
||||
*/
|
||||
private addProperty(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
defaultValue: any,
|
||||
change: DetectedChange
|
||||
): AppliedMigration {
|
||||
const value = this.resolveDefaultValue(propertyPath, defaultValue, node);
|
||||
|
||||
// Handle nested property paths (e.g., "parameters.inputFieldMapping")
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!target[part]) {
|
||||
target[part] = {};
|
||||
}
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
target[finalKey] = value;
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Added property',
|
||||
newValue: value,
|
||||
description: `Added "${propertyPath}" with default value`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a deprecated property from the node configuration
|
||||
*/
|
||||
private removeProperty(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!target[part]) return null; // Property doesn't exist
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
const oldValue = target[finalKey];
|
||||
|
||||
if (oldValue !== undefined) {
|
||||
delete target[finalKey];
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Removed property',
|
||||
oldValue,
|
||||
description: `Removed deprecated property "${propertyPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rename a property (move value from old name to new name)
|
||||
*/
|
||||
private renameProperty(
|
||||
node: any,
|
||||
sourcePath: string,
|
||||
targetPath: string,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
// Get old value
|
||||
const sourceParts = sourcePath.split('.');
|
||||
let sourceTarget = node;
|
||||
|
||||
for (let i = 0; i < sourceParts.length - 1; i++) {
|
||||
if (!sourceTarget[sourceParts[i]]) return null;
|
||||
sourceTarget = sourceTarget[sourceParts[i]];
|
||||
}
|
||||
|
||||
const sourceKey = sourceParts[sourceParts.length - 1];
|
||||
const oldValue = sourceTarget[sourceKey];
|
||||
|
||||
if (oldValue === undefined) return null; // Source doesn't exist
|
||||
|
||||
// Set new value
|
||||
const targetParts = targetPath.split('.');
|
||||
let targetTarget = node;
|
||||
|
||||
for (let i = 0; i < targetParts.length - 1; i++) {
|
||||
if (!targetTarget[targetParts[i]]) {
|
||||
targetTarget[targetParts[i]] = {};
|
||||
}
|
||||
targetTarget = targetTarget[targetParts[i]];
|
||||
}
|
||||
|
||||
const targetKey = targetParts[targetParts.length - 1];
|
||||
targetTarget[targetKey] = oldValue;
|
||||
|
||||
// Remove old value
|
||||
delete sourceTarget[sourceKey];
|
||||
|
||||
return {
|
||||
propertyName: targetPath,
|
||||
action: 'Renamed property',
|
||||
oldValue: `${sourcePath}: ${JSON.stringify(oldValue)}`,
|
||||
newValue: `${targetPath}: ${JSON.stringify(oldValue)}`,
|
||||
description: `Renamed "${sourcePath}" to "${targetPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a default value for a property
|
||||
*/
|
||||
private setDefault(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
defaultValue: any,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (!target[parts[i]]) {
|
||||
target[parts[i]] = {};
|
||||
}
|
||||
target = target[parts[i]];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
|
||||
// Only set if not already defined
|
||||
if (target[finalKey] === undefined) {
|
||||
const value = this.resolveDefaultValue(propertyPath, defaultValue, node);
|
||||
target[finalKey] = value;
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Set default value',
|
||||
newValue: value,
|
||||
description: `Set default value for "${propertyPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve default value with special handling for certain property types
|
||||
*/
|
||||
private resolveDefaultValue(propertyPath: string, defaultValue: any, node: any): any {
|
||||
// Special case: webhookId needs a UUID
|
||||
if (propertyPath === 'webhookId' || propertyPath.endsWith('.webhookId')) {
|
||||
return uuidv4();
|
||||
}
|
||||
|
||||
// Special case: webhook path needs a unique value
|
||||
if (propertyPath === 'path' || propertyPath.endsWith('.path')) {
|
||||
if (node.type === 'n8n-nodes-base.webhook') {
|
||||
return `/webhook-${Date.now()}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Return provided default or null
|
||||
return defaultValue !== null && defaultValue !== undefined ? defaultValue : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse version string to number (for typeVersion field)
|
||||
*/
|
||||
private parseVersion(version: string): number {
|
||||
const parts = version.split('.').map(Number);
|
||||
|
||||
// Handle versions like "1.1" -> 1.1, "2.0" -> 2
|
||||
if (parts.length === 1) return parts[0];
|
||||
if (parts.length === 2) return parts[0] + parts[1] / 10;
|
||||
|
||||
// For more complex versions, just use first number
|
||||
return parts[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a migrated node is valid
|
||||
*/
|
||||
async validateMigratedNode(node: any, nodeType: string): Promise<{
|
||||
valid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
}> {
|
||||
const errors: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Basic validation
|
||||
if (!node.typeVersion) {
|
||||
errors.push('Missing typeVersion after migration');
|
||||
}
|
||||
|
||||
if (!node.parameters) {
|
||||
errors.push('Missing parameters object');
|
||||
}
|
||||
|
||||
// Check for common issues
|
||||
if (nodeType === 'n8n-nodes-base.webhook') {
|
||||
if (!node.parameters?.path) {
|
||||
errors.push('Webhook node missing required "path" parameter');
|
||||
}
|
||||
if (node.typeVersion >= 2.1 && !node.webhookId) {
|
||||
warnings.push('Webhook v2.1+ typically requires webhookId');
|
||||
}
|
||||
}
|
||||
|
||||
if (nodeType === 'n8n-nodes-base.executeWorkflow') {
|
||||
if (node.typeVersion >= 1.1 && !node.parameters?.inputFieldMapping) {
|
||||
errors.push('Execute Workflow v1.1+ requires inputFieldMapping');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch migrate multiple nodes in a workflow
|
||||
*/
|
||||
async migrateWorkflowNodes(
|
||||
workflow: any,
|
||||
targetVersions: Record<string, string> // nodeId -> targetVersion
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
results: MigrationResult[];
|
||||
overallConfidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
}> {
|
||||
const results: MigrationResult[] = [];
|
||||
|
||||
for (const node of workflow.nodes || []) {
|
||||
const targetVersion = targetVersions[node.id];
|
||||
|
||||
if (targetVersion && node.typeVersion) {
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
|
||||
const result = await this.migrateNode(node, currentVersion, targetVersion);
|
||||
results.push(result);
|
||||
|
||||
// Update node in place
|
||||
Object.assign(node, result.updatedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate overall confidence
|
||||
const confidences = results.map(r => r.confidence);
|
||||
let overallConfidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
|
||||
if (confidences.includes('LOW')) {
|
||||
overallConfidence = 'LOW';
|
||||
} else if (confidences.includes('MEDIUM')) {
|
||||
overallConfidence = 'MEDIUM';
|
||||
}
|
||||
|
||||
const success = results.every(r => r.success);
|
||||
|
||||
return {
|
||||
success,
|
||||
results,
|
||||
overallConfidence
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1038,9 +1038,16 @@ export class NodeSpecificValidators {
|
||||
delete autofix.continueOnFail;
|
||||
}
|
||||
|
||||
// Note: responseNode mode validation moved to workflow-validator.ts
|
||||
// where it has access to node-level onError property (not just config/parameters)
|
||||
|
||||
// Response mode validation
|
||||
if (responseMode === 'responseNode' && !config.onError && !config.continueOnFail) {
|
||||
errors.push({
|
||||
type: 'invalid_configuration',
|
||||
property: 'responseMode',
|
||||
message: 'responseNode mode requires onError: "continueRegularOutput"',
|
||||
fix: 'Set onError to ensure response is always sent'
|
||||
});
|
||||
}
|
||||
|
||||
// Always output data for debugging
|
||||
if (!config.alwaysOutputData) {
|
||||
suggestions.push('Enable alwaysOutputData to debug webhook payloads');
|
||||
|
||||
@@ -1,377 +0,0 @@
|
||||
/**
|
||||
* Node Version Service
|
||||
*
|
||||
* Central service for node version discovery, comparison, and upgrade path recommendation.
|
||||
* Provides caching for performance and integrates with the database and breaking change detector.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { BreakingChangeDetector } from './breaking-change-detector';
|
||||
|
||||
export interface NodeVersion {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
packageName: string;
|
||||
displayName: string;
|
||||
isCurrentMax: boolean;
|
||||
minimumN8nVersion?: string;
|
||||
breakingChanges: any[];
|
||||
deprecatedProperties: string[];
|
||||
addedProperties: string[];
|
||||
releasedAt?: Date;
|
||||
}
|
||||
|
||||
export interface VersionComparison {
|
||||
nodeType: string;
|
||||
currentVersion: string;
|
||||
latestVersion: string;
|
||||
isOutdated: boolean;
|
||||
versionGap: number; // How many versions behind
|
||||
hasBreakingChanges: boolean;
|
||||
recommendUpgrade: boolean;
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
reason: string;
|
||||
}
|
||||
|
||||
export interface UpgradePath {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
direct: boolean; // Can upgrade directly or needs intermediate steps
|
||||
intermediateVersions: string[]; // If multi-step upgrade needed
|
||||
totalBreakingChanges: number;
|
||||
autoMigratableChanges: number;
|
||||
manualRequiredChanges: number;
|
||||
estimatedEffort: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
steps: UpgradeStep[];
|
||||
}
|
||||
|
||||
export interface UpgradeStep {
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
breakingChanges: number;
|
||||
migrationHints: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Node Version Service with caching
|
||||
*/
|
||||
export class NodeVersionService {
|
||||
private versionCache: Map<string, NodeVersion[]> = new Map();
|
||||
private cacheTTL: number = 5 * 60 * 1000; // 5 minutes
|
||||
private cacheTimestamps: Map<string, number> = new Map();
|
||||
|
||||
constructor(
|
||||
private nodeRepository: NodeRepository,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get all available versions for a node type
|
||||
*/
|
||||
getAvailableVersions(nodeType: string): NodeVersion[] {
|
||||
// Check cache first
|
||||
const cached = this.getCachedVersions(nodeType);
|
||||
if (cached) return cached;
|
||||
|
||||
// Query from database
|
||||
const versions = this.nodeRepository.getNodeVersions(nodeType);
|
||||
|
||||
// Cache the result
|
||||
this.cacheVersions(nodeType, versions);
|
||||
|
||||
return versions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest available version for a node type
|
||||
*/
|
||||
getLatestVersion(nodeType: string): string | null {
|
||||
const versions = this.getAvailableVersions(nodeType);
|
||||
|
||||
if (versions.length === 0) {
|
||||
// Fallback to main nodes table
|
||||
const node = this.nodeRepository.getNode(nodeType);
|
||||
return node?.version || null;
|
||||
}
|
||||
|
||||
// Find version marked as current max
|
||||
const maxVersion = versions.find(v => v.isCurrentMax);
|
||||
if (maxVersion) return maxVersion.version;
|
||||
|
||||
// Fallback: sort and get highest
|
||||
const sorted = versions.sort((a, b) => this.compareVersions(b.version, a.version));
|
||||
return sorted[0]?.version || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare a node's current version against the latest available
|
||||
*/
|
||||
compareVersions(currentVersion: string, latestVersion: string): number {
|
||||
const parts1 = currentVersion.split('.').map(Number);
|
||||
const parts2 = latestVersion.split('.').map(Number);
|
||||
|
||||
for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) {
|
||||
const p1 = parts1[i] || 0;
|
||||
const p2 = parts2[i] || 0;
|
||||
|
||||
if (p1 < p2) return -1;
|
||||
if (p1 > p2) return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze if a node version is outdated and should be upgraded
|
||||
*/
|
||||
analyzeVersion(nodeType: string, currentVersion: string): VersionComparison {
|
||||
const latestVersion = this.getLatestVersion(nodeType);
|
||||
|
||||
if (!latestVersion) {
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion: currentVersion,
|
||||
isOutdated: false,
|
||||
versionGap: 0,
|
||||
hasBreakingChanges: false,
|
||||
recommendUpgrade: false,
|
||||
confidence: 'HIGH',
|
||||
reason: 'No version information available. Using current version.'
|
||||
};
|
||||
}
|
||||
|
||||
const comparison = this.compareVersions(currentVersion, latestVersion);
|
||||
const isOutdated = comparison < 0;
|
||||
|
||||
if (!isOutdated) {
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isOutdated: false,
|
||||
versionGap: 0,
|
||||
hasBreakingChanges: false,
|
||||
recommendUpgrade: false,
|
||||
confidence: 'HIGH',
|
||||
reason: 'Node is already at the latest version.'
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate version gap
|
||||
const versionGap = this.calculateVersionGap(currentVersion, latestVersion);
|
||||
|
||||
// Check for breaking changes
|
||||
const hasBreakingChanges = this.breakingChangeDetector.hasBreakingChanges(
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Determine upgrade recommendation and confidence
|
||||
let recommendUpgrade = true;
|
||||
let confidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
let reason = `Version ${latestVersion} available. `;
|
||||
|
||||
if (hasBreakingChanges) {
|
||||
confidence = 'MEDIUM';
|
||||
reason += 'Contains breaking changes. Review before upgrading.';
|
||||
} else {
|
||||
reason += 'Safe to upgrade (no breaking changes detected).';
|
||||
}
|
||||
|
||||
if (versionGap > 2) {
|
||||
confidence = 'LOW';
|
||||
reason += ` Version gap is large (${versionGap} versions). Consider incremental upgrade.`;
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isOutdated,
|
||||
versionGap,
|
||||
hasBreakingChanges,
|
||||
recommendUpgrade,
|
||||
confidence,
|
||||
reason
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the version gap (number of versions between)
|
||||
*/
|
||||
private calculateVersionGap(fromVersion: string, toVersion: string): number {
|
||||
const from = fromVersion.split('.').map(Number);
|
||||
const to = toVersion.split('.').map(Number);
|
||||
|
||||
// Simple gap calculation based on version numbers
|
||||
let gap = 0;
|
||||
|
||||
for (let i = 0; i < Math.max(from.length, to.length); i++) {
|
||||
const f = from[i] || 0;
|
||||
const t = to[i] || 0;
|
||||
gap += Math.abs(t - f);
|
||||
}
|
||||
|
||||
return gap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Suggest the best upgrade path for a node
|
||||
*/
|
||||
async suggestUpgradePath(nodeType: string, currentVersion: string): Promise<UpgradePath | null> {
|
||||
const latestVersion = this.getLatestVersion(nodeType);
|
||||
|
||||
if (!latestVersion) return null;
|
||||
|
||||
const comparison = this.compareVersions(currentVersion, latestVersion);
|
||||
if (comparison >= 0) return null; // Already at latest or newer
|
||||
|
||||
// Get all available versions between current and latest
|
||||
const allVersions = this.getAvailableVersions(nodeType);
|
||||
const intermediateVersions = allVersions
|
||||
.filter(v =>
|
||||
this.compareVersions(v.version, currentVersion) > 0 &&
|
||||
this.compareVersions(v.version, latestVersion) < 0
|
||||
)
|
||||
.map(v => v.version)
|
||||
.sort((a, b) => this.compareVersions(a, b));
|
||||
|
||||
// Analyze the upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Determine if direct upgrade is safe
|
||||
const versionGap = this.calculateVersionGap(currentVersion, latestVersion);
|
||||
const direct = versionGap <= 1 || !analysis.hasBreakingChanges;
|
||||
|
||||
// Generate upgrade steps
|
||||
const steps: UpgradeStep[] = [];
|
||||
|
||||
if (direct || intermediateVersions.length === 0) {
|
||||
// Direct upgrade
|
||||
steps.push({
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
breakingChanges: analysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: analysis.recommendations
|
||||
});
|
||||
} else {
|
||||
// Multi-step upgrade through intermediate versions
|
||||
let stepFrom = currentVersion;
|
||||
|
||||
for (const intermediateVersion of intermediateVersions) {
|
||||
const stepAnalysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
stepFrom,
|
||||
intermediateVersion
|
||||
);
|
||||
|
||||
steps.push({
|
||||
fromVersion: stepFrom,
|
||||
toVersion: intermediateVersion,
|
||||
breakingChanges: stepAnalysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: stepAnalysis.recommendations
|
||||
});
|
||||
|
||||
stepFrom = intermediateVersion;
|
||||
}
|
||||
|
||||
// Final step to latest
|
||||
const finalStepAnalysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
stepFrom,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
steps.push({
|
||||
fromVersion: stepFrom,
|
||||
toVersion: latestVersion,
|
||||
breakingChanges: finalStepAnalysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: finalStepAnalysis.recommendations
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate estimated effort
|
||||
const totalBreakingChanges = steps.reduce((sum, step) => sum + step.breakingChanges, 0);
|
||||
let estimatedEffort: 'LOW' | 'MEDIUM' | 'HIGH' = 'LOW';
|
||||
|
||||
if (totalBreakingChanges > 5 || steps.length > 3) {
|
||||
estimatedEffort = 'HIGH';
|
||||
} else if (totalBreakingChanges > 2 || steps.length > 1) {
|
||||
estimatedEffort = 'MEDIUM';
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
direct,
|
||||
intermediateVersions,
|
||||
totalBreakingChanges,
|
||||
autoMigratableChanges: analysis.autoMigratableCount,
|
||||
manualRequiredChanges: analysis.manualRequiredCount,
|
||||
estimatedEffort,
|
||||
steps
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific version exists for a node
|
||||
*/
|
||||
versionExists(nodeType: string, version: string): boolean {
|
||||
const versions = this.getAvailableVersions(nodeType);
|
||||
return versions.some(v => v.version === version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get version metadata (breaking changes, added/deprecated properties)
|
||||
*/
|
||||
getVersionMetadata(nodeType: string, version: string): NodeVersion | null {
|
||||
const versionData = this.nodeRepository.getNodeVersion(nodeType, version);
|
||||
return versionData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the version cache
|
||||
*/
|
||||
clearCache(nodeType?: string): void {
|
||||
if (nodeType) {
|
||||
this.versionCache.delete(nodeType);
|
||||
this.cacheTimestamps.delete(nodeType);
|
||||
} else {
|
||||
this.versionCache.clear();
|
||||
this.cacheTimestamps.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached versions if still valid
|
||||
*/
|
||||
private getCachedVersions(nodeType: string): NodeVersion[] | null {
|
||||
const cached = this.versionCache.get(nodeType);
|
||||
const timestamp = this.cacheTimestamps.get(nodeType);
|
||||
|
||||
if (cached && timestamp) {
|
||||
const age = Date.now() - timestamp;
|
||||
if (age < this.cacheTTL) {
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache versions with timestamp
|
||||
*/
|
||||
private cacheVersions(nodeType: string, versions: NodeVersion[]): void {
|
||||
this.versionCache.set(nodeType, versions);
|
||||
this.cacheTimestamps.set(nodeType, Date.now());
|
||||
}
|
||||
}
|
||||
@@ -1,423 +0,0 @@
|
||||
/**
|
||||
* Post-Update Validator
|
||||
*
|
||||
* Generates comprehensive, AI-friendly migration reports after node version upgrades.
|
||||
* Provides actionable guidance for AI agents on what manual steps are needed.
|
||||
*
|
||||
* Validation includes:
|
||||
* - New required properties
|
||||
* - Deprecated/removed properties
|
||||
* - Behavior changes
|
||||
* - Step-by-step migration instructions
|
||||
*/
|
||||
|
||||
import { BreakingChangeDetector, DetectedChange } from './breaking-change-detector';
|
||||
import { MigrationResult } from './node-migration-service';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
|
||||
export interface PostUpdateGuidance {
|
||||
nodeId: string;
|
||||
nodeName: string;
|
||||
nodeType: string;
|
||||
oldVersion: string;
|
||||
newVersion: string;
|
||||
migrationStatus: 'complete' | 'partial' | 'manual_required';
|
||||
requiredActions: RequiredAction[];
|
||||
deprecatedProperties: DeprecatedProperty[];
|
||||
behaviorChanges: BehaviorChange[];
|
||||
migrationSteps: string[];
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
estimatedTime: string; // e.g., "5 minutes", "15 minutes"
|
||||
}
|
||||
|
||||
export interface RequiredAction {
|
||||
type: 'ADD_PROPERTY' | 'UPDATE_PROPERTY' | 'CONFIGURE_OPTION' | 'REVIEW_CONFIGURATION';
|
||||
property: string;
|
||||
reason: string;
|
||||
suggestedValue?: any;
|
||||
currentValue?: any;
|
||||
documentation?: string;
|
||||
priority: 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
}
|
||||
|
||||
export interface DeprecatedProperty {
|
||||
property: string;
|
||||
status: 'removed' | 'deprecated';
|
||||
replacement?: string;
|
||||
action: 'remove' | 'replace' | 'ignore';
|
||||
impact: 'breaking' | 'warning';
|
||||
}
|
||||
|
||||
export interface BehaviorChange {
|
||||
aspect: string; // e.g., "data passing", "webhook handling"
|
||||
oldBehavior: string;
|
||||
newBehavior: string;
|
||||
impact: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
actionRequired: boolean;
|
||||
recommendation: string;
|
||||
}
|
||||
|
||||
export class PostUpdateValidator {
|
||||
constructor(
|
||||
private versionService: NodeVersionService,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Generate comprehensive post-update guidance for a migrated node
|
||||
*/
|
||||
async generateGuidance(
|
||||
nodeId: string,
|
||||
nodeName: string,
|
||||
nodeType: string,
|
||||
oldVersion: string,
|
||||
newVersion: string,
|
||||
migrationResult: MigrationResult
|
||||
): Promise<PostUpdateGuidance> {
|
||||
// Analyze the version upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
oldVersion,
|
||||
newVersion
|
||||
);
|
||||
|
||||
// Determine migration status
|
||||
const migrationStatus = this.determineMigrationStatus(migrationResult, analysis.changes);
|
||||
|
||||
// Generate required actions
|
||||
const requiredActions = this.generateRequiredActions(
|
||||
migrationResult,
|
||||
analysis.changes,
|
||||
nodeType
|
||||
);
|
||||
|
||||
// Identify deprecated properties
|
||||
const deprecatedProperties = this.identifyDeprecatedProperties(analysis.changes);
|
||||
|
||||
// Document behavior changes
|
||||
const behaviorChanges = this.documentBehaviorChanges(nodeType, oldVersion, newVersion);
|
||||
|
||||
// Generate step-by-step migration instructions
|
||||
const migrationSteps = this.generateMigrationSteps(
|
||||
requiredActions,
|
||||
deprecatedProperties,
|
||||
behaviorChanges
|
||||
);
|
||||
|
||||
// Calculate confidence and estimated time
|
||||
const confidence = this.calculateConfidence(requiredActions, migrationStatus);
|
||||
const estimatedTime = this.estimateTime(requiredActions, behaviorChanges);
|
||||
|
||||
return {
|
||||
nodeId,
|
||||
nodeName,
|
||||
nodeType,
|
||||
oldVersion,
|
||||
newVersion,
|
||||
migrationStatus,
|
||||
requiredActions,
|
||||
deprecatedProperties,
|
||||
behaviorChanges,
|
||||
migrationSteps,
|
||||
confidence,
|
||||
estimatedTime
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the migration status based on results and changes
|
||||
*/
|
||||
private determineMigrationStatus(
|
||||
migrationResult: MigrationResult,
|
||||
changes: DetectedChange[]
|
||||
): 'complete' | 'partial' | 'manual_required' {
|
||||
if (migrationResult.remainingIssues.length === 0) {
|
||||
return 'complete';
|
||||
}
|
||||
|
||||
const criticalIssues = changes.filter(c => c.isBreaking && !c.autoMigratable);
|
||||
|
||||
if (criticalIssues.length > 0) {
|
||||
return 'manual_required';
|
||||
}
|
||||
|
||||
return 'partial';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate actionable required actions for the AI agent
|
||||
*/
|
||||
private generateRequiredActions(
|
||||
migrationResult: MigrationResult,
|
||||
changes: DetectedChange[],
|
||||
nodeType: string
|
||||
): RequiredAction[] {
|
||||
const actions: RequiredAction[] = [];
|
||||
|
||||
// Actions from remaining issues (not auto-migrated)
|
||||
const manualChanges = changes.filter(c => !c.autoMigratable);
|
||||
|
||||
for (const change of manualChanges) {
|
||||
actions.push({
|
||||
type: this.mapChangeTypeToActionType(change.changeType),
|
||||
property: change.propertyName,
|
||||
reason: change.migrationHint,
|
||||
suggestedValue: change.newValue,
|
||||
currentValue: change.oldValue,
|
||||
documentation: this.getPropertyDocumentation(nodeType, change.propertyName),
|
||||
priority: this.mapSeverityToPriority(change.severity)
|
||||
});
|
||||
}
|
||||
|
||||
return actions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify deprecated or removed properties
|
||||
*/
|
||||
private identifyDeprecatedProperties(changes: DetectedChange[]): DeprecatedProperty[] {
|
||||
const deprecated: DeprecatedProperty[] = [];
|
||||
|
||||
for (const change of changes) {
|
||||
if (change.changeType === 'removed') {
|
||||
deprecated.push({
|
||||
property: change.propertyName,
|
||||
status: 'removed',
|
||||
replacement: change.migrationStrategy?.targetProperty,
|
||||
action: change.autoMigratable ? 'remove' : 'replace',
|
||||
impact: change.isBreaking ? 'breaking' : 'warning'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return deprecated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Document behavior changes for specific nodes
|
||||
*/
|
||||
private documentBehaviorChanges(
|
||||
nodeType: string,
|
||||
oldVersion: string,
|
||||
newVersion: string
|
||||
): BehaviorChange[] {
|
||||
const changes: BehaviorChange[] = [];
|
||||
|
||||
// Execute Workflow node behavior changes
|
||||
if (nodeType === 'n8n-nodes-base.executeWorkflow') {
|
||||
if (this.versionService.compareVersions(oldVersion, '1.1') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '1.1') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Data passing to sub-workflows',
|
||||
oldBehavior: 'Automatic data passing - all data from parent workflow automatically available',
|
||||
newBehavior: 'Explicit field mapping required - must define inputFieldMapping to pass specific fields',
|
||||
impact: 'HIGH',
|
||||
actionRequired: true,
|
||||
recommendation: 'Define inputFieldMapping with specific field mappings between parent and child workflows. Review data dependencies.'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Webhook node behavior changes
|
||||
if (nodeType === 'n8n-nodes-base.webhook') {
|
||||
if (this.versionService.compareVersions(oldVersion, '2.1') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '2.1') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Webhook persistence',
|
||||
oldBehavior: 'Webhook URL changes on workflow updates',
|
||||
newBehavior: 'Stable webhook URL via webhookId field',
|
||||
impact: 'MEDIUM',
|
||||
actionRequired: false,
|
||||
recommendation: 'Webhook URLs now remain stable across workflow updates. Update external systems if needed.'
|
||||
});
|
||||
}
|
||||
|
||||
if (this.versionService.compareVersions(oldVersion, '2.0') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '2.0') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Response handling',
|
||||
oldBehavior: 'Automatic response after webhook trigger',
|
||||
newBehavior: 'Configurable response mode (onReceived vs lastNode)',
|
||||
impact: 'MEDIUM',
|
||||
actionRequired: true,
|
||||
recommendation: 'Review responseMode setting. Use "onReceived" for immediate responses or "lastNode" to wait for workflow completion.'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate step-by-step migration instructions for AI agents
|
||||
*/
|
||||
private generateMigrationSteps(
|
||||
requiredActions: RequiredAction[],
|
||||
deprecatedProperties: DeprecatedProperty[],
|
||||
behaviorChanges: BehaviorChange[]
|
||||
): string[] {
|
||||
const steps: string[] = [];
|
||||
let stepNumber = 1;
|
||||
|
||||
// Start with deprecations
|
||||
if (deprecatedProperties.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Remove deprecated properties`);
|
||||
for (const dep of deprecatedProperties) {
|
||||
steps.push(` - Remove "${dep.property}" ${dep.replacement ? `(use "${dep.replacement}" instead)` : ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Then critical actions
|
||||
const criticalActions = requiredActions.filter(a => a.priority === 'CRITICAL');
|
||||
if (criticalActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Address critical configuration requirements`);
|
||||
for (const action of criticalActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
if (action.suggestedValue !== undefined) {
|
||||
steps.push(` Suggested value: ${JSON.stringify(action.suggestedValue)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// High priority actions
|
||||
const highActions = requiredActions.filter(a => a.priority === 'HIGH');
|
||||
if (highActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Configure required properties`);
|
||||
for (const action of highActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Behavior change adaptations
|
||||
const actionRequiredChanges = behaviorChanges.filter(c => c.actionRequired);
|
||||
if (actionRequiredChanges.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Adapt to behavior changes`);
|
||||
for (const change of actionRequiredChanges) {
|
||||
steps.push(` - ${change.aspect}: ${change.recommendation}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Medium/Low priority actions
|
||||
const otherActions = requiredActions.filter(a => a.priority === 'MEDIUM' || a.priority === 'LOW');
|
||||
if (otherActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Review optional configurations`);
|
||||
for (const action of otherActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Final validation step
|
||||
steps.push(`Step ${stepNumber}: Test workflow execution`);
|
||||
steps.push(' - Validate all node configurations');
|
||||
steps.push(' - Run a test execution');
|
||||
steps.push(' - Verify expected behavior');
|
||||
|
||||
return steps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map change type to action type
|
||||
*/
|
||||
private mapChangeTypeToActionType(
|
||||
changeType: string
|
||||
): 'ADD_PROPERTY' | 'UPDATE_PROPERTY' | 'CONFIGURE_OPTION' | 'REVIEW_CONFIGURATION' {
|
||||
switch (changeType) {
|
||||
case 'added':
|
||||
return 'ADD_PROPERTY';
|
||||
case 'requirement_changed':
|
||||
case 'type_changed':
|
||||
return 'UPDATE_PROPERTY';
|
||||
case 'default_changed':
|
||||
return 'CONFIGURE_OPTION';
|
||||
default:
|
||||
return 'REVIEW_CONFIGURATION';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Map severity to priority
|
||||
*/
|
||||
private mapSeverityToPriority(
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH'
|
||||
): 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' {
|
||||
if (severity === 'HIGH') return 'CRITICAL';
|
||||
return severity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get documentation for a property (placeholder - would integrate with node docs)
|
||||
*/
|
||||
private getPropertyDocumentation(nodeType: string, propertyName: string): string {
|
||||
// In future, this would fetch from node documentation
|
||||
return `See n8n documentation for ${nodeType} - ${propertyName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall confidence in the migration
|
||||
*/
|
||||
private calculateConfidence(
|
||||
requiredActions: RequiredAction[],
|
||||
migrationStatus: 'complete' | 'partial' | 'manual_required'
|
||||
): 'HIGH' | 'MEDIUM' | 'LOW' {
|
||||
if (migrationStatus === 'complete') return 'HIGH';
|
||||
|
||||
const criticalActions = requiredActions.filter(a => a.priority === 'CRITICAL');
|
||||
|
||||
if (migrationStatus === 'manual_required' || criticalActions.length > 3) {
|
||||
return 'LOW';
|
||||
}
|
||||
|
||||
return 'MEDIUM';
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate time required for manual migration steps
|
||||
*/
|
||||
private estimateTime(
|
||||
requiredActions: RequiredAction[],
|
||||
behaviorChanges: BehaviorChange[]
|
||||
): string {
|
||||
const criticalCount = requiredActions.filter(a => a.priority === 'CRITICAL').length;
|
||||
const highCount = requiredActions.filter(a => a.priority === 'HIGH').length;
|
||||
const behaviorCount = behaviorChanges.filter(c => c.actionRequired).length;
|
||||
|
||||
const totalComplexity = criticalCount * 5 + highCount * 3 + behaviorCount * 2;
|
||||
|
||||
if (totalComplexity === 0) return '< 1 minute';
|
||||
if (totalComplexity <= 5) return '2-5 minutes';
|
||||
if (totalComplexity <= 10) return '5-10 minutes';
|
||||
if (totalComplexity <= 20) return '10-20 minutes';
|
||||
return '20+ minutes';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a human-readable summary for logging/display
|
||||
*/
|
||||
generateSummary(guidance: PostUpdateGuidance): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`Node "${guidance.nodeName}" upgraded from v${guidance.oldVersion} to v${guidance.newVersion}`);
|
||||
lines.push(`Status: ${guidance.migrationStatus.toUpperCase()}`);
|
||||
lines.push(`Confidence: ${guidance.confidence}`);
|
||||
lines.push(`Estimated time: ${guidance.estimatedTime}`);
|
||||
|
||||
if (guidance.requiredActions.length > 0) {
|
||||
lines.push(`\nRequired actions: ${guidance.requiredActions.length}`);
|
||||
for (const action of guidance.requiredActions.slice(0, 3)) {
|
||||
lines.push(` - [${action.priority}] ${action.property}: ${action.reason}`);
|
||||
}
|
||||
if (guidance.requiredActions.length > 3) {
|
||||
lines.push(` ... and ${guidance.requiredActions.length - 3} more`);
|
||||
}
|
||||
}
|
||||
|
||||
if (guidance.behaviorChanges.length > 0) {
|
||||
lines.push(`\nBehavior changes: ${guidance.behaviorChanges.length}`);
|
||||
for (const change of guidance.behaviorChanges) {
|
||||
lines.push(` - ${change.aspect}: ${change.newBehavior}`);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
@@ -16,10 +16,6 @@ import {
|
||||
} from '../types/workflow-diff';
|
||||
import { WorkflowNode, Workflow } from '../types/n8n-api';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
import { BreakingChangeDetector } from './breaking-change-detector';
|
||||
import { NodeMigrationService } from './node-migration-service';
|
||||
import { PostUpdateValidator, PostUpdateGuidance } from './post-update-validator';
|
||||
|
||||
const logger = new Logger({ prefix: '[WorkflowAutoFixer]' });
|
||||
|
||||
@@ -29,9 +25,7 @@ export type FixType =
|
||||
| 'typeversion-correction'
|
||||
| 'error-output-config'
|
||||
| 'node-type-correction'
|
||||
| 'webhook-missing-path'
|
||||
| 'typeversion-upgrade' // NEW: Proactive version upgrades
|
||||
| 'version-migration'; // NEW: Smart version migrations with breaking changes
|
||||
| 'webhook-missing-path';
|
||||
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
@@ -59,7 +53,6 @@ export interface AutoFixResult {
|
||||
byType: Record<FixType, number>;
|
||||
byConfidence: Record<FixConfidenceLevel, number>;
|
||||
};
|
||||
postUpdateGuidance?: PostUpdateGuidance[]; // NEW: AI-friendly migration guidance
|
||||
}
|
||||
|
||||
export interface NodeFormatIssue extends ExpressionFormatIssue {
|
||||
@@ -98,34 +91,25 @@ export class WorkflowAutoFixer {
|
||||
maxFixes: 50
|
||||
};
|
||||
private similarityService: NodeSimilarityService | null = null;
|
||||
private versionService: NodeVersionService | null = null;
|
||||
private breakingChangeDetector: BreakingChangeDetector | null = null;
|
||||
private migrationService: NodeMigrationService | null = null;
|
||||
private postUpdateValidator: PostUpdateValidator | null = null;
|
||||
|
||||
constructor(repository?: NodeRepository) {
|
||||
if (repository) {
|
||||
this.similarityService = new NodeSimilarityService(repository);
|
||||
this.breakingChangeDetector = new BreakingChangeDetector(repository);
|
||||
this.versionService = new NodeVersionService(repository, this.breakingChangeDetector);
|
||||
this.migrationService = new NodeMigrationService(this.versionService, this.breakingChangeDetector);
|
||||
this.postUpdateValidator = new PostUpdateValidator(this.versionService, this.breakingChangeDetector);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate fix operations from validation results
|
||||
*/
|
||||
async generateFixes(
|
||||
generateFixes(
|
||||
workflow: Workflow,
|
||||
validationResult: WorkflowValidationResult,
|
||||
formatIssues: ExpressionFormatIssue[] = [],
|
||||
config: Partial<AutoFixConfig> = {}
|
||||
): Promise<AutoFixResult> {
|
||||
): AutoFixResult {
|
||||
const fullConfig = { ...this.defaultConfig, ...config };
|
||||
const operations: WorkflowDiffOperation[] = [];
|
||||
const fixes: FixOperation[] = [];
|
||||
const postUpdateGuidance: PostUpdateGuidance[] = [];
|
||||
|
||||
// Create a map for quick node lookup
|
||||
const nodeMap = new Map<string, WorkflowNode>();
|
||||
@@ -159,16 +143,6 @@ export class WorkflowAutoFixer {
|
||||
this.processWebhookPathFixes(validationResult, nodeMap, operations, fixes);
|
||||
}
|
||||
|
||||
// NEW: Process version upgrades (HIGH/MEDIUM confidence)
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('typeversion-upgrade')) {
|
||||
await this.processVersionUpgradeFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// NEW: Process version migrations with breaking changes (MEDIUM/LOW confidence)
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('version-migration')) {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// Filter by confidence threshold
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
@@ -185,8 +159,7 @@ export class WorkflowAutoFixer {
|
||||
operations: limitedOperations,
|
||||
fixes: limitedFixes,
|
||||
summary,
|
||||
stats,
|
||||
postUpdateGuidance: postUpdateGuidance.length > 0 ? postUpdateGuidance : undefined
|
||||
stats
|
||||
};
|
||||
}
|
||||
|
||||
@@ -605,9 +578,7 @@ export class WorkflowAutoFixer {
|
||||
'typeversion-correction': 0,
|
||||
'error-output-config': 0,
|
||||
'node-type-correction': 0,
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0
|
||||
'webhook-missing-path': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -650,186 +621,10 @@ export class WorkflowAutoFixer {
|
||||
parts.push(`${stats.byType['webhook-missing-path']} webhook ${stats.byType['webhook-missing-path'] === 1 ? 'path' : 'paths'}`);
|
||||
}
|
||||
|
||||
if (stats.byType['typeversion-upgrade'] > 0) {
|
||||
parts.push(`${stats.byType['typeversion-upgrade']} version ${stats.byType['typeversion-upgrade'] === 1 ? 'upgrade' : 'upgrades'}`);
|
||||
}
|
||||
if (stats.byType['version-migration'] > 0) {
|
||||
parts.push(`${stats.byType['version-migration']} version ${stats.byType['version-migration'] === 1 ? 'migration' : 'migrations'}`);
|
||||
}
|
||||
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version upgrade fixes (proactive upgrades to latest versions)
|
||||
* HIGH confidence for non-breaking upgrades, MEDIUM for upgrades with auto-migratable changes
|
||||
*/
|
||||
private async processVersionUpgradeFixes(
|
||||
workflow: Workflow,
|
||||
nodeMap: Map<string, WorkflowNode>,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[],
|
||||
postUpdateGuidance: PostUpdateGuidance[]
|
||||
): Promise<void> {
|
||||
if (!this.versionService || !this.migrationService || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version upgrade fixes.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (!node.typeVersion || !node.type) continue;
|
||||
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
const analysis = this.versionService.analyzeVersion(node.type, currentVersion);
|
||||
|
||||
// Only upgrade if outdated and recommended
|
||||
if (!analysis.isOutdated || !analysis.recommendUpgrade) continue;
|
||||
|
||||
// Skip if confidence is too low
|
||||
if (analysis.confidence === 'LOW') continue;
|
||||
|
||||
const latestVersion = analysis.latestVersion;
|
||||
|
||||
// Attempt migration
|
||||
try {
|
||||
const migrationResult = await this.migrationService.migrateNode(
|
||||
node,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Create fix operation
|
||||
fixes.push({
|
||||
node: node.name,
|
||||
field: 'typeVersion',
|
||||
type: 'typeversion-upgrade',
|
||||
before: currentVersion,
|
||||
after: latestVersion,
|
||||
confidence: analysis.hasBreakingChanges ? 'medium' : 'high',
|
||||
description: `Upgrade ${node.name} from v${currentVersion} to v${latestVersion}. ${analysis.reason}`
|
||||
});
|
||||
|
||||
// Create update operation
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: node.id,
|
||||
updates: {
|
||||
typeVersion: parseFloat(latestVersion),
|
||||
parameters: migrationResult.updatedNode.parameters,
|
||||
...(migrationResult.updatedNode.webhookId && { webhookId: migrationResult.updatedNode.webhookId })
|
||||
}
|
||||
};
|
||||
operations.push(operation);
|
||||
|
||||
// Generate post-update guidance
|
||||
const guidance = await this.postUpdateValidator.generateGuidance(
|
||||
node.id,
|
||||
node.name,
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
migrationResult
|
||||
);
|
||||
|
||||
postUpdateGuidance.push(guidance);
|
||||
|
||||
logger.info(`Generated version upgrade fix for ${node.name}: ${currentVersion} → ${latestVersion}`, {
|
||||
appliedMigrations: migrationResult.appliedMigrations.length,
|
||||
remainingIssues: migrationResult.remainingIssues.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Failed to process version upgrade for ${node.name}`, { error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version migration fixes (handle breaking changes with smart migrations)
|
||||
* MEDIUM/LOW confidence for migrations requiring manual intervention
|
||||
*/
|
||||
private async processVersionMigrationFixes(
|
||||
workflow: Workflow,
|
||||
nodeMap: Map<string, WorkflowNode>,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[],
|
||||
postUpdateGuidance: PostUpdateGuidance[]
|
||||
): Promise<void> {
|
||||
// This method handles migrations that weren't covered by typeversion-upgrade
|
||||
// Focuses on nodes with complex breaking changes that need manual review
|
||||
|
||||
if (!this.versionService || !this.breakingChangeDetector || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version migration fixes.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (!node.typeVersion || !node.type) continue;
|
||||
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
const latestVersion = this.versionService.getLatestVersion(node.type);
|
||||
|
||||
if (!latestVersion || currentVersion === latestVersion) continue;
|
||||
|
||||
// Check if this has breaking changes
|
||||
const hasBreaking = this.breakingChangeDetector.hasBreakingChanges(
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
if (!hasBreaking) continue; // Already handled by typeversion-upgrade
|
||||
|
||||
// Analyze the migration
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Only proceed if there are non-auto-migratable changes
|
||||
if (analysis.autoMigratableCount === analysis.changes.length) continue;
|
||||
|
||||
// Generate guidance for manual migration
|
||||
const guidance = await this.postUpdateValidator.generateGuidance(
|
||||
node.id,
|
||||
node.name,
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
{
|
||||
success: false,
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
appliedMigrations: [],
|
||||
remainingIssues: analysis.recommendations,
|
||||
confidence: analysis.overallSeverity === 'HIGH' ? 'LOW' : 'MEDIUM',
|
||||
updatedNode: node
|
||||
}
|
||||
);
|
||||
|
||||
// Create a fix entry (won't be auto-applied, just documented)
|
||||
fixes.push({
|
||||
node: node.name,
|
||||
field: 'typeVersion',
|
||||
type: 'version-migration',
|
||||
before: currentVersion,
|
||||
after: latestVersion,
|
||||
confidence: guidance.confidence === 'HIGH' ? 'medium' : 'low',
|
||||
description: `Version migration required: ${node.name} v${currentVersion} → v${latestVersion}. ${analysis.manualRequiredCount} manual action(s) required.`
|
||||
});
|
||||
|
||||
postUpdateGuidance.push(guidance);
|
||||
|
||||
logger.info(`Documented version migration for ${node.name}`, {
|
||||
breakingChanges: analysis.changes.filter(c => c.isBreaking).length,
|
||||
manualRequired: analysis.manualRequiredCount
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -36,9 +36,6 @@ import { sanitizeNode, sanitizeWorkflowNodes } from './node-sanitizer';
|
||||
const logger = new Logger({ prefix: '[WorkflowDiffEngine]' });
|
||||
|
||||
export class WorkflowDiffEngine {
|
||||
// Track node name changes during operations for connection reference updates
|
||||
private renameMap: Map<string, string> = new Map();
|
||||
|
||||
/**
|
||||
* Apply diff operations to a workflow
|
||||
*/
|
||||
@@ -47,9 +44,6 @@ export class WorkflowDiffEngine {
|
||||
request: WorkflowDiffRequest
|
||||
): Promise<WorkflowDiffResult> {
|
||||
try {
|
||||
// Reset rename tracking for this diff operation
|
||||
this.renameMap.clear();
|
||||
|
||||
// Clone workflow to avoid modifying original
|
||||
const workflowCopy = JSON.parse(JSON.stringify(workflow));
|
||||
|
||||
@@ -100,12 +94,6 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Update connection references after all node renames (even in continueOnError mode)
|
||||
if (this.renameMap.size > 0 && appliedIndices.length > 0) {
|
||||
this.updateConnectionReferences(workflowCopy);
|
||||
logger.debug(`Auto-updated ${this.renameMap.size} node name references in connections (continueOnError mode)`);
|
||||
}
|
||||
|
||||
// If validateOnly flag is set, return success without applying
|
||||
if (request.validateOnly) {
|
||||
return {
|
||||
@@ -159,12 +147,6 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Update connection references after all node renames
|
||||
if (this.renameMap.size > 0) {
|
||||
this.updateConnectionReferences(workflowCopy);
|
||||
logger.debug(`Auto-updated ${this.renameMap.size} node name references in connections`);
|
||||
}
|
||||
|
||||
// Pass 2: Validate and apply other operations (connections, metadata)
|
||||
for (const { operation, index } of otherOperations) {
|
||||
const error = this.validateOperation(workflowCopy, operation);
|
||||
@@ -371,23 +353,6 @@ export class WorkflowDiffEngine {
|
||||
if (!node) {
|
||||
return this.formatNodeNotFoundError(workflow, operation.nodeId || operation.nodeName || '', 'updateNode');
|
||||
}
|
||||
|
||||
// Check for name collision if renaming
|
||||
if (operation.updates.name && operation.updates.name !== node.name) {
|
||||
const normalizedNewName = this.normalizeNodeName(operation.updates.name);
|
||||
const normalizedCurrentName = this.normalizeNodeName(node.name);
|
||||
|
||||
// Only check collision if the names are actually different after normalization
|
||||
if (normalizedNewName !== normalizedCurrentName) {
|
||||
const collision = workflow.nodes.find(n =>
|
||||
n.id !== node.id && this.normalizeNodeName(n.name) === normalizedNewName
|
||||
);
|
||||
if (collision) {
|
||||
return `Cannot rename node "${node.name}" to "${operation.updates.name}": A node with that name already exists (id: ${collision.id.substring(0, 8)}...). Please choose a different name.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -614,14 +579,6 @@ export class WorkflowDiffEngine {
|
||||
const node = this.findNode(workflow, operation.nodeId, operation.nodeName);
|
||||
if (!node) return;
|
||||
|
||||
// Track node renames for connection reference updates
|
||||
if (operation.updates.name && operation.updates.name !== node.name) {
|
||||
const oldName = node.name;
|
||||
const newName = operation.updates.name;
|
||||
this.renameMap.set(oldName, newName);
|
||||
logger.debug(`Tracking rename: "${oldName}" → "${newName}"`);
|
||||
}
|
||||
|
||||
// Apply updates using dot notation
|
||||
Object.entries(operation.updates).forEach(([path, value]) => {
|
||||
this.setNestedProperty(node, path, value);
|
||||
@@ -940,59 +897,6 @@ export class WorkflowDiffEngine {
|
||||
workflow.connections = operation.connections;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update all connection references when nodes are renamed.
|
||||
* This method is called after node operations to ensure connection integrity.
|
||||
*
|
||||
* Updates:
|
||||
* - Connection object keys (source node names)
|
||||
* - Connection target.node values (target node names)
|
||||
* - All output types (main, error, ai_tool, ai_languageModel, etc.)
|
||||
*
|
||||
* @param workflow - The workflow to update
|
||||
*/
|
||||
private updateConnectionReferences(workflow: Workflow): void {
|
||||
if (this.renameMap.size === 0) return;
|
||||
|
||||
logger.debug(`Updating connection references for ${this.renameMap.size} renamed nodes`);
|
||||
|
||||
// Create a mapping of all renames (old → new)
|
||||
const renames = new Map(this.renameMap);
|
||||
|
||||
// Step 1: Update connection object keys (source node names)
|
||||
const updatedConnections: WorkflowConnection = {};
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
// Check if this source node was renamed
|
||||
const newSourceName = renames.get(sourceName) || sourceName;
|
||||
updatedConnections[newSourceName] = outputs;
|
||||
}
|
||||
|
||||
// Step 2: Update target node references within connections
|
||||
for (const [sourceName, outputs] of Object.entries(updatedConnections)) {
|
||||
// Iterate through all output types (main, error, ai_tool, ai_languageModel, etc.)
|
||||
for (const [outputType, connections] of Object.entries(outputs)) {
|
||||
// connections is Array<Array<{node, type, index}>>
|
||||
for (let outputIndex = 0; outputIndex < connections.length; outputIndex++) {
|
||||
const connectionsAtIndex = connections[outputIndex];
|
||||
for (let connIndex = 0; connIndex < connectionsAtIndex.length; connIndex++) {
|
||||
const connection = connectionsAtIndex[connIndex];
|
||||
// Check if target node was renamed
|
||||
if (renames.has(connection.node)) {
|
||||
const newTargetName = renames.get(connection.node)!;
|
||||
connection.node = newTargetName;
|
||||
logger.debug(`Updated connection: ${sourceName}[${outputType}][${outputIndex}][${connIndex}].node: "${connection.node}" → "${newTargetName}"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace workflow connections with updated connections
|
||||
workflow.connections = updatedConnections;
|
||||
|
||||
logger.info(`Auto-updated ${this.renameMap.size} node name references in connections`);
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
/**
|
||||
|
||||
@@ -11,8 +11,6 @@ import { NodeSimilarityService, NodeSuggestion } from './node-similarity-service
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { validateAISpecificNodes, hasAINodes } from './ai-node-validator';
|
||||
import { isTriggerNode } from '../utils/node-type-utils';
|
||||
import { isNonExecutableNode } from '../utils/node-classification';
|
||||
const logger = new Logger({ prefix: '[WorkflowValidator]' });
|
||||
|
||||
interface WorkflowNode {
|
||||
@@ -87,8 +85,17 @@ export class WorkflowValidator {
|
||||
this.similarityService = new NodeSimilarityService(nodeRepository);
|
||||
}
|
||||
|
||||
// Note: isStickyNote logic moved to shared utility: src/utils/node-classification.ts
|
||||
// Use isNonExecutableNode(node.type) instead
|
||||
/**
|
||||
* Check if a node is a Sticky Note or other non-executable node
|
||||
*/
|
||||
private isStickyNote(node: WorkflowNode): boolean {
|
||||
const stickyNoteTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
return stickyNoteTypes.includes(node.type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a complete workflow
|
||||
@@ -139,7 +146,7 @@ export class WorkflowValidator {
|
||||
}
|
||||
|
||||
// Update statistics after null check (exclude sticky notes from counts)
|
||||
const executableNodes = Array.isArray(workflow.nodes) ? workflow.nodes.filter(n => !isNonExecutableNode(n.type)) : [];
|
||||
const executableNodes = Array.isArray(workflow.nodes) ? workflow.nodes.filter(n => !this.isStickyNote(n)) : [];
|
||||
result.statistics.totalNodes = executableNodes.length;
|
||||
result.statistics.enabledNodes = executableNodes.filter(n => !n.disabled).length;
|
||||
|
||||
@@ -319,8 +326,16 @@ export class WorkflowValidator {
|
||||
nodeIds.add(node.id);
|
||||
}
|
||||
|
||||
// Count trigger nodes using shared trigger detection
|
||||
const triggerNodes = workflow.nodes.filter(n => isTriggerNode(n.type));
|
||||
// Count trigger nodes - normalize type names first
|
||||
const triggerNodes = workflow.nodes.filter(n => {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(n.type);
|
||||
const lowerType = normalizedType.toLowerCase();
|
||||
return lowerType.includes('trigger') ||
|
||||
(lowerType.includes('webhook') && !lowerType.includes('respond')) ||
|
||||
normalizedType === 'nodes-base.start' ||
|
||||
normalizedType === 'nodes-base.manualTrigger' ||
|
||||
normalizedType === 'nodes-base.formTrigger';
|
||||
});
|
||||
result.statistics.triggerNodes = triggerNodes.length;
|
||||
|
||||
// Check for at least one trigger node
|
||||
@@ -341,7 +356,7 @@ export class WorkflowValidator {
|
||||
profile: string
|
||||
): Promise<void> {
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
try {
|
||||
// Validate node name length
|
||||
@@ -617,12 +632,16 @@ export class WorkflowValidator {
|
||||
|
||||
// Check for orphaned nodes (exclude sticky notes)
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
// Use shared trigger detection function for consistency
|
||||
const isNodeTrigger = isTriggerNode(node.type);
|
||||
|
||||
if (!connectedNodes.has(node.name) && !isNodeTrigger) {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(node.type);
|
||||
const isTrigger = normalizedType.toLowerCase().includes('trigger') ||
|
||||
normalizedType.toLowerCase().includes('webhook') ||
|
||||
normalizedType === 'nodes-base.start' ||
|
||||
normalizedType === 'nodes-base.manualTrigger' ||
|
||||
normalizedType === 'nodes-base.formTrigger';
|
||||
|
||||
if (!connectedNodes.has(node.name) && !isTrigger) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
@@ -858,7 +877,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Build node type map (exclude sticky notes)
|
||||
workflow.nodes.forEach(node => {
|
||||
if (!isNonExecutableNode(node.type)) {
|
||||
if (!this.isStickyNote(node)) {
|
||||
nodeTypeMap.set(node.name, node.type);
|
||||
}
|
||||
});
|
||||
@@ -926,7 +945,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Check from all executable nodes (exclude sticky notes)
|
||||
for (const node of workflow.nodes) {
|
||||
if (!isNonExecutableNode(node.type) && !visited.has(node.name)) {
|
||||
if (!this.isStickyNote(node) && !visited.has(node.name)) {
|
||||
if (hasCycleDFS(node.name)) return true;
|
||||
}
|
||||
}
|
||||
@@ -945,7 +964,7 @@ export class WorkflowValidator {
|
||||
const nodeNames = workflow.nodes.map(n => n.name);
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
// Skip expression validation for langchain nodes
|
||||
// They have AI-specific validators and different expression rules
|
||||
@@ -1092,7 +1111,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Check node-level error handling properties for ALL executable nodes
|
||||
for (const node of workflow.nodes) {
|
||||
if (!isNonExecutableNode(node.type)) {
|
||||
if (!this.isStickyNote(node)) {
|
||||
this.checkNodeErrorHandling(node, workflow, result);
|
||||
}
|
||||
}
|
||||
@@ -1273,15 +1292,6 @@ export class WorkflowValidator {
|
||||
|
||||
/**
|
||||
* Check node-level error handling configuration for a single node
|
||||
*
|
||||
* Validates error handling properties (onError, continueOnFail, retryOnFail)
|
||||
* and provides warnings for error-prone nodes (HTTP, webhooks, databases)
|
||||
* that lack proper error handling. Delegates webhook-specific validation
|
||||
* to checkWebhookErrorHandling() for clearer logic.
|
||||
*
|
||||
* @param node - The workflow node to validate
|
||||
* @param workflow - The complete workflow for context
|
||||
* @param result - Validation result to add errors/warnings to
|
||||
*/
|
||||
private checkNodeErrorHandling(
|
||||
node: WorkflowNode,
|
||||
@@ -1492,8 +1502,12 @@ export class WorkflowValidator {
|
||||
message: 'HTTP Request node without error handling. Consider adding "onError: \'continueRegularOutput\'" for non-critical requests or "retryOnFail: true" for transient failures.'
|
||||
});
|
||||
} else if (normalizedType.includes('webhook')) {
|
||||
// Delegate to specialized webhook validation helper
|
||||
this.checkWebhookErrorHandling(node, normalizedType, result);
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Webhook node without error handling. Consider adding "onError: \'continueRegularOutput\'" to prevent workflow failures from blocking webhook responses.'
|
||||
});
|
||||
} else if (errorProneNodeTypes.some(db => normalizedType.includes(db) && ['postgres', 'mysql', 'mongodb'].includes(db))) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
@@ -1584,52 +1598,6 @@ export class WorkflowValidator {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Check webhook-specific error handling requirements
|
||||
*
|
||||
* Webhooks have special error handling requirements:
|
||||
* - respondToWebhook nodes (response nodes) don't need error handling
|
||||
* - Webhook nodes with responseNode mode REQUIRE onError to ensure responses
|
||||
* - Regular webhook nodes should have error handling to prevent blocking
|
||||
*
|
||||
* @param node - The webhook node to check
|
||||
* @param normalizedType - Normalized node type for comparison
|
||||
* @param result - Validation result to add errors/warnings to
|
||||
*/
|
||||
private checkWebhookErrorHandling(
|
||||
node: WorkflowNode,
|
||||
normalizedType: string,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
// respondToWebhook nodes are response nodes (endpoints), not triggers
|
||||
// They're the END of execution, not controllers of flow - skip error handling check
|
||||
if (normalizedType.includes('respondtowebhook')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for responseNode mode specifically
|
||||
// responseNode mode requires onError to ensure response is sent even on error
|
||||
if (node.parameters?.responseMode === 'responseNode') {
|
||||
if (!node.onError && !node.continueOnFail) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'responseNode mode requires onError: "continueRegularOutput"'
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Regular webhook nodes without responseNode mode
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Webhook node without error handling. Consider adding "onError: \'continueRegularOutput\'" to prevent workflow failures from blocking webhook responses.'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate error handling suggestions based on all nodes
|
||||
*/
|
||||
|
||||
@@ -1,460 +0,0 @@
|
||||
/**
|
||||
* Workflow Versioning Service
|
||||
*
|
||||
* Provides workflow backup, versioning, rollback, and cleanup capabilities.
|
||||
* Automatically prunes to 10 versions per workflow to prevent memory leaks.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { N8nApiClient } from './n8n-api-client';
|
||||
import { WorkflowValidator } from './workflow-validator';
|
||||
import { EnhancedConfigValidator } from './enhanced-config-validator';
|
||||
|
||||
export interface WorkflowVersion {
|
||||
id: number;
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
workflowSnapshot: any;
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface VersionInfo {
|
||||
id: number;
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
trigger: string;
|
||||
operationCount?: number;
|
||||
fixTypesApplied?: string[];
|
||||
createdAt: string;
|
||||
size: number; // Size in bytes
|
||||
}
|
||||
|
||||
export interface RestoreResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
workflowId: string;
|
||||
fromVersion?: number;
|
||||
toVersionId: number;
|
||||
backupCreated: boolean;
|
||||
backupVersionId?: number;
|
||||
validationErrors?: string[];
|
||||
}
|
||||
|
||||
export interface BackupResult {
|
||||
versionId: number;
|
||||
versionNumber: number;
|
||||
pruned: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface StorageStats {
|
||||
totalVersions: number;
|
||||
totalSize: number;
|
||||
totalSizeFormatted: string;
|
||||
byWorkflow: WorkflowStorageInfo[];
|
||||
}
|
||||
|
||||
export interface WorkflowStorageInfo {
|
||||
workflowId: string;
|
||||
workflowName: string;
|
||||
versionCount: number;
|
||||
totalSize: number;
|
||||
totalSizeFormatted: string;
|
||||
lastBackup: string;
|
||||
}
|
||||
|
||||
export interface VersionDiff {
|
||||
versionId1: number;
|
||||
versionId2: number;
|
||||
version1Number: number;
|
||||
version2Number: number;
|
||||
addedNodes: string[];
|
||||
removedNodes: string[];
|
||||
modifiedNodes: string[];
|
||||
connectionChanges: number;
|
||||
settingChanges: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow Versioning Service
|
||||
*/
|
||||
export class WorkflowVersioningService {
|
||||
private readonly DEFAULT_MAX_VERSIONS = 10;
|
||||
|
||||
constructor(
|
||||
private nodeRepository: NodeRepository,
|
||||
private apiClient?: N8nApiClient
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Create backup before modification
|
||||
* Automatically prunes to 10 versions after backup creation
|
||||
*/
|
||||
async createBackup(
|
||||
workflowId: string,
|
||||
workflow: any,
|
||||
context: {
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
}
|
||||
): Promise<BackupResult> {
|
||||
// Get current max version number
|
||||
const versions = this.nodeRepository.getWorkflowVersions(workflowId, 1);
|
||||
const nextVersion = versions.length > 0 ? versions[0].versionNumber + 1 : 1;
|
||||
|
||||
// Create new version
|
||||
const versionId = this.nodeRepository.createWorkflowVersion({
|
||||
workflowId,
|
||||
versionNumber: nextVersion,
|
||||
workflowName: workflow.name || 'Unnamed Workflow',
|
||||
workflowSnapshot: workflow,
|
||||
trigger: context.trigger,
|
||||
operations: context.operations,
|
||||
fixTypes: context.fixTypes,
|
||||
metadata: context.metadata
|
||||
});
|
||||
|
||||
// Auto-prune to keep max 10 versions
|
||||
const pruned = this.nodeRepository.pruneWorkflowVersions(
|
||||
workflowId,
|
||||
this.DEFAULT_MAX_VERSIONS
|
||||
);
|
||||
|
||||
return {
|
||||
versionId,
|
||||
versionNumber: nextVersion,
|
||||
pruned,
|
||||
message: pruned > 0
|
||||
? `Backup created (version ${nextVersion}), pruned ${pruned} old version(s)`
|
||||
: `Backup created (version ${nextVersion})`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get version history for a workflow
|
||||
*/
|
||||
async getVersionHistory(workflowId: string, limit: number = 10): Promise<VersionInfo[]> {
|
||||
const versions = this.nodeRepository.getWorkflowVersions(workflowId, limit);
|
||||
|
||||
return versions.map(v => ({
|
||||
id: v.id,
|
||||
workflowId: v.workflowId,
|
||||
versionNumber: v.versionNumber,
|
||||
workflowName: v.workflowName,
|
||||
trigger: v.trigger,
|
||||
operationCount: v.operations ? v.operations.length : undefined,
|
||||
fixTypesApplied: v.fixTypes || undefined,
|
||||
createdAt: v.createdAt,
|
||||
size: JSON.stringify(v.workflowSnapshot).length
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific workflow version
|
||||
*/
|
||||
async getVersion(versionId: number): Promise<WorkflowVersion | null> {
|
||||
return this.nodeRepository.getWorkflowVersion(versionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore workflow to a previous version
|
||||
* Creates backup of current state before restoring
|
||||
*/
|
||||
async restoreVersion(
|
||||
workflowId: string,
|
||||
versionId?: number,
|
||||
validateBefore: boolean = true
|
||||
): Promise<RestoreResult> {
|
||||
if (!this.apiClient) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'API client not configured - cannot restore workflow',
|
||||
workflowId,
|
||||
toVersionId: versionId || 0,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Get the version to restore
|
||||
let versionToRestore: WorkflowVersion | null = null;
|
||||
|
||||
if (versionId) {
|
||||
versionToRestore = this.nodeRepository.getWorkflowVersion(versionId);
|
||||
} else {
|
||||
// Get latest backup
|
||||
versionToRestore = this.nodeRepository.getLatestWorkflowVersion(workflowId);
|
||||
}
|
||||
|
||||
if (!versionToRestore) {
|
||||
return {
|
||||
success: false,
|
||||
message: versionId
|
||||
? `Version ${versionId} not found`
|
||||
: `No backup versions found for workflow ${workflowId}`,
|
||||
workflowId,
|
||||
toVersionId: versionId || 0,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Validate workflow structure if requested
|
||||
if (validateBefore) {
|
||||
const validator = new WorkflowValidator(this.nodeRepository, EnhancedConfigValidator);
|
||||
const validationResult = await validator.validateWorkflow(
|
||||
versionToRestore.workflowSnapshot,
|
||||
{
|
||||
validateNodes: true,
|
||||
validateConnections: true,
|
||||
validateExpressions: false,
|
||||
profile: 'runtime'
|
||||
}
|
||||
);
|
||||
|
||||
if (validationResult.errors.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Cannot restore - version ${versionToRestore.versionNumber} has validation errors`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: false,
|
||||
validationErrors: validationResult.errors.map(e => e.message || 'Unknown error')
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Create backup of current workflow before restoring
|
||||
let backupResult: BackupResult | undefined;
|
||||
try {
|
||||
const currentWorkflow = await this.apiClient.getWorkflow(workflowId);
|
||||
backupResult = await this.createBackup(workflowId, currentWorkflow, {
|
||||
trigger: 'partial_update',
|
||||
metadata: {
|
||||
reason: 'Backup before rollback',
|
||||
restoringToVersion: versionToRestore.versionNumber
|
||||
}
|
||||
});
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to create backup before restore: ${error.message}`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Restore the workflow
|
||||
try {
|
||||
await this.apiClient.updateWorkflow(workflowId, versionToRestore.workflowSnapshot);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully restored workflow to version ${versionToRestore.versionNumber}`,
|
||||
workflowId,
|
||||
fromVersion: backupResult.versionNumber,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: true,
|
||||
backupVersionId: backupResult.versionId
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to restore workflow: ${error.message}`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: true,
|
||||
backupVersionId: backupResult.versionId
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific version
|
||||
*/
|
||||
async deleteVersion(versionId: number): Promise<{ success: boolean; message: string }> {
|
||||
const version = this.nodeRepository.getWorkflowVersion(versionId);
|
||||
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Version ${versionId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
this.nodeRepository.deleteWorkflowVersion(versionId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Deleted version ${version.versionNumber} for workflow ${version.workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all versions for a workflow
|
||||
*/
|
||||
async deleteAllVersions(workflowId: string): Promise<{ deleted: number; message: string }> {
|
||||
const count = this.nodeRepository.getWorkflowVersionCount(workflowId);
|
||||
|
||||
if (count === 0) {
|
||||
return {
|
||||
deleted: 0,
|
||||
message: `No versions found for workflow ${workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = this.nodeRepository.deleteWorkflowVersionsByWorkflowId(workflowId);
|
||||
|
||||
return {
|
||||
deleted,
|
||||
message: `Deleted ${deleted} version(s) for workflow ${workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger pruning for a workflow
|
||||
*/
|
||||
async pruneVersions(
|
||||
workflowId: string,
|
||||
maxVersions: number = 10
|
||||
): Promise<{ pruned: number; remaining: number }> {
|
||||
const pruned = this.nodeRepository.pruneWorkflowVersions(workflowId, maxVersions);
|
||||
const remaining = this.nodeRepository.getWorkflowVersionCount(workflowId);
|
||||
|
||||
return { pruned, remaining };
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate entire workflow_versions table
|
||||
* Requires explicit confirmation
|
||||
*/
|
||||
async truncateAllVersions(confirm: boolean): Promise<{ deleted: number; message: string }> {
|
||||
if (!confirm) {
|
||||
return {
|
||||
deleted: 0,
|
||||
message: 'Truncate operation not confirmed - no action taken'
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = this.nodeRepository.truncateWorkflowVersions();
|
||||
|
||||
return {
|
||||
deleted,
|
||||
message: `Truncated workflow_versions table - deleted ${deleted} version(s)`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage statistics
|
||||
*/
|
||||
async getStorageStats(): Promise<StorageStats> {
|
||||
const stats = this.nodeRepository.getVersionStorageStats();
|
||||
|
||||
return {
|
||||
totalVersions: stats.totalVersions,
|
||||
totalSize: stats.totalSize,
|
||||
totalSizeFormatted: this.formatBytes(stats.totalSize),
|
||||
byWorkflow: stats.byWorkflow.map((w: any) => ({
|
||||
workflowId: w.workflowId,
|
||||
workflowName: w.workflowName,
|
||||
versionCount: w.versionCount,
|
||||
totalSize: w.totalSize,
|
||||
totalSizeFormatted: this.formatBytes(w.totalSize),
|
||||
lastBackup: w.lastBackup
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two versions
|
||||
*/
|
||||
async compareVersions(versionId1: number, versionId2: number): Promise<VersionDiff> {
|
||||
const v1 = this.nodeRepository.getWorkflowVersion(versionId1);
|
||||
const v2 = this.nodeRepository.getWorkflowVersion(versionId2);
|
||||
|
||||
if (!v1 || !v2) {
|
||||
throw new Error(`One or both versions not found: ${versionId1}, ${versionId2}`);
|
||||
}
|
||||
|
||||
// Compare nodes
|
||||
const nodes1 = new Set<string>(v1.workflowSnapshot.nodes?.map((n: any) => n.id as string) || []);
|
||||
const nodes2 = new Set<string>(v2.workflowSnapshot.nodes?.map((n: any) => n.id as string) || []);
|
||||
|
||||
const addedNodes: string[] = [...nodes2].filter(id => !nodes1.has(id));
|
||||
const removedNodes: string[] = [...nodes1].filter(id => !nodes2.has(id));
|
||||
const commonNodes = [...nodes1].filter(id => nodes2.has(id));
|
||||
|
||||
// Check for modified nodes
|
||||
const modifiedNodes: string[] = [];
|
||||
for (const nodeId of commonNodes) {
|
||||
const node1 = v1.workflowSnapshot.nodes?.find((n: any) => n.id === nodeId);
|
||||
const node2 = v2.workflowSnapshot.nodes?.find((n: any) => n.id === nodeId);
|
||||
|
||||
if (JSON.stringify(node1) !== JSON.stringify(node2)) {
|
||||
modifiedNodes.push(nodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare connections
|
||||
const conn1Str = JSON.stringify(v1.workflowSnapshot.connections || {});
|
||||
const conn2Str = JSON.stringify(v2.workflowSnapshot.connections || {});
|
||||
const connectionChanges = conn1Str !== conn2Str ? 1 : 0;
|
||||
|
||||
// Compare settings
|
||||
const settings1 = v1.workflowSnapshot.settings || {};
|
||||
const settings2 = v2.workflowSnapshot.settings || {};
|
||||
const settingChanges = this.diffObjects(settings1, settings2);
|
||||
|
||||
return {
|
||||
versionId1,
|
||||
versionId2,
|
||||
version1Number: v1.versionNumber,
|
||||
version2Number: v2.versionNumber,
|
||||
addedNodes,
|
||||
removedNodes,
|
||||
modifiedNodes,
|
||||
connectionChanges,
|
||||
settingChanges
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format bytes to human-readable string
|
||||
*/
|
||||
private formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple object diff
|
||||
*/
|
||||
private diffObjects(obj1: any, obj2: any): any {
|
||||
const changes: any = {};
|
||||
|
||||
const allKeys = new Set([...Object.keys(obj1), ...Object.keys(obj2)]);
|
||||
|
||||
for (const key of allKeys) {
|
||||
if (JSON.stringify(obj1[key]) !== JSON.stringify(obj2[key])) {
|
||||
changes[key] = {
|
||||
before: obj1[key],
|
||||
after: obj2[key]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* Utility functions for detecting and handling n8n expressions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Detects if a value is an n8n expression
|
||||
*
|
||||
* n8n expressions can be:
|
||||
* - Pure expression: `={{ $json.value }}`
|
||||
* - Mixed content: `=https://api.com/{{ $json.id }}/data`
|
||||
* - Prefix-only: `=$json.value`
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value is an expression (starts with =)
|
||||
*/
|
||||
export function isExpression(value: unknown): value is string {
|
||||
return typeof value === 'string' && value.startsWith('=');
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects if a string contains n8n expression syntax {{ }}
|
||||
*
|
||||
* This checks for expression markers within the string,
|
||||
* regardless of whether it has the = prefix.
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value contains {{ }} markers
|
||||
*/
|
||||
export function containsExpression(value: unknown): boolean {
|
||||
if (typeof value !== 'string') {
|
||||
return false;
|
||||
}
|
||||
// Use single regex for better performance than two includes()
|
||||
return /\{\{.*\}\}/s.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects if a value should skip literal validation
|
||||
*
|
||||
* This is the main utility to use before validating values like URLs, JSON, etc.
|
||||
* It returns true if:
|
||||
* - The value is an expression (starts with =)
|
||||
* - OR the value contains expression markers {{ }}
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if validation should be skipped
|
||||
*/
|
||||
export function shouldSkipLiteralValidation(value: unknown): boolean {
|
||||
return isExpression(value) || containsExpression(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the expression content from a value
|
||||
*
|
||||
* If value is `={{ $json.value }}`, returns `$json.value`
|
||||
* If value is `=$json.value`, returns `$json.value`
|
||||
* If value is not an expression, returns the original value
|
||||
*
|
||||
* @param value - The value to extract from
|
||||
* @returns The expression content or original value
|
||||
*/
|
||||
export function extractExpressionContent(value: string): string {
|
||||
if (!isExpression(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const withoutPrefix = value.substring(1); // Remove =
|
||||
|
||||
// Check if it's wrapped in {{ }}
|
||||
const match = withoutPrefix.match(/^\{\{(.+)\}\}$/s);
|
||||
if (match) {
|
||||
return match[1].trim();
|
||||
}
|
||||
|
||||
return withoutPrefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is a mixed content expression
|
||||
*
|
||||
* Mixed content has both literal text and expressions:
|
||||
* - `Hello {{ $json.name }}!`
|
||||
* - `https://api.com/{{ $json.id }}/data`
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value has mixed content
|
||||
*/
|
||||
export function hasMixedContent(value: unknown): boolean {
|
||||
// Type guard first to avoid calling containsExpression on non-strings
|
||||
if (typeof value !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!containsExpression(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If it's wrapped entirely in {{ }}, it's not mixed
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.startsWith('={{') && trimmed.endsWith('}}')) {
|
||||
// Check if there's only one pair of {{ }}
|
||||
const count = (trimmed.match(/\{\{/g) || []).length;
|
||||
if (count === 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
/**
|
||||
* Node Classification Utilities
|
||||
*
|
||||
* Provides shared classification logic for workflow nodes.
|
||||
* Used by validators to consistently identify node types across the codebase.
|
||||
*
|
||||
* This module centralizes node type classification to ensure consistent behavior
|
||||
* between WorkflowValidator and n8n-validation.ts, preventing bugs like sticky
|
||||
* notes being incorrectly flagged as disconnected nodes.
|
||||
*/
|
||||
|
||||
import { isTriggerNode as isTriggerNodeImpl } from './node-type-utils';
|
||||
|
||||
/**
|
||||
* Check if a node type is a sticky note (documentation-only node)
|
||||
*
|
||||
* Sticky notes are UI-only annotation nodes that:
|
||||
* - Do not participate in workflow execution
|
||||
* - Never have connections (by design)
|
||||
* - Should be excluded from connection validation
|
||||
* - Serve purely as visual documentation in the workflow canvas
|
||||
*
|
||||
* Example sticky note types:
|
||||
* - 'n8n-nodes-base.stickyNote' (standard format)
|
||||
* - 'nodes-base.stickyNote' (normalized format)
|
||||
* - '@n8n/n8n-nodes-base.stickyNote' (scoped format)
|
||||
*
|
||||
* @param nodeType - The node type to check (e.g., 'n8n-nodes-base.stickyNote')
|
||||
* @returns true if the node is a sticky note, false otherwise
|
||||
*/
|
||||
export function isStickyNote(nodeType: string): boolean {
|
||||
const stickyNoteTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
return stickyNoteTypes.includes(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type is a trigger node
|
||||
*
|
||||
* This function delegates to the comprehensive trigger detection implementation
|
||||
* in node-type-utils.ts which supports 200+ trigger types using flexible
|
||||
* pattern matching instead of a hardcoded list.
|
||||
*
|
||||
* Trigger nodes:
|
||||
* - Start workflow execution
|
||||
* - Only need outgoing connections (no incoming connections required)
|
||||
* - Include webhooks, manual triggers, schedule triggers, email triggers, etc.
|
||||
* - Are the entry points for workflow execution
|
||||
*
|
||||
* Examples:
|
||||
* - Webhooks: Listen for HTTP requests
|
||||
* - Manual triggers: Started manually by user
|
||||
* - Schedule/Cron triggers: Run on a schedule
|
||||
* - Execute Workflow Trigger: Invoked by other workflows
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node is a trigger, false otherwise
|
||||
*/
|
||||
export function isTriggerNode(nodeType: string): boolean {
|
||||
return isTriggerNodeImpl(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type is non-executable (UI-only)
|
||||
*
|
||||
* Non-executable nodes:
|
||||
* - Do not participate in workflow execution
|
||||
* - Serve documentation/annotation purposes only
|
||||
* - Should be excluded from all execution-related validation
|
||||
* - Should be excluded from statistics like "total executable nodes"
|
||||
* - Should be excluded from connection validation
|
||||
*
|
||||
* Currently includes: sticky notes
|
||||
*
|
||||
* Future: May include other annotation/comment nodes if n8n adds them
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node is non-executable, false otherwise
|
||||
*/
|
||||
export function isNonExecutableNode(nodeType: string): boolean {
|
||||
return isStickyNote(nodeType);
|
||||
// Future: Add other non-executable node types here
|
||||
// Example: || isCommentNode(nodeType) || isAnnotationNode(nodeType)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type requires incoming connections
|
||||
*
|
||||
* Most nodes require at least one incoming connection to receive data,
|
||||
* but there are two categories of exceptions:
|
||||
*
|
||||
* 1. Trigger nodes: Only need outgoing connections
|
||||
* - They start workflow execution
|
||||
* - They generate their own data
|
||||
* - Examples: webhook, manualTrigger, scheduleTrigger
|
||||
*
|
||||
* 2. Non-executable nodes: Don't need any connections
|
||||
* - They are UI-only annotations
|
||||
* - They don't participate in execution
|
||||
* - Examples: stickyNote
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node requires incoming connections, false otherwise
|
||||
*/
|
||||
export function requiresIncomingConnection(nodeType: string): boolean {
|
||||
// Non-executable nodes don't need any connections
|
||||
if (isNonExecutableNode(nodeType)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Trigger nodes only need outgoing connections
|
||||
if (isTriggerNode(nodeType)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Regular nodes need incoming connections
|
||||
return true;
|
||||
}
|
||||
@@ -140,116 +140,4 @@ export function getNodeTypeVariations(type: string): string[] {
|
||||
|
||||
// Remove duplicates while preserving order
|
||||
return [...new Set(variations)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node is ANY type of trigger (including executeWorkflowTrigger)
|
||||
*
|
||||
* This function determines if a node can start a workflow execution.
|
||||
* Returns true for:
|
||||
* - Webhook triggers (webhook, webhookTrigger)
|
||||
* - Time-based triggers (schedule, cron)
|
||||
* - Poll-based triggers (emailTrigger, slackTrigger, etc.)
|
||||
* - Manual triggers (manualTrigger, start, formTrigger)
|
||||
* - Sub-workflow triggers (executeWorkflowTrigger)
|
||||
*
|
||||
* Used for: Disconnection validation (triggers don't need incoming connections)
|
||||
*
|
||||
* @param nodeType - The node type to check (e.g., "n8n-nodes-base.executeWorkflowTrigger")
|
||||
* @returns true if node is any type of trigger
|
||||
*/
|
||||
export function isTriggerNode(nodeType: string): boolean {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
// Check for trigger pattern in node type name
|
||||
if (lowerType.includes('trigger')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for webhook nodes (excluding respondToWebhook which is NOT a trigger)
|
||||
if (lowerType.includes('webhook') && !lowerType.includes('respond')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for specific trigger types that don't have 'trigger' in their name
|
||||
const specificTriggers = [
|
||||
'nodes-base.start',
|
||||
'nodes-base.manualTrigger',
|
||||
'nodes-base.formTrigger'
|
||||
];
|
||||
|
||||
return specificTriggers.includes(normalized);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node is an ACTIVATABLE trigger (excludes executeWorkflowTrigger)
|
||||
*
|
||||
* This function determines if a node can be used to activate a workflow.
|
||||
* Returns true for:
|
||||
* - Webhook triggers (webhook, webhookTrigger)
|
||||
* - Time-based triggers (schedule, cron)
|
||||
* - Poll-based triggers (emailTrigger, slackTrigger, etc.)
|
||||
* - Manual triggers (manualTrigger, start, formTrigger)
|
||||
*
|
||||
* Returns FALSE for:
|
||||
* - executeWorkflowTrigger (can only be invoked by other workflows)
|
||||
*
|
||||
* Used for: Activation validation (active workflows need activatable triggers)
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if node can activate a workflow
|
||||
*/
|
||||
export function isActivatableTrigger(nodeType: string): boolean {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
// executeWorkflowTrigger cannot activate a workflow (invoked by other workflows)
|
||||
if (lowerType.includes('executeworkflow')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// All other triggers can activate workflows
|
||||
return isTriggerNode(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get human-readable description of trigger type
|
||||
*
|
||||
* @param nodeType - The node type
|
||||
* @returns Description of what triggers this node
|
||||
*/
|
||||
export function getTriggerTypeDescription(nodeType: string): string {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
if (lowerType.includes('executeworkflow')) {
|
||||
return 'Execute Workflow Trigger (invoked by other workflows)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('webhook')) {
|
||||
return 'Webhook Trigger (HTTP requests)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('schedule') || lowerType.includes('cron')) {
|
||||
return 'Schedule Trigger (time-based)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('manual') || normalized === 'nodes-base.start') {
|
||||
return 'Manual Trigger (manual execution)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('email') || lowerType.includes('imap') || lowerType.includes('gmail')) {
|
||||
return 'Email Trigger (polling)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('form')) {
|
||||
return 'Form Trigger (form submissions)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('trigger')) {
|
||||
return 'Trigger (event-based)';
|
||||
}
|
||||
|
||||
return 'Unknown trigger type';
|
||||
}
|
||||
@@ -205,20 +205,9 @@ describe.skipIf(!dbExists)('Database Content Validation', () => {
|
||||
|
||||
it('MUST have FTS5 index properly ranked', () => {
|
||||
const results = db.prepare(`
|
||||
SELECT
|
||||
n.node_type,
|
||||
rank
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
SELECT node_type, rank FROM nodes_fts
|
||||
WHERE nodes_fts MATCH 'webhook'
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER('webhook') THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER('%webhook%') THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER('%webhook%') THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank
|
||||
ORDER BY rank
|
||||
LIMIT 5
|
||||
`).all();
|
||||
|
||||
@@ -226,7 +215,7 @@ describe.skipIf(!dbExists)('Database Content Validation', () => {
|
||||
'CRITICAL: FTS5 ranking not working. Search quality will be degraded.'
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
// Exact match should be in top results (using production boosting logic with CASE-first ordering)
|
||||
// Exact match should be in top results
|
||||
const topNodes = results.slice(0, 3).map((r: any) => r.node_type);
|
||||
expect(topNodes,
|
||||
'WARNING: Exact match "nodes-base.webhook" not in top 3 ranked results'
|
||||
|
||||
@@ -136,25 +136,14 @@ describe('Node FTS5 Search Integration Tests', () => {
|
||||
describe('FTS5 Search Quality', () => {
|
||||
it('should rank exact matches higher', () => {
|
||||
const results = db.prepare(`
|
||||
SELECT
|
||||
n.node_type,
|
||||
rank
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
SELECT node_type, rank FROM nodes_fts
|
||||
WHERE nodes_fts MATCH 'webhook'
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER('webhook') THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER('%webhook%') THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER('%webhook%') THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank
|
||||
ORDER BY rank
|
||||
LIMIT 10
|
||||
`).all();
|
||||
|
||||
expect(results.length).toBeGreaterThan(0);
|
||||
// Exact match should be in top results (using production boosting logic with CASE-first ordering)
|
||||
// Exact match should be in top results
|
||||
const topResults = results.slice(0, 3).map((r: any) => r.node_type);
|
||||
expect(topResults).toContain('nodes-base.webhook');
|
||||
});
|
||||
|
||||
@@ -555,9 +555,8 @@ describe('MCP Performance Tests', () => {
|
||||
console.log(`Sustained load test - Requests: ${requestCount}, RPS: ${requestsPerSecond.toFixed(2)}, Errors: ${errorCount}`);
|
||||
console.log(`Environment: ${process.env.CI ? 'CI' : 'Local'}`);
|
||||
|
||||
// Environment-aware RPS threshold
|
||||
// Relaxed to 75 RPS locally to account for parallel test execution overhead
|
||||
const rpsThreshold = process.env.CI ? 50 : 75;
|
||||
// Environment-aware RPS threshold (relaxed -8% for type safety overhead)
|
||||
const rpsThreshold = process.env.CI ? 50 : 92;
|
||||
expect(requestsPerSecond).toBeGreaterThan(rpsThreshold);
|
||||
|
||||
// Error rate should be very low
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { getN8nCredentials } from './credentials';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { createDatabaseAdapter } from '../../../../src/database/database-adapter';
|
||||
import * as path from 'path';
|
||||
|
||||
// Singleton repository instance for tests
|
||||
let repositoryInstance: NodeRepository | null = null;
|
||||
|
||||
/**
|
||||
* Creates MCP context for testing MCP handlers against real n8n instance
|
||||
@@ -18,27 +12,3 @@ export function createMcpContext(): InstanceContext {
|
||||
n8nApiKey: creds.apiKey
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates a NodeRepository instance for integration tests
|
||||
* Uses the project's main database
|
||||
*/
|
||||
export async function getMcpRepository(): Promise<NodeRepository> {
|
||||
if (repositoryInstance) {
|
||||
return repositoryInstance;
|
||||
}
|
||||
|
||||
// Use the main project database
|
||||
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
|
||||
const db = await createDatabaseAdapter(dbPath);
|
||||
repositoryInstance = new NodeRepository(db);
|
||||
|
||||
return repositoryInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the repository instance (useful for test cleanup)
|
||||
*/
|
||||
export function resetMcpRepository(): void {
|
||||
repositoryInstance = null;
|
||||
}
|
||||
|
||||
@@ -623,9 +623,7 @@ describe('Integration: handleAutofixWorkflow', () => {
|
||||
const response = await handleAutofixWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
applyFixes: false,
|
||||
// Exclude version upgrade fixes to test "no fixes" scenario
|
||||
fixTypes: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path']
|
||||
applyFixes: false
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
|
||||
@@ -19,9 +19,8 @@ import { createTestContext, TestContext, createTestWorkflowName } from '../utils
|
||||
import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdatePartialWorkflow } from '../../../../src/mcp/handlers-workflow-diff';
|
||||
import { Workflow } from '../../../../src/types/n8n-api';
|
||||
|
||||
@@ -29,13 +28,11 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
// Skip workflow validation for these tests - they test n8n API behavior with edge cases
|
||||
process.env.SKIP_WORKFLOW_VALIDATION = 'true';
|
||||
});
|
||||
@@ -137,7 +134,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -244,7 +240,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -377,7 +372,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -580,7 +574,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -717,7 +710,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -863,7 +855,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -968,7 +959,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1097,7 +1087,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1196,7 +1185,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1277,7 +1265,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1359,7 +1346,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1492,7 +1478,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 1
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1603,7 +1589,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
branch: 'true'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1719,7 +1705,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1857,7 +1843,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 1
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1970,7 +1956,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
sourceIndex: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2089,7 +2075,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2195,7 +2181,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2307,7 +2293,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
targetIndex: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2446,7 +2432,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
|
||||
@@ -12,22 +12,19 @@ import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { SIMPLE_WEBHOOK_WORKFLOW, SIMPLE_HTTP_WORKFLOW, MULTI_NODE_WORKFLOW } from '../utils/fixtures';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdatePartialWorkflow } from '../../../../src/mcp/handlers-workflow-diff';
|
||||
|
||||
describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -94,7 +91,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -133,7 +129,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -166,7 +161,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -198,7 +192,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -233,7 +226,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -269,7 +261,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -307,7 +298,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -341,7 +331,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -369,7 +358,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
id: created.id,
|
||||
operations: [{ type: 'disableNode', nodeName: 'Webhook' }]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -384,7 +372,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -429,7 +416,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -467,7 +453,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -502,7 +487,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -535,7 +519,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -568,7 +551,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -597,7 +579,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
id: created.id,
|
||||
operations: [{ type: 'removeNode', nodeName: 'HTTP Request' }]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -613,7 +594,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
validateOnly: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -653,7 +633,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -691,7 +670,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -724,7 +702,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -759,7 +736,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -817,7 +793,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -850,7 +825,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
validateOnly: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -894,7 +868,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
continueOnError: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -937,7 +910,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -981,7 +953,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1034,7 +1005,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1080,7 +1050,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
|
||||
@@ -11,22 +11,19 @@ import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { SIMPLE_WEBHOOK_WORKFLOW, SIMPLE_HTTP_WORKFLOW } from '../utils/fixtures';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdateWorkflow } from '../../../../src/mcp/handlers-n8n-manager';
|
||||
|
||||
describe('Integration: handleUpdateWorkflow', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -71,7 +68,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: replacement.nodes,
|
||||
connections: replacement.connections
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -142,7 +138,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: updatedNodes,
|
||||
connections: updatedConnections
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -188,7 +183,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
timezone: 'Europe/London'
|
||||
}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -234,7 +228,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
],
|
||||
connections: {}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -249,7 +242,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
id: '99999999',
|
||||
name: 'Should Fail'
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -289,7 +281,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: current.nodes, // Required by n8n API
|
||||
connections: current.connections // Required by n8n API
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -335,7 +326,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
timezone: 'America/New_York'
|
||||
}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
|
||||
@@ -1,722 +0,0 @@
|
||||
/**
|
||||
* Integration tests for AI node connection validation in workflow diff operations
|
||||
* Tests that AI nodes with AI-specific connection types (ai_languageModel, ai_memory, etc.)
|
||||
* are properly validated without requiring main connections
|
||||
*
|
||||
* Related to issue #357
|
||||
*/
|
||||
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { WorkflowDiffEngine } from '../../../src/services/workflow-diff-engine';
|
||||
|
||||
describe('AI Node Connection Validation', () => {
|
||||
describe('AI-specific connection types', () => {
|
||||
test('should accept workflow with ai_languageModel connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Language Model Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_memory connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Memory Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_embedding connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Embedding Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_tool connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Tool Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store Tool',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Vector Store Tool': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_vectorStore connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Vector Store Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Supabase Vector Store': {
|
||||
ai_vectorStore: [
|
||||
[{ node: 'AI Agent', type: 'ai_vectorStore', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mixed connection types', () => {
|
||||
test('should accept workflow mixing main and AI connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Mixed Connections Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond-node',
|
||||
name: 'Respond to Webhook',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
main: [
|
||||
[{ node: 'Respond to Webhook', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with error connections alongside AI connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Error + AI Connections Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'error-handler',
|
||||
name: 'Error Handler',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [200, -200],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
error: [
|
||||
[{ node: 'Error Handler', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex AI workflow (Issue #357 scenario)', () => {
|
||||
test('should accept full AI agent workflow with RAG components', async () => {
|
||||
// Simplified version of the workflow from issue #357
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Agent with RAG',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'code-node',
|
||||
name: 'Prepare Inputs',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1.7,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [400, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1.1,
|
||||
position: [500, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [600, 400],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1.3,
|
||||
position: [600, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond-node',
|
||||
name: 'Respond to Webhook',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'Prepare Inputs', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Prepare Inputs': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
main: [
|
||||
[{ node: 'Respond to Webhook', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Supabase Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Supabase Vector Store': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should successfully update AI workflow nodes without connection errors', async () => {
|
||||
// Test that we can update nodes in an AI workflow without triggering validation errors
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Workflow Update Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: { path: 'test' }
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
|
||||
// Update the webhook node (unrelated to AI nodes)
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'webhook-node',
|
||||
updates: {
|
||||
notes: 'Updated webhook configuration'
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
|
||||
// Verify the update was applied
|
||||
const updatedNode = result.workflow.nodes.find((n: any) => n.id === 'webhook-node');
|
||||
expect(updatedNode?.notes).toBe('Updated webhook configuration');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Node-only AI nodes (no main connections)', () => {
|
||||
test('should accept AI nodes with ONLY ai_languageModel connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// OpenAI Chat Model has NO main connections, ONLY ai_languageModel
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept AI nodes with ONLY ai_memory connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Memory Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Memory node has NO main connections, ONLY ai_memory
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept embedding nodes with ONLY ai_embedding connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Embedding Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Embedding node has NO main connections, ONLY ai_embedding
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept vector store nodes with ONLY ai_tool connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Vector Store Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Vector store has NO main connections, ONLY ai_tool
|
||||
'Supabase Vector Store': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,573 +0,0 @@
|
||||
/**
|
||||
* Integration tests for auto-update connection references on node rename
|
||||
* Tests real-world workflow scenarios from Issue #353
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { WorkflowDiffEngine } from '@/services/workflow-diff-engine';
|
||||
import { validateWorkflowStructure } from '@/services/n8n-validation';
|
||||
import { WorkflowDiffRequest, UpdateNodeOperation } from '@/types/workflow-diff';
|
||||
import { Workflow, WorkflowNode } from '@/types/n8n-api';
|
||||
|
||||
describe('WorkflowDiffEngine - Node Rename Integration Tests', () => {
|
||||
let diffEngine: WorkflowDiffEngine;
|
||||
|
||||
beforeEach(() => {
|
||||
diffEngine = new WorkflowDiffEngine();
|
||||
});
|
||||
|
||||
describe('Real-world API endpoint workflow (Issue #353 scenario)', () => {
|
||||
let apiWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
// Complex real-world API endpoint workflow
|
||||
apiWorkflow = {
|
||||
id: 'api-workflow',
|
||||
name: 'POST /patients/:id/approaches - Add Approach',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-trigger',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
path: 'patients/{{$parameter["id"]/approaches',
|
||||
httpMethod: 'POST',
|
||||
responseMode: 'responseNode'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'validate-request',
|
||||
name: 'Validate Request',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Validation logic'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'check-auth',
|
||||
name: 'Check Authorization',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [400, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
boolean: [{ value1: '={{$json.authorized}}', value2: true }]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'process-request',
|
||||
name: 'Process Request',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 0],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Processing logic'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-success',
|
||||
name: 'Return 200 OK',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [800, 0],
|
||||
parameters: {
|
||||
responseBody: '={{ {"success": true, "data": $json} }}',
|
||||
options: { responseCode: 200 }
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-forbidden',
|
||||
name: 'Return 403 Forbidden1',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 200],
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Forbidden"} }}',
|
||||
options: { responseCode: 403 }
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'handle-error',
|
||||
name: 'Handle Error',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [400, 300],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Error handling'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-error',
|
||||
name: 'Return 500 Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 300],
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Internal Server Error"} }}',
|
||||
options: { responseCode: 500 }
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Validate Request', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Validate Request': {
|
||||
main: [[{ node: 'Check Authorization', type: 'main', index: 0 }]],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Authorization': {
|
||||
main: [
|
||||
[{ node: 'Process Request', type: 'main', index: 0 }], // true branch
|
||||
[{ node: 'Return 403 Forbidden1', type: 'main', index: 0 }] // false branch
|
||||
],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Process Request': {
|
||||
main: [[{ node: 'Return 200 OK', type: 'main', index: 0 }]],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Handle Error': {
|
||||
main: [[{ node: 'Return 500 Error', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should successfully rename error response node and maintain all connections', async () => {
|
||||
// The exact operation from Issue #353
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-forbidden',
|
||||
updates: {
|
||||
name: 'Return 404 Not Found',
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Not Found"} }}',
|
||||
options: { responseCode: 404 }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
// Should succeed
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Node should be renamed
|
||||
const renamedNode = result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-forbidden');
|
||||
expect(renamedNode?.name).toBe('Return 404 Not Found');
|
||||
expect(renamedNode?.parameters.options?.responseCode).toBe(404);
|
||||
|
||||
// Connection from IF node should be updated
|
||||
expect(result.workflow!.connections['Check Authorization'].main[1][0].node).toBe('Return 404 Not Found');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle multiple node renames in complex workflow', async () => {
|
||||
const operations: UpdateNodeOperation[] = [
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-forbidden',
|
||||
updates: { name: 'Return 404 Not Found' }
|
||||
},
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-success',
|
||||
updates: { name: 'Return 201 Created' }
|
||||
},
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-error',
|
||||
updates: { name: 'Return 500 Internal Server Error' }
|
||||
}
|
||||
];
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// All nodes should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-forbidden')?.name).toBe('Return 404 Not Found');
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-success')?.name).toBe('Return 201 Created');
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-error')?.name).toBe('Return 500 Internal Server Error');
|
||||
|
||||
// All connections should be updated
|
||||
expect(result.workflow!.connections['Check Authorization'].main[1][0].node).toBe('Return 404 Not Found');
|
||||
expect(result.workflow!.connections['Process Request'].main[0][0].node).toBe('Return 201 Created');
|
||||
expect(result.workflow!.connections['Handle Error'].main[0][0].node).toBe('Return 500 Internal Server Error');
|
||||
|
||||
// Validate entire workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should maintain error connections after rename', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'validate-request',
|
||||
updates: { name: 'Validate Input' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Main connection should be updated
|
||||
expect(result.workflow!.connections['Validate Input']).toBeDefined();
|
||||
expect(result.workflow!.connections['Validate Input'].main[0][0].node).toBe('Check Authorization');
|
||||
|
||||
// Error connection should also be updated
|
||||
expect(result.workflow!.connections['Validate Input'].error[0][0].node).toBe('Handle Error');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('AI Agent workflow with tool connections', () => {
|
||||
let aiWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
aiWorkflow = {
|
||||
id: 'ai-workflow',
|
||||
name: 'AI Customer Support Agent',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-1',
|
||||
name: 'Customer Query',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: { path: 'support', httpMethod: 'POST' }
|
||||
},
|
||||
{
|
||||
id: 'agent-1',
|
||||
name: 'Support Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: { promptTemplate: 'Help the customer with: {{$json.query}}' }
|
||||
},
|
||||
{
|
||||
id: 'tool-http',
|
||||
name: 'Knowledge Base API',
|
||||
type: '@n8n/n8n-nodes-langchain.toolHttpRequest',
|
||||
typeVersion: 1,
|
||||
position: [200, 100],
|
||||
parameters: { url: 'https://kb.example.com/search' }
|
||||
},
|
||||
{
|
||||
id: 'tool-code',
|
||||
name: 'Custom Logic Tool',
|
||||
type: '@n8n/n8n-nodes-langchain.toolCode',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: { code: '// Custom logic' }
|
||||
},
|
||||
{
|
||||
id: 'response-1',
|
||||
name: 'Send Response',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Customer Query': {
|
||||
main: [[{ node: 'Support Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Support Agent': {
|
||||
main: [[{ node: 'Send Response', type: 'main', index: 0 }]],
|
||||
ai_tool: [
|
||||
[
|
||||
{ node: 'Knowledge Base API', type: 'ai_tool', index: 0 },
|
||||
{ node: 'Custom Logic Tool', type: 'ai_tool', index: 0 }
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// SKIPPED: Pre-existing validation bug - validateWorkflowStructure() doesn't recognize
|
||||
// AI connections (ai_tool, ai_languageModel, etc.) as valid, causing false positives.
|
||||
// The rename feature works correctly - connections ARE updated. Validation is the issue.
|
||||
// TODO: Fix validateWorkflowStructure() to check all connection types, not just 'main'
|
||||
it.skip('should update AI tool connections when renaming agent', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'agent-1',
|
||||
updates: { name: 'AI Support Assistant' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'ai-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(aiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Agent should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'agent-1')?.name).toBe('AI Support Assistant');
|
||||
|
||||
// All connections should be updated
|
||||
expect(result.workflow!.connections['AI Support Assistant']).toBeDefined();
|
||||
expect(result.workflow!.connections['AI Support Assistant'].main[0][0].node).toBe('Send Response');
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0]).toHaveLength(2);
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0][0].node).toBe('Knowledge Base API');
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0][1].node).toBe('Custom Logic Tool');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
// SKIPPED: Pre-existing validation bug - validateWorkflowStructure() doesn't recognize
|
||||
// AI connections (ai_tool, ai_languageModel, etc.) as valid, causing false positives.
|
||||
// The rename feature works correctly - connections ARE updated. Validation is the issue.
|
||||
// TODO: Fix validateWorkflowStructure() to check all connection types, not just 'main'
|
||||
it.skip('should update AI tool connections when renaming tool', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'tool-http',
|
||||
updates: { name: 'Documentation Search' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'ai-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(aiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Tool should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'tool-http')?.name).toBe('Documentation Search');
|
||||
|
||||
// AI tool connection should reference new name
|
||||
expect(result.workflow!.connections['Support Agent'].ai_tool[0][0].node).toBe('Documentation Search');
|
||||
// Other tool should remain unchanged
|
||||
expect(result.workflow!.connections['Support Agent'].ai_tool[0][1].node).toBe('Custom Logic Tool');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi-branch workflow with IF and Switch nodes', () => {
|
||||
let multiBranchWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
multiBranchWorkflow = {
|
||||
id: 'multi-branch-workflow',
|
||||
name: 'Order Processing Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-1',
|
||||
name: 'New Order',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'if-1',
|
||||
name: 'Check Payment Status',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'switch-1',
|
||||
name: 'Route by Order Type',
|
||||
type: 'n8n-nodes-base.switch',
|
||||
typeVersion: 3,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-digital',
|
||||
name: 'Process Digital Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-physical',
|
||||
name: 'Process Physical Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 100],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-service',
|
||||
name: 'Process Service Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'reject-payment',
|
||||
name: 'Reject Payment',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [400, 300],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'New Order': {
|
||||
main: [[{ node: 'Check Payment Status', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Payment Status': {
|
||||
main: [
|
||||
[{ node: 'Route by Order Type', type: 'main', index: 0 }], // paid
|
||||
[{ node: 'Reject Payment', type: 'main', index: 0 }] // not paid
|
||||
]
|
||||
},
|
||||
'Route by Order Type': {
|
||||
main: [
|
||||
[{ node: 'Process Digital Order', type: 'main', index: 0 }], // case 0: digital
|
||||
[{ node: 'Process Physical Order', type: 'main', index: 0 }], // case 1: physical
|
||||
[{ node: 'Process Service Order', type: 'main', index: 0 }] // case 2: service
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should update all branch connections when renaming IF node', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'if-1',
|
||||
updates: { name: 'Validate Payment' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// IF node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'if-1')?.name).toBe('Validate Payment');
|
||||
|
||||
// Both branches should be updated
|
||||
expect(result.workflow!.connections['Validate Payment']).toBeDefined();
|
||||
expect(result.workflow!.connections['Validate Payment'].main[0][0].node).toBe('Route by Order Type');
|
||||
expect(result.workflow!.connections['Validate Payment'].main[1][0].node).toBe('Reject Payment');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should update all case connections when renaming Switch node', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'switch-1',
|
||||
updates: { name: 'Order Type Router' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Switch node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'switch-1')?.name).toBe('Order Type Router');
|
||||
|
||||
// All three cases should be updated
|
||||
expect(result.workflow!.connections['Order Type Router']).toBeDefined();
|
||||
expect(result.workflow!.connections['Order Type Router'].main).toHaveLength(3);
|
||||
expect(result.workflow!.connections['Order Type Router'].main[0][0].node).toBe('Process Digital Order');
|
||||
expect(result.workflow!.connections['Order Type Router'].main[1][0].node).toBe('Process Physical Order');
|
||||
expect(result.workflow!.connections['Order Type Router'].main[2][0].node).toBe('Process Service Order');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should update specific case target when renamed', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'process-digital',
|
||||
updates: { name: 'Send Digital Download Link' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Digital order node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'process-digital')?.name).toBe('Send Digital Download Link');
|
||||
|
||||
// Case 0 connection should be updated
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[0][0].node).toBe('Send Digital Download Link');
|
||||
// Other cases should remain unchanged
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[1][0].node).toBe('Process Physical Order');
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[2][0].node).toBe('Process Service Order');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -24,12 +24,10 @@ vi.mock('@/mcp/handlers-n8n-manager', () => ({
|
||||
// Import mocked modules
|
||||
import { getN8nApiClient } from '@/mcp/handlers-n8n-manager';
|
||||
import { logger } from '@/utils/logger';
|
||||
import type { NodeRepository } from '@/database/node-repository';
|
||||
|
||||
describe('handlers-workflow-diff', () => {
|
||||
let mockApiClient: any;
|
||||
let mockDiffEngine: any;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
// Helper function to create test workflow
|
||||
const createTestWorkflow = (overrides = {}) => ({
|
||||
@@ -80,9 +78,6 @@ describe('handlers-workflow-diff', () => {
|
||||
applyDiff: vi.fn(),
|
||||
};
|
||||
|
||||
// Setup mock repository
|
||||
mockRepository = {} as NodeRepository;
|
||||
|
||||
// Mock the API client getter
|
||||
vi.mocked(getN8nApiClient).mockReturnValue(mockApiClient);
|
||||
|
||||
@@ -146,7 +141,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(updatedWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: true,
|
||||
@@ -190,7 +185,7 @@ describe('handlers-workflow-diff', () => {
|
||||
errors: [],
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: true,
|
||||
@@ -267,7 +262,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue({ ...testWorkflow });
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Applied 3 operations');
|
||||
@@ -297,7 +292,7 @@ describe('handlers-workflow-diff', () => {
|
||||
failed: [0],
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -319,7 +314,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -334,7 +329,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'non-existent',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -363,7 +358,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node1', updates: {} }],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -388,7 +383,7 @@ describe('handlers-workflow-diff', () => {
|
||||
],
|
||||
};
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(invalidInput, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(invalidInput);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBe('Invalid input');
|
||||
@@ -437,7 +432,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue({ ...testWorkflow });
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(mockDiffEngine.applyDiff).toHaveBeenCalledWith(testWorkflow, diffRequest);
|
||||
@@ -460,7 +455,7 @@ describe('handlers-workflow-diff', () => {
|
||||
await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node1', updates: {} }],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Workflow diff request received',
|
||||
@@ -478,7 +473,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -494,7 +489,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -510,7 +505,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -526,7 +521,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -569,7 +564,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(testWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(mockDiffEngine.applyDiff).toHaveBeenCalledWith(testWorkflow, diffRequest);
|
||||
@@ -592,7 +587,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(testWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Applied 0 operations');
|
||||
@@ -618,7 +613,7 @@ describe('handlers-workflow-diff', () => {
|
||||
errors: ['Operation 2 failed: Node "invalid-node" not found'],
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
|
||||
@@ -1,685 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { BreakingChangeDetector, type DetectedChange, type VersionUpgradeAnalysis } from '@/services/breaking-change-detector';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import * as BreakingChangesRegistry from '@/services/breaking-changes-registry';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/breaking-changes-registry');
|
||||
|
||||
describe('BreakingChangeDetector', () => {
|
||||
let detector: BreakingChangeDetector;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
const createMockVersionData = (version: string, properties: any[] = []) => ({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version,
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request',
|
||||
isCurrentMax: false,
|
||||
propertiesSchema: properties,
|
||||
breakingChanges: [],
|
||||
deprecatedProperties: [],
|
||||
addedProperties: []
|
||||
});
|
||||
|
||||
const createMockProperty = (name: string, type: string = 'string', required = false) => ({
|
||||
name,
|
||||
displayName: name,
|
||||
type,
|
||||
required,
|
||||
default: null
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
detector = new BreakingChangeDetector(mockRepository);
|
||||
});
|
||||
|
||||
describe('analyzeVersionUpgrade', () => {
|
||||
it('should combine registry and dynamic changes', async () => {
|
||||
const registryChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'registryProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'From registry',
|
||||
autoMigratable: true,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([registryChange]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('dynamicProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.length).toBeGreaterThan(0);
|
||||
expect(result.changes.some(c => c.source === 'registry')).toBe(true);
|
||||
expect(result.changes.some(c => c.source === 'dynamic')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect breaking changes', async () => {
|
||||
const breakingChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'criticalProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'This is breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([breakingChange]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.hasBreakingChanges).toBe(true);
|
||||
});
|
||||
|
||||
it('should calculate auto-migratable and manual counts', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'autoProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Auto',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'manualProp',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manual',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.autoMigratableCount).toBe(1);
|
||||
expect(result.manualRequiredCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should determine overall severity', async () => {
|
||||
const highSeverityChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'criticalProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Critical',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([highSeverityChange]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should generate recommendations', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop1',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Remove this',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop2',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manual work needed',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.length).toBeGreaterThan(0);
|
||||
expect(result.recommendations.some(r => r.includes('breaking change'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('automatically migrated'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('manual intervention'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dynamic change detection', () => {
|
||||
it('should detect added properties', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('newProp')]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange).toBeDefined();
|
||||
expect(addedChange?.propertyName).toBe('newProp');
|
||||
expect(addedChange?.source).toBe('dynamic');
|
||||
});
|
||||
|
||||
it('should mark required added properties as breaking', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('requiredProp', 'string', true)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange?.isBreaking).toBe(true);
|
||||
expect(addedChange?.severity).toBe('HIGH');
|
||||
expect(addedChange?.autoMigratable).toBe(false);
|
||||
});
|
||||
|
||||
it('should mark optional added properties as non-breaking', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('optionalProp', 'string', false)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange?.isBreaking).toBe(false);
|
||||
expect(addedChange?.severity).toBe('LOW');
|
||||
expect(addedChange?.autoMigratable).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect removed properties', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('oldProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const removedChange = result.changes.find(c => c.changeType === 'removed');
|
||||
expect(removedChange).toBeDefined();
|
||||
expect(removedChange?.propertyName).toBe('oldProp');
|
||||
expect(removedChange?.isBreaking).toBe(true);
|
||||
expect(removedChange?.autoMigratable).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect requirement changes', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('prop', 'string', false)]);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('prop', 'string', true)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const requirementChange = result.changes.find(c => c.changeType === 'requirement_changed');
|
||||
expect(requirementChange).toBeDefined();
|
||||
expect(requirementChange?.isBreaking).toBe(true);
|
||||
expect(requirementChange?.oldValue).toBe('optional');
|
||||
expect(requirementChange?.newValue).toBe('required');
|
||||
});
|
||||
|
||||
it('should detect when property becomes optional', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('prop', 'string', true)]);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('prop', 'string', false)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const requirementChange = result.changes.find(c => c.changeType === 'requirement_changed');
|
||||
expect(requirementChange).toBeDefined();
|
||||
expect(requirementChange?.isBreaking).toBe(false);
|
||||
expect(requirementChange?.severity).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should handle missing version data gracefully', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.filter(c => c.source === 'dynamic')).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle missing properties schema', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = { ...createMockVersionData('1.0'), propertiesSchema: null };
|
||||
const v2 = { ...createMockVersionData('2.0'), propertiesSchema: null };
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1 as any)
|
||||
.mockReturnValueOnce(v2 as any);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.filter(c => c.source === 'dynamic')).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('change merging and deduplication', () => {
|
||||
it('should prioritize registry changes over dynamic', async () => {
|
||||
const registryChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'sharedProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'From registry',
|
||||
autoMigratable: true,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([registryChange]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('sharedProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const sharedChanges = result.changes.filter(c => c.propertyName === 'sharedProp');
|
||||
expect(sharedChanges).toHaveLength(1);
|
||||
expect(sharedChanges[0].source).toBe('registry');
|
||||
});
|
||||
|
||||
it('should sort changes by severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Low',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'highProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'High',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'medProp',
|
||||
changeType: 'renamed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Medium',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: { type: 'rename_property', sourceProperty: 'old', targetProperty: 'new' }
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes[0].severity).toBe('HIGH');
|
||||
expect(result.changes[result.changes.length - 1].severity).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasBreakingChanges', () => {
|
||||
it('should return true when breaking changes exist', () => {
|
||||
const breakingChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getBreakingChangesForNode').mockReturnValue([breakingChange]);
|
||||
|
||||
const result = detector.hasBreakingChanges('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when no breaking changes', () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getBreakingChangesForNode').mockReturnValue([]);
|
||||
|
||||
const result = detector.hasBreakingChanges('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getChangedProperties', () => {
|
||||
it('should return list of changed property names', () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop1',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop2',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
|
||||
const result = detector.getChangedProperties('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toEqual(['prop1', 'prop2']);
|
||||
});
|
||||
|
||||
it('should return empty array when no changes', () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const result = detector.getChangedProperties('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('recommendations generation', () => {
|
||||
it('should recommend safe upgrade when no breaking changes', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Safe',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('No breaking changes'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('safe'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about breaking changes', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('breaking change'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should list manual changes required', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'manualProp',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manually configure this',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('manual intervention'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('manualProp'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nested properties', () => {
|
||||
it('should flatten nested properties for comparison', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const nestedProp = {
|
||||
name: 'parent',
|
||||
displayName: 'Parent',
|
||||
type: 'options',
|
||||
options: [
|
||||
createMockProperty('child1'),
|
||||
createMockProperty('child2')
|
||||
]
|
||||
};
|
||||
|
||||
const v1 = createMockVersionData('1.0', [nestedProp]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
// Should detect removal of parent and nested properties
|
||||
expect(result.changes.some(c => c.propertyName.includes('parent'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('overall severity calculation', () => {
|
||||
it('should return HIGH when any change is HIGH severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'highProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should return MEDIUM when no HIGH but has MEDIUM', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'medProp',
|
||||
changeType: 'renamed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should return LOW when all changes are LOW severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('LOW');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,532 +0,0 @@
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { validateWorkflowStructure } from '@/services/n8n-validation';
|
||||
import type { Workflow } from '@/types/n8n-api';
|
||||
|
||||
describe('n8n-validation - Sticky Notes Bug Fix', () => {
|
||||
describe('sticky notes should be excluded from disconnected nodes validation', () => {
|
||||
test('should allow workflow with sticky notes and connected functional nodes', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'HTTP Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Documentation Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'This is a documentation note' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should have no errors - sticky note should be ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle multiple sticky notes without errors', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Documented Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Process',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
// 10 sticky notes for documentation
|
||||
...Array.from({ length: 10 }, (_, i) => ({
|
||||
id: `sticky${i}`,
|
||||
name: `📝 Note ${i}`,
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [100 + i * 50, 100] as [number, number],
|
||||
parameters: { content: `Documentation note ${i}` }
|
||||
}))
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Process', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle all sticky note type variations', () => {
|
||||
const stickyTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
|
||||
stickyTypes.forEach((stickyType, index) => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: `sticky${index}`,
|
||||
name: `Note ${index}`,
|
||||
type: stickyType,
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: `Note ${index}` }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Sticky note should be ignored regardless of type variation
|
||||
expect(errors.every(e => !e.includes(`Note ${index}`))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle complex workflow with multiple sticky notes (real-world scenario)', () => {
|
||||
// Simulates workflow like "POST /auth/login" with 4 sticky notes
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'POST /auth/login',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook1',
|
||||
name: 'Webhook Trigger',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/auth/login', httpMethod: 'POST' }
|
||||
},
|
||||
{
|
||||
id: 'http1',
|
||||
name: 'Authenticate',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond1',
|
||||
name: 'Return Success',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [650, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond2',
|
||||
name: 'Return Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [650, 350],
|
||||
parameters: {}
|
||||
},
|
||||
// 4 sticky notes for documentation
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: '📝 Webhook Trigger',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 150],
|
||||
parameters: { content: 'Receives login request' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: '📝 Authenticate with Supabase',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 150],
|
||||
parameters: { content: 'Validates credentials' }
|
||||
},
|
||||
{
|
||||
id: 'sticky3',
|
||||
name: '📝 Return Tokens',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [650, 150],
|
||||
parameters: { content: 'Returns access and refresh tokens' }
|
||||
},
|
||||
{
|
||||
id: 'sticky4',
|
||||
name: '📝 Return Error',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [650, 450],
|
||||
parameters: { content: 'Returns error message' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook Trigger': {
|
||||
main: [[{ node: 'Authenticate', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Authenticate': {
|
||||
main: [
|
||||
[{ node: 'Return Success', type: 'main', index: 0 }],
|
||||
[{ node: 'Return Error', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should have no errors - all sticky notes should be ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validation should still detect truly disconnected functional nodes', () => {
|
||||
test('should detect disconnected HTTP node but ignore sticky note', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Disconnected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Sticky Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note' }
|
||||
}
|
||||
],
|
||||
connections: {} // No connections
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error on HTTP node, but NOT on sticky note
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const disconnectedError = errors.find(e => e.includes('Disconnected'));
|
||||
expect(disconnectedError).toBeDefined();
|
||||
expect(disconnectedError).toContain('Disconnected HTTP');
|
||||
expect(disconnectedError).not.toContain('Sticky Note');
|
||||
});
|
||||
|
||||
test('should detect multiple disconnected functional nodes but ignore sticky notes', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Disconnected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Disconnected Set',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
// Multiple sticky notes that should be ignored
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Note 1',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note 1' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: 'Note 2',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 100],
|
||||
parameters: { content: 'Note 2' }
|
||||
}
|
||||
],
|
||||
connections: {} // No connections
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error because there are no connections
|
||||
// When there are NO connections, validation shows "Multi-node workflow has no connections"
|
||||
// This is the expected behavior - it suggests connecting any two executable nodes
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const connectionError = errors.find(e => e.includes('no connections') || e.includes('Disconnected'));
|
||||
expect(connectionError).toBeDefined();
|
||||
// Error should NOT mention sticky notes
|
||||
expect(connectionError).not.toContain('Note 1');
|
||||
expect(connectionError).not.toContain('Note 2');
|
||||
});
|
||||
|
||||
test('should allow sticky notes but still validate functional node connections', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Connected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Disconnected Set',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Sticky Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Connected HTTP', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error only on disconnected Set node
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const disconnectedError = errors.find(e => e.includes('Disconnected'));
|
||||
expect(disconnectedError).toBeDefined();
|
||||
expect(disconnectedError).toContain('Disconnected Set');
|
||||
expect(disconnectedError).not.toContain('Connected HTTP');
|
||||
expect(disconnectedError).not.toContain('Sticky Note');
|
||||
});
|
||||
});
|
||||
|
||||
describe('regression tests - ensure sticky notes work like in n8n UI', () => {
|
||||
test('single webhook with sticky notes should be valid (matches n8n UI behavior)', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Webhook Only with Notes',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Usage Instructions',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Call this webhook to trigger the workflow' }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Webhook-only workflows are valid in n8n
|
||||
// Sticky notes should not affect this
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('workflow with only sticky notes should be invalid (no executable nodes)', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Only Notes',
|
||||
nodes: [
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Note 1',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note 1' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: 'Note 2',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 100],
|
||||
parameters: { content: 'Note 2' }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should fail because there are no executable nodes
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
expect(errors.some(e => e.includes('at least one executable node'))).toBe(true);
|
||||
});
|
||||
|
||||
test('complex production workflow structure should validate correctly', () => {
|
||||
// Tests a realistic production workflow structure
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Production API Endpoint',
|
||||
nodes: [
|
||||
// Functional nodes
|
||||
{
|
||||
id: 'webhook1',
|
||||
name: 'API Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/api/endpoint' }
|
||||
},
|
||||
{
|
||||
id: 'validate1',
|
||||
name: 'Validate Input',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'branch1',
|
||||
name: 'Check Valid',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process1',
|
||||
name: 'Process Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [850, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'success1',
|
||||
name: 'Return Success',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [1050, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'error1',
|
||||
name: 'Return Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [850, 350],
|
||||
parameters: {}
|
||||
},
|
||||
// Documentation sticky notes (11 notes like in real workflow)
|
||||
...Array.from({ length: 11 }, (_, i) => ({
|
||||
id: `sticky${i}`,
|
||||
name: `📝 Documentation ${i}`,
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250 + i * 100, 100] as [number, number],
|
||||
parameters: { content: `Documentation section ${i}` }
|
||||
}))
|
||||
],
|
||||
connections: {
|
||||
'API Webhook': {
|
||||
main: [[{ node: 'Validate Input', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Validate Input': {
|
||||
main: [[{ node: 'Check Valid', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Valid': {
|
||||
main: [
|
||||
[{ node: 'Process Request', type: 'main', index: 0 }],
|
||||
[{ node: 'Return Error', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Process Request': {
|
||||
main: [[{ node: 'Return Success', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should be valid - all functional nodes connected, sticky notes ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,798 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { NodeMigrationService, type MigrationResult, type AppliedMigration } from '@/services/node-migration-service';
|
||||
import { NodeVersionService } from '@/services/node-version-service';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis, type DetectedChange } from '@/services/breaking-change-detector';
|
||||
|
||||
vi.mock('@/services/node-version-service');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('NodeMigrationService', () => {
|
||||
let service: NodeMigrationService;
|
||||
let mockVersionService: NodeVersionService;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockNode = (id: string, type: string, version: number, parameters: any = {}) => ({
|
||||
id,
|
||||
name: `${type}-node`,
|
||||
type,
|
||||
typeVersion: version,
|
||||
position: [0, 0] as [number, number],
|
||||
parameters
|
||||
});
|
||||
|
||||
const createMockChange = (
|
||||
propertyName: string,
|
||||
changeType: DetectedChange['changeType'],
|
||||
autoMigratable: boolean,
|
||||
migrationStrategy?: any
|
||||
): DetectedChange => ({
|
||||
propertyName,
|
||||
changeType,
|
||||
isBreaking: true,
|
||||
migrationHint: `Migrate ${propertyName}`,
|
||||
autoMigratable,
|
||||
migrationStrategy,
|
||||
severity: 'MEDIUM',
|
||||
source: 'registry'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockVersionService = {} as any;
|
||||
mockBreakingChangeDetector = {} as any;
|
||||
service = new NodeMigrationService(mockVersionService, mockBreakingChangeDetector);
|
||||
});
|
||||
|
||||
describe('migrateNode', () => {
|
||||
it('should update node typeVersion', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2);
|
||||
expect(result.fromVersion).toBe('1.0');
|
||||
expect(result.toVersion).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should apply auto-migratable changes', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newProperty', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].propertyName).toBe('newProperty');
|
||||
expect(result.appliedMigrations[0].action).toBe('Added property');
|
||||
});
|
||||
|
||||
it('should collect remaining manual issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('manualProperty', 'requirement_changed', false)
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.remainingIssues).toHaveLength(1);
|
||||
expect(result.remainingIssues[0]).toContain('manualProperty');
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should determine confidence based on remaining issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysisNoIssues: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysisNoIssues);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should set MEDIUM confidence for few issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'requirement_changed', false),
|
||||
createMockChange('prop2', 'requirement_changed', false)
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should set LOW confidence for many issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(5).fill(createMockChange('prop', 'requirement_changed', false)),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('addProperty migration', () => {
|
||||
it('should add new property with default value', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('newField', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'test-value'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.newField).toBe('test-value');
|
||||
});
|
||||
|
||||
it('should handle nested property paths', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, { parameters: {} });
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('parameters.authentication', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'none'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.parameters.authentication).toBe('none');
|
||||
});
|
||||
|
||||
it('should generate webhookId for webhook nodes', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('webhookId', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: null
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '2.0', '2.1');
|
||||
|
||||
expect(result.updatedNode.webhookId).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i);
|
||||
});
|
||||
|
||||
it('should generate unique webhook paths', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('path', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: null
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.path).toMatch(/^\/webhook-\d+$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeProperty migration', () => {
|
||||
it('should remove deprecated property', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).oldField = 'value';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('oldField', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.oldField).toBeUndefined();
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].action).toBe('Removed property');
|
||||
expect(result.appliedMigrations[0].oldValue).toBe('value');
|
||||
});
|
||||
|
||||
it('should handle removing nested properties', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {
|
||||
parameters: { oldAuth: 'basic' }
|
||||
});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('parameters.oldAuth', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.parameters.oldAuth).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should skip removal if property does not exist', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('nonExistentField', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('renameProperty migration', () => {
|
||||
it('should rename property', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).oldName = 'value';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newName', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'oldName',
|
||||
targetProperty: 'newName'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.oldName).toBeUndefined();
|
||||
expect(result.updatedNode.newName).toBe('value');
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].action).toBe('Renamed property');
|
||||
});
|
||||
|
||||
it.skip('should handle nested property renaming', async () => {
|
||||
// Skipped: deep cloning creates new objects that aren't detected by the migration logic
|
||||
// The feature works in production, but testing nested renames requires more complex mocking
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {
|
||||
parameters: { oldParam: 'test' }
|
||||
});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('parameters.newParam', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'parameters.oldParam',
|
||||
targetProperty: 'parameters.newParam'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.updatedNode.parameters.oldParam).toBeUndefined();
|
||||
expect(result.updatedNode.parameters.newParam).toBe('test');
|
||||
});
|
||||
|
||||
it('should skip rename if source does not exist', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newName', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'nonExistent',
|
||||
targetProperty: 'newName'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setDefault migration', () => {
|
||||
it('should set default value if property is undefined', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('field', 'default_changed', true, {
|
||||
type: 'set_default',
|
||||
defaultValue: 'new-default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.field).toBe('new-default');
|
||||
});
|
||||
|
||||
it('should not overwrite existing value', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).field = 'existing';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('field', 'default_changed', true, {
|
||||
type: 'set_default',
|
||||
defaultValue: 'new-default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.field).toBe('existing');
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateMigratedNode', () => {
|
||||
it('should validate basic node structure', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 2, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should detect missing typeVersion', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 2), typeVersion: undefined };
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain('Missing typeVersion after migration');
|
||||
});
|
||||
|
||||
it('should detect missing parameters', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 2), parameters: undefined };
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain('Missing parameters object');
|
||||
});
|
||||
|
||||
it('should validate webhook node requirements', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.webhook');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some(e => e.includes('path'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about missing webhookId in v2.1+', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2.1, { path: '/test' });
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.webhook');
|
||||
|
||||
expect(result.warnings.some(w => w.includes('webhookId'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate executeWorkflow requirements', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.executeWorkflow', 1.1, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.executeWorkflow');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some(e => e.includes('inputFieldMapping'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateWorkflowNodes', () => {
|
||||
it('should migrate multiple nodes in a workflow', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: '',
|
||||
fromVersion: '',
|
||||
toVersion: '',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0',
|
||||
'node-2': '2.1'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(2);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.overallConfidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should calculate overall confidence as LOW if any migration is LOW', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysisLow: VersionUpgradeAnalysis = {
|
||||
nodeType: '',
|
||||
fromVersion: '',
|
||||
toVersion: '',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(5).fill(createMockChange('prop', 'requirement_changed', false)),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysisLow);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.overallConfidence).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should update nodes in place', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1, {})
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(workflow.nodes[0].typeVersion).toBe(2);
|
||||
});
|
||||
|
||||
it('should skip nodes without target versions', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(mockBreakingChangeDetector.analyzeVersionUpgrade).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle nodes without typeVersion', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 1), typeVersion: undefined };
|
||||
|
||||
const workflow = { nodes: [node] };
|
||||
const targetVersions = { 'node-1': '2.0' };
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle empty workflow', async () => {
|
||||
const workflow = { nodes: [] };
|
||||
const targetVersions = {};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.overallConfidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should handle version string with single digit', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1',
|
||||
toVersion: '2',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1', '2');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle version string with decimal', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.1',
|
||||
toVersion: '2.3',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.1', '2.3');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2.3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1610,20 +1610,15 @@ describe('NodeSpecificValidators', () => {
|
||||
});
|
||||
|
||||
describe('response mode validation', () => {
|
||||
// NOTE: responseNode mode validation was moved to workflow-validator.ts in Phase 5
|
||||
// because it requires access to node-level onError property, not just config/parameters.
|
||||
// See workflow-validator.ts checkWebhookErrorHandling() method for the actual implementation.
|
||||
// The validation cannot be performed at the node-specific-validator level.
|
||||
|
||||
it.skip('should error on responseNode without error handling - MOVED TO WORKFLOW VALIDATOR', () => {
|
||||
it('should error on responseNode without error handling', () => {
|
||||
context.config = {
|
||||
path: 'my-webhook',
|
||||
httpMethod: 'POST',
|
||||
responseMode: 'responseNode'
|
||||
};
|
||||
|
||||
|
||||
NodeSpecificValidators.validateWebhook(context);
|
||||
|
||||
|
||||
expect(context.errors).toContainEqual({
|
||||
type: 'invalid_configuration',
|
||||
property: 'responseMode',
|
||||
@@ -1632,14 +1627,14 @@ describe('NodeSpecificValidators', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it.skip('should not error on responseNode with proper error handling - MOVED TO WORKFLOW VALIDATOR', () => {
|
||||
it('should not error on responseNode with proper error handling', () => {
|
||||
context.config = {
|
||||
path: 'my-webhook',
|
||||
httpMethod: 'POST',
|
||||
responseMode: 'responseNode',
|
||||
onError: 'continueRegularOutput'
|
||||
};
|
||||
|
||||
|
||||
NodeSpecificValidators.validateWebhook(context);
|
||||
|
||||
const responseModeErrors = context.errors.filter(e => e.property === 'responseMode');
|
||||
|
||||
@@ -1,497 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { NodeVersionService, type NodeVersion, type VersionComparison } from '@/services/node-version-service';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis } from '@/services/breaking-change-detector';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('NodeVersionService', () => {
|
||||
let service: NodeVersionService;
|
||||
let mockRepository: NodeRepository;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockVersion = (version: string, isCurrentMax = false): NodeVersion => ({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version,
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request',
|
||||
isCurrentMax,
|
||||
breakingChanges: [],
|
||||
deprecatedProperties: [],
|
||||
addedProperties: []
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
mockBreakingChangeDetector = new BreakingChangeDetector(mockRepository);
|
||||
service = new NodeVersionService(mockRepository, mockBreakingChangeDetector);
|
||||
});
|
||||
|
||||
describe('getAvailableVersions', () => {
|
||||
it('should return versions from database', () => {
|
||||
const versions = [createMockVersion('1.0'), createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toEqual(versions);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledWith('nodes-base.httpRequest');
|
||||
});
|
||||
|
||||
it('should cache results', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should use cache within TTL', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result1 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
const result2 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(result1).toEqual(result2);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should refresh cache after TTL expiry', () => {
|
||||
vi.useFakeTimers();
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
// Advance time beyond TTL (5 minutes)
|
||||
vi.advanceTimersByTime(6 * 60 * 1000);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatestVersion', () => {
|
||||
it('should return version marked as currentMax', () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('2.0', true),
|
||||
createMockVersion('1.5')
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should fallback to highest version if no currentMax', () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('2.0'),
|
||||
createMockVersion('1.5')
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should fallback to main nodes table if no versions', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version: '1.0',
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request'
|
||||
} as any);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('1.0');
|
||||
});
|
||||
|
||||
it('should return null if no version data available', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('compareVersions', () => {
|
||||
it('should return -1 when first version is lower', () => {
|
||||
const result = service.compareVersions('1.0', '2.0');
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
|
||||
it('should return 1 when first version is higher', () => {
|
||||
const result = service.compareVersions('2.0', '1.0');
|
||||
expect(result).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 0 when versions are equal', () => {
|
||||
const result = service.compareVersions('1.0', '1.0');
|
||||
expect(result).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle multi-part versions', () => {
|
||||
expect(service.compareVersions('1.2.3', '1.2.4')).toBe(-1);
|
||||
expect(service.compareVersions('2.0.0', '1.9.9')).toBe(1);
|
||||
expect(service.compareVersions('1.0.0', '1.0.0')).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle versions with different lengths', () => {
|
||||
expect(service.compareVersions('1.0', '1.0.0')).toBe(0);
|
||||
expect(service.compareVersions('1.0', '1.0.1')).toBe(-1);
|
||||
expect(service.compareVersions('2', '1.9')).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeVersion', () => {
|
||||
it('should return up-to-date status when on latest version', () => {
|
||||
const versions = [createMockVersion('1.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(false);
|
||||
expect(result.recommendUpgrade).toBe(false);
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.reason).toContain('already at the latest version');
|
||||
});
|
||||
|
||||
it('should detect outdated version', () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(true);
|
||||
expect(result.latestVersion).toBe('2.0');
|
||||
expect(result.recommendUpgrade).toBe(true);
|
||||
});
|
||||
|
||||
it('should calculate version gap', () => {
|
||||
const versions = [createMockVersion('3.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.versionGap).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should detect breaking changes and lower confidence', () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(true);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.hasBreakingChanges).toBe(true);
|
||||
expect(result.confidence).toBe('MEDIUM');
|
||||
expect(result.reason).toContain('breaking changes');
|
||||
});
|
||||
|
||||
it('should lower confidence for large version gaps', () => {
|
||||
const versions = [createMockVersion('10.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.confidence).toBe('LOW');
|
||||
expect(result.reason).toContain('Version gap is large');
|
||||
});
|
||||
|
||||
it('should handle missing version information', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(false);
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.reason).toContain('No version information available');
|
||||
});
|
||||
});
|
||||
|
||||
describe('suggestUpgradePath', () => {
|
||||
it('should return null when already on latest version', async () => {
|
||||
const versions = [createMockVersion('1.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when no version information available', async () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should suggest direct upgrade for simple cases', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.direct).toBe(true);
|
||||
expect(result!.steps).toHaveLength(1);
|
||||
expect(result!.steps[0].fromVersion).toBe('1.0');
|
||||
expect(result!.steps[0].toVersion).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should suggest multi-step upgrade for complex cases', async () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('1.5'),
|
||||
createMockVersion('2.0', true)
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{ isBreaking: true, autoMigratable: false } as any,
|
||||
{ isBreaking: true, autoMigratable: false } as any,
|
||||
{ isBreaking: true, autoMigratable: false } as any
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.intermediateVersions).toContain('1.5');
|
||||
});
|
||||
|
||||
it('should calculate estimated effort correctly', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysisLow: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [{ isBreaking: false, autoMigratable: true } as any],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysisLow);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.estimatedEffort).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should estimate HIGH effort for many breaking changes', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysisHigh: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(7).fill({ isBreaking: true, autoMigratable: false }),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 7,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysisHigh);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.estimatedEffort).toBe('HIGH');
|
||||
expect(result!.totalBreakingChanges).toBeGreaterThan(5);
|
||||
});
|
||||
|
||||
it('should include migration hints in steps', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [{ isBreaking: true, autoMigratable: false } as any],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: ['Review property changes']
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.steps[0].migrationHints).toContain('Review property changes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('versionExists', () => {
|
||||
it('should return true if version exists', () => {
|
||||
const versions = [createMockVersion('1.0'), createMockVersion('2.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.versionExists('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if version does not exist', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.versionExists('nodes-base.httpRequest', '2.0');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersionMetadata', () => {
|
||||
it('should return version metadata', () => {
|
||||
const version = createMockVersion('1.0');
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(version);
|
||||
|
||||
const result = service.getVersionMetadata('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toEqual(version);
|
||||
});
|
||||
|
||||
it('should return null if version not found', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = service.getVersionMetadata('nodes-base.httpRequest', '99.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearCache', () => {
|
||||
it('should clear cache for specific node type', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.clearCache('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear entire cache when no node type specified', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
service.clearCache();
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache management', () => {
|
||||
it('should cache different node types separately', () => {
|
||||
const httpVersions = [createMockVersion('1.0')];
|
||||
const webhookVersions = [createMockVersion('2.0')];
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersions')
|
||||
.mockReturnValueOnce(httpVersions)
|
||||
.mockReturnValueOnce(webhookVersions);
|
||||
|
||||
const result1 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
const result2 = service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
expect(result1).toEqual(httpVersions);
|
||||
expect(result2).toEqual(webhookVersions);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should not use cache after clearing', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
|
||||
service.clearCache('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty version arrays', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle version comparison with zero parts', () => {
|
||||
const result = service.compareVersions('0.0.0', '0.0.1');
|
||||
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
|
||||
it('should handle single digit versions', () => {
|
||||
const result = service.compareVersions('1', '2');
|
||||
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,856 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { PostUpdateValidator, type PostUpdateGuidance } from '@/services/post-update-validator';
|
||||
import { NodeVersionService } from '@/services/node-version-service';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis, type DetectedChange } from '@/services/breaking-change-detector';
|
||||
import { type MigrationResult } from '@/services/node-migration-service';
|
||||
|
||||
vi.mock('@/services/node-version-service');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('PostUpdateValidator', () => {
|
||||
let validator: PostUpdateValidator;
|
||||
let mockVersionService: NodeVersionService;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockMigrationResult = (
|
||||
success: boolean,
|
||||
remainingIssues: string[] = []
|
||||
): MigrationResult => ({
|
||||
success,
|
||||
nodeId: 'node-1',
|
||||
nodeName: 'Test Node',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
appliedMigrations: [],
|
||||
remainingIssues,
|
||||
confidence: success ? 'HIGH' : 'MEDIUM',
|
||||
updatedNode: {}
|
||||
});
|
||||
|
||||
const createMockChange = (
|
||||
propertyName: string,
|
||||
changeType: DetectedChange['changeType'],
|
||||
autoMigratable: boolean,
|
||||
severity: DetectedChange['severity'] = 'MEDIUM'
|
||||
): DetectedChange => ({
|
||||
propertyName,
|
||||
changeType,
|
||||
isBreaking: true,
|
||||
migrationHint: `Migrate ${propertyName}`,
|
||||
autoMigratable,
|
||||
severity,
|
||||
source: 'registry'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockVersionService = {} as any;
|
||||
mockBreakingChangeDetector = {} as any;
|
||||
validator = new PostUpdateValidator(mockVersionService, mockBreakingChangeDetector);
|
||||
|
||||
mockVersionService.compareVersions = vi.fn((v1, v2) => {
|
||||
const parse = (v: string) => parseFloat(v);
|
||||
return parse(v1) - parse(v2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateGuidance', () => {
|
||||
it('should generate complete guidance for successful migration', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('complete');
|
||||
expect(guidance.confidence).toBe('HIGH');
|
||||
expect(guidance.requiredActions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should identify manual_required status for critical issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('criticalProp', 'requirement_changed', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Manual action required']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('manual_required');
|
||||
expect(guidance.confidence).not.toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should set partial status for some remaining issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop', 'added', true, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Minor issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('partial');
|
||||
});
|
||||
});
|
||||
|
||||
describe('required actions generation', () => {
|
||||
it('should generate required actions for manual changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newRequiredProp', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Add property']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions).toHaveLength(1);
|
||||
expect(guidance.requiredActions[0].type).toBe('ADD_PROPERTY');
|
||||
expect(guidance.requiredActions[0].property).toBe('newRequiredProp');
|
||||
expect(guidance.requiredActions[0].priority).toBe('CRITICAL');
|
||||
});
|
||||
|
||||
it('should map change types to action types correctly', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('addedProp', 'added', false, 'HIGH'),
|
||||
createMockChange('changedProp', 'requirement_changed', false, 'MEDIUM'),
|
||||
createMockChange('defaultProp', 'default_changed', false, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions[0].type).toBe('ADD_PROPERTY');
|
||||
expect(guidance.requiredActions[1].type).toBe('UPDATE_PROPERTY');
|
||||
expect(guidance.requiredActions[2].type).toBe('CONFIGURE_OPTION');
|
||||
});
|
||||
|
||||
it('should map severity to priority correctly', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('highProp', 'added', false, 'HIGH'),
|
||||
createMockChange('medProp', 'added', false, 'MEDIUM'),
|
||||
createMockChange('lowProp', 'added', false, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions[0].priority).toBe('CRITICAL');
|
||||
expect(guidance.requiredActions[1].priority).toBe('MEDIUM');
|
||||
expect(guidance.requiredActions[2].priority).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deprecated properties identification', () => {
|
||||
it('should identify removed properties', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('oldProp', 'removed', true),
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
}
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.deprecatedProperties).toHaveLength(1);
|
||||
expect(guidance.deprecatedProperties[0].property).toBe('oldProp');
|
||||
expect(guidance.deprecatedProperties[0].status).toBe('removed');
|
||||
expect(guidance.deprecatedProperties[0].action).toBe('remove');
|
||||
});
|
||||
|
||||
it('should mark breaking removals appropriately', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('breakingProp', 'removed', false),
|
||||
isBreaking: true
|
||||
}
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.deprecatedProperties[0].impact).toBe('breaking');
|
||||
});
|
||||
});
|
||||
|
||||
describe('behavior changes documentation', () => {
|
||||
it('should document Execute Workflow v1.1 data passing changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.behaviorChanges).toHaveLength(1);
|
||||
expect(guidance.behaviorChanges[0].aspect).toContain('Data passing');
|
||||
expect(guidance.behaviorChanges[0].impact).toBe('HIGH');
|
||||
expect(guidance.behaviorChanges[0].actionRequired).toBe(true);
|
||||
});
|
||||
|
||||
it('should document Webhook v2.1 persistence changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'2.0',
|
||||
'2.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const persistenceChange = guidance.behaviorChanges.find(c => c.aspect.includes('persistence'));
|
||||
expect(persistenceChange).toBeDefined();
|
||||
expect(persistenceChange?.impact).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should document Webhook v2.0 response handling changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.9',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'1.9',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const responseChange = guidance.behaviorChanges.find(c => c.aspect.includes('Response'));
|
||||
expect(responseChange).toBeDefined();
|
||||
expect(responseChange?.actionRequired).toBe(true);
|
||||
});
|
||||
|
||||
it('should not document behavior changes for other nodes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'HTTP Request',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.behaviorChanges).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migration steps generation', () => {
|
||||
it('should generate ordered migration steps', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('removedProp', 'removed', true),
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
},
|
||||
createMockChange('criticalProp', 'added', false, 'HIGH'),
|
||||
createMockChange('mediumProp', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.length).toBeGreaterThan(0);
|
||||
expect(guidance.migrationSteps[0]).toContain('deprecated');
|
||||
expect(guidance.migrationSteps.some(s => s.includes('critical'))).toBe(true);
|
||||
expect(guidance.migrationSteps.some(s => s.includes('Test workflow'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should include behavior change adaptation steps', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.some(s => s.includes('behavior changes'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should always include final validation step', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.some(s => s.includes('Test workflow'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('confidence calculation', () => {
|
||||
it('should set HIGH confidence for complete migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should set MEDIUM confidence for partial migrations with few issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop', 'added', true, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Minor issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should set LOW confidence for manual_required with many critical actions', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 4,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('time estimation', () => {
|
||||
it('should estimate < 1 minute for simple migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toBe('< 1 minute');
|
||||
});
|
||||
|
||||
it('should estimate 2-5 minutes for few actions', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toMatch(/2-5|5-10/);
|
||||
});
|
||||
|
||||
it('should estimate 20+ minutes for complex migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH'),
|
||||
createMockChange('prop5', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toContain('20+');
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateSummary', () => {
|
||||
it('should generate readable summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('Test Node');
|
||||
expect(summary).toContain('1.0');
|
||||
expect(summary).toContain('2.0');
|
||||
expect(summary).toContain('Required actions');
|
||||
});
|
||||
|
||||
it('should limit actions displayed in summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH'),
|
||||
createMockChange('prop5', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('and 2 more');
|
||||
});
|
||||
|
||||
it('should include behavior changes in summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'2.0',
|
||||
'2.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('Behavior changes');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -35,10 +35,6 @@ describe('WorkflowAutoFixer', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
|
||||
// Mock getNodeVersions to return empty array (no versions available)
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
|
||||
autoFixer = new WorkflowAutoFixer(mockRepository);
|
||||
});
|
||||
|
||||
@@ -70,7 +66,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Expression Format Fixes', () => {
|
||||
it('should fix missing prefix in expressions', async () => {
|
||||
it('should fix missing prefix in expressions', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {
|
||||
url: '{{ $json.url }}',
|
||||
@@ -104,7 +100,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('expression-format');
|
||||
@@ -116,7 +112,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
expect(result.operations[0].type).toBe('updateNode');
|
||||
});
|
||||
|
||||
it('should handle multiple expression fixes in same node', async () => {
|
||||
it('should handle multiple expression fixes in same node', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {
|
||||
url: '{{ $json.url }}',
|
||||
@@ -162,7 +158,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.fixes).toHaveLength(2);
|
||||
expect(result.operations).toHaveLength(1); // Single update operation for the node
|
||||
@@ -170,7 +166,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('TypeVersion Fixes', () => {
|
||||
it('should fix typeVersion exceeding maximum', async () => {
|
||||
it('should fix typeVersion exceeding maximum', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {})
|
||||
]);
|
||||
@@ -195,7 +191,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('typeversion-correction');
|
||||
@@ -206,7 +202,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Error Output Configuration Fixes', () => {
|
||||
it('should remove conflicting onError setting', async () => {
|
||||
it('should remove conflicting onError setting', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {})
|
||||
]);
|
||||
@@ -232,7 +228,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('error-output-config');
|
||||
@@ -299,7 +295,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Confidence Filtering', () => {
|
||||
it('should filter fixes by confidence level', async () => {
|
||||
it('should filter fixes by confidence level', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', { url: '{{ $json.url }}' })
|
||||
]);
|
||||
@@ -330,7 +326,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues, {
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues, {
|
||||
confidenceThreshold: 'low'
|
||||
});
|
||||
|
||||
@@ -340,7 +336,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Summary Generation', () => {
|
||||
it('should generate appropriate summary for fixes', async () => {
|
||||
it('should generate appropriate summary for fixes', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', { url: '{{ $json.url }}' })
|
||||
]);
|
||||
@@ -371,14 +367,14 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.summary).toContain('expression format');
|
||||
expect(result.stats.total).toBe(1);
|
||||
expect(result.stats.byType['expression-format']).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle empty fixes gracefully', async () => {
|
||||
it('should handle empty fixes gracefully', () => {
|
||||
const workflow = createMockWorkflow([]);
|
||||
const validationResult: WorkflowValidationResult = {
|
||||
valid: true,
|
||||
@@ -395,7 +391,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.summary).toBe('No fixes available');
|
||||
expect(result.stats.total).toBe(0);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,616 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowVersioningService, type WorkflowVersion, type BackupResult } from '@/services/workflow-versioning-service';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { N8nApiClient } from '@/services/n8n-api-client';
|
||||
import { WorkflowValidator } from '@/services/workflow-validator';
|
||||
import type { Workflow } from '@/types/n8n-api';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/n8n-api-client');
|
||||
vi.mock('@/services/workflow-validator');
|
||||
|
||||
describe('WorkflowVersioningService', () => {
|
||||
let service: WorkflowVersioningService;
|
||||
let mockRepository: NodeRepository;
|
||||
let mockApiClient: N8nApiClient;
|
||||
|
||||
const createMockWorkflow = (id: string, name: string, nodes: any[] = []): Workflow => ({
|
||||
id,
|
||||
name,
|
||||
active: false,
|
||||
nodes,
|
||||
connections: {},
|
||||
settings: {},
|
||||
createdAt: '2025-01-01T00:00:00.000Z',
|
||||
updatedAt: '2025-01-01T00:00:00.000Z'
|
||||
});
|
||||
|
||||
const createMockVersion = (versionNumber: number): WorkflowVersion => ({
|
||||
id: versionNumber,
|
||||
workflowId: 'workflow-1',
|
||||
versionNumber,
|
||||
workflowName: 'Test Workflow',
|
||||
workflowSnapshot: createMockWorkflow('workflow-1', 'Test Workflow'),
|
||||
trigger: 'partial_update',
|
||||
createdAt: '2025-01-01T00:00:00.000Z'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
mockApiClient = new N8nApiClient({ baseUrl: 'http://test', apiKey: 'test-key' });
|
||||
service = new WorkflowVersioningService(mockRepository, mockApiClient);
|
||||
});
|
||||
|
||||
describe('createBackup', () => {
|
||||
it('should create a backup with version 1 for new workflow', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(1);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'partial_update'
|
||||
});
|
||||
|
||||
expect(result.versionId).toBe(1);
|
||||
expect(result.versionNumber).toBe(1);
|
||||
expect(result.pruned).toBe(0);
|
||||
expect(result.message).toContain('Backup created (version 1)');
|
||||
});
|
||||
|
||||
it('should increment version number from latest version', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
const existingVersions = [createMockVersion(3), createMockVersion(2)];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue(existingVersions);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(4);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'full_update'
|
||||
});
|
||||
|
||||
expect(result.versionNumber).toBe(4);
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
versionNumber: 4
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should include context in version metadata', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(1);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'autofix',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node-1' }],
|
||||
fixTypes: ['expression-format'],
|
||||
metadata: { testKey: 'testValue' }
|
||||
});
|
||||
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
trigger: 'autofix',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node-1' }],
|
||||
fixTypes: ['expression-format'],
|
||||
metadata: { testKey: 'testValue' }
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should auto-prune to 10 versions and report pruned count', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(1)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(3);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'partial_update'
|
||||
});
|
||||
|
||||
expect(mockRepository.pruneWorkflowVersions).toHaveBeenCalledWith('workflow-1', 10);
|
||||
expect(result.pruned).toBe(3);
|
||||
expect(result.message).toContain('pruned 3 old version(s)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersionHistory', () => {
|
||||
it('should return formatted version history', async () => {
|
||||
const versions = [
|
||||
createMockVersion(3),
|
||||
createMockVersion(2),
|
||||
createMockVersion(1)
|
||||
];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue(versions);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].versionNumber).toBe(3);
|
||||
expect(result[0].workflowId).toBe('workflow-1');
|
||||
expect(result[0].size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should include operation count when operations exist', async () => {
|
||||
const versionWithOps: WorkflowVersion = {
|
||||
...createMockVersion(1),
|
||||
operations: [{ type: 'updateNode' }, { type: 'addNode' }]
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([versionWithOps]);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result[0].operationCount).toBe(2);
|
||||
});
|
||||
|
||||
it('should include fixTypes when present', async () => {
|
||||
const versionWithFixes: WorkflowVersion = {
|
||||
...createMockVersion(1),
|
||||
fixTypes: ['expression-format', 'typeversion-correction']
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([versionWithFixes]);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result[0].fixTypesApplied).toEqual(['expression-format', 'typeversion-correction']);
|
||||
});
|
||||
|
||||
it('should respect the limit parameter', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
|
||||
await service.getVersionHistory('workflow-1', 5);
|
||||
|
||||
expect(mockRepository.getWorkflowVersions).toHaveBeenCalledWith('workflow-1', 5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersion', () => {
|
||||
it('should return the requested version', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
|
||||
const result = await service.getVersion(1);
|
||||
|
||||
expect(result).toEqual(version);
|
||||
});
|
||||
|
||||
it('should return null if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.getVersion(999);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('restoreVersion', () => {
|
||||
it('should fail if API client is not configured', async () => {
|
||||
const serviceWithoutApi = new WorkflowVersioningService(mockRepository);
|
||||
|
||||
const result = await serviceWithoutApi.restoreVersion('workflow-1', 1);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('API client not configured');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 999);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Version 999 not found');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should restore latest version when no versionId provided', async () => {
|
||||
const version = createMockVersion(3);
|
||||
vi.spyOn(mockRepository, 'getLatestWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(4);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', undefined, false);
|
||||
|
||||
expect(mockRepository.getLatestWorkflowVersion).toHaveBeenCalledWith('workflow-1');
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if no backup versions exist and no versionId provided', async () => {
|
||||
vi.spyOn(mockRepository, 'getLatestWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', undefined);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('No backup versions found');
|
||||
});
|
||||
|
||||
it('should validate version before restore when validateBefore is true', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
|
||||
const mockValidator = {
|
||||
validateWorkflow: vi.fn().mockResolvedValue({
|
||||
errors: [{ message: 'Validation error' }]
|
||||
})
|
||||
};
|
||||
vi.spyOn(WorkflowValidator.prototype, 'validateWorkflow').mockImplementation(
|
||||
mockValidator.validateWorkflow
|
||||
);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, true);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('has validation errors');
|
||||
expect(result.validationErrors).toEqual(['Validation error']);
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should skip validation when validateBefore is false', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const mockValidator = vi.fn();
|
||||
vi.spyOn(WorkflowValidator.prototype, 'validateWorkflow').mockImplementation(mockValidator);
|
||||
|
||||
await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockValidator).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should create backup before restoring', async () => {
|
||||
const versionToRestore = createMockVersion(1);
|
||||
const currentWorkflow = createMockWorkflow('workflow-1', 'Current Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(versionToRestore);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(2)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(currentWorkflow);
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockApiClient.getWorkflow).toHaveBeenCalledWith('workflow-1');
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
workflowSnapshot: currentWorkflow,
|
||||
metadata: expect.objectContaining({
|
||||
reason: 'Backup before rollback',
|
||||
restoringToVersion: 1
|
||||
})
|
||||
})
|
||||
);
|
||||
expect(result.backupCreated).toBe(true);
|
||||
expect(result.backupVersionId).toBe(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockRejectedValue(new Error('Backup failed'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Failed to create backup before restore');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should successfully restore workflow', async () => {
|
||||
const versionToRestore = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(versionToRestore);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(2)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockApiClient.updateWorkflow).toHaveBeenCalledWith('workflow-1', versionToRestore.workflowSnapshot);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Successfully restored workflow to version 1');
|
||||
expect(result.fromVersion).toBe(3);
|
||||
expect(result.toVersionId).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle restore API failures', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockRejectedValue(new Error('API Error'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Failed to restore workflow');
|
||||
expect(result.backupCreated).toBe(true);
|
||||
expect(result.backupVersionId).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteVersion', () => {
|
||||
it('should delete a specific version', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'deleteWorkflowVersion').mockReturnValue(undefined);
|
||||
|
||||
const result = await service.deleteVersion(1);
|
||||
|
||||
expect(mockRepository.deleteWorkflowVersion).toHaveBeenCalledWith(1);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Deleted version 1');
|
||||
});
|
||||
|
||||
it('should fail if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.deleteVersion(999);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Version 999 not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteAllVersions', () => {
|
||||
it('should delete all versions for a workflow', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(5);
|
||||
vi.spyOn(mockRepository, 'deleteWorkflowVersionsByWorkflowId').mockReturnValue(5);
|
||||
|
||||
const result = await service.deleteAllVersions('workflow-1');
|
||||
|
||||
expect(result.deleted).toBe(5);
|
||||
expect(result.message).toContain('Deleted 5 version(s)');
|
||||
});
|
||||
|
||||
it('should return zero if no versions exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(0);
|
||||
|
||||
const result = await service.deleteAllVersions('workflow-1');
|
||||
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.message).toContain('No versions found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('pruneVersions', () => {
|
||||
it('should prune versions and return counts', async () => {
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(10);
|
||||
|
||||
const result = await service.pruneVersions('workflow-1', 10);
|
||||
|
||||
expect(result.pruned).toBe(3);
|
||||
expect(result.remaining).toBe(10);
|
||||
});
|
||||
|
||||
it('should use custom maxVersions parameter', async () => {
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(5);
|
||||
|
||||
await service.pruneVersions('workflow-1', 5);
|
||||
|
||||
expect(mockRepository.pruneWorkflowVersions).toHaveBeenCalledWith('workflow-1', 5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('truncateAllVersions', () => {
|
||||
it('should refuse to truncate without confirmation', async () => {
|
||||
const result = await service.truncateAllVersions(false);
|
||||
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.message).toContain('not confirmed');
|
||||
});
|
||||
|
||||
it('should truncate all versions when confirmed', async () => {
|
||||
vi.spyOn(mockRepository, 'truncateWorkflowVersions').mockReturnValue(50);
|
||||
|
||||
const result = await service.truncateAllVersions(true);
|
||||
|
||||
expect(result.deleted).toBe(50);
|
||||
expect(result.message).toContain('Truncated workflow_versions table');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStorageStats', () => {
|
||||
it('should return formatted storage statistics', async () => {
|
||||
const mockStats = {
|
||||
totalVersions: 10,
|
||||
totalSize: 1024000,
|
||||
byWorkflow: [
|
||||
{
|
||||
workflowId: 'workflow-1',
|
||||
workflowName: 'Test Workflow',
|
||||
versionCount: 5,
|
||||
totalSize: 512000,
|
||||
lastBackup: '2025-01-01T00:00:00.000Z'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getVersionStorageStats').mockReturnValue(mockStats);
|
||||
|
||||
const result = await service.getStorageStats();
|
||||
|
||||
expect(result.totalVersions).toBe(10);
|
||||
expect(result.totalSizeFormatted).toContain('KB');
|
||||
expect(result.byWorkflow).toHaveLength(1);
|
||||
expect(result.byWorkflow[0].totalSizeFormatted).toContain('KB');
|
||||
});
|
||||
|
||||
it('should format bytes correctly', async () => {
|
||||
const mockStats = {
|
||||
totalVersions: 1,
|
||||
totalSize: 0,
|
||||
byWorkflow: []
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getVersionStorageStats').mockReturnValue(mockStats);
|
||||
|
||||
const result = await service.getStorageStats();
|
||||
|
||||
expect(result.totalSizeFormatted).toBe('0 Bytes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('compareVersions', () => {
|
||||
it('should detect added nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [
|
||||
{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} },
|
||||
{ id: 'node-2', name: 'Node 2', type: 'test', typeVersion: 1, position: [100, 0], parameters: {} }
|
||||
];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.addedNodes).toEqual(['node-2']);
|
||||
expect(result.removedNodes).toEqual([]);
|
||||
expect(result.modifiedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should detect removed nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [
|
||||
{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} },
|
||||
{ id: 'node-2', name: 'Node 2', type: 'test', typeVersion: 1, position: [100, 0], parameters: {} }
|
||||
];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.removedNodes).toEqual(['node-2']);
|
||||
expect(result.addedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should detect modified nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 2, position: [0, 0], parameters: {} }];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.modifiedNodes).toEqual(['node-1']);
|
||||
});
|
||||
|
||||
it('should detect connection changes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.connections = { 'node-1': { main: [[{ node: 'node-2', type: 'main', index: 0 }]] } };
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.connections = {};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.connectionChanges).toBe(1);
|
||||
});
|
||||
|
||||
it('should detect settings changes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.settings = { executionOrder: 'v0' };
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.settings = { executionOrder: 'v1' };
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.settingChanges).toHaveProperty('executionOrder');
|
||||
expect(result.settingChanges.executionOrder.before).toBe('v0');
|
||||
expect(result.settingChanges.executionOrder.after).toBe('v1');
|
||||
});
|
||||
|
||||
it('should throw error if version not found', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
await expect(service.compareVersions(1, 2)).rejects.toThrow('One or both versions not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatBytes', () => {
|
||||
it('should format bytes to human-readable string', () => {
|
||||
// Access private method through any cast
|
||||
const formatBytes = (service as any).formatBytes.bind(service);
|
||||
|
||||
expect(formatBytes(0)).toBe('0 Bytes');
|
||||
expect(formatBytes(500)).toBe('500 Bytes');
|
||||
expect(formatBytes(1024)).toBe('1 KB');
|
||||
expect(formatBytes(1048576)).toBe('1 MB');
|
||||
expect(formatBytes(1073741824)).toBe('1 GB');
|
||||
});
|
||||
});
|
||||
|
||||
describe('diffObjects', () => {
|
||||
it('should detect object differences', () => {
|
||||
const diffObjects = (service as any).diffObjects.bind(service);
|
||||
|
||||
const obj1 = { a: 1, b: 2 };
|
||||
const obj2 = { a: 1, b: 3, c: 4 };
|
||||
|
||||
const diff = diffObjects(obj1, obj2);
|
||||
|
||||
expect(diff).toHaveProperty('b');
|
||||
expect(diff.b).toEqual({ before: 2, after: 3 });
|
||||
expect(diff).toHaveProperty('c');
|
||||
expect(diff.c).toEqual({ before: undefined, after: 4 });
|
||||
});
|
||||
|
||||
it('should return empty object when no differences', () => {
|
||||
const diffObjects = (service as any).diffObjects.bind(service);
|
||||
|
||||
const obj1 = { a: 1, b: 2 };
|
||||
const obj2 = { a: 1, b: 2 };
|
||||
|
||||
const diff = diffObjects(obj1, obj2);
|
||||
|
||||
expect(Object.keys(diff)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -72,16 +72,9 @@ describe('AuthManager.timingSafeCompare', () => {
|
||||
const medianLast = median(timings.wrongLast);
|
||||
|
||||
// Timing variance should be less than 10% (constant-time)
|
||||
// Guard against division by zero when medians are very small (fast operations)
|
||||
const maxMedian = Math.max(medianFirst, medianLast);
|
||||
const variance = maxMedian === 0
|
||||
? Math.abs(medianFirst - medianLast)
|
||||
: Math.abs(medianFirst - medianLast) / maxMedian;
|
||||
const variance = Math.abs(medianFirst - medianLast) / medianFirst;
|
||||
|
||||
// For constant-time comparison, variance should be minimal
|
||||
// If maxMedian is 0, check absolute difference is small (< 1000ns)
|
||||
// Otherwise, check relative variance is < 10%
|
||||
expect(variance).toBeLessThan(maxMedian === 0 ? 1000 : 0.10);
|
||||
expect(variance).toBeLessThan(0.10);
|
||||
});
|
||||
|
||||
it('should handle special characters safely', () => {
|
||||
|
||||
@@ -1,414 +0,0 @@
|
||||
/**
|
||||
* Tests for Expression Utilities
|
||||
*
|
||||
* Comprehensive test suite for n8n expression detection utilities
|
||||
* that help validators understand when to skip literal validation
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
isExpression,
|
||||
containsExpression,
|
||||
shouldSkipLiteralValidation,
|
||||
extractExpressionContent,
|
||||
hasMixedContent
|
||||
} from '../../../src/utils/expression-utils';
|
||||
|
||||
describe('Expression Utilities', () => {
|
||||
describe('isExpression', () => {
|
||||
describe('Valid expressions', () => {
|
||||
it('should detect expression with = prefix and {{ }}', () => {
|
||||
expect(isExpression('={{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect expression with = prefix only', () => {
|
||||
expect(isExpression('=$json.value')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect mixed content expression', () => {
|
||||
expect(isExpression('=https://api.com/{{ $json.id }}/data')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect expression with complex content', () => {
|
||||
expect(isExpression('={{ $json.items.map(item => item.id) }}')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Non-expressions', () => {
|
||||
it('should return false for plain strings', () => {
|
||||
expect(isExpression('plain text')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for URLs without = prefix', () => {
|
||||
expect(isExpression('https://api.example.com')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for {{ }} without = prefix', () => {
|
||||
expect(isExpression('{{ $json.value }}')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty string', () => {
|
||||
expect(isExpression('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should return false for null', () => {
|
||||
expect(isExpression(null)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined', () => {
|
||||
expect(isExpression(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for number', () => {
|
||||
expect(isExpression(123)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for object', () => {
|
||||
expect(isExpression({})).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for array', () => {
|
||||
expect(isExpression([])).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for boolean', () => {
|
||||
expect(isExpression(true)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type narrowing', () => {
|
||||
it('should narrow type to string when true', () => {
|
||||
const value: unknown = '=$json.value';
|
||||
if (isExpression(value)) {
|
||||
// This should compile because isExpression is a type predicate
|
||||
const length: number = value.length;
|
||||
expect(length).toBeGreaterThan(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('containsExpression', () => {
|
||||
describe('Valid expression markers', () => {
|
||||
it('should detect {{ }} markers', () => {
|
||||
expect(containsExpression('{{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect expression markers in mixed content', () => {
|
||||
expect(containsExpression('Hello {{ $json.name }}!')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect multiple expression markers', () => {
|
||||
expect(containsExpression('{{ $json.first }} and {{ $json.second }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect expression with = prefix', () => {
|
||||
expect(containsExpression('={{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect expressions with newlines', () => {
|
||||
expect(containsExpression('{{ $json.items\n .map(item => item.id) }}')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Non-expressions', () => {
|
||||
it('should return false for plain strings', () => {
|
||||
expect(containsExpression('plain text')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for = prefix without {{ }}', () => {
|
||||
expect(containsExpression('=$json.value')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for single braces', () => {
|
||||
expect(containsExpression('{ value }')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty string', () => {
|
||||
expect(containsExpression('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should return false for null', () => {
|
||||
expect(containsExpression(null)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined', () => {
|
||||
expect(containsExpression(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for number', () => {
|
||||
expect(containsExpression(123)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for object', () => {
|
||||
expect(containsExpression({})).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for array', () => {
|
||||
expect(containsExpression([])).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('shouldSkipLiteralValidation', () => {
|
||||
describe('Should skip validation', () => {
|
||||
it('should skip for expression with = prefix and {{ }}', () => {
|
||||
expect(shouldSkipLiteralValidation('={{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip for expression with = prefix only', () => {
|
||||
expect(shouldSkipLiteralValidation('=$json.value')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip for {{ }} without = prefix', () => {
|
||||
expect(shouldSkipLiteralValidation('{{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip for mixed content with expressions', () => {
|
||||
expect(shouldSkipLiteralValidation('https://api.com/{{ $json.id }}/data')).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip for expression URL', () => {
|
||||
expect(shouldSkipLiteralValidation('={{ $json.baseUrl }}/api')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should not skip validation', () => {
|
||||
it('should validate plain strings', () => {
|
||||
expect(shouldSkipLiteralValidation('plain text')).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate literal URLs', () => {
|
||||
expect(shouldSkipLiteralValidation('https://api.example.com')).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate JSON strings', () => {
|
||||
expect(shouldSkipLiteralValidation('{"key": "value"}')).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate numbers', () => {
|
||||
expect(shouldSkipLiteralValidation(123)).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate null', () => {
|
||||
expect(shouldSkipLiteralValidation(null)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Real-world use cases', () => {
|
||||
it('should skip validation for expression-based URLs', () => {
|
||||
const url = '={{ $json.protocol }}://{{ $json.domain }}/api';
|
||||
expect(shouldSkipLiteralValidation(url)).toBe(true);
|
||||
});
|
||||
|
||||
it('should skip validation for expression-based JSON', () => {
|
||||
const json = '={{ { key: $json.value } }}';
|
||||
expect(shouldSkipLiteralValidation(json)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not skip validation for literal URLs', () => {
|
||||
const url = 'https://api.example.com/endpoint';
|
||||
expect(shouldSkipLiteralValidation(url)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not skip validation for literal JSON', () => {
|
||||
const json = '{"userId": 123, "name": "test"}';
|
||||
expect(shouldSkipLiteralValidation(json)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractExpressionContent', () => {
|
||||
describe('Expression with = prefix and {{ }}', () => {
|
||||
it('should extract content from ={{ }}', () => {
|
||||
expect(extractExpressionContent('={{ $json.value }}')).toBe('$json.value');
|
||||
});
|
||||
|
||||
it('should extract complex expression', () => {
|
||||
expect(extractExpressionContent('={{ $json.items.map(i => i.id) }}')).toBe('$json.items.map(i => i.id)');
|
||||
});
|
||||
|
||||
it('should trim whitespace', () => {
|
||||
expect(extractExpressionContent('={{ $json.value }}')).toBe('$json.value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Expression with = prefix only', () => {
|
||||
it('should extract content from = prefix', () => {
|
||||
expect(extractExpressionContent('=$json.value')).toBe('$json.value');
|
||||
});
|
||||
|
||||
it('should handle complex expressions without {{ }}', () => {
|
||||
expect(extractExpressionContent('=$json.items[0].name')).toBe('$json.items[0].name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Non-expressions', () => {
|
||||
it('should return original value for plain strings', () => {
|
||||
expect(extractExpressionContent('plain text')).toBe('plain text');
|
||||
});
|
||||
|
||||
it('should return original value for {{ }} without = prefix', () => {
|
||||
expect(extractExpressionContent('{{ $json.value }}')).toBe('{{ $json.value }}');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty expression', () => {
|
||||
expect(extractExpressionContent('=')).toBe('');
|
||||
});
|
||||
|
||||
it('should handle expression with only {{ }}', () => {
|
||||
// Empty braces don't match the regex pattern, returns as-is
|
||||
expect(extractExpressionContent('={{}}')).toBe('{{}}');
|
||||
});
|
||||
|
||||
it('should handle nested braces (not valid but should not crash)', () => {
|
||||
// The regex extracts content between outermost {{ }}
|
||||
expect(extractExpressionContent('={{ {{ value }} }}')).toBe('{{ value }}');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasMixedContent', () => {
|
||||
describe('Mixed content cases', () => {
|
||||
it('should detect mixed content with text and expression', () => {
|
||||
expect(hasMixedContent('Hello {{ $json.name }}!')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect URL with expression segments', () => {
|
||||
expect(hasMixedContent('https://api.com/{{ $json.id }}/data')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect multiple expressions in text', () => {
|
||||
expect(hasMixedContent('{{ $json.first }} and {{ $json.second }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect JSON with expressions', () => {
|
||||
expect(hasMixedContent('{"id": {{ $json.id }}, "name": "test"}')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pure expression cases', () => {
|
||||
it('should return false for pure expression with = prefix', () => {
|
||||
expect(hasMixedContent('={{ $json.value }}')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for {{ }} without = prefix (ambiguous case)', () => {
|
||||
// Without = prefix, we can't distinguish between pure expression and mixed content
|
||||
// So it's treated as mixed to be safe
|
||||
expect(hasMixedContent('{{ $json.value }}')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for expression with whitespace', () => {
|
||||
expect(hasMixedContent(' ={{ $json.value }} ')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Non-expression cases', () => {
|
||||
it('should return false for plain text', () => {
|
||||
expect(hasMixedContent('plain text')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for literal URLs', () => {
|
||||
expect(hasMixedContent('https://api.example.com')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for = prefix without {{ }}', () => {
|
||||
expect(hasMixedContent('=$json.value')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should return false for null', () => {
|
||||
expect(hasMixedContent(null)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined', () => {
|
||||
expect(hasMixedContent(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for number', () => {
|
||||
expect(hasMixedContent(123)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for object', () => {
|
||||
expect(hasMixedContent({})).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for array', () => {
|
||||
expect(hasMixedContent([])).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty string', () => {
|
||||
expect(hasMixedContent('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type guard effectiveness', () => {
|
||||
it('should handle non-string types without calling containsExpression', () => {
|
||||
// This tests the fix from Phase 1 - type guard must come before containsExpression
|
||||
expect(() => hasMixedContent(123)).not.toThrow();
|
||||
expect(() => hasMixedContent(null)).not.toThrow();
|
||||
expect(() => hasMixedContent(undefined)).not.toThrow();
|
||||
expect(() => hasMixedContent({})).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration scenarios', () => {
|
||||
it('should correctly identify expression-based URL in HTTP Request node', () => {
|
||||
const url = '={{ $json.baseUrl }}/users/{{ $json.userId }}';
|
||||
|
||||
expect(isExpression(url)).toBe(true);
|
||||
expect(containsExpression(url)).toBe(true);
|
||||
expect(shouldSkipLiteralValidation(url)).toBe(true);
|
||||
expect(hasMixedContent(url)).toBe(true);
|
||||
});
|
||||
|
||||
it('should correctly identify literal URL for validation', () => {
|
||||
const url = 'https://api.example.com/users/123';
|
||||
|
||||
expect(isExpression(url)).toBe(false);
|
||||
expect(containsExpression(url)).toBe(false);
|
||||
expect(shouldSkipLiteralValidation(url)).toBe(false);
|
||||
expect(hasMixedContent(url)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle expression in JSON body', () => {
|
||||
const json = '={{ { userId: $json.id, timestamp: $now } }}';
|
||||
|
||||
expect(isExpression(json)).toBe(true);
|
||||
expect(shouldSkipLiteralValidation(json)).toBe(true);
|
||||
expect(extractExpressionContent(json)).toBe('{ userId: $json.id, timestamp: $now }');
|
||||
});
|
||||
|
||||
it('should handle webhook path with expressions', () => {
|
||||
const path = '=/webhook/{{ $json.customerId }}/notify';
|
||||
|
||||
expect(isExpression(path)).toBe(true);
|
||||
expect(containsExpression(path)).toBe(true);
|
||||
expect(shouldSkipLiteralValidation(path)).toBe(true);
|
||||
expect(extractExpressionContent(path)).toBe('/webhook/{{ $json.customerId }}/notify');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Performance characteristics', () => {
|
||||
it('should use efficient regex for containsExpression', () => {
|
||||
// The implementation should use a single regex test, not two includes()
|
||||
const value = 'text {{ expression }} more text';
|
||||
const start = performance.now();
|
||||
for (let i = 0; i < 10000; i++) {
|
||||
containsExpression(value);
|
||||
}
|
||||
const duration = performance.now() - start;
|
||||
|
||||
// Performance test - should complete in reasonable time
|
||||
expect(duration).toBeLessThan(100); // 100ms for 10k iterations
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,240 +0,0 @@
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import {
|
||||
isStickyNote,
|
||||
isTriggerNode,
|
||||
isNonExecutableNode,
|
||||
requiresIncomingConnection
|
||||
} from '@/utils/node-classification';
|
||||
|
||||
describe('Node Classification Utilities', () => {
|
||||
describe('isStickyNote', () => {
|
||||
test('should identify standard sticky note type', () => {
|
||||
expect(isStickyNote('n8n-nodes-base.stickyNote')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify normalized sticky note type', () => {
|
||||
expect(isStickyNote('nodes-base.stickyNote')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify scoped sticky note type', () => {
|
||||
expect(isStickyNote('@n8n/n8n-nodes-base.stickyNote')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for webhook node', () => {
|
||||
expect(isStickyNote('n8n-nodes-base.webhook')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for HTTP request node', () => {
|
||||
expect(isStickyNote('n8n-nodes-base.httpRequest')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for manual trigger node', () => {
|
||||
expect(isStickyNote('n8n-nodes-base.manualTrigger')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for Set node', () => {
|
||||
expect(isStickyNote('n8n-nodes-base.set')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for empty string', () => {
|
||||
expect(isStickyNote('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isTriggerNode', () => {
|
||||
test('should identify webhook trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.webhook')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify webhook trigger variant', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.webhookTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify manual trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.manualTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify cron trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.cronTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify schedule trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.scheduleTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for HTTP request node', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.httpRequest')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for Set node', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.set')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for sticky note', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.stickyNote')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for empty string', () => {
|
||||
expect(isTriggerNode('')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isNonExecutableNode', () => {
|
||||
test('should identify sticky note as non-executable', () => {
|
||||
expect(isNonExecutableNode('n8n-nodes-base.stickyNote')).toBe(true);
|
||||
});
|
||||
|
||||
test('should identify all sticky note variations as non-executable', () => {
|
||||
expect(isNonExecutableNode('nodes-base.stickyNote')).toBe(true);
|
||||
expect(isNonExecutableNode('@n8n/n8n-nodes-base.stickyNote')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for webhook trigger', () => {
|
||||
expect(isNonExecutableNode('n8n-nodes-base.webhook')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for HTTP request node', () => {
|
||||
expect(isNonExecutableNode('n8n-nodes-base.httpRequest')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for Set node', () => {
|
||||
expect(isNonExecutableNode('n8n-nodes-base.set')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for manual trigger', () => {
|
||||
expect(isNonExecutableNode('n8n-nodes-base.manualTrigger')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('requiresIncomingConnection', () => {
|
||||
describe('non-executable nodes (should not require connections)', () => {
|
||||
test('should return false for sticky note', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.stickyNote')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for all sticky note variations', () => {
|
||||
expect(requiresIncomingConnection('nodes-base.stickyNote')).toBe(false);
|
||||
expect(requiresIncomingConnection('@n8n/n8n-nodes-base.stickyNote')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('trigger nodes (should not require incoming connections)', () => {
|
||||
test('should return false for webhook', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.webhook')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for webhook trigger', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.webhookTrigger')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for manual trigger', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.manualTrigger')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for cron trigger', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.cronTrigger')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false for schedule trigger', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.scheduleTrigger')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('regular nodes (should require incoming connections)', () => {
|
||||
test('should return true for HTTP request node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.httpRequest')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for Set node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.set')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for Code node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.code')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for Function node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.function')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for IF node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.if')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for Switch node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.switch')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for Respond to Webhook node', () => {
|
||||
expect(requiresIncomingConnection('n8n-nodes-base.respondToWebhook')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
test('should return true for unknown node types (conservative approach)', () => {
|
||||
expect(requiresIncomingConnection('unknown-package.unknownNode')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for empty string', () => {
|
||||
expect(requiresIncomingConnection('')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('integration scenarios', () => {
|
||||
test('sticky notes should be non-executable and not require connections', () => {
|
||||
const stickyType = 'n8n-nodes-base.stickyNote';
|
||||
expect(isNonExecutableNode(stickyType)).toBe(true);
|
||||
expect(requiresIncomingConnection(stickyType)).toBe(false);
|
||||
expect(isStickyNote(stickyType)).toBe(true);
|
||||
expect(isTriggerNode(stickyType)).toBe(false);
|
||||
});
|
||||
|
||||
test('webhook nodes should be triggers and not require incoming connections', () => {
|
||||
const webhookType = 'n8n-nodes-base.webhook';
|
||||
expect(isTriggerNode(webhookType)).toBe(true);
|
||||
expect(requiresIncomingConnection(webhookType)).toBe(false);
|
||||
expect(isNonExecutableNode(webhookType)).toBe(false);
|
||||
expect(isStickyNote(webhookType)).toBe(false);
|
||||
});
|
||||
|
||||
test('regular nodes should require incoming connections', () => {
|
||||
const httpType = 'n8n-nodes-base.httpRequest';
|
||||
expect(requiresIncomingConnection(httpType)).toBe(true);
|
||||
expect(isNonExecutableNode(httpType)).toBe(false);
|
||||
expect(isTriggerNode(httpType)).toBe(false);
|
||||
expect(isStickyNote(httpType)).toBe(false);
|
||||
});
|
||||
|
||||
test('all trigger types should not require incoming connections', () => {
|
||||
const triggerTypes = [
|
||||
'n8n-nodes-base.webhook',
|
||||
'n8n-nodes-base.webhookTrigger',
|
||||
'n8n-nodes-base.manualTrigger',
|
||||
'n8n-nodes-base.cronTrigger',
|
||||
'n8n-nodes-base.scheduleTrigger'
|
||||
];
|
||||
|
||||
triggerTypes.forEach(type => {
|
||||
expect(isTriggerNode(type)).toBe(true);
|
||||
expect(requiresIncomingConnection(type)).toBe(false);
|
||||
expect(isNonExecutableNode(type)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
test('all sticky note variations should be non-executable', () => {
|
||||
const stickyTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
|
||||
stickyTypes.forEach(type => {
|
||||
expect(isStickyNote(type)).toBe(true);
|
||||
expect(isNonExecutableNode(type)).toBe(true);
|
||||
expect(requiresIncomingConnection(type)).toBe(false);
|
||||
expect(isTriggerNode(type)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,10 +7,7 @@ import {
|
||||
isBaseNode,
|
||||
isLangChainNode,
|
||||
isValidNodeTypeFormat,
|
||||
getNodeTypeVariations,
|
||||
isTriggerNode,
|
||||
isActivatableTrigger,
|
||||
getTriggerTypeDescription
|
||||
getNodeTypeVariations
|
||||
} from '@/utils/node-type-utils';
|
||||
|
||||
describe('node-type-utils', () => {
|
||||
@@ -199,165 +196,4 @@ describe('node-type-utils', () => {
|
||||
expect(variations.length).toBe(uniqueVariations.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isTriggerNode', () => {
|
||||
it('recognizes executeWorkflowTrigger as a trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.executeWorkflowTrigger')).toBe(true);
|
||||
expect(isTriggerNode('nodes-base.executeWorkflowTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('recognizes schedule triggers', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.scheduleTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.cronTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('recognizes webhook triggers', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.webhook')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.webhookTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('recognizes manual triggers', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.manualTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.start')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.formTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('recognizes email and polling triggers', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.emailTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.imapTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.gmailTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('recognizes various trigger types', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.slackTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.githubTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.twilioTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('does NOT recognize respondToWebhook as a trigger', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.respondToWebhook')).toBe(false);
|
||||
});
|
||||
|
||||
it('does NOT recognize regular nodes as triggers', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.set')).toBe(false);
|
||||
expect(isTriggerNode('n8n-nodes-base.httpRequest')).toBe(false);
|
||||
expect(isTriggerNode('n8n-nodes-base.code')).toBe(false);
|
||||
expect(isTriggerNode('n8n-nodes-base.slack')).toBe(false);
|
||||
});
|
||||
|
||||
it('handles normalized and non-normalized node types', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.webhook')).toBe(true);
|
||||
expect(isTriggerNode('nodes-base.webhook')).toBe(true);
|
||||
});
|
||||
|
||||
it('is case-insensitive', () => {
|
||||
expect(isTriggerNode('n8n-nodes-base.WebhookTrigger')).toBe(true);
|
||||
expect(isTriggerNode('n8n-nodes-base.EMAILTRIGGER')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isActivatableTrigger', () => {
|
||||
it('executeWorkflowTrigger is NOT activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.executeWorkflowTrigger')).toBe(false);
|
||||
expect(isActivatableTrigger('nodes-base.executeWorkflowTrigger')).toBe(false);
|
||||
});
|
||||
|
||||
it('webhook triggers ARE activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.webhook')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.webhookTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('schedule triggers ARE activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.scheduleTrigger')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.cronTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('manual triggers ARE activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.manualTrigger')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.start')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.formTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('polling triggers ARE activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.emailTrigger')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.slackTrigger')).toBe(true);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.gmailTrigger')).toBe(true);
|
||||
});
|
||||
|
||||
it('regular nodes are NOT activatable', () => {
|
||||
expect(isActivatableTrigger('n8n-nodes-base.set')).toBe(false);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.httpRequest')).toBe(false);
|
||||
expect(isActivatableTrigger('n8n-nodes-base.respondToWebhook')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTriggerTypeDescription', () => {
|
||||
it('describes executeWorkflowTrigger correctly', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.executeWorkflowTrigger');
|
||||
expect(desc).toContain('Execute Workflow');
|
||||
expect(desc).toContain('invoked by other workflows');
|
||||
});
|
||||
|
||||
it('describes webhook triggers correctly', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.webhook');
|
||||
expect(desc).toContain('Webhook');
|
||||
expect(desc).toContain('HTTP');
|
||||
});
|
||||
|
||||
it('describes schedule triggers correctly', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.scheduleTrigger');
|
||||
expect(desc).toContain('Schedule');
|
||||
expect(desc).toContain('time-based');
|
||||
});
|
||||
|
||||
it('describes manual triggers correctly', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.manualTrigger');
|
||||
expect(desc).toContain('Manual');
|
||||
});
|
||||
|
||||
it('describes email triggers correctly', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.emailTrigger');
|
||||
expect(desc).toContain('Email');
|
||||
expect(desc).toContain('polling');
|
||||
});
|
||||
|
||||
it('provides generic description for unknown triggers', () => {
|
||||
const desc = getTriggerTypeDescription('n8n-nodes-base.customTrigger');
|
||||
expect(desc).toContain('Trigger');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration: Trigger Classification', () => {
|
||||
it('all triggers detected by isTriggerNode should be classified correctly', () => {
|
||||
const triggers = [
|
||||
'n8n-nodes-base.webhook',
|
||||
'n8n-nodes-base.webhookTrigger',
|
||||
'n8n-nodes-base.scheduleTrigger',
|
||||
'n8n-nodes-base.manualTrigger',
|
||||
'n8n-nodes-base.executeWorkflowTrigger',
|
||||
'n8n-nodes-base.emailTrigger'
|
||||
];
|
||||
|
||||
for (const trigger of triggers) {
|
||||
expect(isTriggerNode(trigger)).toBe(true);
|
||||
const desc = getTriggerTypeDescription(trigger);
|
||||
expect(desc).toBeTruthy();
|
||||
expect(desc).not.toBe('Unknown trigger type');
|
||||
}
|
||||
});
|
||||
|
||||
it('only executeWorkflowTrigger is non-activatable', () => {
|
||||
const triggers = [
|
||||
{ type: 'n8n-nodes-base.webhook', activatable: true },
|
||||
{ type: 'n8n-nodes-base.scheduleTrigger', activatable: true },
|
||||
{ type: 'n8n-nodes-base.executeWorkflowTrigger', activatable: false },
|
||||
{ type: 'n8n-nodes-base.emailTrigger', activatable: true }
|
||||
];
|
||||
|
||||
for (const { type, activatable } of triggers) {
|
||||
expect(isTriggerNode(type)).toBe(true); // All are triggers
|
||||
expect(isActivatableTrigger(type)).toBe(activatable); // But only some are activatable
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -206,7 +206,7 @@ describe('Validation System Fixes', () => {
|
||||
const result = await workflowValidator.validateWorkflow(workflow);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.statistics.totalNodes).toBe(1); // Only webhook, non-executable nodes excluded
|
||||
expect(result.statistics.totalNodes).toBe(1); // Only webhook, sticky note excluded
|
||||
expect(result.statistics.enabledNodes).toBe(1);
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user