mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-02-06 05:23:08 +00:00
Compare commits
4 Commits
claude/ver
...
fix/sessio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa075ac8b0 | ||
|
|
78789cb4d5 | ||
|
|
3f0d119d18 | ||
|
|
247b4abebb |
52
.github/workflows/docker-build.yml
vendored
52
.github/workflows/docker-build.yml
vendored
@@ -5,6 +5,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- '**.txt'
|
||||
@@ -36,12 +38,6 @@ on:
|
||||
- 'CODE_OF_CONDUCT.md'
|
||||
workflow_dispatch:
|
||||
|
||||
# Prevent concurrent Docker pushes across all workflows (shared with release.yml)
|
||||
# This ensures docker-build.yml and release.yml never push to 'latest' simultaneously
|
||||
concurrency:
|
||||
group: docker-push-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
@@ -93,54 +89,16 @@ jobs:
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
no-cache: false
|
||||
no-cache: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
|
||||
- name: Verify multi-arch manifest for latest tag
|
||||
if: github.event_name != 'pull_request' && github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "Verifying multi-arch manifest for latest tag..."
|
||||
|
||||
# Retry with exponential backoff (registry propagation can take time)
|
||||
MAX_ATTEMPTS=5
|
||||
ATTEMPT=1
|
||||
WAIT_TIME=2
|
||||
|
||||
while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
|
||||
echo "Attempt $ATTEMPT of $MAX_ATTEMPTS..."
|
||||
|
||||
MANIFEST=$(docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest 2>&1 || true)
|
||||
|
||||
# Check for both platforms
|
||||
if echo "$MANIFEST" | grep -q "linux/amd64" && echo "$MANIFEST" | grep -q "linux/arm64"; then
|
||||
echo "✅ Multi-arch manifest verified: both amd64 and arm64 present"
|
||||
echo "$MANIFEST"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ $ATTEMPT -lt $MAX_ATTEMPTS ]; then
|
||||
echo "⏳ Registry still propagating, waiting ${WAIT_TIME}s before retry..."
|
||||
sleep $WAIT_TIME
|
||||
WAIT_TIME=$((WAIT_TIME * 2)) # Exponential backoff: 2s, 4s, 8s, 16s
|
||||
fi
|
||||
|
||||
ATTEMPT=$((ATTEMPT + 1))
|
||||
done
|
||||
|
||||
echo "❌ ERROR: Multi-arch manifest incomplete after $MAX_ATTEMPTS attempts!"
|
||||
echo "$MANIFEST"
|
||||
exit 1
|
||||
|
||||
build-railway:
|
||||
name: Build Railway Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -185,13 +143,11 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.railway
|
||||
no-cache: false
|
||||
no-cache: true
|
||||
platforms: linux/amd64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta-railway.outputs.tags }}
|
||||
labels: ${{ steps.meta-railway.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
|
||||
# Nginx build commented out until Phase 2
|
||||
|
||||
152
.github/workflows/release.yml
vendored
152
.github/workflows/release.yml
vendored
@@ -13,10 +13,9 @@ permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
# Prevent concurrent Docker pushes across all workflows (shared with docker-build.yml)
|
||||
# This ensures release.yml and docker-build.yml never push to 'latest' simultaneously
|
||||
# Prevent concurrent releases
|
||||
concurrency:
|
||||
group: docker-push-${{ github.ref }}
|
||||
group: release
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
@@ -112,50 +111,33 @@ jobs:
|
||||
|
||||
echo "✅ Version $CURRENT_VERSION is valid (higher than npm version $NPM_VERSION)"
|
||||
|
||||
generate-release-notes:
|
||||
name: Generate Release Notes
|
||||
extract-changelog:
|
||||
name: Extract Changelog
|
||||
runs-on: ubuntu-latest
|
||||
needs: detect-version-change
|
||||
if: needs.detect-version-change.outputs.version-changed == 'true'
|
||||
outputs:
|
||||
release-notes: ${{ steps.generate.outputs.notes }}
|
||||
has-notes: ${{ steps.generate.outputs.has-notes }}
|
||||
release-notes: ${{ steps.extract.outputs.notes }}
|
||||
has-notes: ${{ steps.extract.outputs.has-notes }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Need full history for git log
|
||||
|
||||
- name: Generate release notes from commits
|
||||
id: generate
|
||||
- name: Extract changelog for version
|
||||
id: extract
|
||||
run: |
|
||||
CURRENT_VERSION="${{ needs.detect-version-change.outputs.new-version }}"
|
||||
CURRENT_TAG="v$CURRENT_VERSION"
|
||||
VERSION="${{ needs.detect-version-change.outputs.new-version }}"
|
||||
CHANGELOG_FILE="docs/CHANGELOG.md"
|
||||
|
||||
# Get the previous tag (excluding the current tag which doesn't exist yet)
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -v "^$CURRENT_TAG$" | head -1)
|
||||
|
||||
echo "Current version: $CURRENT_VERSION"
|
||||
echo "Current tag: $CURRENT_TAG"
|
||||
echo "Previous tag: $PREVIOUS_TAG"
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
echo "ℹ️ No previous tag found, this might be the first release"
|
||||
|
||||
# Get all commits up to current commit - use heredoc for multiline
|
||||
NOTES=$(cat <<EOF
|
||||
### Initial Release
|
||||
|
||||
This is the initial release of n8n-mcp v$CURRENT_VERSION.
|
||||
|
||||
___
|
||||
|
||||
**Release Statistics:**
|
||||
- Commit count: $(git rev-list --count HEAD)
|
||||
- First release setup
|
||||
EOF
|
||||
)
|
||||
if [ ! -f "$CHANGELOG_FILE" ]; then
|
||||
echo "Changelog file not found at $CHANGELOG_FILE"
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=No changelog entries found for version $VERSION" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use the extracted changelog script
|
||||
if NOTES=$(node scripts/extract-changelog.js "$VERSION" "$CHANGELOG_FILE" 2>/dev/null); then
|
||||
echo "has-notes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Use heredoc to properly handle multiline content
|
||||
@@ -165,32 +147,17 @@ EOF
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
echo "✅ Successfully extracted changelog for version $VERSION"
|
||||
else
|
||||
echo "✅ Previous tag found: $PREVIOUS_TAG"
|
||||
|
||||
# Generate release notes between tags
|
||||
if NOTES=$(node scripts/generate-release-notes.js "$PREVIOUS_TAG" "HEAD" 2>/dev/null); then
|
||||
echo "has-notes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Use heredoc to properly handle multiline content
|
||||
{
|
||||
echo "notes<<EOF"
|
||||
echo "$NOTES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
echo "✅ Successfully generated release notes from $PREVIOUS_TAG to $CURRENT_TAG"
|
||||
else
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=Failed to generate release notes for version $CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ Could not generate release notes for version $CURRENT_VERSION"
|
||||
fi
|
||||
echo "has-notes=false" >> $GITHUB_OUTPUT
|
||||
echo "notes=No changelog entries found for version $VERSION" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ Could not extract changelog for version $VERSION"
|
||||
fi
|
||||
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [detect-version-change, generate-release-notes]
|
||||
needs: [detect-version-change, extract-changelog]
|
||||
if: needs.detect-version-change.outputs.version-changed == 'true'
|
||||
outputs:
|
||||
release-id: ${{ steps.create.outputs.id }}
|
||||
@@ -221,9 +188,9 @@ EOF
|
||||
cat > release_body.md << 'EOF'
|
||||
# Release v${{ needs.detect-version-change.outputs.new-version }}
|
||||
|
||||
${{ needs.generate-release-notes.outputs.release-notes }}
|
||||
${{ needs.extract-changelog.outputs.release-notes }}
|
||||
|
||||
___
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -469,75 +436,6 @@ EOF
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Verify multi-arch manifest for latest tag
|
||||
run: |
|
||||
echo "Verifying multi-arch manifest for latest tag..."
|
||||
|
||||
# Retry with exponential backoff (registry propagation can take time)
|
||||
MAX_ATTEMPTS=5
|
||||
ATTEMPT=1
|
||||
WAIT_TIME=2
|
||||
|
||||
while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
|
||||
echo "Attempt $ATTEMPT of $MAX_ATTEMPTS..."
|
||||
|
||||
MANIFEST=$(docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest 2>&1 || true)
|
||||
|
||||
# Check for both platforms
|
||||
if echo "$MANIFEST" | grep -q "linux/amd64" && echo "$MANIFEST" | grep -q "linux/arm64"; then
|
||||
echo "✅ Multi-arch manifest verified: both amd64 and arm64 present"
|
||||
echo "$MANIFEST"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ $ATTEMPT -lt $MAX_ATTEMPTS ]; then
|
||||
echo "⏳ Registry still propagating, waiting ${WAIT_TIME}s before retry..."
|
||||
sleep $WAIT_TIME
|
||||
WAIT_TIME=$((WAIT_TIME * 2)) # Exponential backoff: 2s, 4s, 8s, 16s
|
||||
fi
|
||||
|
||||
ATTEMPT=$((ATTEMPT + 1))
|
||||
done
|
||||
|
||||
echo "❌ ERROR: Multi-arch manifest incomplete after $MAX_ATTEMPTS attempts!"
|
||||
echo "$MANIFEST"
|
||||
exit 1
|
||||
|
||||
- name: Verify multi-arch manifest for version tag
|
||||
run: |
|
||||
VERSION="${{ needs.detect-version-change.outputs.new-version }}"
|
||||
echo "Verifying multi-arch manifest for version tag :$VERSION (without 'v' prefix)..."
|
||||
|
||||
# Retry with exponential backoff (registry propagation can take time)
|
||||
MAX_ATTEMPTS=5
|
||||
ATTEMPT=1
|
||||
WAIT_TIME=2
|
||||
|
||||
while [ $ATTEMPT -le $MAX_ATTEMPTS ]; do
|
||||
echo "Attempt $ATTEMPT of $MAX_ATTEMPTS..."
|
||||
|
||||
MANIFEST=$(docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:$VERSION 2>&1 || true)
|
||||
|
||||
# Check for both platforms
|
||||
if echo "$MANIFEST" | grep -q "linux/amd64" && echo "$MANIFEST" | grep -q "linux/arm64"; then
|
||||
echo "✅ Multi-arch manifest verified for $VERSION: both amd64 and arm64 present"
|
||||
echo "$MANIFEST"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ $ATTEMPT -lt $MAX_ATTEMPTS ]; then
|
||||
echo "⏳ Registry still propagating, waiting ${WAIT_TIME}s before retry..."
|
||||
sleep $WAIT_TIME
|
||||
WAIT_TIME=$((WAIT_TIME * 2)) # Exponential backoff: 2s, 4s, 8s, 16s
|
||||
fi
|
||||
|
||||
ATTEMPT=$((ATTEMPT + 1))
|
||||
done
|
||||
|
||||
echo "❌ ERROR: Multi-arch manifest incomplete for version $VERSION after $MAX_ATTEMPTS attempts!"
|
||||
echo "$MANIFEST"
|
||||
exit 1
|
||||
|
||||
- name: Extract metadata for Railway image
|
||||
id: meta-railway
|
||||
uses: docker/metadata-action@v5
|
||||
|
||||
24
AGENTS.md
Normal file
24
AGENTS.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## Project Structure & Module Organization
|
||||
The MCP integration is centered in `src/`: orchestration in `src/mcp`, engine helpers in `src/services`, and shared contracts in `src/types`. Built artifacts and the CLI wrapper land in `dist/`. Tests are grouped under `tests/` (unit, integration, e2e), while deployment aids and references live in `deploy/`, `docs/`, and `scripts/`. Template catalogs and sample data reside in `templates/` and `data/`.
|
||||
|
||||
## Build, Test, and Development Commands
|
||||
- `npm run build` – transpile TypeScript with `tsconfig.build.json` into `dist/`.
|
||||
- `npm run dev` – rebuilds sources, refreshes templates, and validates packaging.
|
||||
- `npm run start:http` / `npm run start:n8n` – boot the MCP server in HTTP or n8n modes.
|
||||
- `npm run test` – execute Vitest; `npm run test:integration` and `npm run test:e2e` scope coverage.
|
||||
- `npm run lint` (alias `npm run typecheck`) – verify types without emitting artifacts.
|
||||
- `npm run test:coverage` – generate coverage reports in `coverage/`.
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
Code is TypeScript-first with ES modules. Follow the prevalent two-space indentation, single quotes, and trailing commas for multi-line literals. Prefer named exports, keep HTTP entry points under `src/http-server*`, and collocate helper utilities inside `src/utils`. Use `PascalCase` for classes, `camelCase` for functions and variables, and `SCREAMING_SNAKE_CASE` for shared constants. Rebuild before committing so `dist/` mirrors `src/`.
|
||||
|
||||
## Testing Guidelines
|
||||
Vitest drives all automated testing. Place new suites beside peers using the `*.test.ts` suffix (for example, `tests/unit/session-restoration.test.ts`). Integration and end-to-end suites depend on the sample SQLite stores in `data/` and may invoke `docker/` assets; use `npm run test:integration`, `npm run test:mcp-endpoint`, or `npm run test:e2e` when validating those flows. Track coverage with `npm run test:coverage` and call out notable gaps in the PR.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
History favors Conventional Commit prefixes (`fix:`, `feat:`, `chore:`) with optional release tags and issue links (`(#123)`). Keep subjects under 72 characters and describe breaking changes in the body. Pull requests should deliver a short summary, curl traces or screenshots for interface updates, reproduction steps, and links to issues or deployments. Run `npm run dev` plus targeted `npm run test:*` commands before requesting review.
|
||||
|
||||
## Environment & Configuration Tips
|
||||
Copy secrets from `.env.example` when bootstrapping. Template catalogs rely on the bundled SQLite databases under `data/`; refresh them with `npm run rebuild` or `npm run fetch:templates`. For search features, run `npm run prebuild:fts5` before rebuilding so native FTS5 bindings are available.
|
||||
2282
CHANGELOG.md
2282
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -193,4 +193,3 @@ NEVER proactively create documentation files (*.md) or README files. Only create
|
||||
- When the task can be divided into separated subtasks, you should spawn separate sub-agents to handle them in paralel
|
||||
- Use the best sub-agent for the task as per their descriptions
|
||||
- Do not use hyperbolic or dramatic language in comments and documentation
|
||||
- Add to every commit and PR: Concieved by Romuald Członkowski - and then link to www.aiadvisors.pl/en. Don't add it in conversations
|
||||
@@ -34,13 +34,9 @@ RUN apk add --no-cache curl su-exec && \
|
||||
# Copy runtime-only package.json
|
||||
COPY package.runtime.json package.json
|
||||
|
||||
# Install runtime dependencies with better-sqlite3 compilation
|
||||
# Build tools (python3, make, g++) are installed, used for compilation, then removed
|
||||
# This enables native SQLite (better-sqlite3) instead of sql.js, preventing memory leaks
|
||||
# Install runtime dependencies with cache mount
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
apk add --no-cache python3 make g++ && \
|
||||
npm install --production --no-audit --no-fund && \
|
||||
apk del python3 make g++
|
||||
npm install --production --no-audit --no-fund
|
||||
|
||||
# Copy built application
|
||||
COPY --from=builder /app/dist ./dist
|
||||
|
||||
@@ -25,20 +25,16 @@ RUN npm run build
|
||||
FROM node:22-alpine AS runtime
|
||||
WORKDIR /app
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache curl && \
|
||||
# Install system dependencies
|
||||
RUN apk add --no-cache curl python3 make g++ && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
# Copy runtime-only package.json
|
||||
COPY package.runtime.json package.json
|
||||
|
||||
# Install production dependencies with temporary build tools
|
||||
# Build tools (python3, make, g++) enable better-sqlite3 compilation (native SQLite)
|
||||
# They are removed after installation to reduce image size and attack surface
|
||||
RUN apk add --no-cache python3 make g++ && \
|
||||
npm install --production --no-audit --no-fund && \
|
||||
npm cache clean --force && \
|
||||
apk del python3 make g++
|
||||
# Install only production dependencies
|
||||
RUN npm install --production --no-audit --no-fund && \
|
||||
npm cache clean --force
|
||||
|
||||
# Copy built application from builder stage
|
||||
COPY --from=builder /app/dist ./dist
|
||||
|
||||
3491
IMPLEMENTATION_GUIDE.md
Normal file
3491
IMPLEMENTATION_GUIDE.md
Normal file
File diff suppressed because it is too large
Load Diff
1464
MVP_DEPLOYMENT_PLAN.md
Normal file
1464
MVP_DEPLOYMENT_PLAN.md
Normal file
File diff suppressed because it is too large
Load Diff
182
README.md
182
README.md
@@ -5,7 +5,7 @@
|
||||
[](https://www.npmjs.com/package/n8n-mcp)
|
||||
[](https://codecov.io/gh/czlonkowski/n8n-mcp)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/actions)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/n8n-io/n8n)
|
||||
[](https://github.com/czlonkowski/n8n-mcp/pkgs/container/n8n-mcp)
|
||||
[](https://railway.com/deploy/n8n-mcp?referralCode=n8n-mcp)
|
||||
|
||||
@@ -284,86 +284,6 @@ environment:
|
||||
N8N_MCP_TELEMETRY_DISABLED: "true"
|
||||
```
|
||||
|
||||
## ⚙️ Database & Memory Configuration
|
||||
|
||||
### Database Adapters
|
||||
|
||||
n8n-mcp uses SQLite for storing node documentation. Two adapters are available:
|
||||
|
||||
1. **better-sqlite3** (Default in Docker)
|
||||
- Native C++ bindings for best performance
|
||||
- Direct disk writes (no memory overhead)
|
||||
- **Now enabled by default** in Docker images (v2.20.2+)
|
||||
- Memory usage: ~100-120 MB stable
|
||||
|
||||
2. **sql.js** (Fallback)
|
||||
- Pure JavaScript implementation
|
||||
- In-memory database with periodic saves
|
||||
- Used when better-sqlite3 compilation fails
|
||||
- Memory usage: ~150-200 MB stable
|
||||
|
||||
### Memory Optimization (sql.js)
|
||||
|
||||
If using sql.js fallback, you can configure the save interval to balance between data safety and memory efficiency:
|
||||
|
||||
**Environment Variable:**
|
||||
```bash
|
||||
SQLJS_SAVE_INTERVAL_MS=5000 # Default: 5000ms (5 seconds)
|
||||
```
|
||||
|
||||
**Usage:**
|
||||
- Controls how long to wait after database changes before saving to disk
|
||||
- Lower values = more frequent saves = higher memory churn
|
||||
- Higher values = less frequent saves = lower memory usage
|
||||
- Minimum: 100ms
|
||||
- Recommended: 5000-10000ms for production
|
||||
|
||||
**Docker Configuration:**
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"n8n-mcp": {
|
||||
"command": "docker",
|
||||
"args": [
|
||||
"run",
|
||||
"-i",
|
||||
"--rm",
|
||||
"--init",
|
||||
"-e", "SQLJS_SAVE_INTERVAL_MS=10000",
|
||||
"ghcr.io/czlonkowski/n8n-mcp:latest"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**docker-compose:**
|
||||
```yaml
|
||||
environment:
|
||||
SQLJS_SAVE_INTERVAL_MS: "10000"
|
||||
```
|
||||
|
||||
### Memory Leak Fix (v2.20.2)
|
||||
|
||||
**Issue #330** identified a critical memory leak in long-running Docker/Kubernetes deployments:
|
||||
- **Before:** 100 MB → 2.2 GB over 72 hours (OOM kills)
|
||||
- **After:** Stable at 100-200 MB indefinitely
|
||||
|
||||
**Fixes Applied:**
|
||||
- ✅ Docker images now use better-sqlite3 by default (eliminates leak entirely)
|
||||
- ✅ sql.js fallback optimized (98% reduction in save frequency)
|
||||
- ✅ Removed unnecessary memory allocations (50% reduction per save)
|
||||
- ✅ Configurable save interval via `SQLJS_SAVE_INTERVAL_MS`
|
||||
|
||||
For Kubernetes deployments with memory limits:
|
||||
```yaml
|
||||
resources:
|
||||
requests:
|
||||
memory: 256Mi
|
||||
limits:
|
||||
memory: 512Mi
|
||||
```
|
||||
|
||||
## 💖 Support This Project
|
||||
|
||||
<div align="center">
|
||||
@@ -501,14 +421,6 @@ Complete guide for integrating n8n-MCP with Windsurf using project rules.
|
||||
### [Codex](./docs/CODEX_SETUP.md)
|
||||
Complete guide for integrating n8n-MCP with Codex.
|
||||
|
||||
## 🎓 Add Claude Skills (Optional)
|
||||
|
||||
Supercharge your n8n workflow building with specialized skills that teach AI how to build production-ready workflows!
|
||||
|
||||
[](https://www.youtube.com/watch?v=e6VvRqmUY2Y)
|
||||
|
||||
Learn more: [n8n-skills repository](https://github.com/czlonkowski/n8n-skills)
|
||||
|
||||
## 🤖 Claude Project Setup
|
||||
|
||||
For the best results when using n8n-MCP with Claude Projects, use these enhanced system instructions:
|
||||
@@ -674,97 +586,6 @@ n8n_update_partial_workflow({id: "wf-123", operations: [{...}]})
|
||||
n8n_update_partial_workflow({id: "wf-123", operations: [{...}]})
|
||||
```
|
||||
|
||||
### CRITICAL: addConnection Syntax
|
||||
|
||||
The `addConnection` operation requires **four separate string parameters**. Common mistakes cause misleading errors.
|
||||
|
||||
❌ WRONG - Object format (fails with "Expected string, received object"):
|
||||
```json
|
||||
{
|
||||
"type": "addConnection",
|
||||
"connection": {
|
||||
"source": {"nodeId": "node-1", "outputIndex": 0},
|
||||
"destination": {"nodeId": "node-2", "inputIndex": 0}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
❌ WRONG - Combined string (fails with "Source node not found"):
|
||||
```json
|
||||
{
|
||||
"type": "addConnection",
|
||||
"source": "node-1:main:0",
|
||||
"target": "node-2:main:0"
|
||||
}
|
||||
```
|
||||
|
||||
✅ CORRECT - Four separate string parameters:
|
||||
```json
|
||||
{
|
||||
"type": "addConnection",
|
||||
"source": "node-id-string",
|
||||
"target": "target-node-id-string",
|
||||
"sourcePort": "main",
|
||||
"targetPort": "main"
|
||||
}
|
||||
```
|
||||
|
||||
**Reference**: [GitHub Issue #327](https://github.com/czlonkowski/n8n-mcp/issues/327)
|
||||
|
||||
### ⚠️ CRITICAL: IF Node Multi-Output Routing
|
||||
|
||||
IF nodes have **two outputs** (TRUE and FALSE). Use the **`branch` parameter** to route to the correct output:
|
||||
|
||||
✅ CORRECT - Route to TRUE branch (when condition is met):
|
||||
```json
|
||||
{
|
||||
"type": "addConnection",
|
||||
"source": "if-node-id",
|
||||
"target": "success-handler-id",
|
||||
"sourcePort": "main",
|
||||
"targetPort": "main",
|
||||
"branch": "true"
|
||||
}
|
||||
```
|
||||
|
||||
✅ CORRECT - Route to FALSE branch (when condition is NOT met):
|
||||
```json
|
||||
{
|
||||
"type": "addConnection",
|
||||
"source": "if-node-id",
|
||||
"target": "failure-handler-id",
|
||||
"sourcePort": "main",
|
||||
"targetPort": "main",
|
||||
"branch": "false"
|
||||
}
|
||||
```
|
||||
|
||||
**Common Pattern** - Complete IF node routing:
|
||||
```json
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow-id",
|
||||
operations: [
|
||||
{type: "addConnection", source: "If Node", target: "True Handler", sourcePort: "main", targetPort: "main", branch: "true"},
|
||||
{type: "addConnection", source: "If Node", target: "False Handler", sourcePort: "main", targetPort: "main", branch: "false"}
|
||||
]
|
||||
})
|
||||
```
|
||||
|
||||
**Note**: Without the `branch` parameter, both connections may end up on the same output, causing logic errors!
|
||||
|
||||
### removeConnection Syntax
|
||||
|
||||
Use the same four-parameter format:
|
||||
```json
|
||||
{
|
||||
"type": "removeConnection",
|
||||
"source": "source-node-id",
|
||||
"target": "target-node-id",
|
||||
"sourcePort": "main",
|
||||
"targetPort": "main"
|
||||
}
|
||||
```
|
||||
|
||||
## Example Workflow
|
||||
|
||||
### Template-First Approach
|
||||
@@ -981,7 +802,6 @@ These powerful tools allow you to manage n8n workflows directly from Claude. The
|
||||
- **`n8n_list_workflows`** - List workflows with filtering and pagination
|
||||
- **`n8n_validate_workflow`** - Validate workflows already in n8n by ID (NEW in v2.6.3)
|
||||
- **`n8n_autofix_workflow`** - Automatically fix common workflow errors (NEW in v2.13.0!)
|
||||
- **`n8n_workflow_versions`** - Manage workflow version history and rollback (NEW in v2.22.0!)
|
||||
|
||||
#### Execution Management
|
||||
- **`n8n_trigger_webhook_workflow`** - Trigger workflows via webhook URL
|
||||
|
||||
623
TELEMETRY_PRUNING_GUIDE.md
Normal file
623
TELEMETRY_PRUNING_GUIDE.md
Normal file
@@ -0,0 +1,623 @@
|
||||
# Telemetry Data Pruning & Aggregation Guide
|
||||
|
||||
## Overview
|
||||
|
||||
This guide provides a complete solution for managing n8n-mcp telemetry data in Supabase to stay within the 500 MB free tier limit while preserving valuable insights for product development.
|
||||
|
||||
## Current Situation
|
||||
|
||||
- **Database Size**: 265 MB / 500 MB (53% of limit)
|
||||
- **Growth Rate**: 7.7 MB/day (54 MB/week)
|
||||
- **Time Until Full**: ~17 days
|
||||
- **Total Events**: 641,487 events + 17,247 workflows
|
||||
|
||||
### Storage Breakdown
|
||||
|
||||
| Event Type | Count | Size | % of Total |
|
||||
|------------|-------|------|------------|
|
||||
| `tool_sequence` | 362,704 | 96 MB | 72% |
|
||||
| `tool_used` | 191,938 | 28 MB | 21% |
|
||||
| `validation_details` | 36,280 | 14 MB | 11% |
|
||||
| `workflow_created` | 23,213 | 4.5 MB | 3% |
|
||||
| Others | ~26,000 | ~3 MB | 2% |
|
||||
|
||||
## Solution Strategy
|
||||
|
||||
**Aggregate → Delete → Retain only recent raw events**
|
||||
|
||||
### Expected Results
|
||||
|
||||
| Metric | Before | After | Improvement |
|
||||
|--------|--------|-------|-------------|
|
||||
| Database Size | 265 MB | ~90-120 MB | **55-65% reduction** |
|
||||
| Growth Rate | 7.7 MB/day | ~2-3 MB/day | **60-70% slower** |
|
||||
| Days Until Full | 17 days | **Sustainable** | Never fills |
|
||||
| Free Tier Usage | 53% | ~20-25% | **75-80% headroom** |
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
### Step 1: Execute the SQL Migration
|
||||
|
||||
Open Supabase SQL Editor and run the entire contents of `supabase-telemetry-aggregation.sql`:
|
||||
|
||||
```sql
|
||||
-- Copy and paste the entire supabase-telemetry-aggregation.sql file
|
||||
-- Or run it directly from the file
|
||||
```
|
||||
|
||||
This will create:
|
||||
- 5 aggregation tables
|
||||
- Aggregation functions
|
||||
- Automated cleanup function
|
||||
- Monitoring functions
|
||||
- Scheduled cron job (daily at 2 AM UTC)
|
||||
|
||||
### Step 2: Verify Cron Job Setup
|
||||
|
||||
Check that the cron job was created successfully:
|
||||
|
||||
```sql
|
||||
-- View scheduled cron jobs
|
||||
SELECT
|
||||
jobid,
|
||||
schedule,
|
||||
command,
|
||||
nodename,
|
||||
nodeport,
|
||||
database,
|
||||
username,
|
||||
active
|
||||
FROM cron.job
|
||||
WHERE jobname = 'telemetry-daily-cleanup';
|
||||
```
|
||||
|
||||
Expected output:
|
||||
- Schedule: `0 2 * * *` (daily at 2 AM UTC)
|
||||
- Active: `true`
|
||||
|
||||
### Step 3: Run Initial Emergency Cleanup
|
||||
|
||||
Get immediate space relief by running the emergency cleanup:
|
||||
|
||||
```sql
|
||||
-- This will aggregate and delete data older than 7 days
|
||||
SELECT * FROM emergency_cleanup();
|
||||
```
|
||||
|
||||
Expected results:
|
||||
```
|
||||
action | rows_deleted | space_freed_mb
|
||||
------------------------------------+--------------+----------------
|
||||
Deleted non-critical events > 7d | ~284,924 | ~52 MB
|
||||
Deleted error events > 14d | ~2,400 | ~0.5 MB
|
||||
Deleted duplicate workflows | ~8,500 | ~11 MB
|
||||
TOTAL (run VACUUM separately) | 0 | ~63.5 MB
|
||||
```
|
||||
|
||||
### Step 4: Reclaim Disk Space
|
||||
|
||||
After deletion, reclaim the actual disk space:
|
||||
|
||||
```sql
|
||||
-- Reclaim space from deleted rows
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
|
||||
-- Update statistics for query optimization
|
||||
ANALYZE telemetry_events;
|
||||
ANALYZE telemetry_workflows;
|
||||
```
|
||||
|
||||
**Note**: `VACUUM FULL` may take a few minutes and locks the table. Run during off-peak hours if possible.
|
||||
|
||||
### Step 5: Verify Results
|
||||
|
||||
Check the new database size:
|
||||
|
||||
```sql
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```
|
||||
total_size_mb | events_size_mb | workflows_size_mb | aggregates_size_mb | percent_of_limit | days_until_full | status
|
||||
--------------+----------------+-------------------+--------------------+------------------+-----------------+---------
|
||||
202.5 | 85.2 | 35.8 | 12.5 | 40.5 | ~95 | HEALTHY
|
||||
```
|
||||
|
||||
## Daily Operations (Automated)
|
||||
|
||||
Once set up, the system runs automatically:
|
||||
|
||||
1. **Daily at 2 AM UTC**: Cron job runs
|
||||
2. **Aggregation**: Data older than 3 days is aggregated into summary tables
|
||||
3. **Deletion**: Raw events are deleted after aggregation
|
||||
4. **Cleanup**: VACUUM runs to reclaim space
|
||||
5. **Retention**:
|
||||
- High-volume events: 3 days
|
||||
- Error events: 30 days
|
||||
- Aggregated insights: Forever
|
||||
|
||||
## Monitoring Commands
|
||||
|
||||
### Check Database Health
|
||||
|
||||
```sql
|
||||
-- View current size and status
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
|
||||
### View Aggregated Insights
|
||||
|
||||
```sql
|
||||
-- Top tools used daily
|
||||
SELECT
|
||||
aggregation_date,
|
||||
tool_name,
|
||||
usage_count,
|
||||
success_count,
|
||||
error_count,
|
||||
ROUND(100.0 * success_count / NULLIF(usage_count, 0), 1) as success_rate_pct
|
||||
FROM telemetry_tool_usage_daily
|
||||
ORDER BY aggregation_date DESC, usage_count DESC
|
||||
LIMIT 50;
|
||||
|
||||
-- Most common tool sequences
|
||||
SELECT
|
||||
aggregation_date,
|
||||
tool_sequence,
|
||||
occurrence_count,
|
||||
ROUND(avg_sequence_duration_ms, 0) as avg_duration_ms,
|
||||
ROUND(100 * success_rate, 1) as success_rate_pct
|
||||
FROM telemetry_tool_patterns
|
||||
ORDER BY occurrence_count DESC
|
||||
LIMIT 20;
|
||||
|
||||
-- Error patterns over time
|
||||
SELECT
|
||||
aggregation_date,
|
||||
error_type,
|
||||
error_context,
|
||||
occurrence_count,
|
||||
affected_users,
|
||||
sample_error_message
|
||||
FROM telemetry_error_patterns
|
||||
ORDER BY aggregation_date DESC, occurrence_count DESC
|
||||
LIMIT 30;
|
||||
|
||||
-- Workflow creation trends
|
||||
SELECT
|
||||
aggregation_date,
|
||||
complexity,
|
||||
node_count_range,
|
||||
has_trigger,
|
||||
has_webhook,
|
||||
workflow_count,
|
||||
ROUND(avg_node_count, 1) as avg_nodes
|
||||
FROM telemetry_workflow_insights
|
||||
ORDER BY aggregation_date DESC, workflow_count DESC
|
||||
LIMIT 30;
|
||||
|
||||
-- Validation success rates
|
||||
SELECT
|
||||
aggregation_date,
|
||||
validation_type,
|
||||
profile,
|
||||
success_count,
|
||||
failure_count,
|
||||
ROUND(100.0 * success_count / NULLIF(success_count + failure_count, 0), 1) as success_rate_pct,
|
||||
common_failure_reasons
|
||||
FROM telemetry_validation_insights
|
||||
ORDER BY aggregation_date DESC, (success_count + failure_count) DESC
|
||||
LIMIT 30;
|
||||
```
|
||||
|
||||
### Check Cron Job Execution History
|
||||
|
||||
```sql
|
||||
-- View recent cron job runs
|
||||
SELECT
|
||||
runid,
|
||||
jobid,
|
||||
database,
|
||||
status,
|
||||
return_message,
|
||||
start_time,
|
||||
end_time
|
||||
FROM cron.job_run_details
|
||||
WHERE jobid = (SELECT jobid FROM cron.job WHERE jobname = 'telemetry-daily-cleanup')
|
||||
ORDER BY start_time DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
## Manual Operations
|
||||
|
||||
### Run Cleanup On-Demand
|
||||
|
||||
If you need to run cleanup outside the scheduled time:
|
||||
|
||||
```sql
|
||||
-- Run with default 3-day retention
|
||||
SELECT * FROM run_telemetry_aggregation_and_cleanup(3);
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
|
||||
-- Or with custom retention (e.g., 5 days)
|
||||
SELECT * FROM run_telemetry_aggregation_and_cleanup(5);
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
```
|
||||
|
||||
### Emergency Cleanup (Critical Situations)
|
||||
|
||||
If database is approaching limit and you need immediate relief:
|
||||
|
||||
```sql
|
||||
-- Step 1: Run emergency cleanup (7-day retention)
|
||||
SELECT * FROM emergency_cleanup();
|
||||
|
||||
-- Step 2: Reclaim space aggressively
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
ANALYZE telemetry_events;
|
||||
ANALYZE telemetry_workflows;
|
||||
|
||||
-- Step 3: Verify results
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
|
||||
### Adjust Retention Policy
|
||||
|
||||
To change the default 3-day retention period:
|
||||
|
||||
```sql
|
||||
-- Update cron job to use 5-day retention instead
|
||||
SELECT cron.unschedule('telemetry-daily-cleanup');
|
||||
|
||||
SELECT cron.schedule(
|
||||
'telemetry-daily-cleanup',
|
||||
'0 2 * * *', -- Daily at 2 AM UTC
|
||||
$$
|
||||
SELECT run_telemetry_aggregation_and_cleanup(5); -- 5 days instead of 3
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
VACUUM ANALYZE telemetry_workflows;
|
||||
$$
|
||||
);
|
||||
```
|
||||
|
||||
## Data Retention Policies
|
||||
|
||||
### Raw Events Retention
|
||||
|
||||
| Event Type | Retention | Reason |
|
||||
|------------|-----------|--------|
|
||||
| `tool_sequence` | 3 days | High volume, low long-term value |
|
||||
| `tool_used` | 3 days | High volume, aggregated daily |
|
||||
| `validation_details` | 3 days | Aggregated into insights |
|
||||
| `workflow_created` | 3 days | Aggregated into patterns |
|
||||
| `session_start` | 3 days | Operational data only |
|
||||
| `search_query` | 3 days | Operational data only |
|
||||
| `error_occurred` | **30 days** | Extended for debugging |
|
||||
| `workflow_validation_failed` | 3 days | Captured in aggregates |
|
||||
|
||||
### Aggregated Data Retention
|
||||
|
||||
All aggregated data is kept **indefinitely**:
|
||||
- Daily tool usage statistics
|
||||
- Tool sequence patterns
|
||||
- Workflow creation trends
|
||||
- Error patterns and frequencies
|
||||
- Validation success rates
|
||||
|
||||
### Workflow Retention
|
||||
|
||||
- **Unique workflows**: Kept indefinitely (one per unique hash)
|
||||
- **Duplicate workflows**: Deleted after 3 days
|
||||
- **Workflow metadata**: Aggregated into daily insights
|
||||
|
||||
## Intelligence Preserved
|
||||
|
||||
Even after aggressive pruning, you still have access to:
|
||||
|
||||
### Long-term Product Insights
|
||||
- Which tools are most/least used over time
|
||||
- Tool usage trends and adoption curves
|
||||
- Common workflow patterns and complexities
|
||||
- Error frequencies and types across versions
|
||||
- Validation failure patterns
|
||||
|
||||
### Development Intelligence
|
||||
- Feature adoption rates (by day/week/month)
|
||||
- Pain points (high error rates, validation failures)
|
||||
- User behavior patterns (tool sequences, workflow styles)
|
||||
- Version comparison (changes in usage between releases)
|
||||
|
||||
### Recent Debugging Data
|
||||
- Last 3 days of raw events for immediate issues
|
||||
- Last 30 days of error events for bug tracking
|
||||
- Sample error messages for each error type
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Cron Job Not Running
|
||||
|
||||
Check if pg_cron extension is enabled:
|
||||
|
||||
```sql
|
||||
-- Enable pg_cron
|
||||
CREATE EXTENSION IF NOT EXISTS pg_cron;
|
||||
|
||||
-- Verify it's enabled
|
||||
SELECT * FROM pg_extension WHERE extname = 'pg_cron';
|
||||
```
|
||||
|
||||
### Aggregation Functions Failing
|
||||
|
||||
Check for errors in cron job execution:
|
||||
|
||||
```sql
|
||||
-- View error messages
|
||||
SELECT
|
||||
status,
|
||||
return_message,
|
||||
start_time
|
||||
FROM cron.job_run_details
|
||||
WHERE jobid = (SELECT jobid FROM cron.job WHERE jobname = 'telemetry-daily-cleanup')
|
||||
AND status = 'failed'
|
||||
ORDER BY start_time DESC;
|
||||
```
|
||||
|
||||
### VACUUM Not Reclaiming Space
|
||||
|
||||
If `VACUUM ANALYZE` isn't reclaiming enough space, use `VACUUM FULL`:
|
||||
|
||||
```sql
|
||||
-- More aggressive space reclamation (locks table)
|
||||
VACUUM FULL telemetry_events;
|
||||
```
|
||||
|
||||
### Database Still Growing Too Fast
|
||||
|
||||
Reduce retention period further:
|
||||
|
||||
```sql
|
||||
-- Change to 2-day retention (more aggressive)
|
||||
SELECT * FROM run_telemetry_aggregation_and_cleanup(2);
|
||||
```
|
||||
|
||||
Or delete more event types:
|
||||
|
||||
```sql
|
||||
-- Delete additional low-value events
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '3 days'
|
||||
AND event IN ('session_start', 'search_query', 'diagnostic_completed', 'health_check_completed');
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Cron Job Execution Time
|
||||
|
||||
The daily cleanup typically takes:
|
||||
- **Aggregation**: 30-60 seconds
|
||||
- **Deletion**: 15-30 seconds
|
||||
- **VACUUM**: 2-5 minutes
|
||||
- **Total**: ~3-7 minutes
|
||||
|
||||
### Query Performance
|
||||
|
||||
All aggregation tables have indexes on:
|
||||
- Date columns (for time-series queries)
|
||||
- Lookup columns (tool_name, error_type, etc.)
|
||||
- User columns (for user-specific analysis)
|
||||
|
||||
### Lock Considerations
|
||||
|
||||
- `VACUUM ANALYZE`: Minimal locking, safe during operation
|
||||
- `VACUUM FULL`: Locks table, run during off-peak hours
|
||||
- Aggregation functions: Read-only queries, no locking
|
||||
|
||||
## Customization
|
||||
|
||||
### Add Custom Aggregations
|
||||
|
||||
To track additional metrics, create new aggregation tables:
|
||||
|
||||
```sql
|
||||
-- Example: Session duration aggregation
|
||||
CREATE TABLE telemetry_session_duration_daily (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
avg_duration_seconds NUMERIC,
|
||||
median_duration_seconds NUMERIC,
|
||||
max_duration_seconds NUMERIC,
|
||||
session_count INTEGER,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date)
|
||||
);
|
||||
|
||||
-- Add to cleanup function
|
||||
-- (modify run_telemetry_aggregation_and_cleanup)
|
||||
```
|
||||
|
||||
### Modify Retention Policies
|
||||
|
||||
Edit the `run_telemetry_aggregation_and_cleanup` function to adjust retention by event type:
|
||||
|
||||
```sql
|
||||
-- Keep validation_details for 7 days instead of 3
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < (NOW() - INTERVAL '7 days')
|
||||
AND event = 'validation_details';
|
||||
```
|
||||
|
||||
### Change Cron Schedule
|
||||
|
||||
Adjust the execution time if needed:
|
||||
|
||||
```sql
|
||||
-- Run at different time (e.g., 3 AM UTC)
|
||||
SELECT cron.schedule(
|
||||
'telemetry-daily-cleanup',
|
||||
'0 3 * * *', -- 3 AM instead of 2 AM
|
||||
$$ SELECT run_telemetry_aggregation_and_cleanup(3); VACUUM ANALYZE telemetry_events; $$
|
||||
);
|
||||
|
||||
-- Run twice daily (2 AM and 2 PM)
|
||||
SELECT cron.schedule(
|
||||
'telemetry-cleanup-morning',
|
||||
'0 2 * * *',
|
||||
$$ SELECT run_telemetry_aggregation_and_cleanup(3); $$
|
||||
);
|
||||
|
||||
SELECT cron.schedule(
|
||||
'telemetry-cleanup-afternoon',
|
||||
'0 14 * * *',
|
||||
$$ SELECT run_telemetry_aggregation_and_cleanup(3); $$
|
||||
);
|
||||
```
|
||||
|
||||
## Backup & Recovery
|
||||
|
||||
### Before Running Emergency Cleanup
|
||||
|
||||
Create a backup of aggregation queries:
|
||||
|
||||
```sql
|
||||
-- Export aggregated data to CSV or backup tables
|
||||
CREATE TABLE telemetry_tool_usage_backup AS
|
||||
SELECT * FROM telemetry_tool_usage_daily;
|
||||
|
||||
CREATE TABLE telemetry_patterns_backup AS
|
||||
SELECT * FROM telemetry_tool_patterns;
|
||||
```
|
||||
|
||||
### Restore Deleted Data
|
||||
|
||||
Raw event data cannot be restored after deletion. However, aggregated insights are preserved indefinitely.
|
||||
|
||||
To prevent accidental data loss:
|
||||
1. Test cleanup functions on staging first
|
||||
2. Review `check_database_size()` before running emergency cleanup
|
||||
3. Start with longer retention periods (7 days) and reduce gradually
|
||||
4. Monitor aggregated data quality for 1-2 weeks
|
||||
|
||||
## Monitoring Dashboard Queries
|
||||
|
||||
### Weekly Growth Report
|
||||
|
||||
```sql
|
||||
-- Database growth over last 7 days
|
||||
SELECT
|
||||
DATE(created_at) as date,
|
||||
COUNT(*) as events_created,
|
||||
COUNT(DISTINCT event) as event_types,
|
||||
COUNT(DISTINCT user_id) as active_users,
|
||||
ROUND(SUM(pg_column_size(telemetry_events.*))::NUMERIC / 1024 / 1024, 2) as size_mb
|
||||
FROM telemetry_events
|
||||
WHERE created_at >= NOW() - INTERVAL '7 days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY date DESC;
|
||||
```
|
||||
|
||||
### Storage Efficiency Report
|
||||
|
||||
```sql
|
||||
-- Compare raw vs aggregated storage
|
||||
SELECT
|
||||
'Raw Events (last 3 days)' as category,
|
||||
COUNT(*) as row_count,
|
||||
pg_size_pretty(pg_total_relation_size('telemetry_events')) as table_size
|
||||
FROM telemetry_events
|
||||
WHERE created_at >= NOW() - INTERVAL '3 days'
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
'Aggregated Insights (all time)',
|
||||
(SELECT COUNT(*) FROM telemetry_tool_usage_daily) +
|
||||
(SELECT COUNT(*) FROM telemetry_tool_patterns) +
|
||||
(SELECT COUNT(*) FROM telemetry_workflow_insights) +
|
||||
(SELECT COUNT(*) FROM telemetry_error_patterns) +
|
||||
(SELECT COUNT(*) FROM telemetry_validation_insights),
|
||||
pg_size_pretty(
|
||||
pg_total_relation_size('telemetry_tool_usage_daily') +
|
||||
pg_total_relation_size('telemetry_tool_patterns') +
|
||||
pg_total_relation_size('telemetry_workflow_insights') +
|
||||
pg_total_relation_size('telemetry_error_patterns') +
|
||||
pg_total_relation_size('telemetry_validation_insights')
|
||||
);
|
||||
```
|
||||
|
||||
### Top Events by Size
|
||||
|
||||
```sql
|
||||
-- Which event types consume most space
|
||||
SELECT
|
||||
event,
|
||||
COUNT(*) as event_count,
|
||||
pg_size_pretty(SUM(pg_column_size(telemetry_events.*))::BIGINT) as total_size,
|
||||
pg_size_pretty(AVG(pg_column_size(telemetry_events.*))::BIGINT) as avg_size_per_event,
|
||||
ROUND(100.0 * COUNT(*) / SUM(COUNT(*)) OVER (), 2) as pct_of_events
|
||||
FROM telemetry_events
|
||||
GROUP BY event
|
||||
ORDER BY SUM(pg_column_size(telemetry_events.*)) DESC;
|
||||
```
|
||||
|
||||
## Success Metrics
|
||||
|
||||
Track these metrics weekly to ensure the system is working:
|
||||
|
||||
### Target Metrics (After Implementation)
|
||||
|
||||
- ✅ Database size: **< 150 MB** (< 30% of limit)
|
||||
- ✅ Growth rate: **< 3 MB/day** (sustainable)
|
||||
- ✅ Raw event retention: **3 days** (configurable)
|
||||
- ✅ Aggregated data: **All-time insights available**
|
||||
- ✅ Cron job success rate: **> 95%**
|
||||
- ✅ Query performance: **< 500ms for aggregated queries**
|
||||
|
||||
### Review Schedule
|
||||
|
||||
- **Daily**: Check `check_database_size()` status
|
||||
- **Weekly**: Review aggregated insights and growth trends
|
||||
- **Monthly**: Analyze cron job success rate and adjust retention if needed
|
||||
- **After each release**: Compare usage patterns to previous version
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Essential Commands
|
||||
|
||||
```sql
|
||||
-- Check database health
|
||||
SELECT * FROM check_database_size();
|
||||
|
||||
-- View recent aggregated insights
|
||||
SELECT * FROM telemetry_tool_usage_daily ORDER BY aggregation_date DESC LIMIT 10;
|
||||
|
||||
-- Run manual cleanup (3-day retention)
|
||||
SELECT * FROM run_telemetry_aggregation_and_cleanup(3);
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
|
||||
-- Emergency cleanup (7-day retention)
|
||||
SELECT * FROM emergency_cleanup();
|
||||
VACUUM FULL telemetry_events;
|
||||
|
||||
-- View cron job status
|
||||
SELECT * FROM cron.job WHERE jobname = 'telemetry-daily-cleanup';
|
||||
|
||||
-- View cron execution history
|
||||
SELECT * FROM cron.job_run_details
|
||||
WHERE jobid = (SELECT jobid FROM cron.job WHERE jobname = 'telemetry-daily-cleanup')
|
||||
ORDER BY start_time DESC LIMIT 5;
|
||||
```
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Check the troubleshooting section above
|
||||
2. Review cron job execution logs
|
||||
3. Verify pg_cron extension is enabled
|
||||
4. Test aggregation functions manually
|
||||
5. Check Supabase dashboard for errors
|
||||
|
||||
For questions or improvements, refer to the main project documentation.
|
||||
BIN
data/nodes.db
BIN
data/nodes.db
Binary file not shown.
@@ -1,111 +0,0 @@
|
||||
# CI Test Infrastructure - Known Issues
|
||||
|
||||
## Integration Test Failures for External Contributor PRs
|
||||
|
||||
### Issue Summary
|
||||
|
||||
Integration tests fail for external contributor PRs with "No response from n8n server" errors, despite the code changes being correct. This is a **test infrastructure issue**, not a code quality issue.
|
||||
|
||||
### Root Cause
|
||||
|
||||
1. **GitHub Actions Security**: External contributor PRs don't get access to repository secrets (`N8N_API_URL`, `N8N_API_KEY`, etc.)
|
||||
2. **MSW Mock Server**: Mock Service Worker (MSW) is not properly intercepting HTTP requests in the CI environment
|
||||
3. **Test Configuration**: Integration tests expect `http://localhost:3001/mock-api` but the mock server isn't responding
|
||||
|
||||
### Evidence
|
||||
|
||||
From CI logs (PR #343):
|
||||
```
|
||||
[CI-DEBUG] Global setup complete, N8N_API_URL: http://localhost:3001/mock-api
|
||||
❌ No response from n8n server (repeated 60+ times across 20 tests)
|
||||
```
|
||||
|
||||
The tests ARE using the correct mock URL, but MSW isn't intercepting the requests.
|
||||
|
||||
### Why This Happens
|
||||
|
||||
**For External PRs:**
|
||||
- GitHub Actions doesn't expose repository secrets for security reasons
|
||||
- Prevents malicious PRs from exfiltrating secrets
|
||||
- MSW setup runs but requests don't get intercepted in CI
|
||||
|
||||
**Test Configuration:**
|
||||
- `.env.test` line 19: `N8N_API_URL=http://localhost:3001/mock-api`
|
||||
- `.env.test` line 67: `MSW_ENABLED=true`
|
||||
- CI workflow line 75-80: Secrets set but empty for external PRs
|
||||
|
||||
### Impact
|
||||
|
||||
- ✅ **Code Quality**: NOT affected - the actual code changes are correct
|
||||
- ✅ **Local Testing**: Works fine - MSW intercepts requests locally
|
||||
- ❌ **CI for External PRs**: Integration tests fail (infrastructure issue)
|
||||
- ✅ **CI for Internal PRs**: Works fine (has access to secrets)
|
||||
|
||||
### Current Workarounds
|
||||
|
||||
1. **For Maintainers**: Use `--admin` flag to merge despite failing tests when code is verified correct
|
||||
2. **For Contributors**: Run tests locally where MSW works properly
|
||||
3. **For CI**: Unit tests pass (don't require n8n API), integration tests fail
|
||||
|
||||
### Files Affected
|
||||
|
||||
- `tests/integration/setup/integration-setup.ts` - MSW server setup
|
||||
- `tests/setup/msw-setup.ts` - MSW configuration
|
||||
- `tests/mocks/n8n-api/handlers.ts` - Mock request handlers
|
||||
- `.github/workflows/test.yml` - CI configuration
|
||||
- `.env.test` - Test environment configuration
|
||||
|
||||
### Potential Solutions (Not Implemented)
|
||||
|
||||
1. **Separate Unit/Integration Runs**
|
||||
- Run integration tests only for internal PRs
|
||||
- Skip integration tests for external PRs
|
||||
- Rely on unit tests for external PR validation
|
||||
|
||||
2. **MSW CI Debugging**
|
||||
- Add extensive logging to MSW setup
|
||||
- Check if MSW server actually starts in CI
|
||||
- Verify request interception is working
|
||||
|
||||
3. **Mock Server Process**
|
||||
- Start actual HTTP server in CI instead of MSW
|
||||
- More reliable but adds complexity
|
||||
- Would require test infrastructure refactoring
|
||||
|
||||
4. **Public Test Instance**
|
||||
- Use publicly accessible test n8n instance
|
||||
- Exposes test data, security concerns
|
||||
- Would work for external PRs
|
||||
|
||||
### Decision
|
||||
|
||||
**Status**: Documented but not fixed
|
||||
|
||||
**Rationale**:
|
||||
- Integration test infrastructure refactoring is separate concern from code quality
|
||||
- External PRs are relatively rare compared to internal development
|
||||
- Unit tests provide sufficient coverage for most changes
|
||||
- Maintainers can verify integration tests locally before merging
|
||||
|
||||
### Testing Strategy
|
||||
|
||||
**For External Contributor PRs:**
|
||||
1. ✅ Unit tests must pass
|
||||
2. ✅ TypeScript compilation must pass
|
||||
3. ✅ Build must succeed
|
||||
4. ⚠️ Integration test failures are expected (infrastructure issue)
|
||||
5. ✅ Maintainer verifies locally before merge
|
||||
|
||||
**For Internal PRs:**
|
||||
1. ✅ All tests must pass (unit + integration)
|
||||
2. ✅ Full CI validation
|
||||
|
||||
### References
|
||||
|
||||
- PR #343: First occurrence of this issue
|
||||
- PR #345: Documented the infrastructure issue
|
||||
- Issue: External PRs don't get secrets (GitHub Actions security)
|
||||
|
||||
### Last Updated
|
||||
|
||||
2025-10-21 - Documented as part of PR #345 investigation
|
||||
@@ -80,53 +80,6 @@ Remove the server:
|
||||
claude mcp remove n8n-mcp
|
||||
```
|
||||
|
||||
## 🎓 Add Claude Skills (Optional)
|
||||
|
||||
Supercharge your n8n workflow building with specialized Claude Code skills! The [n8n-skills](https://github.com/czlonkowski/n8n-skills) repository provides 7 complementary skills that teach AI assistants how to build production-ready n8n workflows.
|
||||
|
||||
### What You Get
|
||||
|
||||
- ✅ **n8n Expression Syntax** - Correct {{}} patterns and common mistakes
|
||||
- ✅ **n8n MCP Tools Expert** - How to use n8n-mcp tools effectively
|
||||
- ✅ **n8n Workflow Patterns** - 5 proven architectural patterns
|
||||
- ✅ **n8n Validation Expert** - Interpret and fix validation errors
|
||||
- ✅ **n8n Node Configuration** - Operation-aware setup guidance
|
||||
- ✅ **n8n Code JavaScript** - Write effective JavaScript in Code nodes
|
||||
- ✅ **n8n Code Python** - Python patterns with limitation awareness
|
||||
|
||||
### Installation
|
||||
|
||||
**Method 1: Plugin Installation** (Recommended)
|
||||
```bash
|
||||
/plugin install czlonkowski/n8n-skills
|
||||
```
|
||||
|
||||
**Method 2: Via Marketplace**
|
||||
```bash
|
||||
# Add as marketplace, then browse and install
|
||||
/plugin marketplace add czlonkowski/n8n-skills
|
||||
|
||||
# Then browse available plugins
|
||||
/plugin install
|
||||
# Select "n8n-mcp-skills" from the list
|
||||
```
|
||||
|
||||
**Method 3: Manual Installation**
|
||||
```bash
|
||||
# 1. Clone the repository
|
||||
git clone https://github.com/czlonkowski/n8n-skills.git
|
||||
|
||||
# 2. Copy skills to your Claude Code skills directory
|
||||
cp -r n8n-skills/skills/* ~/.claude/skills/
|
||||
|
||||
# 3. Reload Claude Code
|
||||
# Skills will activate automatically
|
||||
```
|
||||
|
||||
For complete installation instructions, configuration options, and usage examples, see the [n8n-skills README](https://github.com/czlonkowski/n8n-skills#-installation).
|
||||
|
||||
Skills work seamlessly with n8n-mcp to provide expert guidance throughout the workflow building process!
|
||||
|
||||
## Project Instructions
|
||||
|
||||
For optimal results, create a `CLAUDE.md` file in your project root with the instructions from the [main README's Claude Project Setup section](../README.md#-claude-project-setup).
|
||||
|
||||
83
docs/MULTI_APP_INTEGRATION.md
Normal file
83
docs/MULTI_APP_INTEGRATION.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Multi-App Integration Guide
|
||||
|
||||
This guide explains how session restoration works in n8n-mcp for multi-tenant deployments.
|
||||
|
||||
## Session Restoration: Warm Start Pattern
|
||||
|
||||
When a container restarts, existing client sessions are lost. The warm start pattern allows clients to seamlessly restore sessions without manual intervention.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Client sends request** with existing session ID after restart
|
||||
2. **Server detects** unknown session ID
|
||||
3. **Restoration hook** is called to load session context from your database
|
||||
4. **New session created** using restored context
|
||||
5. **Current request handled** immediately through new transport
|
||||
6. **Client receives** standard MCP error `-32000` (Server not initialized)
|
||||
7. **Client auto-retries** with initialize request on same connection
|
||||
8. **Session fully restored** and client continues normally
|
||||
|
||||
### Key Features
|
||||
|
||||
- **Zero client changes**: Standard MCP clients auto-retry on -32000
|
||||
- **Single HTTP round-trip**: No extra network requests needed
|
||||
- **Concurrent-safe**: Idempotency guards prevent duplicate restoration
|
||||
- **Automatic cleanup**: Failed restorations clean up resources automatically
|
||||
|
||||
### Implementation
|
||||
|
||||
```typescript
|
||||
import { SingleSessionHTTPServer } from 'n8n-mcp';
|
||||
|
||||
const server = new SingleSessionHTTPServer({
|
||||
// Hook to load session context from your storage
|
||||
onSessionNotFound: async (sessionId) => {
|
||||
const session = await database.loadSession(sessionId);
|
||||
if (!session || session.expired) {
|
||||
return null; // Reject restoration
|
||||
}
|
||||
return session.instanceContext; // Restore session
|
||||
},
|
||||
|
||||
// Optional: Configure timeouts and retries
|
||||
sessionRestorationTimeout: 5000, // 5 seconds (default)
|
||||
sessionRestorationRetries: 2, // Retry on transient failures
|
||||
sessionRestorationRetryDelay: 100 // Delay between retries
|
||||
});
|
||||
```
|
||||
|
||||
### Session Lifecycle Events
|
||||
|
||||
Track session restoration for metrics and debugging:
|
||||
|
||||
```typescript
|
||||
const server = new SingleSessionHTTPServer({
|
||||
sessionEvents: {
|
||||
onSessionRestored: (sessionId, context) => {
|
||||
console.log(`Session ${sessionId} restored`);
|
||||
metrics.increment('session.restored');
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
The restoration hook can return three outcomes:
|
||||
|
||||
- **Return context**: Session is restored successfully
|
||||
- **Return null/undefined**: Session is rejected (client gets 400 Bad Request)
|
||||
- **Throw error**: Restoration failed (client gets 500 Internal Server Error)
|
||||
|
||||
Timeout errors are never retried (already took too long).
|
||||
|
||||
### Concurrency Safety
|
||||
|
||||
Multiple concurrent requests for the same session ID are handled safely:
|
||||
|
||||
- First request triggers restoration
|
||||
- Subsequent requests reuse the restored session
|
||||
- No duplicate session creation
|
||||
- No race conditions
|
||||
|
||||
This ensures correct behavior even under high load or network retries.
|
||||
180
docs/bugfix-onSessionCreated-event.md
Normal file
180
docs/bugfix-onSessionCreated-event.md
Normal file
@@ -0,0 +1,180 @@
|
||||
# Bug Fix: onSessionCreated Event Not Firing (v2.19.0)
|
||||
|
||||
## Summary
|
||||
|
||||
Fixed critical bug where `onSessionCreated` lifecycle event was never emitted for sessions created during the standard MCP initialize flow, completely breaking session persistence functionality.
|
||||
|
||||
## Impact
|
||||
|
||||
- **Severity**: Critical
|
||||
- **Affected Version**: v2.19.0
|
||||
- **Component**: Session Persistence (Phase 3)
|
||||
- **Status**: ✅ Fixed
|
||||
|
||||
## Root Cause
|
||||
|
||||
The `handleRequest()` method in `http-server-single-session.ts` had two different paths for session creation:
|
||||
|
||||
1. **Standard initialize flow** (lines 868-943): Created session inline but **did not emit** `onSessionCreated` event
|
||||
2. **Manual restoration flow** (line 1048): Called `createSession()` which **correctly emitted** the event
|
||||
|
||||
This inconsistency meant that:
|
||||
- New sessions during normal operation were **never saved to database**
|
||||
- Only manually restored sessions triggered the save event
|
||||
- Session persistence was completely broken for new sessions
|
||||
- Container restarts caused all sessions to be lost
|
||||
|
||||
## The Fix
|
||||
|
||||
### Location
|
||||
- **File**: `src/http-server-single-session.ts`
|
||||
- **Method**: `handleRequest()`
|
||||
- **Line**: After line 943 (`await server.connect(transport);`)
|
||||
|
||||
### Code Change
|
||||
|
||||
Added event emission after successfully connecting server to transport during initialize flow:
|
||||
|
||||
```typescript
|
||||
// Connect the server to the transport BEFORE handling the request
|
||||
logger.info('handleRequest: Connecting server to new transport');
|
||||
await server.connect(transport);
|
||||
|
||||
// Phase 3: Emit onSessionCreated event (REQ-4)
|
||||
// Fire-and-forget: don't await or block session creation
|
||||
this.emitEvent('onSessionCreated', sessionIdToUse, instanceContext).catch(eventErr => {
|
||||
logger.error('Failed to emit onSessionCreated event (non-blocking)', {
|
||||
sessionId: sessionIdToUse,
|
||||
error: eventErr instanceof Error ? eventErr.message : String(eventErr)
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Why This Works
|
||||
|
||||
1. **Consistent with existing pattern**: Matches the `createSession()` method pattern (line 664)
|
||||
2. **Non-blocking**: Uses `.catch()` to ensure event handler errors don't break session creation
|
||||
3. **Correct timing**: Fires after `server.connect(transport)` succeeds, ensuring session is fully initialized
|
||||
4. **Same parameters**: Passes `sessionId` and `instanceContext` just like the restoration flow
|
||||
|
||||
## Verification
|
||||
|
||||
### Test Results
|
||||
|
||||
Created comprehensive test suite to verify the fix:
|
||||
|
||||
**Test File**: `tests/unit/session/onSessionCreated-event.test.ts`
|
||||
|
||||
**Test Results**:
|
||||
```
|
||||
✓ onSessionCreated Event - Initialize Flow
|
||||
✓ should emit onSessionCreated event when session is created during initialize flow (1594ms)
|
||||
|
||||
Test Files 5 passed (5)
|
||||
Tests 78 passed (78)
|
||||
```
|
||||
|
||||
**Manual Testing**:
|
||||
```typescript
|
||||
const server = new SingleSessionHTTPServer({
|
||||
sessionEvents: {
|
||||
onSessionCreated: async (sessionId, context) => {
|
||||
console.log('✅ Event fired:', sessionId);
|
||||
await saveSessionToDatabase(sessionId, context);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Result: Event fires successfully on initialize!
|
||||
// ✅ Event fired: 40dcc123-46bd-4994-945e-f2dbe60e54c2
|
||||
```
|
||||
|
||||
### Behavior After Fix
|
||||
|
||||
1. **Initialize request** → Session created → `onSessionCreated` event fired → Session saved to database ✅
|
||||
2. **Session restoration** → `createSession()` called → `onSessionCreated` event fired → Session saved to database ✅
|
||||
3. **Manual restoration** → `manuallyRestoreSession()` → Session created → Event fired ✅
|
||||
|
||||
All three paths now correctly emit the event!
|
||||
|
||||
## Backward Compatibility
|
||||
|
||||
✅ **Fully backward compatible**:
|
||||
- No breaking changes to API
|
||||
- Event handler is optional (defaults to no-op)
|
||||
- Non-blocking implementation ensures session creation succeeds even if handler fails
|
||||
- Matches existing behavior of `createSession()` method
|
||||
- All existing tests pass
|
||||
|
||||
## Related Code
|
||||
|
||||
### Event Emission Points
|
||||
|
||||
1. ✅ **Standard initialize flow**: `handleRequest()` at line ~947 (NEW - fixed)
|
||||
2. ✅ **Manual restoration**: `createSession()` at line 664 (EXISTING - working)
|
||||
3. ✅ **Session restoration**: calls `createSession()` indirectly (EXISTING - working)
|
||||
|
||||
### Other Lifecycle Events
|
||||
|
||||
The following events are working correctly:
|
||||
- `onSessionRestored`: Fires when session is restored from database
|
||||
- `onSessionAccessed`: Fires on every request (with throttling recommended)
|
||||
- `onSessionExpired`: Fires before expired session cleanup
|
||||
- `onSessionDeleted`: Fires on manual session deletion
|
||||
|
||||
## Testing Recommendations
|
||||
|
||||
After applying this fix, verify session persistence works:
|
||||
|
||||
```typescript
|
||||
// 1. Start server with session events
|
||||
const engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionCreated: async (sessionId, context) => {
|
||||
await database.upsertSession({ sessionId, ...context });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 2. Client connects and initializes
|
||||
// 3. Verify session saved to database
|
||||
const sessions = await database.query('SELECT * FROM mcp_sessions');
|
||||
expect(sessions.length).toBeGreaterThan(0);
|
||||
|
||||
// 4. Restart server
|
||||
await engine.shutdown();
|
||||
await engine.start();
|
||||
|
||||
// 5. Client reconnects with old session ID
|
||||
// 6. Verify session restored from database
|
||||
```
|
||||
|
||||
## Impact on n8n-mcp-backend
|
||||
|
||||
This fix **unblocks** the multi-tenant n8n-mcp-backend service that depends on session persistence:
|
||||
|
||||
- ✅ Sessions now persist across container restarts
|
||||
- ✅ Users no longer need to restart Claude Desktop after backend updates
|
||||
- ✅ Session continuity maintained for all users
|
||||
- ✅ Production deployment viable
|
||||
|
||||
## Lessons Learned
|
||||
|
||||
1. **Consistency is critical**: Session creation should follow the same pattern everywhere
|
||||
2. **Event-driven architecture**: Events must fire at all creation points, not just some
|
||||
3. **Testing lifecycle events**: Need integration tests that verify events fire, not just that code runs
|
||||
4. **Documentation**: Clearly document when events should fire and where
|
||||
|
||||
## Files Changed
|
||||
|
||||
- `src/http-server-single-session.ts`: Added event emission (lines 945-952)
|
||||
- `tests/unit/session/onSessionCreated-event.test.ts`: New test file
|
||||
- `tests/integration/session/test-onSessionCreated-event.ts`: Manual verification test
|
||||
|
||||
## Build Status
|
||||
|
||||
- ✅ TypeScript compilation: Success
|
||||
- ✅ Type checking: Success
|
||||
- ✅ All unit tests: 78 passed
|
||||
- ✅ Integration tests: Pass
|
||||
- ✅ Backward compatibility: Verified
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 430 KiB |
5604
package-lock.json
generated
5604
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "n8n-mcp",
|
||||
"version": "2.22.5",
|
||||
"version": "2.19.5",
|
||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -139,19 +139,18 @@
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.20.1",
|
||||
"@n8n/n8n-nodes-langchain": "^1.115.1",
|
||||
"@modelcontextprotocol/sdk": "^1.13.2",
|
||||
"@n8n/n8n-nodes-langchain": "^1.113.1",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
"dotenv": "^16.5.0",
|
||||
"express": "^5.1.0",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"lru-cache": "^11.2.1",
|
||||
"n8n": "^1.116.2",
|
||||
"n8n-core": "^1.115.1",
|
||||
"n8n-workflow": "^1.113.0",
|
||||
"n8n": "^1.114.3",
|
||||
"n8n-core": "^1.113.1",
|
||||
"n8n-workflow": "^1.111.0",
|
||||
"openai": "^4.77.0",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.24.1"
|
||||
},
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
{
|
||||
"name": "n8n-mcp-runtime",
|
||||
"version": "2.22.5",
|
||||
"version": "2.19.5",
|
||||
"description": "n8n MCP Server Runtime Dependencies Only",
|
||||
"private": true,
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"require": "./dist/index.js",
|
||||
"import": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.13.2",
|
||||
"@supabase/supabase-js": "^2.57.4",
|
||||
@@ -11,7 +20,6 @@
|
||||
"dotenv": "^16.5.0",
|
||||
"lru-cache": "^11.2.1",
|
||||
"sql.js": "^1.13.0",
|
||||
"tslib": "^2.6.2",
|
||||
"uuid": "^10.0.0",
|
||||
"axios": "^1.7.7"
|
||||
},
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Generate release notes from commit messages between two tags
|
||||
* Used by GitHub Actions to create automated release notes
|
||||
*/
|
||||
|
||||
const { execSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function generateReleaseNotes(previousTag, currentTag) {
|
||||
try {
|
||||
console.log(`Generating release notes from ${previousTag} to ${currentTag}`);
|
||||
|
||||
// Get commits between tags
|
||||
const gitLogCommand = `git log --pretty=format:"%H|%s|%an|%ae|%ad" --date=short --no-merges ${previousTag}..${currentTag}`;
|
||||
const commitsOutput = execSync(gitLogCommand, { encoding: 'utf8' });
|
||||
|
||||
if (!commitsOutput.trim()) {
|
||||
console.log('No commits found between tags');
|
||||
return 'No changes in this release.';
|
||||
}
|
||||
|
||||
const commits = commitsOutput.trim().split('\n').map(line => {
|
||||
const [hash, subject, author, email, date] = line.split('|');
|
||||
return { hash, subject, author, email, date };
|
||||
});
|
||||
|
||||
// Categorize commits
|
||||
const categories = {
|
||||
'feat': { title: '✨ Features', commits: [] },
|
||||
'fix': { title: '🐛 Bug Fixes', commits: [] },
|
||||
'docs': { title: '📚 Documentation', commits: [] },
|
||||
'refactor': { title: '♻️ Refactoring', commits: [] },
|
||||
'test': { title: '🧪 Testing', commits: [] },
|
||||
'perf': { title: '⚡ Performance', commits: [] },
|
||||
'style': { title: '💅 Styling', commits: [] },
|
||||
'ci': { title: '🔧 CI/CD', commits: [] },
|
||||
'build': { title: '📦 Build', commits: [] },
|
||||
'chore': { title: '🔧 Maintenance', commits: [] },
|
||||
'other': { title: '📝 Other Changes', commits: [] }
|
||||
};
|
||||
|
||||
commits.forEach(commit => {
|
||||
const subject = commit.subject.toLowerCase();
|
||||
let categorized = false;
|
||||
|
||||
// Check for conventional commit prefixes
|
||||
for (const [prefix, category] of Object.entries(categories)) {
|
||||
if (prefix !== 'other' && subject.startsWith(`${prefix}:`)) {
|
||||
category.commits.push(commit);
|
||||
categorized = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If not categorized, put in other
|
||||
if (!categorized) {
|
||||
categories.other.commits.push(commit);
|
||||
}
|
||||
});
|
||||
|
||||
// Generate release notes
|
||||
const releaseNotes = [];
|
||||
|
||||
for (const [key, category] of Object.entries(categories)) {
|
||||
if (category.commits.length > 0) {
|
||||
releaseNotes.push(`### ${category.title}`);
|
||||
releaseNotes.push('');
|
||||
|
||||
category.commits.forEach(commit => {
|
||||
// Clean up the subject by removing the prefix if it exists
|
||||
let cleanSubject = commit.subject;
|
||||
const colonIndex = cleanSubject.indexOf(':');
|
||||
if (colonIndex !== -1 && cleanSubject.substring(0, colonIndex).match(/^(feat|fix|docs|refactor|test|perf|style|ci|build|chore)$/)) {
|
||||
cleanSubject = cleanSubject.substring(colonIndex + 1).trim();
|
||||
// Capitalize first letter
|
||||
cleanSubject = cleanSubject.charAt(0).toUpperCase() + cleanSubject.slice(1);
|
||||
}
|
||||
|
||||
releaseNotes.push(`- ${cleanSubject} (${commit.hash.substring(0, 7)})`);
|
||||
});
|
||||
|
||||
releaseNotes.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// Add commit statistics
|
||||
const totalCommits = commits.length;
|
||||
const contributors = [...new Set(commits.map(c => c.author))];
|
||||
|
||||
releaseNotes.push('---');
|
||||
releaseNotes.push('');
|
||||
releaseNotes.push(`**Release Statistics:**`);
|
||||
releaseNotes.push(`- ${totalCommits} commit${totalCommits !== 1 ? 's' : ''}`);
|
||||
releaseNotes.push(`- ${contributors.length} contributor${contributors.length !== 1 ? 's' : ''}`);
|
||||
|
||||
if (contributors.length <= 5) {
|
||||
releaseNotes.push(`- Contributors: ${contributors.join(', ')}`);
|
||||
}
|
||||
|
||||
return releaseNotes.join('\n');
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error generating release notes: ${error.message}`);
|
||||
return `Failed to generate release notes: ${error.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse command line arguments
|
||||
const previousTag = process.argv[2];
|
||||
const currentTag = process.argv[3];
|
||||
|
||||
if (!previousTag || !currentTag) {
|
||||
console.error('Usage: generate-release-notes.js <previous-tag> <current-tag>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const releaseNotes = generateReleaseNotes(previousTag, currentTag);
|
||||
console.log(releaseNotes);
|
||||
@@ -1,287 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Test Workflow Versioning System
|
||||
*
|
||||
* Tests the complete workflow rollback and versioning functionality:
|
||||
* - Automatic backup creation
|
||||
* - Auto-pruning to 10 versions
|
||||
* - Version history retrieval
|
||||
* - Rollback with validation
|
||||
* - Manual pruning and cleanup
|
||||
* - Storage statistics
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../src/database/node-repository';
|
||||
import { createDatabaseAdapter } from '../src/database/database-adapter';
|
||||
import { WorkflowVersioningService } from '../src/services/workflow-versioning-service';
|
||||
import { logger } from '../src/utils/logger';
|
||||
import { existsSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
// Mock workflow for testing
|
||||
const createMockWorkflow = (id: string, name: string, nodeCount: number = 3) => ({
|
||||
id,
|
||||
name,
|
||||
active: false,
|
||||
nodes: Array.from({ length: nodeCount }, (_, i) => ({
|
||||
id: `node-${i}`,
|
||||
name: `Node ${i}`,
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [250 + i * 200, 300],
|
||||
parameters: { values: { string: [{ name: `field${i}`, value: `value${i}` }] } }
|
||||
})),
|
||||
connections: nodeCount > 1 ? {
|
||||
'node-0': { main: [[{ node: 'node-1', type: 'main', index: 0 }]] },
|
||||
...(nodeCount > 2 && { 'node-1': { main: [[{ node: 'node-2', type: 'main', index: 0 }]] } })
|
||||
} : {},
|
||||
settings: {}
|
||||
});
|
||||
|
||||
async function runTests() {
|
||||
console.log('🧪 Testing Workflow Versioning System\n');
|
||||
|
||||
// Find database path
|
||||
const possiblePaths = [
|
||||
path.join(process.cwd(), 'data', 'nodes.db'),
|
||||
path.join(__dirname, '../../data', 'nodes.db'),
|
||||
'./data/nodes.db'
|
||||
];
|
||||
|
||||
let dbPath: string | null = null;
|
||||
for (const p of possiblePaths) {
|
||||
if (existsSync(p)) {
|
||||
dbPath = p;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!dbPath) {
|
||||
console.error('❌ Database not found. Please run npm run rebuild first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`📁 Using database: ${dbPath}\n`);
|
||||
|
||||
// Initialize repository
|
||||
const db = await createDatabaseAdapter(dbPath);
|
||||
const repository = new NodeRepository(db);
|
||||
const service = new WorkflowVersioningService(repository);
|
||||
|
||||
const workflowId = 'test-workflow-001';
|
||||
let testsPassed = 0;
|
||||
let testsFailed = 0;
|
||||
|
||||
try {
|
||||
// Test 1: Create initial backup
|
||||
console.log('📝 Test 1: Create initial backup');
|
||||
const workflow1 = createMockWorkflow(workflowId, 'Test Workflow v1', 3);
|
||||
const backup1 = await service.createBackup(workflowId, workflow1, {
|
||||
trigger: 'partial_update',
|
||||
operations: [{ type: 'addNode', node: workflow1.nodes[0] }]
|
||||
});
|
||||
|
||||
if (backup1.versionId && backup1.versionNumber === 1 && backup1.pruned === 0) {
|
||||
console.log('✅ Initial backup created successfully');
|
||||
console.log(` Version ID: ${backup1.versionId}, Version Number: ${backup1.versionNumber}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to create initial backup');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 2: Create multiple backups to test auto-pruning
|
||||
console.log('\n📝 Test 2: Create 12 backups to test auto-pruning (should keep only 10)');
|
||||
for (let i = 2; i <= 12; i++) {
|
||||
const workflow = createMockWorkflow(workflowId, `Test Workflow v${i}`, 3 + i);
|
||||
await service.createBackup(workflowId, workflow, {
|
||||
trigger: i % 3 === 0 ? 'full_update' : 'partial_update',
|
||||
operations: [{ type: 'addNode', node: { id: `node-${i}` } }]
|
||||
});
|
||||
}
|
||||
|
||||
const versions = await service.getVersionHistory(workflowId, 100);
|
||||
if (versions.length === 10) {
|
||||
console.log(`✅ Auto-pruning works correctly (kept exactly 10 versions)`);
|
||||
console.log(` Latest version: ${versions[0].versionNumber}, Oldest: ${versions[9].versionNumber}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log(`❌ Auto-pruning failed (expected 10 versions, got ${versions.length})`);
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 3: Get version history
|
||||
console.log('\n📝 Test 3: Get version history');
|
||||
const history = await service.getVersionHistory(workflowId, 5);
|
||||
if (history.length === 5 && history[0].versionNumber > history[4].versionNumber) {
|
||||
console.log(`✅ Version history retrieved successfully (${history.length} versions)`);
|
||||
console.log(' Recent versions:');
|
||||
history.forEach(v => {
|
||||
console.log(` - v${v.versionNumber} (${v.trigger}) - ${v.workflowName} - ${(v.size / 1024).toFixed(2)} KB`);
|
||||
});
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get version history');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 4: Get specific version
|
||||
console.log('\n📝 Test 4: Get specific version details');
|
||||
const specificVersion = await service.getVersion(history[2].id);
|
||||
if (specificVersion && specificVersion.workflowSnapshot) {
|
||||
console.log(`✅ Retrieved version ${specificVersion.versionNumber} successfully`);
|
||||
console.log(` Workflow name: ${specificVersion.workflowName}`);
|
||||
console.log(` Node count: ${specificVersion.workflowSnapshot.nodes.length}`);
|
||||
console.log(` Trigger: ${specificVersion.trigger}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get specific version');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 5: Compare two versions
|
||||
console.log('\n📝 Test 5: Compare two versions');
|
||||
if (history.length >= 2) {
|
||||
const diff = await service.compareVersions(history[0].id, history[1].id);
|
||||
console.log(`✅ Version comparison successful`);
|
||||
console.log(` Comparing v${diff.version1Number} → v${diff.version2Number}`);
|
||||
console.log(` Added nodes: ${diff.addedNodes.length}`);
|
||||
console.log(` Removed nodes: ${diff.removedNodes.length}`);
|
||||
console.log(` Modified nodes: ${diff.modifiedNodes.length}`);
|
||||
console.log(` Connection changes: ${diff.connectionChanges}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Not enough versions to compare');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 6: Manual pruning
|
||||
console.log('\n📝 Test 6: Manual pruning (keep only 5 versions)');
|
||||
const pruneResult = await service.pruneVersions(workflowId, 5);
|
||||
if (pruneResult.pruned === 5 && pruneResult.remaining === 5) {
|
||||
console.log(`✅ Manual pruning successful`);
|
||||
console.log(` Pruned: ${pruneResult.pruned} versions, Remaining: ${pruneResult.remaining}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log(`❌ Manual pruning failed (expected 5 pruned, 5 remaining, got ${pruneResult.pruned} pruned, ${pruneResult.remaining} remaining)`);
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 7: Storage statistics
|
||||
console.log('\n📝 Test 7: Storage statistics');
|
||||
const stats = await service.getStorageStats();
|
||||
if (stats.totalVersions > 0 && stats.byWorkflow.length > 0) {
|
||||
console.log(`✅ Storage stats retrieved successfully`);
|
||||
console.log(` Total versions: ${stats.totalVersions}`);
|
||||
console.log(` Total size: ${stats.totalSizeFormatted}`);
|
||||
console.log(` Workflows with versions: ${stats.byWorkflow.length}`);
|
||||
stats.byWorkflow.forEach(w => {
|
||||
console.log(` - ${w.workflowName}: ${w.versionCount} versions, ${w.totalSizeFormatted}`);
|
||||
});
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to get storage stats');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 8: Delete specific version
|
||||
console.log('\n📝 Test 8: Delete specific version');
|
||||
const versionsBeforeDelete = await service.getVersionHistory(workflowId, 100);
|
||||
const versionToDelete = versionsBeforeDelete[versionsBeforeDelete.length - 1];
|
||||
const deleteResult = await service.deleteVersion(versionToDelete.id);
|
||||
const versionsAfterDelete = await service.getVersionHistory(workflowId, 100);
|
||||
|
||||
if (deleteResult.success && versionsAfterDelete.length === versionsBeforeDelete.length - 1) {
|
||||
console.log(`✅ Version deletion successful`);
|
||||
console.log(` Deleted version ${versionToDelete.versionNumber}`);
|
||||
console.log(` Remaining versions: ${versionsAfterDelete.length}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to delete version');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 9: Test different trigger types
|
||||
console.log('\n📝 Test 9: Test different trigger types');
|
||||
const workflow2 = createMockWorkflow(workflowId, 'Test Workflow Autofix', 2);
|
||||
const backupAutofix = await service.createBackup(workflowId, workflow2, {
|
||||
trigger: 'autofix',
|
||||
fixTypes: ['expression-format', 'typeversion-correction']
|
||||
});
|
||||
|
||||
const workflow3 = createMockWorkflow(workflowId, 'Test Workflow Full Update', 4);
|
||||
const backupFull = await service.createBackup(workflowId, workflow3, {
|
||||
trigger: 'full_update',
|
||||
metadata: { reason: 'Major refactoring' }
|
||||
});
|
||||
|
||||
const allVersions = await service.getVersionHistory(workflowId, 100);
|
||||
const autofixVersions = allVersions.filter(v => v.trigger === 'autofix');
|
||||
const fullUpdateVersions = allVersions.filter(v => v.trigger === 'full_update');
|
||||
const partialUpdateVersions = allVersions.filter(v => v.trigger === 'partial_update');
|
||||
|
||||
if (autofixVersions.length > 0 && fullUpdateVersions.length > 0 && partialUpdateVersions.length > 0) {
|
||||
console.log(`✅ All trigger types working correctly`);
|
||||
console.log(` Partial updates: ${partialUpdateVersions.length}`);
|
||||
console.log(` Full updates: ${fullUpdateVersions.length}`);
|
||||
console.log(` Autofixes: ${autofixVersions.length}`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to create versions with different trigger types');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 10: Cleanup - Delete all versions for workflow
|
||||
console.log('\n📝 Test 10: Delete all versions for workflow');
|
||||
const deleteAllResult = await service.deleteAllVersions(workflowId);
|
||||
const versionsAfterDeleteAll = await service.getVersionHistory(workflowId, 100);
|
||||
|
||||
if (deleteAllResult.deleted > 0 && versionsAfterDeleteAll.length === 0) {
|
||||
console.log(`✅ Delete all versions successful`);
|
||||
console.log(` Deleted ${deleteAllResult.deleted} versions`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Failed to delete all versions');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Test 11: Truncate all versions (requires confirmation)
|
||||
console.log('\n📝 Test 11: Test truncate without confirmation');
|
||||
const truncateResult1 = await service.truncateAllVersions(false);
|
||||
if (truncateResult1.deleted === 0 && truncateResult1.message.includes('not confirmed')) {
|
||||
console.log(`✅ Truncate safety check works (requires confirmation)`);
|
||||
testsPassed++;
|
||||
} else {
|
||||
console.log('❌ Truncate safety check failed');
|
||||
testsFailed++;
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('📊 Test Summary');
|
||||
console.log('='.repeat(60));
|
||||
console.log(`✅ Passed: ${testsPassed}`);
|
||||
console.log(`❌ Failed: ${testsFailed}`);
|
||||
console.log(`📈 Success Rate: ${((testsPassed / (testsPassed + testsFailed)) * 100).toFixed(1)}%`);
|
||||
console.log('='.repeat(60));
|
||||
|
||||
if (testsFailed === 0) {
|
||||
console.log('\n🎉 All tests passed! Workflow versioning system is working correctly.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log('\n⚠️ Some tests failed. Please review the implementation.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Test suite failed with error:', error.message);
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run tests
|
||||
runTests().catch(error => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -232,45 +232,15 @@ class BetterSQLiteAdapter implements DatabaseAdapter {
|
||||
*/
|
||||
class SQLJSAdapter implements DatabaseAdapter {
|
||||
private saveTimer: NodeJS.Timeout | null = null;
|
||||
private saveIntervalMs: number;
|
||||
private closed = false; // Prevent multiple close() calls
|
||||
|
||||
// Default save interval: 5 seconds (balance between data safety and performance)
|
||||
// Configurable via SQLJS_SAVE_INTERVAL_MS environment variable
|
||||
//
|
||||
// DATA LOSS WINDOW: Up to 5 seconds of database changes may be lost if process
|
||||
// crashes before scheduleSave() timer fires. This is acceptable because:
|
||||
// 1. close() calls saveToFile() immediately on graceful shutdown
|
||||
// 2. Docker/Kubernetes SIGTERM provides 30s for cleanup (more than enough)
|
||||
// 3. The alternative (100ms interval) caused 2.2GB memory leaks in production
|
||||
// 4. MCP server is primarily read-heavy (writes are rare)
|
||||
private static readonly DEFAULT_SAVE_INTERVAL_MS = 5000;
|
||||
|
||||
constructor(private db: any, private dbPath: string) {
|
||||
// Read save interval from environment or use default
|
||||
const envInterval = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
this.saveIntervalMs = envInterval ? parseInt(envInterval, 10) : SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS;
|
||||
|
||||
// Validate interval (minimum 100ms, maximum 60000ms = 1 minute)
|
||||
if (isNaN(this.saveIntervalMs) || this.saveIntervalMs < 100 || this.saveIntervalMs > 60000) {
|
||||
logger.warn(
|
||||
`Invalid SQLJS_SAVE_INTERVAL_MS value: ${envInterval} (must be 100-60000ms), ` +
|
||||
`using default ${SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS}ms`
|
||||
);
|
||||
this.saveIntervalMs = SQLJSAdapter.DEFAULT_SAVE_INTERVAL_MS;
|
||||
}
|
||||
|
||||
logger.debug(`SQLJSAdapter initialized with save interval: ${this.saveIntervalMs}ms`);
|
||||
|
||||
// NOTE: No initial save scheduled here (optimization)
|
||||
// Database is either:
|
||||
// 1. Loaded from existing file (already persisted), or
|
||||
// 2. New database (will be saved on first write operation)
|
||||
// Set up auto-save on changes
|
||||
this.scheduleSave();
|
||||
}
|
||||
|
||||
prepare(sql: string): PreparedStatement {
|
||||
const stmt = this.db.prepare(sql);
|
||||
// Don't schedule save on prepare - only on actual writes (via SQLJSStatement.run())
|
||||
this.scheduleSave();
|
||||
return new SQLJSStatement(stmt, () => this.scheduleSave());
|
||||
}
|
||||
|
||||
@@ -280,18 +250,11 @@ class SQLJSAdapter implements DatabaseAdapter {
|
||||
}
|
||||
|
||||
close(): void {
|
||||
if (this.closed) {
|
||||
logger.debug('SQLJSAdapter already closed, skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
this.saveToFile();
|
||||
if (this.saveTimer) {
|
||||
clearTimeout(this.saveTimer);
|
||||
this.saveTimer = null;
|
||||
}
|
||||
this.db.close();
|
||||
this.closed = true;
|
||||
}
|
||||
|
||||
pragma(key: string, value?: any): any {
|
||||
@@ -339,31 +302,18 @@ class SQLJSAdapter implements DatabaseAdapter {
|
||||
clearTimeout(this.saveTimer);
|
||||
}
|
||||
|
||||
// Save after configured interval of inactivity (default: 5000ms)
|
||||
// This debouncing reduces memory churn from frequent buffer allocations
|
||||
//
|
||||
// NOTE: Under constant write load, saves may be delayed until writes stop.
|
||||
// This is acceptable because:
|
||||
// 1. MCP server is primarily read-heavy (node lookups, searches)
|
||||
// 2. Writes are rare (only during database rebuilds)
|
||||
// 3. close() saves immediately on shutdown, flushing any pending changes
|
||||
// Save after 100ms of inactivity
|
||||
this.saveTimer = setTimeout(() => {
|
||||
this.saveToFile();
|
||||
}, this.saveIntervalMs);
|
||||
}, 100);
|
||||
}
|
||||
|
||||
private saveToFile(): void {
|
||||
try {
|
||||
// Export database to Uint8Array (2-5MB typical)
|
||||
const data = this.db.export();
|
||||
|
||||
// Write directly without Buffer.from() copy (saves 50% memory allocation)
|
||||
// writeFileSync accepts Uint8Array directly, no need for Buffer conversion
|
||||
fsSync.writeFileSync(this.dbPath, data);
|
||||
const buffer = Buffer.from(data);
|
||||
fsSync.writeFileSync(this.dbPath, buffer);
|
||||
logger.debug(`Database saved to ${this.dbPath}`);
|
||||
|
||||
// Note: 'data' reference is automatically cleared when function exits
|
||||
// V8 GC will reclaim the Uint8Array once it's no longer referenced
|
||||
} catch (error) {
|
||||
logger.error('Failed to save database', error);
|
||||
}
|
||||
|
||||
@@ -462,501 +462,4 @@ export class NodeRepository {
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* VERSION MANAGEMENT METHODS
|
||||
* Methods for working with node_versions and version_property_changes tables
|
||||
*/
|
||||
|
||||
/**
|
||||
* Save a specific node version to the database
|
||||
*/
|
||||
saveNodeVersion(versionData: {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
packageName: string;
|
||||
displayName: string;
|
||||
description?: string;
|
||||
category?: string;
|
||||
isCurrentMax?: boolean;
|
||||
propertiesSchema?: any;
|
||||
operations?: any;
|
||||
credentialsRequired?: any;
|
||||
outputs?: any;
|
||||
minimumN8nVersion?: string;
|
||||
breakingChanges?: any[];
|
||||
deprecatedProperties?: string[];
|
||||
addedProperties?: string[];
|
||||
releasedAt?: Date;
|
||||
}): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO node_versions (
|
||||
node_type, version, package_name, display_name, description,
|
||||
category, is_current_max, properties_schema, operations,
|
||||
credentials_required, outputs, minimum_n8n_version,
|
||||
breaking_changes, deprecated_properties, added_properties,
|
||||
released_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
versionData.nodeType,
|
||||
versionData.version,
|
||||
versionData.packageName,
|
||||
versionData.displayName,
|
||||
versionData.description || null,
|
||||
versionData.category || null,
|
||||
versionData.isCurrentMax ? 1 : 0,
|
||||
versionData.propertiesSchema ? JSON.stringify(versionData.propertiesSchema) : null,
|
||||
versionData.operations ? JSON.stringify(versionData.operations) : null,
|
||||
versionData.credentialsRequired ? JSON.stringify(versionData.credentialsRequired) : null,
|
||||
versionData.outputs ? JSON.stringify(versionData.outputs) : null,
|
||||
versionData.minimumN8nVersion || null,
|
||||
versionData.breakingChanges ? JSON.stringify(versionData.breakingChanges) : null,
|
||||
versionData.deprecatedProperties ? JSON.stringify(versionData.deprecatedProperties) : null,
|
||||
versionData.addedProperties ? JSON.stringify(versionData.addedProperties) : null,
|
||||
versionData.releasedAt || null
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available versions for a specific node type
|
||||
*/
|
||||
getNodeVersions(nodeType: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ?
|
||||
ORDER BY version DESC
|
||||
`).all(normalizedType) as any[];
|
||||
|
||||
return rows.map(row => this.parseNodeVersionRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest (current max) version for a node type
|
||||
*/
|
||||
getLatestNodeVersion(nodeType: string): any | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND is_current_max = 1
|
||||
LIMIT 1
|
||||
`).get(normalizedType) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific version of a node
|
||||
*/
|
||||
getNodeVersion(nodeType: string, version: string): any | null {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM node_versions
|
||||
WHERE node_type = ? AND version = ?
|
||||
`).get(normalizedType, version) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseNodeVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a property change between versions
|
||||
*/
|
||||
savePropertyChange(changeData: {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking?: boolean;
|
||||
oldValue?: string;
|
||||
newValue?: string;
|
||||
migrationHint?: string;
|
||||
autoMigratable?: boolean;
|
||||
migrationStrategy?: any;
|
||||
severity?: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
}): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO version_property_changes (
|
||||
node_type, from_version, to_version, property_name, change_type,
|
||||
is_breaking, old_value, new_value, migration_hint, auto_migratable,
|
||||
migration_strategy, severity
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
changeData.nodeType,
|
||||
changeData.fromVersion,
|
||||
changeData.toVersion,
|
||||
changeData.propertyName,
|
||||
changeData.changeType,
|
||||
changeData.isBreaking ? 1 : 0,
|
||||
changeData.oldValue || null,
|
||||
changeData.newValue || null,
|
||||
changeData.migrationHint || null,
|
||||
changeData.autoMigratable ? 1 : 0,
|
||||
changeData.migrationStrategy ? JSON.stringify(changeData.migrationStrategy) : null,
|
||||
changeData.severity || 'MEDIUM'
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get property changes between two versions
|
||||
*/
|
||||
getPropertyChanges(nodeType: string, fromVersion: string, toVersion: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND from_version = ? AND to_version = ?
|
||||
ORDER BY severity DESC, property_name
|
||||
`).all(normalizedType, fromVersion, toVersion) as any[];
|
||||
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all breaking changes for upgrading from one version to another
|
||||
* Can handle multi-step upgrades (e.g., 1.0 -> 2.0 via 1.5)
|
||||
*/
|
||||
getBreakingChanges(nodeType: string, fromVersion: string, toVersion?: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
let sql = `
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ? AND is_breaking = 1
|
||||
`;
|
||||
const params: any[] = [normalizedType];
|
||||
|
||||
if (toVersion) {
|
||||
// Get changes between specific versions
|
||||
sql += ` AND from_version >= ? AND to_version <= ?`;
|
||||
params.push(fromVersion, toVersion);
|
||||
} else {
|
||||
// Get all breaking changes from this version onwards
|
||||
sql += ` AND from_version >= ?`;
|
||||
params.push(fromVersion);
|
||||
}
|
||||
|
||||
sql += ` ORDER BY from_version, to_version, severity DESC`;
|
||||
|
||||
const rows = this.db.prepare(sql).all(...params) as any[];
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auto-migratable changes for a version upgrade
|
||||
*/
|
||||
getAutoMigratableChanges(nodeType: string, fromVersion: string, toVersion: string): any[] {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(nodeType);
|
||||
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM version_property_changes
|
||||
WHERE node_type = ?
|
||||
AND from_version = ?
|
||||
AND to_version = ?
|
||||
AND auto_migratable = 1
|
||||
ORDER BY severity DESC
|
||||
`).all(normalizedType, fromVersion, toVersion) as any[];
|
||||
|
||||
return rows.map(row => this.parsePropertyChangeRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a version upgrade path exists between two versions
|
||||
*/
|
||||
hasVersionUpgradePath(nodeType: string, fromVersion: string, toVersion: string): boolean {
|
||||
const versions = this.getNodeVersions(nodeType);
|
||||
if (versions.length === 0) return false;
|
||||
|
||||
// Check if both versions exist
|
||||
const fromExists = versions.some(v => v.version === fromVersion);
|
||||
const toExists = versions.some(v => v.version === toVersion);
|
||||
|
||||
return fromExists && toExists;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get count of nodes with multiple versions
|
||||
*/
|
||||
getVersionedNodesCount(): number {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(DISTINCT node_type) as count
|
||||
FROM node_versions
|
||||
`).get() as any;
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse node version row from database
|
||||
*/
|
||||
private parseNodeVersionRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
version: row.version,
|
||||
packageName: row.package_name,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
category: row.category,
|
||||
isCurrentMax: Number(row.is_current_max) === 1,
|
||||
propertiesSchema: row.properties_schema ? this.safeJsonParse(row.properties_schema, []) : null,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, []) : null,
|
||||
credentialsRequired: row.credentials_required ? this.safeJsonParse(row.credentials_required, []) : null,
|
||||
outputs: row.outputs ? this.safeJsonParse(row.outputs, null) : null,
|
||||
minimumN8nVersion: row.minimum_n8n_version,
|
||||
breakingChanges: row.breaking_changes ? this.safeJsonParse(row.breaking_changes, []) : [],
|
||||
deprecatedProperties: row.deprecated_properties ? this.safeJsonParse(row.deprecated_properties, []) : [],
|
||||
addedProperties: row.added_properties ? this.safeJsonParse(row.added_properties, []) : [],
|
||||
releasedAt: row.released_at,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse property change row from database
|
||||
*/
|
||||
private parsePropertyChangeRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
nodeType: row.node_type,
|
||||
fromVersion: row.from_version,
|
||||
toVersion: row.to_version,
|
||||
propertyName: row.property_name,
|
||||
changeType: row.change_type,
|
||||
isBreaking: Number(row.is_breaking) === 1,
|
||||
oldValue: row.old_value,
|
||||
newValue: row.new_value,
|
||||
migrationHint: row.migration_hint,
|
||||
autoMigratable: Number(row.auto_migratable) === 1,
|
||||
migrationStrategy: row.migration_strategy ? this.safeJsonParse(row.migration_strategy, null) : null,
|
||||
severity: row.severity,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Workflow Versioning Methods
|
||||
// ========================================
|
||||
|
||||
/**
|
||||
* Create a new workflow version (backup before modification)
|
||||
*/
|
||||
createWorkflowVersion(data: {
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
workflowSnapshot: any;
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
}): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO workflow_versions (
|
||||
workflow_id, version_number, workflow_name, workflow_snapshot,
|
||||
trigger, operations, fix_types, metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
const result = stmt.run(
|
||||
data.workflowId,
|
||||
data.versionNumber,
|
||||
data.workflowName,
|
||||
JSON.stringify(data.workflowSnapshot),
|
||||
data.trigger,
|
||||
data.operations ? JSON.stringify(data.operations) : null,
|
||||
data.fixTypes ? JSON.stringify(data.fixTypes) : null,
|
||||
data.metadata ? JSON.stringify(data.metadata) : null
|
||||
);
|
||||
|
||||
return result.lastInsertRowid as number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get workflow versions ordered by version number (newest first)
|
||||
*/
|
||||
getWorkflowVersions(workflowId: string, limit?: number): any[] {
|
||||
let sql = `
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`;
|
||||
|
||||
if (limit) {
|
||||
sql += ` LIMIT ?`;
|
||||
const rows = this.db.prepare(sql).all(workflowId, limit) as any[];
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
|
||||
const rows = this.db.prepare(sql).all(workflowId) as any[];
|
||||
return rows.map(row => this.parseWorkflowVersionRow(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific workflow version by ID
|
||||
*/
|
||||
getWorkflowVersion(versionId: number): any | null {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions WHERE id = ?
|
||||
`).get(versionId) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest workflow version for a workflow
|
||||
*/
|
||||
getLatestWorkflowVersion(workflowId: string): any | null {
|
||||
const row = this.db.prepare(`
|
||||
SELECT * FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
LIMIT 1
|
||||
`).get(workflowId) as any;
|
||||
|
||||
if (!row) return null;
|
||||
return this.parseWorkflowVersionRow(row);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific workflow version
|
||||
*/
|
||||
deleteWorkflowVersion(versionId: number): void {
|
||||
this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id = ?
|
||||
`).run(versionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all versions for a specific workflow
|
||||
*/
|
||||
deleteWorkflowVersionsByWorkflowId(workflowId: string): number {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE workflow_id = ?
|
||||
`).run(workflowId);
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prune old workflow versions, keeping only the most recent N versions
|
||||
* Returns number of versions deleted
|
||||
*/
|
||||
pruneWorkflowVersions(workflowId: string, keepCount: number): number {
|
||||
// Get all versions ordered by version_number DESC
|
||||
const versions = this.db.prepare(`
|
||||
SELECT id FROM workflow_versions
|
||||
WHERE workflow_id = ?
|
||||
ORDER BY version_number DESC
|
||||
`).all(workflowId) as any[];
|
||||
|
||||
// If we have fewer versions than keepCount, no pruning needed
|
||||
if (versions.length <= keepCount) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Get IDs of versions to delete (all except the most recent keepCount)
|
||||
const idsToDelete = versions.slice(keepCount).map(v => v.id);
|
||||
|
||||
if (idsToDelete.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Delete old versions
|
||||
const placeholders = idsToDelete.map(() => '?').join(',');
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions WHERE id IN (${placeholders})
|
||||
`).run(...idsToDelete);
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate the entire workflow_versions table
|
||||
* Returns number of rows deleted
|
||||
*/
|
||||
truncateWorkflowVersions(): number {
|
||||
const result = this.db.prepare(`
|
||||
DELETE FROM workflow_versions
|
||||
`).run();
|
||||
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get count of versions for a specific workflow
|
||||
*/
|
||||
getWorkflowVersionCount(workflowId: string): number {
|
||||
const result = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions WHERE workflow_id = ?
|
||||
`).get(workflowId) as any;
|
||||
|
||||
return result.count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage statistics for workflow versions
|
||||
*/
|
||||
getVersionStorageStats(): any {
|
||||
// Total versions
|
||||
const totalResult = this.db.prepare(`
|
||||
SELECT COUNT(*) as count FROM workflow_versions
|
||||
`).get() as any;
|
||||
|
||||
// Total size (approximate - sum of JSON lengths)
|
||||
const sizeResult = this.db.prepare(`
|
||||
SELECT SUM(LENGTH(workflow_snapshot)) as total_size FROM workflow_versions
|
||||
`).get() as any;
|
||||
|
||||
// Per-workflow breakdown
|
||||
const byWorkflow = this.db.prepare(`
|
||||
SELECT
|
||||
workflow_id,
|
||||
workflow_name,
|
||||
COUNT(*) as version_count,
|
||||
SUM(LENGTH(workflow_snapshot)) as total_size,
|
||||
MAX(created_at) as last_backup
|
||||
FROM workflow_versions
|
||||
GROUP BY workflow_id
|
||||
ORDER BY version_count DESC
|
||||
`).all() as any[];
|
||||
|
||||
return {
|
||||
totalVersions: totalResult.count,
|
||||
totalSize: sizeResult.total_size || 0,
|
||||
byWorkflow: byWorkflow.map(row => ({
|
||||
workflowId: row.workflow_id,
|
||||
workflowName: row.workflow_name,
|
||||
versionCount: row.version_count,
|
||||
totalSize: row.total_size,
|
||||
lastBackup: row.last_backup
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse workflow version row from database
|
||||
*/
|
||||
private parseWorkflowVersionRow(row: any): any {
|
||||
return {
|
||||
id: row.id,
|
||||
workflowId: row.workflow_id,
|
||||
versionNumber: row.version_number,
|
||||
workflowName: row.workflow_name,
|
||||
workflowSnapshot: this.safeJsonParse(row.workflow_snapshot, null),
|
||||
trigger: row.trigger,
|
||||
operations: row.operations ? this.safeJsonParse(row.operations, null) : null,
|
||||
fixTypes: row.fix_types ? this.safeJsonParse(row.fix_types, null) : null,
|
||||
metadata: row.metadata ? this.safeJsonParse(row.metadata, null) : null,
|
||||
createdAt: row.created_at
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -145,92 +145,3 @@ ORDER BY node_type, rank;
|
||||
-- Note: Template FTS5 tables are created conditionally at runtime if FTS5 is supported
|
||||
-- See template-repository.ts initializeFTS5() method
|
||||
-- Node FTS5 table (nodes_fts) is created above during schema initialization
|
||||
|
||||
-- Node versions table for tracking all available versions of each node
|
||||
-- Enables version upgrade detection and migration
|
||||
CREATE TABLE IF NOT EXISTS node_versions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT NOT NULL, -- e.g., "n8n-nodes-base.executeWorkflow"
|
||||
version TEXT NOT NULL, -- e.g., "1.0", "1.1", "2.0"
|
||||
package_name TEXT NOT NULL, -- e.g., "n8n-nodes-base"
|
||||
display_name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
category TEXT,
|
||||
is_current_max INTEGER DEFAULT 0, -- 1 if this is the latest version
|
||||
properties_schema TEXT, -- JSON schema for this specific version
|
||||
operations TEXT, -- JSON array of operations for this version
|
||||
credentials_required TEXT, -- JSON array of required credentials
|
||||
outputs TEXT, -- JSON array of output definitions
|
||||
minimum_n8n_version TEXT, -- Minimum n8n version required (e.g., "1.0.0")
|
||||
breaking_changes TEXT, -- JSON array of breaking changes from previous version
|
||||
deprecated_properties TEXT, -- JSON array of removed/deprecated properties
|
||||
added_properties TEXT, -- JSON array of newly added properties
|
||||
released_at DATETIME, -- When this version was released
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(node_type, version),
|
||||
FOREIGN KEY (node_type) REFERENCES nodes(node_type) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Indexes for version queries
|
||||
CREATE INDEX IF NOT EXISTS idx_version_node_type ON node_versions(node_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_version_current_max ON node_versions(is_current_max);
|
||||
CREATE INDEX IF NOT EXISTS idx_version_composite ON node_versions(node_type, version);
|
||||
|
||||
-- Version property changes for detailed migration tracking
|
||||
-- Records specific property-level changes between versions
|
||||
CREATE TABLE IF NOT EXISTS version_property_changes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT NOT NULL,
|
||||
from_version TEXT NOT NULL, -- Version where change occurred (e.g., "1.0")
|
||||
to_version TEXT NOT NULL, -- Target version (e.g., "1.1")
|
||||
property_name TEXT NOT NULL, -- Property path (e.g., "parameters.inputFieldMapping")
|
||||
change_type TEXT NOT NULL CHECK(change_type IN (
|
||||
'added', -- Property added (may be required)
|
||||
'removed', -- Property removed/deprecated
|
||||
'renamed', -- Property renamed
|
||||
'type_changed', -- Property type changed
|
||||
'requirement_changed', -- Required → Optional or vice versa
|
||||
'default_changed' -- Default value changed
|
||||
)),
|
||||
is_breaking INTEGER DEFAULT 0, -- 1 if this is a breaking change
|
||||
old_value TEXT, -- For renamed/type_changed: old property name or type
|
||||
new_value TEXT, -- For renamed/type_changed: new property name or type
|
||||
migration_hint TEXT, -- Human-readable migration guidance
|
||||
auto_migratable INTEGER DEFAULT 0, -- 1 if can be automatically migrated
|
||||
migration_strategy TEXT, -- JSON: strategy for auto-migration
|
||||
severity TEXT CHECK(severity IN ('LOW', 'MEDIUM', 'HIGH')), -- Impact severity
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (node_type, from_version) REFERENCES node_versions(node_type, version) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Indexes for property change queries
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_node ON version_property_changes(node_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_versions ON version_property_changes(node_type, from_version, to_version);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_breaking ON version_property_changes(is_breaking);
|
||||
CREATE INDEX IF NOT EXISTS idx_prop_changes_auto ON version_property_changes(auto_migratable);
|
||||
|
||||
-- Workflow versions table for rollback and version history tracking
|
||||
-- Stores full workflow snapshots before modifications for guaranteed reversibility
|
||||
-- Auto-prunes to 10 versions per workflow to prevent memory leaks
|
||||
CREATE TABLE IF NOT EXISTS workflow_versions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
workflow_id TEXT NOT NULL, -- n8n workflow ID
|
||||
version_number INTEGER NOT NULL, -- Incremental version number (1, 2, 3...)
|
||||
workflow_name TEXT NOT NULL, -- Workflow name at time of backup
|
||||
workflow_snapshot TEXT NOT NULL, -- Full workflow JSON before modification
|
||||
trigger TEXT NOT NULL CHECK(trigger IN (
|
||||
'partial_update', -- Created by n8n_update_partial_workflow
|
||||
'full_update', -- Created by n8n_update_full_workflow
|
||||
'autofix' -- Created by n8n_autofix_workflow
|
||||
)),
|
||||
operations TEXT, -- JSON array of diff operations (if partial update)
|
||||
fix_types TEXT, -- JSON array of fix types (if autofix)
|
||||
metadata TEXT, -- Additional context (JSON)
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(workflow_id, version_number)
|
||||
);
|
||||
|
||||
-- Indexes for workflow version queries
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_workflow_id ON workflow_versions(workflow_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_created_at ON workflow_versions(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_workflow_versions_trigger ON workflow_versions(trigger);
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,17 +23,6 @@ import {
|
||||
|
||||
dotenv.config();
|
||||
|
||||
/**
|
||||
* MCP tool response format with optional structured content
|
||||
*/
|
||||
interface MCPToolResponse {
|
||||
content: Array<{
|
||||
type: 'text';
|
||||
text: string;
|
||||
}>;
|
||||
structuredContent?: unknown;
|
||||
}
|
||||
|
||||
let expressServer: any;
|
||||
let authToken: string | null = null;
|
||||
|
||||
@@ -415,43 +404,16 @@ export async function startFixedHTTPServer() {
|
||||
|
||||
try {
|
||||
const result = await mcpServer.executeTool(toolName, toolArgs);
|
||||
|
||||
// Convert result to JSON text for content field
|
||||
let responseText = JSON.stringify(result, null, 2);
|
||||
|
||||
// Build MCP-compliant response with structuredContent for validation tools
|
||||
const mcpResult: MCPToolResponse = {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: responseText
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Add structuredContent for validation tools (they have outputSchema)
|
||||
// Apply 1MB safety limit to prevent memory issues (matches STDIO server behavior)
|
||||
if (toolName.startsWith('validate_')) {
|
||||
const resultSize = responseText.length;
|
||||
|
||||
if (resultSize > 1000000) {
|
||||
// Response is too large - truncate and warn
|
||||
logger.warn(
|
||||
`Validation tool ${toolName} response is very large (${resultSize} chars). ` +
|
||||
`Truncating for HTTP transport safety.`
|
||||
);
|
||||
mcpResult.content[0].text = responseText.substring(0, 999000) +
|
||||
'\n\n[Response truncated due to size limits]';
|
||||
// Don't include structuredContent for truncated responses
|
||||
} else {
|
||||
// Normal case - include structured content for MCP protocol compliance
|
||||
mcpResult.structuredContent = result;
|
||||
}
|
||||
}
|
||||
|
||||
response = {
|
||||
jsonrpc: '2.0',
|
||||
result: mcpResult,
|
||||
result: {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(result, null, 2)
|
||||
}
|
||||
]
|
||||
},
|
||||
id: jsonRpcRequest.id
|
||||
};
|
||||
} catch (error) {
|
||||
|
||||
@@ -19,6 +19,13 @@ export {
|
||||
isInstanceContext
|
||||
} from './types/instance-context';
|
||||
|
||||
// Session restoration types (v2.19.0)
|
||||
export type {
|
||||
SessionRestoreHook,
|
||||
SessionRestorationOptions,
|
||||
SessionState
|
||||
} from './types/session-restoration';
|
||||
|
||||
// Re-export MCP SDK types for convenience
|
||||
export type {
|
||||
Tool,
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Request, Response } from 'express';
|
||||
import { SingleSessionHTTPServer } from './http-server-single-session';
|
||||
import { logger } from './utils/logger';
|
||||
import { InstanceContext } from './types/instance-context';
|
||||
import { SessionRestoreHook, SessionState } from './types/session-restoration';
|
||||
|
||||
export interface EngineHealth {
|
||||
status: 'healthy' | 'unhealthy';
|
||||
@@ -25,6 +26,71 @@ export interface EngineHealth {
|
||||
export interface EngineOptions {
|
||||
sessionTimeout?: number;
|
||||
logLevel?: 'error' | 'warn' | 'info' | 'debug';
|
||||
|
||||
/**
|
||||
* Session restoration hook for multi-tenant persistence
|
||||
* Called when a client tries to use an unknown session ID
|
||||
* Return instance context to restore the session, or null to reject
|
||||
*
|
||||
* @security IMPORTANT: Implement rate limiting in this hook to prevent abuse.
|
||||
* Malicious clients could trigger excessive database lookups by sending random
|
||||
* session IDs. Consider using express-rate-limit or similar middleware.
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
onSessionNotFound?: SessionRestoreHook;
|
||||
|
||||
/**
|
||||
* Maximum time to wait for session restoration (milliseconds)
|
||||
* @default 5000 (5 seconds)
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionRestorationTimeout?: number;
|
||||
|
||||
/**
|
||||
* Session lifecycle event handlers (Phase 3 - REQ-4)
|
||||
*
|
||||
* Optional callbacks for session lifecycle events:
|
||||
* - onSessionCreated: Called when a new session is created
|
||||
* - onSessionRestored: Called when a session is restored from storage
|
||||
* - onSessionAccessed: Called on EVERY request (consider throttling!)
|
||||
* - onSessionExpired: Called when a session expires
|
||||
* - onSessionDeleted: Called when a session is manually deleted
|
||||
*
|
||||
* All handlers are fire-and-forget (non-blocking).
|
||||
* Errors are logged but don't affect session operations.
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionEvents?: {
|
||||
onSessionCreated?: (sessionId: string, instanceContext: InstanceContext) => void | Promise<void>;
|
||||
onSessionRestored?: (sessionId: string, instanceContext: InstanceContext) => void | Promise<void>;
|
||||
onSessionAccessed?: (sessionId: string) => void | Promise<void>;
|
||||
onSessionExpired?: (sessionId: string) => void | Promise<void>;
|
||||
onSessionDeleted?: (sessionId: string) => void | Promise<void>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Number of retry attempts for failed session restoration (Phase 4 - REQ-7)
|
||||
*
|
||||
* When the restoration hook throws an error, the system will retry
|
||||
* up to this many times with a delay between attempts.
|
||||
*
|
||||
* Timeout errors are NOT retried (already took too long).
|
||||
* The overall timeout applies to ALL retry attempts combined.
|
||||
*
|
||||
* @default 0 (no retries, opt-in)
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionRestorationRetries?: number;
|
||||
|
||||
/**
|
||||
* Delay between retry attempts in milliseconds (Phase 4 - REQ-7)
|
||||
*
|
||||
* @default 100 (100 milliseconds)
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionRestorationRetryDelay?: number;
|
||||
}
|
||||
|
||||
export class N8NMCPEngine {
|
||||
@@ -32,7 +98,7 @@ export class N8NMCPEngine {
|
||||
private startTime: Date;
|
||||
|
||||
constructor(options: EngineOptions = {}) {
|
||||
this.server = new SingleSessionHTTPServer();
|
||||
this.server = new SingleSessionHTTPServer(options);
|
||||
this.startTime = new Date();
|
||||
|
||||
if (options.logLevel) {
|
||||
@@ -97,7 +163,7 @@ export class N8NMCPEngine {
|
||||
total: Math.round(memoryUsage.heapTotal / 1024 / 1024),
|
||||
unit: 'MB'
|
||||
},
|
||||
version: '2.3.2'
|
||||
version: '2.19.4'
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Health check failed:', error);
|
||||
@@ -106,7 +172,7 @@ export class N8NMCPEngine {
|
||||
uptime: 0,
|
||||
sessionActive: false,
|
||||
memoryUsage: { used: 0, total: 0, unit: 'MB' },
|
||||
version: '2.3.2'
|
||||
version: '2.19.4'
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -119,6 +185,114 @@ export class N8NMCPEngine {
|
||||
return this.server.getSessionInfo();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all active session IDs (Phase 2 - REQ-5)
|
||||
* Returns array of currently active session IDs
|
||||
*
|
||||
* @returns Array of session IDs
|
||||
* @since 2.19.0
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const engine = new N8NMCPEngine();
|
||||
* const sessionIds = engine.getActiveSessions();
|
||||
* console.log(`Active sessions: ${sessionIds.length}`);
|
||||
* ```
|
||||
*/
|
||||
getActiveSessions(): string[] {
|
||||
return this.server.getActiveSessions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session state for a specific session (Phase 2 - REQ-5)
|
||||
* Returns session state or null if session doesn't exist
|
||||
*
|
||||
* @param sessionId - The session ID to get state for
|
||||
* @returns SessionState object or null
|
||||
* @since 2.19.0
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const state = engine.getSessionState('session-123');
|
||||
* if (state) {
|
||||
* // Save to database
|
||||
* await db.saveSession(state);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
getSessionState(sessionId: string): SessionState | null {
|
||||
return this.server.getSessionState(sessionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all session states (Phase 2 - REQ-5)
|
||||
* Returns array of all active session states for bulk backup
|
||||
*
|
||||
* @returns Array of SessionState objects
|
||||
* @since 2.19.0
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Periodic backup every 5 minutes
|
||||
* setInterval(async () => {
|
||||
* const states = engine.getAllSessionStates();
|
||||
* for (const state of states) {
|
||||
* await database.upsertSession(state);
|
||||
* }
|
||||
* }, 300000);
|
||||
* ```
|
||||
*/
|
||||
getAllSessionStates(): SessionState[] {
|
||||
return this.server.getAllSessionStates();
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually restore a session (Phase 2 - REQ-5)
|
||||
* Creates a session with the given ID and instance context
|
||||
*
|
||||
* @param sessionId - The session ID to restore
|
||||
* @param instanceContext - Instance configuration
|
||||
* @returns true if session was restored successfully, false otherwise
|
||||
* @since 2.19.0
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Restore session from database
|
||||
* const session = await db.loadSession('session-123');
|
||||
* if (session) {
|
||||
* const restored = engine.restoreSession(
|
||||
* session.sessionId,
|
||||
* session.instanceContext
|
||||
* );
|
||||
* console.log(`Restored: ${restored}`);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
restoreSession(sessionId: string, instanceContext: InstanceContext): boolean {
|
||||
return this.server.manuallyRestoreSession(sessionId, instanceContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually delete a session (Phase 2 - REQ-5)
|
||||
* Removes the session and cleans up resources
|
||||
*
|
||||
* @param sessionId - The session ID to delete
|
||||
* @returns true if session was deleted, false if not found
|
||||
* @since 2.19.0
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Delete expired session
|
||||
* const deleted = engine.deleteSession('session-123');
|
||||
* if (deleted) {
|
||||
* await db.deleteSession('session-123');
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
deleteSession(sessionId: string): boolean {
|
||||
return this.server.manuallyDeleteSession(sessionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Graceful shutdown for service lifecycle
|
||||
*
|
||||
|
||||
@@ -31,7 +31,6 @@ import { InstanceContext, validateInstanceContext } from '../types/instance-cont
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
import { WorkflowAutoFixer, AutoFixConfig } from '../services/workflow-auto-fixer';
|
||||
import { ExpressionFormatValidator, ExpressionFormatIssue } from '../services/expression-format-validator';
|
||||
import { WorkflowVersioningService } from '../services/workflow-versioning-service';
|
||||
import { handleUpdatePartialWorkflow } from './handlers-workflow-diff';
|
||||
import { telemetry } from '../telemetry';
|
||||
import {
|
||||
@@ -364,7 +363,6 @@ const updateWorkflowSchema = z.object({
|
||||
nodes: z.array(z.any()).optional(),
|
||||
connections: z.record(z.any()).optional(),
|
||||
settings: z.any().optional(),
|
||||
createBackup: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const listWorkflowsSchema = z.object({
|
||||
@@ -417,17 +415,6 @@ const listExecutionsSchema = z.object({
|
||||
includeData: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const workflowVersionsSchema = z.object({
|
||||
mode: z.enum(['list', 'get', 'rollback', 'delete', 'prune', 'truncate']),
|
||||
workflowId: z.string().optional(),
|
||||
versionId: z.number().optional(),
|
||||
limit: z.number().default(10).optional(),
|
||||
validateBefore: z.boolean().default(true).optional(),
|
||||
deleteAll: z.boolean().default(false).optional(),
|
||||
maxVersions: z.number().default(10).optional(),
|
||||
confirmTruncate: z.boolean().default(false).optional(),
|
||||
});
|
||||
|
||||
// Workflow Management Handlers
|
||||
|
||||
export async function handleCreateWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
@@ -695,44 +682,16 @@ export async function handleGetWorkflowMinimal(args: unknown, context?: Instance
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleUpdateWorkflow(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
export async function handleUpdateWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
try {
|
||||
const client = ensureApiConfigured(context);
|
||||
const input = updateWorkflowSchema.parse(args);
|
||||
const { id, createBackup, ...updateData } = input;
|
||||
const { id, ...updateData } = input;
|
||||
|
||||
// If nodes/connections are being updated, validate the structure
|
||||
if (updateData.nodes || updateData.connections) {
|
||||
// Always fetch current workflow for validation (need all fields like name)
|
||||
const current = await client.getWorkflow(id);
|
||||
|
||||
// Create backup before modifying workflow (default: true)
|
||||
if (createBackup !== false) {
|
||||
try {
|
||||
const versioningService = new WorkflowVersioningService(repository, client);
|
||||
const backupResult = await versioningService.createBackup(id, current, {
|
||||
trigger: 'full_update'
|
||||
});
|
||||
|
||||
logger.info('Workflow backup created', {
|
||||
workflowId: id,
|
||||
versionId: backupResult.versionId,
|
||||
versionNumber: backupResult.versionNumber,
|
||||
pruned: backupResult.pruned
|
||||
});
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to create workflow backup', {
|
||||
workflowId: id,
|
||||
error: error.message
|
||||
});
|
||||
// Continue with update even if backup fails (non-blocking)
|
||||
}
|
||||
}
|
||||
|
||||
const fullWorkflow = {
|
||||
...current,
|
||||
...updateData
|
||||
@@ -1036,7 +995,7 @@ export async function handleAutofixWorkflow(
|
||||
|
||||
// Generate fixes using WorkflowAutoFixer
|
||||
const autoFixer = new WorkflowAutoFixer(repository);
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
workflow,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -1086,10 +1045,8 @@ export async function handleAutofixWorkflow(
|
||||
const updateResult = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: workflow.id,
|
||||
operations: fixResult.operations,
|
||||
createBackup: true // Ensure backup is created with autofix metadata
|
||||
operations: fixResult.operations
|
||||
},
|
||||
repository,
|
||||
context
|
||||
);
|
||||
|
||||
@@ -2005,191 +1962,3 @@ export async function handleDiagnostic(request: any, context?: InstanceContext):
|
||||
data: diagnostic
|
||||
};
|
||||
}
|
||||
|
||||
export async function handleWorkflowVersions(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
try {
|
||||
const input = workflowVersionsSchema.parse(args);
|
||||
const client = context ? getN8nApiClient(context) : null;
|
||||
const versioningService = new WorkflowVersioningService(repository, client || undefined);
|
||||
|
||||
switch (input.mode) {
|
||||
case 'list': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for list mode'
|
||||
};
|
||||
}
|
||||
|
||||
const versions = await versioningService.getVersionHistory(input.workflowId, input.limit);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
versions,
|
||||
count: versions.length,
|
||||
message: `Found ${versions.length} version(s) for workflow ${input.workflowId}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'get': {
|
||||
if (!input.versionId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'versionId is required for get mode'
|
||||
};
|
||||
}
|
||||
|
||||
const version = await versioningService.getVersion(input.versionId);
|
||||
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Version ${input.versionId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: version
|
||||
};
|
||||
}
|
||||
|
||||
case 'rollback': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for rollback mode'
|
||||
};
|
||||
}
|
||||
|
||||
if (!client) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'n8n API not configured. Cannot perform rollback without API access.'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.restoreVersion(
|
||||
input.workflowId,
|
||||
input.versionId,
|
||||
input.validateBefore
|
||||
);
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
data: result.success ? result : undefined,
|
||||
error: result.success ? undefined : result.message,
|
||||
details: result.success ? undefined : {
|
||||
validationErrors: result.validationErrors
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
if (input.deleteAll) {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for deleteAll mode'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.deleteAllVersions(input.workflowId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
deleted: result.deleted,
|
||||
message: result.message
|
||||
}
|
||||
};
|
||||
} else {
|
||||
if (!input.versionId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'versionId is required for single version delete'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.deleteVersion(input.versionId);
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
data: result.success ? { message: result.message } : undefined,
|
||||
error: result.success ? undefined : result.message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
case 'prune': {
|
||||
if (!input.workflowId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'workflowId is required for prune mode'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.pruneVersions(
|
||||
input.workflowId,
|
||||
input.maxVersions || 10
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: input.workflowId,
|
||||
pruned: result.pruned,
|
||||
remaining: result.remaining,
|
||||
message: `Pruned ${result.pruned} old version(s), ${result.remaining} version(s) remaining`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
case 'truncate': {
|
||||
if (!input.confirmTruncate) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'confirmTruncate must be true to truncate all versions. This action cannot be undone.'
|
||||
};
|
||||
}
|
||||
|
||||
const result = await versioningService.truncateAllVersions(true);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
deleted: result.deleted,
|
||||
message: result.message
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
success: false,
|
||||
error: `Unknown mode: ${input.mode}`
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid input',
|
||||
details: { errors: error.errors }
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,9 +11,6 @@ import { getN8nApiClient } from './handlers-n8n-manager';
|
||||
import { N8nApiError, getUserFriendlyErrorMessage } from '../utils/n8n-errors';
|
||||
import { logger } from '../utils/logger';
|
||||
import { InstanceContext } from '../types/instance-context';
|
||||
import { validateWorkflowStructure } from '../services/n8n-validation';
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { WorkflowVersioningService } from '../services/workflow-versioning-service';
|
||||
|
||||
// Zod schema for the diff request
|
||||
const workflowDiffSchema = z.object({
|
||||
@@ -50,14 +47,9 @@ const workflowDiffSchema = z.object({
|
||||
})),
|
||||
validateOnly: z.boolean().optional(),
|
||||
continueOnError: z.boolean().optional(),
|
||||
createBackup: z.boolean().optional(),
|
||||
});
|
||||
|
||||
export async function handleUpdatePartialWorkflow(
|
||||
args: unknown,
|
||||
repository: NodeRepository,
|
||||
context?: InstanceContext
|
||||
): Promise<McpToolResponse> {
|
||||
export async function handleUpdatePartialWorkflow(args: unknown, context?: InstanceContext): Promise<McpToolResponse> {
|
||||
try {
|
||||
// Debug logging (only in debug mode)
|
||||
if (process.env.DEBUG_MCP === 'true') {
|
||||
@@ -96,30 +88,6 @@ export async function handleUpdatePartialWorkflow(
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Create backup before modifying workflow (default: true)
|
||||
if (input.createBackup !== false && !input.validateOnly) {
|
||||
try {
|
||||
const versioningService = new WorkflowVersioningService(repository, client);
|
||||
const backupResult = await versioningService.createBackup(input.id, workflow, {
|
||||
trigger: 'partial_update',
|
||||
operations: input.operations
|
||||
});
|
||||
|
||||
logger.info('Workflow backup created', {
|
||||
workflowId: input.id,
|
||||
versionId: backupResult.versionId,
|
||||
versionNumber: backupResult.versionNumber,
|
||||
pruned: backupResult.pruned
|
||||
});
|
||||
} catch (error: any) {
|
||||
logger.warn('Failed to create workflow backup', {
|
||||
workflowId: input.id,
|
||||
error: error.message
|
||||
});
|
||||
// Continue with update even if backup fails (non-blocking)
|
||||
}
|
||||
}
|
||||
|
||||
// Apply diff operations
|
||||
const diffEngine = new WorkflowDiffEngine();
|
||||
const diffRequest = input as WorkflowDiffRequest;
|
||||
@@ -138,7 +106,6 @@ export async function handleUpdatePartialWorkflow(
|
||||
error: 'Failed to apply diff operations',
|
||||
details: {
|
||||
errors: diffResult.errors,
|
||||
warnings: diffResult.warnings,
|
||||
operationsApplied: diffResult.operationsApplied,
|
||||
applied: diffResult.applied,
|
||||
failed: diffResult.failed
|
||||
@@ -155,93 +122,10 @@ export async function handleUpdatePartialWorkflow(
|
||||
data: {
|
||||
valid: true,
|
||||
operationsToApply: input.operations.length
|
||||
},
|
||||
details: {
|
||||
warnings: diffResult.warnings
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Validate final workflow structure after applying all operations
|
||||
// This prevents creating workflows that pass operation-level validation
|
||||
// but fail workflow-level validation (e.g., UI can't render them)
|
||||
//
|
||||
// Validation can be skipped for specific integration tests that need to test
|
||||
// n8n API behavior with edge case workflows by setting SKIP_WORKFLOW_VALIDATION=true
|
||||
if (diffResult.workflow) {
|
||||
const structureErrors = validateWorkflowStructure(diffResult.workflow);
|
||||
if (structureErrors.length > 0) {
|
||||
const skipValidation = process.env.SKIP_WORKFLOW_VALIDATION === 'true';
|
||||
|
||||
logger.warn('Workflow structure validation failed after applying diff operations', {
|
||||
workflowId: input.id,
|
||||
errors: structureErrors,
|
||||
blocking: !skipValidation
|
||||
});
|
||||
|
||||
// Analyze error types to provide targeted recovery guidance
|
||||
const errorTypes = new Set<string>();
|
||||
structureErrors.forEach(err => {
|
||||
if (err.includes('operator') || err.includes('singleValue')) errorTypes.add('operator_issues');
|
||||
if (err.includes('connection') || err.includes('referenced')) errorTypes.add('connection_issues');
|
||||
if (err.includes('Missing') || err.includes('missing')) errorTypes.add('missing_metadata');
|
||||
if (err.includes('branch') || err.includes('output')) errorTypes.add('branch_mismatch');
|
||||
});
|
||||
|
||||
// Build recovery guidance based on error types
|
||||
const recoverySteps = [];
|
||||
if (errorTypes.has('operator_issues')) {
|
||||
recoverySteps.push('Operator structure issue detected. Use validate_node_operation to check specific nodes.');
|
||||
recoverySteps.push('Binary operators (equals, contains, greaterThan, etc.) must NOT have singleValue:true');
|
||||
recoverySteps.push('Unary operators (isEmpty, isNotEmpty, true, false) REQUIRE singleValue:true');
|
||||
}
|
||||
if (errorTypes.has('connection_issues')) {
|
||||
recoverySteps.push('Connection validation failed. Check all node connections reference existing nodes.');
|
||||
recoverySteps.push('Use cleanStaleConnections operation to remove connections to non-existent nodes.');
|
||||
}
|
||||
if (errorTypes.has('missing_metadata')) {
|
||||
recoverySteps.push('Missing metadata detected. Ensure filter-based nodes (IF v2.2+, Switch v3.2+) have complete conditions.options.');
|
||||
recoverySteps.push('Required options: {version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}');
|
||||
}
|
||||
if (errorTypes.has('branch_mismatch')) {
|
||||
recoverySteps.push('Branch count mismatch. Ensure Switch nodes have outputs for all rules (e.g., 3 rules = 3 output branches).');
|
||||
}
|
||||
|
||||
// Add generic recovery steps if no specific guidance
|
||||
if (recoverySteps.length === 0) {
|
||||
recoverySteps.push('Review the validation errors listed above');
|
||||
recoverySteps.push('Fix issues using updateNode or cleanStaleConnections operations');
|
||||
recoverySteps.push('Run validate_workflow again to verify fixes');
|
||||
}
|
||||
|
||||
const errorMessage = structureErrors.length === 1
|
||||
? `Workflow validation failed: ${structureErrors[0]}`
|
||||
: `Workflow validation failed with ${structureErrors.length} structural issues`;
|
||||
|
||||
// If validation is not skipped, return error and block the save
|
||||
if (!skipValidation) {
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
details: {
|
||||
errors: structureErrors,
|
||||
errorCount: structureErrors.length,
|
||||
operationsApplied: diffResult.operationsApplied,
|
||||
applied: diffResult.applied,
|
||||
recoveryGuidance: recoverySteps,
|
||||
note: 'Operations were applied but created an invalid workflow structure. The workflow was NOT saved to n8n to prevent UI rendering errors.',
|
||||
autoSanitizationNote: 'Auto-sanitization runs on all nodes during updates to fix operator structures and add missing metadata. However, it cannot fix all issues (e.g., broken connections, branch mismatches). Use the recovery guidance above to resolve remaining issues.'
|
||||
}
|
||||
};
|
||||
}
|
||||
// Validation skipped: log warning but continue (for specific integration tests)
|
||||
logger.info('Workflow validation skipped (SKIP_WORKFLOW_VALIDATION=true): Allowing workflow with validation warnings to proceed', {
|
||||
workflowId: input.id,
|
||||
warningCount: structureErrors.length
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update workflow via API
|
||||
try {
|
||||
const updatedWorkflow = await client.updateWorkflow(input.id, diffResult.workflow!);
|
||||
@@ -256,8 +140,7 @@ export async function handleUpdatePartialWorkflow(
|
||||
workflowName: updatedWorkflow.name,
|
||||
applied: diffResult.applied,
|
||||
failed: diffResult.failed,
|
||||
errors: diffResult.errors,
|
||||
warnings: diffResult.warnings
|
||||
errors: diffResult.errors
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
|
||||
@@ -128,25 +128,7 @@ export class N8NDocumentationMCPServer {
|
||||
this.server = new Server(
|
||||
{
|
||||
name: 'n8n-documentation-mcp',
|
||||
version: PROJECT_VERSION,
|
||||
icons: [
|
||||
{
|
||||
src: "https://www.n8n-mcp.com/logo.png",
|
||||
mimeType: "image/png",
|
||||
sizes: ["192x192"]
|
||||
},
|
||||
{
|
||||
src: "https://www.n8n-mcp.com/logo-128.png",
|
||||
mimeType: "image/png",
|
||||
sizes: ["128x128"]
|
||||
},
|
||||
{
|
||||
src: "https://www.n8n-mcp.com/logo-48.png",
|
||||
mimeType: "image/png",
|
||||
sizes: ["48x48"]
|
||||
}
|
||||
],
|
||||
websiteUrl: "https://n8n-mcp.com"
|
||||
version: '1.0.0',
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
@@ -285,6 +267,13 @@ export class N8NDocumentationMCPServer {
|
||||
private dbHealthChecked: boolean = false;
|
||||
|
||||
private async validateDatabaseHealth(): Promise<void> {
|
||||
// CRITICAL: Skip all database validation in test mode
|
||||
// This allows session lifecycle tests to use empty :memory: databases
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
logger.debug('Skipping database validation in test mode');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.db) return;
|
||||
|
||||
try {
|
||||
@@ -296,18 +285,26 @@ export class N8NDocumentationMCPServer {
|
||||
throw new Error('Database is empty. Run "npm run rebuild" to populate node data.');
|
||||
}
|
||||
|
||||
// Check if FTS5 table exists
|
||||
const ftsExists = this.db.prepare(`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='nodes_fts'
|
||||
`).get();
|
||||
// Check FTS5 support before attempting FTS5 queries
|
||||
// sql.js doesn't support FTS5, so we need to skip FTS5 validation for sql.js databases
|
||||
const hasFTS5 = this.db.checkFTS5Support();
|
||||
|
||||
if (!ftsExists) {
|
||||
logger.warn('FTS5 table missing - search performance will be degraded. Please run: npm run rebuild');
|
||||
if (!hasFTS5) {
|
||||
logger.warn('FTS5 not supported (likely using sql.js) - search will use basic queries');
|
||||
} else {
|
||||
const ftsCount = this.db.prepare('SELECT COUNT(*) as count FROM nodes_fts').get() as { count: number };
|
||||
if (ftsCount.count === 0) {
|
||||
logger.warn('FTS5 index is empty - search will not work properly. Please run: npm run rebuild');
|
||||
// Only check FTS5 table if FTS5 is supported
|
||||
const ftsExists = this.db.prepare(`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='nodes_fts'
|
||||
`).get();
|
||||
|
||||
if (!ftsExists) {
|
||||
logger.warn('FTS5 table missing - search performance will be degraded. Please run: npm run rebuild');
|
||||
} else {
|
||||
const ftsCount = this.db.prepare('SELECT COUNT(*) as count FROM nodes_fts').get() as { count: number };
|
||||
if (ftsCount.count === 0) {
|
||||
logger.warn('FTS5 index is empty - search will not work properly. Please run: npm run rebuild');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1009,10 +1006,10 @@ export class N8NDocumentationMCPServer {
|
||||
return n8nHandlers.handleGetWorkflowMinimal(args, this.instanceContext);
|
||||
case 'n8n_update_full_workflow':
|
||||
this.validateToolParams(name, args, ['id']);
|
||||
return n8nHandlers.handleUpdateWorkflow(args, this.repository!, this.instanceContext);
|
||||
return n8nHandlers.handleUpdateWorkflow(args, this.instanceContext);
|
||||
case 'n8n_update_partial_workflow':
|
||||
this.validateToolParams(name, args, ['id', 'operations']);
|
||||
return handleUpdatePartialWorkflow(args, this.repository!, this.instanceContext);
|
||||
return handleUpdatePartialWorkflow(args, this.instanceContext);
|
||||
case 'n8n_delete_workflow':
|
||||
this.validateToolParams(name, args, ['id']);
|
||||
return n8nHandlers.handleDeleteWorkflow(args, this.instanceContext);
|
||||
@@ -1050,9 +1047,6 @@ export class N8NDocumentationMCPServer {
|
||||
case 'n8n_diagnostic':
|
||||
// No required parameters
|
||||
return n8nHandlers.handleDiagnostic({ params: { arguments: args } }, this.instanceContext);
|
||||
case 'n8n_workflow_versions':
|
||||
this.validateToolParams(name, args, ['mode']);
|
||||
return n8nHandlers.handleWorkflowVersions(args, this.repository!, this.instanceContext);
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
@@ -1286,13 +1280,13 @@ export class N8NDocumentationMCPServer {
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
WHERE nodes_fts MATCH ?
|
||||
ORDER BY
|
||||
rank,
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER(?) THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER(?) THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER(?) THEN 2
|
||||
WHEN n.display_name = ? THEN 0
|
||||
WHEN n.display_name LIKE ? THEN 1
|
||||
WHEN n.node_type LIKE ? THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank,
|
||||
n.display_name
|
||||
LIMIT ?
|
||||
`).all(ftsQuery, cleanedQuery, `%${cleanedQuery}%`, `%${cleanedQuery}%`, limit) as (NodeRow & { rank: number })[];
|
||||
|
||||
@@ -48,7 +48,7 @@ An n8n AI Agent workflow typically consists of:
|
||||
- Manages conversation flow
|
||||
- Decides when to use tools
|
||||
- Iterates until task is complete
|
||||
- Supports fallback models for reliability
|
||||
- Supports fallback models (v2.1+)
|
||||
|
||||
3. **Language Model**: The AI brain
|
||||
- OpenAI GPT-4, Claude, Gemini, etc.
|
||||
@@ -441,7 +441,7 @@ For real-time user experience:
|
||||
|
||||
### Pattern 2: Fallback Language Models
|
||||
|
||||
For production reliability with fallback language models:
|
||||
For production reliability (requires AI Agent v2.1+):
|
||||
|
||||
\`\`\`typescript
|
||||
n8n_update_partial_workflow({
|
||||
@@ -724,7 +724,7 @@ n8n_validate_workflow({id: "workflow_id"})
|
||||
'Always validate workflows after making changes',
|
||||
'AI connections require sourceOutput parameter',
|
||||
'Streaming mode has specific constraints',
|
||||
'Fallback models require AI Agent node with fallback support'
|
||||
'Some features require specific AI Agent versions (v2.1+ for fallback)'
|
||||
],
|
||||
relatedTools: [
|
||||
'n8n_create_workflow',
|
||||
|
||||
@@ -11,8 +11,7 @@ export const validateNodeOperationDoc: ToolDocumentation = {
|
||||
tips: [
|
||||
'Profile choices: minimal (editing), runtime (execution), ai-friendly (balanced), strict (deployment)',
|
||||
'Returns fixes you can apply directly',
|
||||
'Operation-aware - knows Slack post needs text',
|
||||
'Validates operator structures for IF and Switch nodes with conditions'
|
||||
'Operation-aware - knows Slack post needs text'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -72,9 +71,7 @@ export const validateNodeOperationDoc: ToolDocumentation = {
|
||||
'Validate configuration before workflow execution',
|
||||
'Debug why a node isn\'t working as expected',
|
||||
'Generate configuration fixes automatically',
|
||||
'Different validation for editing vs production',
|
||||
'Check IF/Switch operator structures (binary vs unary operators)',
|
||||
'Validate conditions.options metadata for filter-based nodes'
|
||||
'Different validation for editing vs production'
|
||||
],
|
||||
performance: '<100ms for most nodes, <200ms for complex nodes with many conditions',
|
||||
bestPractices: [
|
||||
@@ -88,10 +85,7 @@ export const validateNodeOperationDoc: ToolDocumentation = {
|
||||
pitfalls: [
|
||||
'Must include operation fields for multi-operation nodes',
|
||||
'Fixes are suggestions - review before applying',
|
||||
'Profile affects what\'s validated - minimal skips many checks',
|
||||
'**Binary vs Unary operators**: Binary operators (equals, contains, greaterThan) must NOT have singleValue:true. Unary operators (isEmpty, isNotEmpty, true, false) REQUIRE singleValue:true',
|
||||
'**IF and Switch nodes with conditions**: Must have complete conditions.options structure: {version: 2, leftValue: "", caseSensitive: true/false, typeValidation: "strict"}',
|
||||
'**Operator type field**: Must be data type (string/number/boolean/dateTime/array/object), NOT operation name (e.g., use type:"string" operation:"equals", not type:"equals")'
|
||||
'Profile affects what\'s validated - minimal skips many checks'
|
||||
],
|
||||
relatedTools: ['validate_node_minimal for quick checks', 'get_node_essentials for valid examples', 'validate_workflow for complete workflow validation']
|
||||
}
|
||||
|
||||
@@ -11,8 +11,7 @@ export const validateWorkflowDoc: ToolDocumentation = {
|
||||
tips: [
|
||||
'Always validate before n8n_create_workflow to catch errors early',
|
||||
'Use options.profile="minimal" for quick checks during development',
|
||||
'AI tool connections are automatically validated for proper node references',
|
||||
'Detects operator structure issues (binary vs unary, singleValue requirements)'
|
||||
'AI tool connections are automatically validated for proper node references'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -68,9 +67,7 @@ export const validateWorkflowDoc: ToolDocumentation = {
|
||||
'Use minimal profile during development, strict profile before production',
|
||||
'Pay attention to warnings - they often indicate potential runtime issues',
|
||||
'Validate after any workflow modifications, especially connection changes',
|
||||
'Check statistics to understand workflow complexity',
|
||||
'**Auto-sanitization runs during create/update**: Operator structures and missing metadata are automatically fixed when workflows are created or updated, but validation helps catch issues before they reach n8n',
|
||||
'If validation detects operator issues, they will be auto-fixed during n8n_create_workflow or n8n_update_partial_workflow'
|
||||
'Check statistics to understand workflow complexity'
|
||||
],
|
||||
pitfalls: [
|
||||
'Large workflows (100+ nodes) may take longer to validate',
|
||||
|
||||
@@ -4,17 +4,15 @@ export const n8nAutofixWorkflowDoc: ToolDocumentation = {
|
||||
name: 'n8n_autofix_workflow',
|
||||
category: 'workflow_management',
|
||||
essentials: {
|
||||
description: 'Automatically fix common workflow validation errors - expression formats, typeVersions, error outputs, webhook paths, and smart version upgrades',
|
||||
description: 'Automatically fix common workflow validation errors - expression formats, typeVersions, error outputs, webhook paths',
|
||||
keyParameters: ['id', 'applyFixes'],
|
||||
example: 'n8n_autofix_workflow({id: "wf_abc123", applyFixes: false})',
|
||||
performance: 'Network-dependent (200-1500ms) - fetches, validates, and optionally updates workflow with smart migrations',
|
||||
performance: 'Network-dependent (200-1000ms) - fetches, validates, and optionally updates workflow',
|
||||
tips: [
|
||||
'Use applyFixes: false to preview changes before applying',
|
||||
'Set confidenceThreshold to control fix aggressiveness (high/medium/low)',
|
||||
'Supports expression formats, typeVersion issues, error outputs, node corrections, webhook paths, AND version upgrades',
|
||||
'High-confidence fixes (≥90%) are safe for auto-application',
|
||||
'Version upgrades include smart migration with breaking change detection',
|
||||
'Post-update guidance provides AI-friendly step-by-step instructions for manual changes'
|
||||
'Supports fixing expression formats, typeVersion issues, error outputs, node type corrections, and webhook paths',
|
||||
'High-confidence fixes (≥90%) are safe for auto-application'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -41,20 +39,6 @@ The auto-fixer can resolve:
|
||||
- Sets both 'path' parameter and 'webhookId' field to the same UUID
|
||||
- Ensures webhook nodes become functional with valid endpoints
|
||||
- High confidence fix as UUID generation is deterministic
|
||||
6. **Smart Version Upgrades** (NEW): Proactively upgrades nodes to their latest versions:
|
||||
- Detects outdated node versions and recommends upgrades
|
||||
- Applies smart migrations with auto-migratable property changes
|
||||
- Handles breaking changes intelligently (Execute Workflow v1.0→v1.1, Webhook v2.0→v2.1, etc.)
|
||||
- Generates UUIDs for required fields (webhookId), sets sensible defaults
|
||||
- HIGH confidence for non-breaking upgrades, MEDIUM for breaking changes with auto-migration
|
||||
- Example: Execute Workflow v1.0→v1.1 adds inputFieldMapping automatically
|
||||
7. **Version Migration Guidance** (NEW): Documents complex migrations requiring manual intervention:
|
||||
- Identifies breaking changes that cannot be auto-migrated
|
||||
- Provides AI-friendly post-update guidance with step-by-step instructions
|
||||
- Lists required actions by priority (CRITICAL, HIGH, MEDIUM, LOW)
|
||||
- Documents behavior changes and their impact
|
||||
- Estimates time required for manual migration steps
|
||||
- MEDIUM/LOW confidence - requires review before applying
|
||||
|
||||
The tool uses a confidence-based system to ensure safe fixes:
|
||||
- **High (≥90%)**: Safe to auto-apply (exact matches, known patterns)
|
||||
@@ -76,7 +60,7 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
fixTypes: {
|
||||
type: 'array',
|
||||
required: false,
|
||||
description: 'Types of fixes to apply. Options: ["expression-format", "typeversion-correction", "error-output-config", "node-type-correction", "webhook-missing-path", "typeversion-upgrade", "version-migration"]. Default: all types. NEW: "typeversion-upgrade" for smart version upgrades, "version-migration" for complex migration guidance.'
|
||||
description: 'Types of fixes to apply. Options: ["expression-format", "typeversion-correction", "error-output-config", "node-type-correction", "webhook-missing-path"]. Default: all types.'
|
||||
},
|
||||
confidenceThreshold: {
|
||||
type: 'string',
|
||||
@@ -94,21 +78,13 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
- fixes: Detailed list of individual fixes with before/after values
|
||||
- summary: Human-readable summary of fixes
|
||||
- stats: Statistics by fix type and confidence level
|
||||
- applied: Boolean indicating if fixes were applied (when applyFixes: true)
|
||||
- postUpdateGuidance: (NEW) Array of AI-friendly migration guidance for version upgrades, including:
|
||||
* Required actions by priority (CRITICAL, HIGH, MEDIUM, LOW)
|
||||
* Deprecated properties to remove
|
||||
* Behavior changes and their impact
|
||||
* Step-by-step migration instructions
|
||||
* Estimated time for manual changes`,
|
||||
- applied: Boolean indicating if fixes were applied (when applyFixes: true)`,
|
||||
examples: [
|
||||
'n8n_autofix_workflow({id: "wf_abc123"}) - Preview all possible fixes including version upgrades',
|
||||
'n8n_autofix_workflow({id: "wf_abc123"}) - Preview all possible fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true}) - Apply all medium+ confidence fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true, confidenceThreshold: "high"}) - Only apply high-confidence fixes',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["expression-format"]}) - Only fix expression format issues',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["webhook-missing-path"]}) - Only fix webhook path issues',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["typeversion-upgrade"]}) - NEW: Only upgrade node versions with smart migrations',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", fixTypes: ["typeversion-upgrade", "version-migration"]}) - NEW: Upgrade versions and provide migration guidance',
|
||||
'n8n_autofix_workflow({id: "wf_abc123", applyFixes: true, maxFixes: 10}) - Apply up to 10 fixes'
|
||||
],
|
||||
useCases: [
|
||||
@@ -118,23 +94,16 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
'Cleaning up workflows before production deployment',
|
||||
'Batch fixing common issues across multiple workflows',
|
||||
'Migrating workflows between n8n instances with different versions',
|
||||
'Repairing webhook nodes that lost their path configuration',
|
||||
'Upgrading Execute Workflow nodes from v1.0 to v1.1+ with automatic inputFieldMapping',
|
||||
'Modernizing webhook nodes to v2.1+ with stable webhookId fields',
|
||||
'Proactively keeping workflows up-to-date with latest node versions',
|
||||
'Getting detailed migration guidance for complex breaking changes'
|
||||
'Repairing webhook nodes that lost their path configuration'
|
||||
],
|
||||
performance: 'Depends on workflow size and number of issues. Preview mode: 200-500ms. Apply mode: 500-1500ms for medium workflows with version upgrades. Node similarity matching and version metadata are cached for 5 minutes for improved performance on repeated validations.',
|
||||
performance: 'Depends on workflow size and number of issues. Preview mode: 200-500ms. Apply mode: 500-1000ms for medium workflows. Node similarity matching is cached for 5 minutes for improved performance on repeated validations.',
|
||||
bestPractices: [
|
||||
'Always preview fixes first (applyFixes: false) before applying',
|
||||
'Start with high confidence threshold for production workflows',
|
||||
'Review the fix summary to understand what changed',
|
||||
'Test workflows after auto-fixing to ensure expected behavior',
|
||||
'Use fixTypes parameter to target specific issue categories',
|
||||
'Keep maxFixes reasonable to avoid too many changes at once',
|
||||
'NEW: Review postUpdateGuidance for version upgrades - contains step-by-step migration instructions',
|
||||
'NEW: Test workflows after version upgrades - behavior may change even with successful auto-migration',
|
||||
'NEW: Apply version upgrades incrementally - start with high-confidence, non-breaking upgrades'
|
||||
'Keep maxFixes reasonable to avoid too many changes at once'
|
||||
],
|
||||
pitfalls: [
|
||||
'Some fixes may change workflow behavior - always test after fixing',
|
||||
@@ -143,12 +112,7 @@ Requires N8N_API_URL and N8N_API_KEY environment variables to be configured.`,
|
||||
'Node type corrections only work for known node types in the database',
|
||||
'Cannot fix structural issues like missing nodes or invalid connections',
|
||||
'TypeVersion downgrades might remove node features added in newer versions',
|
||||
'Generated webhook paths are new UUIDs - existing webhook URLs will change',
|
||||
'NEW: Version upgrades may introduce breaking changes - review postUpdateGuidance carefully',
|
||||
'NEW: Auto-migrated properties use sensible defaults which may not match your use case',
|
||||
'NEW: Execute Workflow v1.1+ requires explicit inputFieldMapping - automatic mapping uses empty array',
|
||||
'NEW: Some breaking changes cannot be auto-migrated and require manual intervention',
|
||||
'NEW: Version history is based on registry - unknown nodes cannot be upgraded'
|
||||
'Generated webhook paths are new UUIDs - existing webhook URLs will change'
|
||||
],
|
||||
relatedTools: [
|
||||
'n8n_validate_workflow',
|
||||
|
||||
@@ -11,8 +11,7 @@ export const n8nCreateWorkflowDoc: ToolDocumentation = {
|
||||
tips: [
|
||||
'Workflow created inactive',
|
||||
'Returns ID for future updates',
|
||||
'Validate first with validate_workflow',
|
||||
'Auto-sanitization fixes operator structures and missing metadata during creation'
|
||||
'Validate first with validate_workflow'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -91,9 +90,7 @@ n8n_create_workflow({
|
||||
'Workflows created in INACTIVE state - must activate separately',
|
||||
'Node IDs must be unique within workflow',
|
||||
'Credentials must be configured separately in n8n',
|
||||
'Node type names must include package prefix (e.g., "n8n-nodes-base.slack")',
|
||||
'**Auto-sanitization runs on creation**: All nodes sanitized before workflow created (operator structures fixed, missing metadata added)',
|
||||
'**Auto-sanitization cannot prevent all failures**: Broken connections or invalid node configurations may still cause creation to fail'
|
||||
'Node type names must include package prefix (e.g., "n8n-nodes-base.slack")'
|
||||
],
|
||||
relatedTools: ['validate_workflow', 'n8n_update_partial_workflow', 'n8n_trigger_webhook_workflow']
|
||||
}
|
||||
|
||||
@@ -17,9 +17,7 @@ export const n8nUpdatePartialWorkflowDoc: ToolDocumentation = {
|
||||
'Use continueOnError mode for best-effort bulk operations',
|
||||
'Validate with validateOnly first',
|
||||
'For AI connections, specify sourceOutput type (ai_languageModel, ai_tool, etc.)',
|
||||
'Batch AI component connections for atomic updates',
|
||||
'Auto-sanitization: ALL nodes auto-fixed during updates (operator structures, missing metadata)',
|
||||
'Node renames automatically update all connection references - no manual connection operations needed'
|
||||
'Batch AI component connections for atomic updates'
|
||||
]
|
||||
},
|
||||
full: {
|
||||
@@ -81,10 +79,6 @@ Full support for all 8 AI connection types used in n8n AI workflows:
|
||||
- Multiple tools: Batch multiple \`sourceOutput: "ai_tool"\` connections to one AI Agent
|
||||
- Vector retrieval: Chain ai_embedding → ai_vectorStore → ai_tool → AI Agent
|
||||
|
||||
**Important Notes**:
|
||||
- **AI nodes do NOT require main connections**: Nodes like OpenAI Chat Model, Postgres Chat Memory, Embeddings OpenAI, and Supabase Vector Store use AI-specific connection types exclusively. They should ONLY have connections like \`ai_languageModel\`, \`ai_memory\`, \`ai_embedding\`, or \`ai_tool\` - NOT \`main\` connections.
|
||||
- **Fixed in v2.21.1**: Validation now correctly recognizes AI nodes that only have AI-specific connections without requiring \`main\` connections (resolves issue #357).
|
||||
|
||||
**Best Practices**:
|
||||
- Always specify \`sourceOutput\` for AI connections (defaults to "main" if omitted)
|
||||
- Connect language model BEFORE creating/enabling AI Agent (validation requirement)
|
||||
@@ -100,93 +94,7 @@ The **cleanStaleConnections** operation automatically removes broken connection
|
||||
Set **continueOnError: true** to apply valid operations even if some fail. Returns detailed results showing which operations succeeded/failed. Perfect for bulk cleanup operations.
|
||||
|
||||
### Graceful Error Handling
|
||||
Add **ignoreErrors: true** to removeConnection operations to prevent failures when connections don't exist.
|
||||
|
||||
## Auto-Sanitization System
|
||||
|
||||
### What Gets Auto-Fixed
|
||||
When ANY workflow update is made, ALL nodes in the workflow are automatically sanitized to ensure complete metadata and correct structure:
|
||||
|
||||
1. **Operator Structure Fixes**:
|
||||
- Binary operators (equals, contains, greaterThan, etc.) automatically have \`singleValue\` removed
|
||||
- Unary operators (isEmpty, isNotEmpty, true, false) automatically get \`singleValue: true\` added
|
||||
- Invalid operator structures (e.g., \`{type: "isNotEmpty"}\`) are corrected to \`{type: "boolean", operation: "isNotEmpty"}\`
|
||||
|
||||
2. **Missing Metadata Added**:
|
||||
- IF nodes with conditions get complete \`conditions.options\` structure if missing
|
||||
- Switch nodes with conditions get complete \`conditions.options\` for all rules
|
||||
- Required fields: \`{version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}\`
|
||||
|
||||
### Sanitization Scope
|
||||
- Runs on **ALL nodes** in the workflow, not just modified ones
|
||||
- Triggered by ANY update operation (addNode, updateNode, addConnection, etc.)
|
||||
- Prevents workflow corruption that would make UI unrenderable
|
||||
|
||||
### Limitations
|
||||
Auto-sanitization CANNOT fix:
|
||||
- Broken connections (connections referencing non-existent nodes) - use \`cleanStaleConnections\`
|
||||
- Branch count mismatches (e.g., Switch with 3 rules but only 2 outputs) - requires manual connection fixes
|
||||
- Workflows in paradoxical corrupt states (API returns corrupt data, API rejects updates) - must recreate workflow
|
||||
|
||||
### Recovery Guidance
|
||||
If validation still fails after auto-sanitization:
|
||||
1. Check error details for specific issues
|
||||
2. Use \`validate_workflow\` to see all validation errors
|
||||
3. For connection issues, use \`cleanStaleConnections\` operation
|
||||
4. For branch mismatches, add missing output connections
|
||||
5. For paradoxical corrupted workflows, create new workflow and migrate nodes
|
||||
|
||||
## Automatic Connection Reference Updates
|
||||
|
||||
When you rename a node using **updateNode**, all connection references throughout the workflow are automatically updated. Both the connection source keys and target references are updated for all connection types (main, error, ai_tool, ai_languageModel, ai_memory, etc.) and all branch configurations (IF node branches, Switch node cases, error outputs).
|
||||
|
||||
### Basic Example
|
||||
\`\`\`javascript
|
||||
// Rename a node - connections update automatically
|
||||
n8n_update_partial_workflow({
|
||||
id: "wf_123",
|
||||
operations: [{
|
||||
type: "updateNode",
|
||||
nodeId: "node_abc",
|
||||
updates: { name: "Data Processor" }
|
||||
}]
|
||||
});
|
||||
// All incoming and outgoing connections now reference "Data Processor"
|
||||
\`\`\`
|
||||
|
||||
### Multi-Output Node Example
|
||||
\`\`\`javascript
|
||||
// Rename nodes in a branching workflow
|
||||
n8n_update_partial_workflow({
|
||||
id: "workflow_id",
|
||||
operations: [
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeId: "if_node_id",
|
||||
updates: { name: "Value Checker" }
|
||||
},
|
||||
{
|
||||
type: "updateNode",
|
||||
nodeId: "error_node_id",
|
||||
updates: { name: "Error Handler" }
|
||||
}
|
||||
]
|
||||
});
|
||||
// IF node branches and error connections automatically updated
|
||||
\`\`\`
|
||||
|
||||
### Name Collision Protection
|
||||
Attempting to rename a node to an existing name returns a clear error:
|
||||
\`\`\`
|
||||
Cannot rename node "Old Name" to "New Name": A node with that name already exists (id: abc123...).
|
||||
Please choose a different name.
|
||||
\`\`\`
|
||||
|
||||
### Usage Notes
|
||||
- Simply rename nodes with updateNode - no manual connection operations needed
|
||||
- Multiple renames in one call work atomically
|
||||
- Can rename a node and add/remove connections using the new name in the same batch
|
||||
- Use \`validateOnly: true\` to preview effects before applying`,
|
||||
Add **ignoreErrors: true** to removeConnection operations to prevent failures when connections don't exist.`,
|
||||
parameters: {
|
||||
id: { type: 'string', required: true, description: 'Workflow ID to update' },
|
||||
operations: {
|
||||
@@ -219,7 +127,7 @@ Please choose a different name.
|
||||
'// Connect memory to AI Agent\nn8n_update_partial_workflow({id: "ai3", operations: [{type: "addConnection", source: "Window Buffer Memory", target: "AI Agent", sourceOutput: "ai_memory"}]})',
|
||||
'// Connect output parser to AI Agent\nn8n_update_partial_workflow({id: "ai4", operations: [{type: "addConnection", source: "Structured Output Parser", target: "AI Agent", sourceOutput: "ai_outputParser"}]})',
|
||||
'// Complete AI Agent setup: Add language model, tools, and memory\nn8n_update_partial_workflow({id: "ai5", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel"},\n {type: "addConnection", source: "HTTP Request Tool", target: "AI Agent", sourceOutput: "ai_tool"},\n {type: "addConnection", source: "Code Tool", target: "AI Agent", sourceOutput: "ai_tool"},\n {type: "addConnection", source: "Window Buffer Memory", target: "AI Agent", sourceOutput: "ai_memory"}\n]})',
|
||||
'// Add fallback model to AI Agent for reliability\nn8n_update_partial_workflow({id: "ai6", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 0},\n {type: "addConnection", source: "Anthropic Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 1}\n]})',
|
||||
'// Add fallback model to AI Agent (requires v2.1+)\nn8n_update_partial_workflow({id: "ai6", operations: [\n {type: "addConnection", source: "OpenAI Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 0},\n {type: "addConnection", source: "Anthropic Chat Model", target: "AI Agent", sourceOutput: "ai_languageModel", targetIndex: 1}\n]})',
|
||||
'// Vector Store setup: Connect embeddings and documents\nn8n_update_partial_workflow({id: "ai7", operations: [\n {type: "addConnection", source: "Embeddings OpenAI", target: "Pinecone Vector Store", sourceOutput: "ai_embedding"},\n {type: "addConnection", source: "Default Data Loader", target: "Pinecone Vector Store", sourceOutput: "ai_document"}\n]})',
|
||||
'// Connect Vector Store Tool to AI Agent (retrieval setup)\nn8n_update_partial_workflow({id: "ai8", operations: [\n {type: "addConnection", source: "Pinecone Vector Store", target: "Vector Store Tool", sourceOutput: "ai_vectorStore"},\n {type: "addConnection", source: "Vector Store Tool", target: "AI Agent", sourceOutput: "ai_tool"}\n]})',
|
||||
'// Rewire AI Agent to use different language model\nn8n_update_partial_workflow({id: "ai9", operations: [{type: "rewireConnection", source: "AI Agent", from: "OpenAI Chat Model", to: "Anthropic Chat Model", sourceOutput: "ai_languageModel"}]})',
|
||||
@@ -272,14 +180,8 @@ Please choose a different name.
|
||||
'Use "updates" property for updateNode operations: {type: "updateNode", updates: {...}}',
|
||||
'Smart parameters (branch, case) only work with IF and Switch nodes - ignored for other node types',
|
||||
'Explicit sourceIndex overrides smart parameters (branch, case) if both provided',
|
||||
'**CRITICAL**: For If nodes, ALWAYS use branch="true"/"false" instead of sourceIndex. Using sourceIndex=0 for multiple connections will put them ALL on the TRUE branch (main[0]), breaking your workflow logic!',
|
||||
'**CRITICAL**: For Switch nodes, ALWAYS use case=N instead of sourceIndex. Using same sourceIndex for multiple connections will put them on the same case output.',
|
||||
'cleanStaleConnections removes ALL broken connections - cannot be selective',
|
||||
'replaceConnections overwrites entire connections object - all previous connections lost',
|
||||
'**Auto-sanitization behavior**: Binary operators (equals, contains) automatically have singleValue removed; unary operators (isEmpty, isNotEmpty) automatically get singleValue:true added',
|
||||
'**Auto-sanitization runs on ALL nodes**: When ANY update is made, ALL nodes in the workflow are sanitized (not just modified ones)',
|
||||
'**Auto-sanitization cannot fix everything**: It fixes operator structures and missing metadata, but cannot fix broken connections or branch mismatches',
|
||||
'**Corrupted workflows beyond repair**: Workflows in paradoxical states (API returns corrupt, API rejects updates) cannot be fixed via API - must be recreated'
|
||||
'replaceConnections overwrites entire connections object - all previous connections lost'
|
||||
],
|
||||
relatedTools: ['n8n_update_full_workflow', 'n8n_get_workflow', 'validate_workflow', 'tools_documentation']
|
||||
}
|
||||
|
||||
@@ -293,7 +293,7 @@ export const n8nManagementTools: ToolDefinition[] = [
|
||||
description: 'Types of fixes to apply (default: all)',
|
||||
items: {
|
||||
type: 'string',
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path', 'typeversion-upgrade', 'version-migration']
|
||||
enum: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path']
|
||||
}
|
||||
},
|
||||
confidenceThreshold: {
|
||||
@@ -462,59 +462,5 @@ Examples:
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'n8n_workflow_versions',
|
||||
description: `Manage workflow version history, rollback, and cleanup. Six modes:
|
||||
- list: Show version history for a workflow
|
||||
- get: Get details of specific version
|
||||
- rollback: Restore workflow to previous version (creates backup first)
|
||||
- delete: Delete specific version or all versions for a workflow
|
||||
- prune: Manually trigger pruning to keep N most recent versions
|
||||
- truncate: Delete ALL versions for ALL workflows (requires confirmation)`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
mode: {
|
||||
type: 'string',
|
||||
enum: ['list', 'get', 'rollback', 'delete', 'prune', 'truncate'],
|
||||
description: 'Operation mode'
|
||||
},
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'Workflow ID (required for list, rollback, delete, prune)'
|
||||
},
|
||||
versionId: {
|
||||
type: 'number',
|
||||
description: 'Version ID (required for get mode and single version delete, optional for rollback)'
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'Max versions to return in list mode'
|
||||
},
|
||||
validateBefore: {
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: 'Validate workflow structure before rollback'
|
||||
},
|
||||
deleteAll: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Delete all versions for workflow (delete mode only)'
|
||||
},
|
||||
maxVersions: {
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'Keep N most recent versions (prune mode only)'
|
||||
},
|
||||
confirmTruncate: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'REQUIRED: Must be true to truncate all versions (truncate mode only)'
|
||||
}
|
||||
},
|
||||
required: ['mode']
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -164,7 +164,7 @@ async function testAutofix() {
|
||||
// Step 3: Generate fixes in preview mode
|
||||
logger.info('\nStep 3: Generating fixes (preview mode)...');
|
||||
const autoFixer = new WorkflowAutoFixer();
|
||||
const previewResult = await autoFixer.generateFixes(
|
||||
const previewResult = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -210,7 +210,7 @@ async function testAutofix() {
|
||||
logger.info('\n\n=== Testing Different Confidence Thresholds ===');
|
||||
|
||||
for (const threshold of ['high', 'medium', 'low'] as const) {
|
||||
const result = await autoFixer.generateFixes(
|
||||
const result = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
@@ -227,7 +227,7 @@ async function testAutofix() {
|
||||
|
||||
const fixTypes = ['expression-format', 'typeversion-correction', 'error-output-config'] as const;
|
||||
for (const fixType of fixTypes) {
|
||||
const result = await autoFixer.generateFixes(
|
||||
const result = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
allFormatIssues,
|
||||
|
||||
@@ -173,7 +173,7 @@ async function testNodeSimilarity() {
|
||||
console.log('='.repeat(60));
|
||||
|
||||
const autoFixer = new WorkflowAutoFixer(repository);
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
testWorkflow as any,
|
||||
validationResult,
|
||||
[],
|
||||
|
||||
@@ -87,7 +87,7 @@ async function testWebhookAutofix() {
|
||||
// Step 2: Generate fixes (preview mode)
|
||||
logger.info('\nStep 2: Generating fixes in preview mode...');
|
||||
|
||||
const fixResult = await autoFixer.generateFixes(
|
||||
const fixResult = autoFixer.generateFixes(
|
||||
testWorkflow,
|
||||
validationResult,
|
||||
[], // No expression format issues to pass
|
||||
|
||||
@@ -1,321 +0,0 @@
|
||||
/**
|
||||
* Breaking Change Detector
|
||||
*
|
||||
* Detects breaking changes between node versions by:
|
||||
* 1. Consulting the hardcoded breaking changes registry
|
||||
* 2. Dynamically comparing property schemas between versions
|
||||
* 3. Analyzing property requirement changes
|
||||
*
|
||||
* Used by the autofixer to intelligently upgrade node versions.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import {
|
||||
BREAKING_CHANGES_REGISTRY,
|
||||
BreakingChange,
|
||||
getBreakingChangesForNode,
|
||||
getAllChangesForNode
|
||||
} from './breaking-changes-registry';
|
||||
|
||||
export interface DetectedChange {
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking: boolean;
|
||||
oldValue?: any;
|
||||
newValue?: any;
|
||||
migrationHint: string;
|
||||
autoMigratable: boolean;
|
||||
migrationStrategy?: any;
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
source: 'registry' | 'dynamic'; // Where this change was detected
|
||||
}
|
||||
|
||||
export interface VersionUpgradeAnalysis {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
hasBreakingChanges: boolean;
|
||||
changes: DetectedChange[];
|
||||
autoMigratableCount: number;
|
||||
manualRequiredCount: number;
|
||||
overallSeverity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
recommendations: string[];
|
||||
}
|
||||
|
||||
export class BreakingChangeDetector {
|
||||
constructor(private nodeRepository: NodeRepository) {}
|
||||
|
||||
/**
|
||||
* Analyze a version upgrade and detect all changes
|
||||
*/
|
||||
async analyzeVersionUpgrade(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): Promise<VersionUpgradeAnalysis> {
|
||||
// Get changes from registry
|
||||
const registryChanges = this.getRegistryChanges(nodeType, fromVersion, toVersion);
|
||||
|
||||
// Get dynamic changes by comparing schemas
|
||||
const dynamicChanges = this.detectDynamicChanges(nodeType, fromVersion, toVersion);
|
||||
|
||||
// Merge and deduplicate changes
|
||||
const allChanges = this.mergeChanges(registryChanges, dynamicChanges);
|
||||
|
||||
// Calculate statistics
|
||||
const hasBreakingChanges = allChanges.some(c => c.isBreaking);
|
||||
const autoMigratableCount = allChanges.filter(c => c.autoMigratable).length;
|
||||
const manualRequiredCount = allChanges.filter(c => !c.autoMigratable).length;
|
||||
|
||||
// Determine overall severity
|
||||
const overallSeverity = this.calculateOverallSeverity(allChanges);
|
||||
|
||||
// Generate recommendations
|
||||
const recommendations = this.generateRecommendations(allChanges);
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
fromVersion,
|
||||
toVersion,
|
||||
hasBreakingChanges,
|
||||
changes: allChanges,
|
||||
autoMigratableCount,
|
||||
manualRequiredCount,
|
||||
overallSeverity,
|
||||
recommendations
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get changes from the hardcoded registry
|
||||
*/
|
||||
private getRegistryChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): DetectedChange[] {
|
||||
const registryChanges = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
|
||||
return registryChanges.map(change => ({
|
||||
propertyName: change.propertyName,
|
||||
changeType: change.changeType,
|
||||
isBreaking: change.isBreaking,
|
||||
oldValue: change.oldValue,
|
||||
newValue: change.newValue,
|
||||
migrationHint: change.migrationHint,
|
||||
autoMigratable: change.autoMigratable,
|
||||
migrationStrategy: change.migrationStrategy,
|
||||
severity: change.severity,
|
||||
source: 'registry' as const
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamically detect changes by comparing property schemas
|
||||
*/
|
||||
private detectDynamicChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): DetectedChange[] {
|
||||
// Get both versions from the database
|
||||
const oldVersionData = this.nodeRepository.getNodeVersion(nodeType, fromVersion);
|
||||
const newVersionData = this.nodeRepository.getNodeVersion(nodeType, toVersion);
|
||||
|
||||
if (!oldVersionData || !newVersionData) {
|
||||
return []; // Can't detect dynamic changes without version data
|
||||
}
|
||||
|
||||
const changes: DetectedChange[] = [];
|
||||
|
||||
// Compare properties schemas
|
||||
const oldProps = this.flattenProperties(oldVersionData.propertiesSchema || []);
|
||||
const newProps = this.flattenProperties(newVersionData.propertiesSchema || []);
|
||||
|
||||
// Detect added properties
|
||||
for (const propName of Object.keys(newProps)) {
|
||||
if (!oldProps[propName]) {
|
||||
const prop = newProps[propName];
|
||||
const isRequired = prop.required === true;
|
||||
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'added',
|
||||
isBreaking: isRequired, // Breaking if required
|
||||
newValue: prop.type || 'unknown',
|
||||
migrationHint: isRequired
|
||||
? `Property "${propName}" is now required in v${toVersion}. Provide a value to prevent validation errors.`
|
||||
: `Property "${propName}" was added in v${toVersion}. Optional parameter, safe to ignore if not needed.`,
|
||||
autoMigratable: !isRequired, // Can auto-add with default if not required
|
||||
migrationStrategy: !isRequired
|
||||
? {
|
||||
type: 'add_property',
|
||||
defaultValue: prop.default || null
|
||||
}
|
||||
: undefined,
|
||||
severity: isRequired ? 'HIGH' : 'LOW',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Detect removed properties
|
||||
for (const propName of Object.keys(oldProps)) {
|
||||
if (!newProps[propName]) {
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'removed',
|
||||
isBreaking: true, // Removal is always breaking
|
||||
oldValue: oldProps[propName].type || 'unknown',
|
||||
migrationHint: `Property "${propName}" was removed in v${toVersion}. Remove this property from your configuration.`,
|
||||
autoMigratable: true, // Can auto-remove
|
||||
migrationStrategy: {
|
||||
type: 'remove_property'
|
||||
},
|
||||
severity: 'MEDIUM',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Detect requirement changes
|
||||
for (const propName of Object.keys(newProps)) {
|
||||
if (oldProps[propName]) {
|
||||
const oldRequired = oldProps[propName].required === true;
|
||||
const newRequired = newProps[propName].required === true;
|
||||
|
||||
if (oldRequired !== newRequired) {
|
||||
changes.push({
|
||||
propertyName: propName,
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: newRequired && !oldRequired, // Breaking if became required
|
||||
oldValue: oldRequired ? 'required' : 'optional',
|
||||
newValue: newRequired ? 'required' : 'optional',
|
||||
migrationHint: newRequired
|
||||
? `Property "${propName}" is now required in v${toVersion}. Ensure a value is provided.`
|
||||
: `Property "${propName}" is now optional in v${toVersion}.`,
|
||||
autoMigratable: false, // Requirement changes need manual review
|
||||
severity: newRequired ? 'HIGH' : 'LOW',
|
||||
source: 'dynamic'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten nested properties into a map for easy comparison
|
||||
*/
|
||||
private flattenProperties(properties: any[], prefix: string = ''): Record<string, any> {
|
||||
const flat: Record<string, any> = {};
|
||||
|
||||
for (const prop of properties) {
|
||||
if (!prop.name && !prop.displayName) continue;
|
||||
|
||||
const propName = prop.name || prop.displayName;
|
||||
const fullPath = prefix ? `${prefix}.${propName}` : propName;
|
||||
|
||||
flat[fullPath] = prop;
|
||||
|
||||
// Recursively flatten nested options
|
||||
if (prop.options && Array.isArray(prop.options)) {
|
||||
Object.assign(flat, this.flattenProperties(prop.options, fullPath));
|
||||
}
|
||||
}
|
||||
|
||||
return flat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge registry and dynamic changes, avoiding duplicates
|
||||
*/
|
||||
private mergeChanges(
|
||||
registryChanges: DetectedChange[],
|
||||
dynamicChanges: DetectedChange[]
|
||||
): DetectedChange[] {
|
||||
const merged = [...registryChanges];
|
||||
|
||||
// Add dynamic changes that aren't already in registry
|
||||
for (const dynamicChange of dynamicChanges) {
|
||||
const existsInRegistry = registryChanges.some(
|
||||
rc => rc.propertyName === dynamicChange.propertyName &&
|
||||
rc.changeType === dynamicChange.changeType
|
||||
);
|
||||
|
||||
if (!existsInRegistry) {
|
||||
merged.push(dynamicChange);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by severity (HIGH -> MEDIUM -> LOW)
|
||||
const severityOrder = { HIGH: 0, MEDIUM: 1, LOW: 2 };
|
||||
merged.sort((a, b) => severityOrder[a.severity] - severityOrder[b.severity]);
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall severity of the upgrade
|
||||
*/
|
||||
private calculateOverallSeverity(changes: DetectedChange[]): 'LOW' | 'MEDIUM' | 'HIGH' {
|
||||
if (changes.some(c => c.severity === 'HIGH')) return 'HIGH';
|
||||
if (changes.some(c => c.severity === 'MEDIUM')) return 'MEDIUM';
|
||||
return 'LOW';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate actionable recommendations for the upgrade
|
||||
*/
|
||||
private generateRecommendations(changes: DetectedChange[]): string[] {
|
||||
const recommendations: string[] = [];
|
||||
|
||||
const breakingChanges = changes.filter(c => c.isBreaking);
|
||||
const autoMigratable = changes.filter(c => c.autoMigratable);
|
||||
const manualRequired = changes.filter(c => !c.autoMigratable);
|
||||
|
||||
if (breakingChanges.length === 0) {
|
||||
recommendations.push('✓ No breaking changes detected. This upgrade should be safe.');
|
||||
} else {
|
||||
recommendations.push(
|
||||
`⚠ ${breakingChanges.length} breaking change(s) detected. Review carefully before applying.`
|
||||
);
|
||||
}
|
||||
|
||||
if (autoMigratable.length > 0) {
|
||||
recommendations.push(
|
||||
`✓ ${autoMigratable.length} change(s) can be automatically migrated.`
|
||||
);
|
||||
}
|
||||
|
||||
if (manualRequired.length > 0) {
|
||||
recommendations.push(
|
||||
`✋ ${manualRequired.length} change(s) require manual intervention.`
|
||||
);
|
||||
|
||||
// List specific manual changes
|
||||
for (const change of manualRequired) {
|
||||
recommendations.push(` - ${change.propertyName}: ${change.migrationHint}`);
|
||||
}
|
||||
}
|
||||
|
||||
return recommendations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick check: does this upgrade have breaking changes?
|
||||
*/
|
||||
hasBreakingChanges(nodeType: string, fromVersion: string, toVersion: string): boolean {
|
||||
const registryChanges = getBreakingChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return registryChanges.length > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get simple list of property names that changed
|
||||
*/
|
||||
getChangedProperties(nodeType: string, fromVersion: string, toVersion: string): string[] {
|
||||
const registryChanges = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return registryChanges.map(c => c.propertyName);
|
||||
}
|
||||
}
|
||||
@@ -1,315 +0,0 @@
|
||||
/**
|
||||
* Breaking Changes Registry
|
||||
*
|
||||
* Central registry of known breaking changes between node versions.
|
||||
* Used by the autofixer to detect and migrate version upgrades intelligently.
|
||||
*
|
||||
* Each entry defines:
|
||||
* - Which versions are affected
|
||||
* - What properties changed
|
||||
* - Whether it's auto-migratable
|
||||
* - Migration strategies and hints
|
||||
*/
|
||||
|
||||
export interface BreakingChange {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
propertyName: string;
|
||||
changeType: 'added' | 'removed' | 'renamed' | 'type_changed' | 'requirement_changed' | 'default_changed';
|
||||
isBreaking: boolean;
|
||||
oldValue?: string;
|
||||
newValue?: string;
|
||||
migrationHint: string;
|
||||
autoMigratable: boolean;
|
||||
migrationStrategy?: {
|
||||
type: 'add_property' | 'remove_property' | 'rename_property' | 'set_default';
|
||||
defaultValue?: any;
|
||||
sourceProperty?: string;
|
||||
targetProperty?: string;
|
||||
};
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry of known breaking changes across all n8n nodes
|
||||
*/
|
||||
export const BREAKING_CHANGES_REGISTRY: BreakingChange[] = [
|
||||
// ==========================================
|
||||
// Execute Workflow Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.inputFieldMapping',
|
||||
changeType: 'added',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v1.1+, the Execute Workflow node requires explicit field mapping to pass data to sub-workflows. Add an "inputFieldMapping" object with "mappings" array defining how to map fields from parent to child workflow.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: {
|
||||
mappings: []
|
||||
}
|
||||
},
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.mode',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'The "mode" parameter behavior changed in v1.1. Default is now "static" instead of "list". Ensure your workflow ID specification matches the selected mode.',
|
||||
autoMigratable: false,
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Webhook Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
propertyName: 'webhookId',
|
||||
changeType: 'added',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v2.1+, webhooks require a unique "webhookId" field in addition to the path. This ensures webhook persistence across workflow updates. A UUID will be auto-generated if not provided.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: null // Will be generated as UUID at runtime
|
||||
},
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.path',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'In v2.0+, the webhook path must be explicitly defined and cannot be empty. Ensure a valid path is set.',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH'
|
||||
},
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.responseMode',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'v2.0 introduces a "responseMode" parameter to control how the webhook responds. Default is "onReceived" (immediate response). Use "lastNode" to wait for workflow completion.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: 'onReceived'
|
||||
},
|
||||
severity: 'LOW'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// HTTP Request Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.httpRequest',
|
||||
fromVersion: '4.1',
|
||||
toVersion: '4.2',
|
||||
propertyName: 'parameters.sendBody',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'In v4.2+, "sendBody" must be explicitly set to true for POST/PUT/PATCH requests to include a body. Previous versions had implicit body sending.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: true
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Code Node (JavaScript)
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.code',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'parameters.mode',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'v2.0 introduces execution modes: "runOnceForAllItems" (default) and "runOnceForEachItem". The default mode processes all items at once, which may differ from v1.0 behavior.',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'add_property',
|
||||
defaultValue: 'runOnceForAllItems'
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Schedule Trigger Node
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: 'n8n-nodes-base.scheduleTrigger',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
propertyName: 'parameters.rule.interval',
|
||||
changeType: 'type_changed',
|
||||
isBreaking: true,
|
||||
oldValue: 'string',
|
||||
newValue: 'array',
|
||||
migrationHint: 'In v1.1+, the interval parameter changed from a single string to an array of interval objects. Convert your single interval to an array format: [{field: "hours", value: 1}]',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH'
|
||||
},
|
||||
|
||||
// ==========================================
|
||||
// Error Handling (Global Change)
|
||||
// ==========================================
|
||||
{
|
||||
nodeType: '*', // Applies to all nodes
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'continueOnFail',
|
||||
changeType: 'removed',
|
||||
isBreaking: false,
|
||||
migrationHint: 'The "continueOnFail" property is deprecated. Use "onError" instead with value "continueErrorOutput" or "continueRegularOutput".',
|
||||
autoMigratable: true,
|
||||
migrationStrategy: {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'continueOnFail',
|
||||
targetProperty: 'onError',
|
||||
defaultValue: 'continueErrorOutput'
|
||||
},
|
||||
severity: 'MEDIUM'
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Get breaking changes for a specific node type and version upgrade
|
||||
*/
|
||||
export function getBreakingChangesForNode(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return BREAKING_CHANGES_REGISTRY.filter(change => {
|
||||
// Match exact node type or wildcard (*)
|
||||
const nodeMatches = change.nodeType === nodeType || change.nodeType === '*';
|
||||
|
||||
// Check if version range matches
|
||||
const versionMatches =
|
||||
compareVersions(fromVersion, change.fromVersion) >= 0 &&
|
||||
compareVersions(toVersion, change.toVersion) <= 0;
|
||||
|
||||
return nodeMatches && versionMatches && change.isBreaking;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all changes (breaking and non-breaking) for a version upgrade
|
||||
*/
|
||||
export function getAllChangesForNode(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return BREAKING_CHANGES_REGISTRY.filter(change => {
|
||||
const nodeMatches = change.nodeType === nodeType || change.nodeType === '*';
|
||||
const versionMatches =
|
||||
compareVersions(fromVersion, change.fromVersion) >= 0 &&
|
||||
compareVersions(toVersion, change.toVersion) <= 0;
|
||||
|
||||
return nodeMatches && versionMatches;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auto-migratable changes for a version upgrade
|
||||
*/
|
||||
export function getAutoMigratableChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): BreakingChange[] {
|
||||
return getAllChangesForNode(nodeType, fromVersion, toVersion).filter(
|
||||
change => change.autoMigratable
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific node has known breaking changes for a version upgrade
|
||||
*/
|
||||
export function hasBreakingChanges(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): boolean {
|
||||
return getBreakingChangesForNode(nodeType, fromVersion, toVersion).length > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get migration hints for a version upgrade
|
||||
*/
|
||||
export function getMigrationHints(
|
||||
nodeType: string,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): string[] {
|
||||
const changes = getAllChangesForNode(nodeType, fromVersion, toVersion);
|
||||
return changes.map(change => change.migrationHint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple version comparison
|
||||
* Returns: -1 if v1 < v2, 0 if equal, 1 if v1 > v2
|
||||
*/
|
||||
function compareVersions(v1: string, v2: string): number {
|
||||
const parts1 = v1.split('.').map(Number);
|
||||
const parts2 = v2.split('.').map(Number);
|
||||
|
||||
for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) {
|
||||
const p1 = parts1[i] || 0;
|
||||
const p2 = parts2[i] || 0;
|
||||
|
||||
if (p1 < p2) return -1;
|
||||
if (p1 > p2) return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nodes with known version migrations
|
||||
*/
|
||||
export function getNodesWithVersionMigrations(): string[] {
|
||||
const nodeTypes = new Set<string>();
|
||||
|
||||
BREAKING_CHANGES_REGISTRY.forEach(change => {
|
||||
if (change.nodeType !== '*') {
|
||||
nodeTypes.add(change.nodeType);
|
||||
}
|
||||
});
|
||||
|
||||
return Array.from(nodeTypes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all versions tracked for a specific node
|
||||
*/
|
||||
export function getTrackedVersionsForNode(nodeType: string): string[] {
|
||||
const versions = new Set<string>();
|
||||
|
||||
BREAKING_CHANGES_REGISTRY
|
||||
.filter(change => change.nodeType === nodeType || change.nodeType === '*')
|
||||
.forEach(change => {
|
||||
versions.add(change.fromVersion);
|
||||
versions.add(change.toVersion);
|
||||
});
|
||||
|
||||
return Array.from(versions).sort((a, b) => compareVersions(a, b));
|
||||
}
|
||||
@@ -5,8 +5,6 @@
|
||||
* Provides helpful suggestions and identifies missing or misconfigured properties.
|
||||
*/
|
||||
|
||||
import { shouldSkipLiteralValidation } from '../utils/expression-utils.js';
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: ValidationError[];
|
||||
@@ -383,16 +381,13 @@ export class ConfigValidator {
|
||||
): void {
|
||||
// URL validation
|
||||
if (config.url && typeof config.url === 'string') {
|
||||
// Skip validation for expressions - they will be evaluated at runtime
|
||||
if (!shouldSkipLiteralValidation(config.url)) {
|
||||
if (!config.url.startsWith('http://') && !config.url.startsWith('https://')) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: 'URL must start with http:// or https://',
|
||||
fix: 'Add https:// to the beginning of your URL'
|
||||
});
|
||||
}
|
||||
if (!config.url.startsWith('http://') && !config.url.startsWith('https://')) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: 'URL must start with http:// or https://',
|
||||
fix: 'Add https:// to the beginning of your URL'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,19 +417,15 @@ export class ConfigValidator {
|
||||
|
||||
// JSON body validation
|
||||
if (config.sendBody && config.contentType === 'json' && config.jsonBody) {
|
||||
// Skip validation for expressions - they will be evaluated at runtime
|
||||
if (!shouldSkipLiteralValidation(config.jsonBody)) {
|
||||
try {
|
||||
JSON.parse(config.jsonBody);
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown parsing error';
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'jsonBody',
|
||||
message: `jsonBody contains invalid JSON: ${errorMsg}`,
|
||||
fix: 'Fix JSON syntax error and ensure valid JSON format'
|
||||
});
|
||||
}
|
||||
try {
|
||||
JSON.parse(config.jsonBody);
|
||||
} catch (e) {
|
||||
errors.push({
|
||||
type: 'invalid_value',
|
||||
property: 'jsonBody',
|
||||
message: 'jsonBody contains invalid JSON',
|
||||
fix: 'Ensure jsonBody contains valid JSON syntax'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -401,59 +401,7 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
config: Record<string, any>,
|
||||
result: EnhancedValidationResult
|
||||
): void {
|
||||
const url = String(config.url || '');
|
||||
const options = config.options || {};
|
||||
|
||||
// 1. Suggest alwaysOutputData for better error handling (node-level property)
|
||||
// Note: We can't check if it exists (it's node-level, not in parameters),
|
||||
// but we can suggest it as a best practice
|
||||
if (!result.suggestions.some(s => typeof s === 'string' && s.includes('alwaysOutputData'))) {
|
||||
result.suggestions.push(
|
||||
'Consider adding alwaysOutputData: true at node level (not in parameters) for better error handling. ' +
|
||||
'This ensures the node produces output even when HTTP requests fail, allowing downstream error handling.'
|
||||
);
|
||||
}
|
||||
|
||||
// 2. Suggest responseFormat for API endpoints
|
||||
const lowerUrl = url.toLowerCase();
|
||||
const isApiEndpoint =
|
||||
// Subdomain patterns (api.example.com)
|
||||
/^https?:\/\/api\./i.test(url) ||
|
||||
// Path patterns with word boundaries to prevent false positives like "therapist", "restaurant"
|
||||
/\/api[\/\?]|\/api$/i.test(url) ||
|
||||
/\/rest[\/\?]|\/rest$/i.test(url) ||
|
||||
// Known API service domains
|
||||
lowerUrl.includes('supabase.co') ||
|
||||
lowerUrl.includes('firebase') ||
|
||||
lowerUrl.includes('googleapis.com') ||
|
||||
// Versioned API paths (e.g., example.com/v1, example.com/v2)
|
||||
/\.com\/v\d+/i.test(url);
|
||||
|
||||
if (isApiEndpoint && !options.response?.response?.responseFormat) {
|
||||
result.suggestions.push(
|
||||
'API endpoints should explicitly set options.response.response.responseFormat to "json" or "text" ' +
|
||||
'to prevent confusion about response parsing. Example: ' +
|
||||
'{ "options": { "response": { "response": { "responseFormat": "json" } } } }'
|
||||
);
|
||||
}
|
||||
|
||||
// 3. Enhanced URL protocol validation for expressions
|
||||
if (url && url.startsWith('=')) {
|
||||
// Expression-based URL - check for common protocol issues
|
||||
const expressionContent = url.slice(1); // Remove = prefix
|
||||
const lowerExpression = expressionContent.toLowerCase();
|
||||
|
||||
// Check for missing protocol in expression (case-insensitive)
|
||||
if (expressionContent.startsWith('www.') ||
|
||||
(expressionContent.includes('{{') && !lowerExpression.includes('http'))) {
|
||||
result.warnings.push({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: 'URL expression appears to be missing http:// or https:// protocol',
|
||||
suggestion: 'Include protocol in your expression. Example: ={{ "https://" + $json.domain + ".com" }}'
|
||||
});
|
||||
}
|
||||
}
|
||||
// Examples removed - validation provides error messages and fixes instead
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -518,15 +466,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
return Array.from(seen.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a warning should be filtered out (hardcoded credentials shown only in strict mode)
|
||||
*/
|
||||
private static shouldFilterCredentialWarning(warning: ValidationWarning): boolean {
|
||||
return warning.type === 'security' &&
|
||||
warning.message !== undefined &&
|
||||
warning.message.includes('Hardcoded nodeCredentialType');
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply profile-based filtering to validation results
|
||||
*/
|
||||
@@ -539,13 +478,9 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
// Only keep missing required errors
|
||||
result.errors = result.errors.filter(e => e.type === 'missing_required');
|
||||
// Keep ONLY critical warnings (security and deprecated)
|
||||
// But filter out hardcoded credential type warnings (only show in strict mode)
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
return w.type === 'security' || w.type === 'deprecated';
|
||||
});
|
||||
result.warnings = result.warnings.filter(w =>
|
||||
w.type === 'security' || w.type === 'deprecated'
|
||||
);
|
||||
result.suggestions = [];
|
||||
break;
|
||||
|
||||
@@ -558,10 +493,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
);
|
||||
// Keep security and deprecated warnings, REMOVE property visibility warnings
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
// Filter out hardcoded credential type warnings (only show in strict mode)
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
if (w.type === 'security' || w.type === 'deprecated') return true;
|
||||
// FILTER OUT property visibility warnings (too noisy)
|
||||
if (w.type === 'inefficient' && w.message && w.message.includes('not visible')) {
|
||||
@@ -587,10 +518,6 @@ export class EnhancedConfigValidator extends ConfigValidator {
|
||||
// Current behavior - balanced for AI agents
|
||||
// Filter out noise but keep helpful warnings
|
||||
result.warnings = result.warnings.filter(w => {
|
||||
// Filter out hardcoded credential type warnings (only show in strict mode)
|
||||
if (this.shouldFilterCredentialWarning(w)) {
|
||||
return false;
|
||||
}
|
||||
// Keep security and deprecated warnings
|
||||
if (w.type === 'security' || w.type === 'deprecated') return true;
|
||||
// Keep missing common properties
|
||||
|
||||
@@ -207,14 +207,8 @@ export class ExpressionValidator {
|
||||
expr: string,
|
||||
result: ExpressionValidationResult
|
||||
): void {
|
||||
// Check for missing $ prefix - but exclude cases where $ is already present OR it's property access (e.g., .json)
|
||||
// The pattern now excludes:
|
||||
// - Immediately preceded by $ (e.g., $json) - handled by (?<!\$)
|
||||
// - Preceded by a dot (e.g., .json in $('Node').item.json.field) - handled by (?<!\.)
|
||||
// - Inside word characters (e.g., myJson) - handled by (?<!\w)
|
||||
// - Inside bracket notation (e.g., ['json']) - handled by (?<![)
|
||||
// - After opening bracket or quote (e.g., "json" or ['json'])
|
||||
const missingPrefixPattern = /(?<![.$\w['])\b(json|node|input|items|workflow|execution)\b(?!\s*[:''])/;
|
||||
// Check for missing $ prefix - but exclude cases where $ is already present
|
||||
const missingPrefixPattern = /(?<!\$)\b(json|node|input|items|workflow|execution)\b(?!\s*:)/;
|
||||
if (expr.match(missingPrefixPattern)) {
|
||||
result.warnings.push(
|
||||
'Possible missing $ prefix for variable (e.g., use $json instead of json)'
|
||||
|
||||
@@ -170,23 +170,10 @@ export class N8nApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists workflows from n8n instance.
|
||||
*
|
||||
* @param params - Query parameters for filtering and pagination
|
||||
* @returns Paginated list of workflows
|
||||
*
|
||||
* @remarks
|
||||
* This method handles two response formats for backwards compatibility:
|
||||
* - Modern (n8n v0.200.0+): {data: Workflow[], nextCursor?: string}
|
||||
* - Legacy (older versions): Workflow[] (wrapped automatically)
|
||||
*
|
||||
* @see https://github.com/czlonkowski/n8n-mcp/issues/349
|
||||
*/
|
||||
async listWorkflows(params: WorkflowListParams = {}): Promise<WorkflowListResponse> {
|
||||
try {
|
||||
const response = await this.client.get('/workflows', { params });
|
||||
return this.validateListResponse<Workflow>(response.data, 'workflows');
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
throw handleN8nApiError(error);
|
||||
}
|
||||
@@ -204,23 +191,10 @@ export class N8nApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists executions from n8n instance.
|
||||
*
|
||||
* @param params - Query parameters for filtering and pagination
|
||||
* @returns Paginated list of executions
|
||||
*
|
||||
* @remarks
|
||||
* This method handles two response formats for backwards compatibility:
|
||||
* - Modern (n8n v0.200.0+): {data: Execution[], nextCursor?: string}
|
||||
* - Legacy (older versions): Execution[] (wrapped automatically)
|
||||
*
|
||||
* @see https://github.com/czlonkowski/n8n-mcp/issues/349
|
||||
*/
|
||||
async listExecutions(params: ExecutionListParams = {}): Promise<ExecutionListResponse> {
|
||||
try {
|
||||
const response = await this.client.get('/executions', { params });
|
||||
return this.validateListResponse<Execution>(response.data, 'executions');
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
throw handleN8nApiError(error);
|
||||
}
|
||||
@@ -287,23 +261,10 @@ export class N8nApiClient {
|
||||
}
|
||||
|
||||
// Credential Management
|
||||
/**
|
||||
* Lists credentials from n8n instance.
|
||||
*
|
||||
* @param params - Query parameters for filtering and pagination
|
||||
* @returns Paginated list of credentials
|
||||
*
|
||||
* @remarks
|
||||
* This method handles two response formats for backwards compatibility:
|
||||
* - Modern (n8n v0.200.0+): {data: Credential[], nextCursor?: string}
|
||||
* - Legacy (older versions): Credential[] (wrapped automatically)
|
||||
*
|
||||
* @see https://github.com/czlonkowski/n8n-mcp/issues/349
|
||||
*/
|
||||
async listCredentials(params: CredentialListParams = {}): Promise<CredentialListResponse> {
|
||||
try {
|
||||
const response = await this.client.get('/credentials', { params });
|
||||
return this.validateListResponse<Credential>(response.data, 'credentials');
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
throw handleN8nApiError(error);
|
||||
}
|
||||
@@ -345,23 +306,10 @@ export class N8nApiClient {
|
||||
}
|
||||
|
||||
// Tag Management
|
||||
/**
|
||||
* Lists tags from n8n instance.
|
||||
*
|
||||
* @param params - Query parameters for filtering and pagination
|
||||
* @returns Paginated list of tags
|
||||
*
|
||||
* @remarks
|
||||
* This method handles two response formats for backwards compatibility:
|
||||
* - Modern (n8n v0.200.0+): {data: Tag[], nextCursor?: string}
|
||||
* - Legacy (older versions): Tag[] (wrapped automatically)
|
||||
*
|
||||
* @see https://github.com/czlonkowski/n8n-mcp/issues/349
|
||||
*/
|
||||
async listTags(params: TagListParams = {}): Promise<TagListResponse> {
|
||||
try {
|
||||
const response = await this.client.get('/tags', { params });
|
||||
return this.validateListResponse<Tag>(response.data, 'tags');
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
throw handleN8nApiError(error);
|
||||
}
|
||||
@@ -464,49 +412,4 @@ export class N8nApiClient {
|
||||
throw handleN8nApiError(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates and normalizes n8n API list responses.
|
||||
* Handles both modern format {data: [], nextCursor?: string} and legacy array format.
|
||||
*
|
||||
* @param responseData - Raw response data from n8n API
|
||||
* @param resourceType - Resource type for error messages (e.g., 'workflows', 'executions')
|
||||
* @returns Normalized response in modern format
|
||||
* @throws Error if response structure is invalid
|
||||
*/
|
||||
private validateListResponse<T>(
|
||||
responseData: any,
|
||||
resourceType: string
|
||||
): { data: T[]; nextCursor?: string | null } {
|
||||
// Validate response structure
|
||||
if (!responseData || typeof responseData !== 'object') {
|
||||
throw new Error(`Invalid response from n8n API for ${resourceType}: response is not an object`);
|
||||
}
|
||||
|
||||
// Handle legacy case where API returns array directly (older n8n versions)
|
||||
if (Array.isArray(responseData)) {
|
||||
logger.warn(
|
||||
`n8n API returned array directly instead of {data, nextCursor} object for ${resourceType}. ` +
|
||||
'Wrapping in expected format for backwards compatibility.'
|
||||
);
|
||||
return {
|
||||
data: responseData,
|
||||
nextCursor: null
|
||||
};
|
||||
}
|
||||
|
||||
// Validate expected format {data: [], nextCursor?: string}
|
||||
if (!Array.isArray(responseData.data)) {
|
||||
const keys = Object.keys(responseData).slice(0, 5);
|
||||
const keysPreview = keys.length < Object.keys(responseData).length
|
||||
? `${keys.join(', ')}...`
|
||||
: keys.join(', ');
|
||||
throw new Error(
|
||||
`Invalid response from n8n API for ${resourceType}: expected {data: [], nextCursor?: string}, ` +
|
||||
`got object with keys: [${keysPreview}]`
|
||||
);
|
||||
}
|
||||
|
||||
return responseData;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
import { WorkflowNode, WorkflowConnection, Workflow } from '../types/n8n-api';
|
||||
import { isTriggerNode, isActivatableTrigger } from '../utils/node-type-utils';
|
||||
import { isNonExecutableNode } from '../utils/node-classification';
|
||||
|
||||
// Zod schemas for n8n API validation
|
||||
|
||||
@@ -24,31 +22,17 @@ export const workflowNodeSchema = z.object({
|
||||
executeOnce: z.boolean().optional(),
|
||||
});
|
||||
|
||||
// Connection array schema used by all connection types
|
||||
const connectionArraySchema = z.array(
|
||||
z.array(
|
||||
z.object({
|
||||
node: z.string(),
|
||||
type: z.string(),
|
||||
index: z.number(),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
/**
|
||||
* Workflow connection schema supporting all connection types.
|
||||
* Note: 'main' is optional because AI nodes exclusively use AI-specific
|
||||
* connection types (ai_languageModel, ai_memory, etc.) without main connections.
|
||||
*/
|
||||
export const workflowConnectionSchema = z.record(
|
||||
z.object({
|
||||
main: connectionArraySchema.optional(),
|
||||
error: connectionArraySchema.optional(),
|
||||
ai_tool: connectionArraySchema.optional(),
|
||||
ai_languageModel: connectionArraySchema.optional(),
|
||||
ai_memory: connectionArraySchema.optional(),
|
||||
ai_embedding: connectionArraySchema.optional(),
|
||||
ai_vectorStore: connectionArraySchema.optional(),
|
||||
main: z.array(
|
||||
z.array(
|
||||
z.object({
|
||||
node: z.string(),
|
||||
type: z.string(),
|
||||
index: z.number(),
|
||||
})
|
||||
)
|
||||
),
|
||||
})
|
||||
);
|
||||
|
||||
@@ -210,14 +194,6 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
errors.push('Workflow must have at least one node');
|
||||
}
|
||||
|
||||
// Check if workflow has only non-executable nodes (sticky notes)
|
||||
if (workflow.nodes && workflow.nodes.length > 0) {
|
||||
const hasExecutableNodes = workflow.nodes.some(node => !isNonExecutableNode(node.type));
|
||||
if (!hasExecutableNodes) {
|
||||
errors.push('Workflow must have at least one executable node. Sticky notes alone cannot form a valid workflow.');
|
||||
}
|
||||
}
|
||||
|
||||
if (!workflow.connections) {
|
||||
errors.push('Workflow connections are required');
|
||||
}
|
||||
@@ -229,67 +205,16 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
singleNode.type === 'n8n-nodes-base.webhookTrigger';
|
||||
|
||||
if (!isWebhookOnly) {
|
||||
errors.push(`Single non-webhook node workflow is invalid. Current node: "${singleNode.name}" (${singleNode.type}). Add another node using: {type: 'addNode', node: {name: 'Process Data', type: 'n8n-nodes-base.set', typeVersion: 3.4, position: [450, 300], parameters: {}}}`);
|
||||
errors.push('Single-node workflows are only valid for webhooks. Add at least one more node and connect them. Example: Manual Trigger → Set node');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for disconnected nodes in multi-node workflows
|
||||
// Check for empty connections in multi-node workflows
|
||||
if (workflow.nodes && workflow.nodes.length > 1 && workflow.connections) {
|
||||
// Filter out non-executable nodes (sticky notes) when counting nodes
|
||||
const executableNodes = workflow.nodes.filter(node => !isNonExecutableNode(node.type));
|
||||
const connectionCount = Object.keys(workflow.connections).length;
|
||||
|
||||
// First check: workflow has no connections at all (only check if there are multiple executable nodes)
|
||||
if (connectionCount === 0 && executableNodes.length > 1) {
|
||||
const nodeNames = executableNodes.slice(0, 2).map(n => n.name);
|
||||
errors.push(`Multi-node workflow has no connections between nodes. Add a connection using: {type: 'addConnection', source: '${nodeNames[0]}', target: '${nodeNames[1]}', sourcePort: 'main', targetPort: 'main'}`);
|
||||
} else if (connectionCount > 0 || executableNodes.length > 1) {
|
||||
// Second check: detect disconnected nodes (nodes with no incoming or outgoing connections)
|
||||
const connectedNodes = new Set<string>();
|
||||
|
||||
// Collect all nodes that appear in connections (as source or target)
|
||||
Object.entries(workflow.connections).forEach(([sourceName, connection]) => {
|
||||
connectedNodes.add(sourceName); // Node has outgoing connection
|
||||
|
||||
if (connection.main && Array.isArray(connection.main)) {
|
||||
connection.main.forEach((outputs) => {
|
||||
if (Array.isArray(outputs)) {
|
||||
outputs.forEach((target) => {
|
||||
connectedNodes.add(target.node); // Node has incoming connection
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Find disconnected nodes (excluding non-executable nodes and triggers)
|
||||
// Non-executable nodes (sticky notes) are UI-only and don't need connections
|
||||
// Trigger nodes only need outgoing connections
|
||||
const disconnectedNodes = workflow.nodes.filter(node => {
|
||||
// Skip non-executable nodes (sticky notes, etc.) - they're UI-only annotations
|
||||
if (isNonExecutableNode(node.type)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isConnected = connectedNodes.has(node.name);
|
||||
const isNodeTrigger = isTriggerNode(node.type);
|
||||
|
||||
// Trigger nodes only need outgoing connections
|
||||
if (isNodeTrigger) {
|
||||
return !workflow.connections?.[node.name]; // Disconnected if no outgoing connections
|
||||
}
|
||||
|
||||
// Regular nodes need at least one connection (incoming or outgoing)
|
||||
return !isConnected;
|
||||
});
|
||||
|
||||
if (disconnectedNodes.length > 0) {
|
||||
const disconnectedList = disconnectedNodes.map(n => `"${n.name}" (${n.type})`).join(', ');
|
||||
const firstDisconnected = disconnectedNodes[0];
|
||||
const suggestedSource = workflow.nodes.find(n => connectedNodes.has(n.name))?.name || workflow.nodes[0].name;
|
||||
|
||||
errors.push(`Disconnected nodes detected: ${disconnectedList}. Each node must have at least one connection. Add a connection: {type: 'addConnection', source: '${suggestedSource}', target: '${firstDisconnected.name}', sourcePort: 'main', targetPort: 'main'}`);
|
||||
}
|
||||
if (connectionCount === 0) {
|
||||
errors.push('Multi-node workflow has empty connections. Connect nodes like this: connections: { "Node1 Name": { "main": [[{ "node": "Node2 Name", "type": "main", "index": 0 }]] } }');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,16 +236,6 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
});
|
||||
}
|
||||
|
||||
// Validate filter-based nodes (IF v2.2+, Switch v3.2+) have complete metadata
|
||||
if (workflow.nodes) {
|
||||
workflow.nodes.forEach((node, index) => {
|
||||
const filterErrors = validateFilterBasedNodeMetadata(node);
|
||||
if (filterErrors.length > 0) {
|
||||
errors.push(...filterErrors.map(err => `Node "${node.name}" (index ${index}): ${err}`));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Validate connections
|
||||
if (workflow.connections) {
|
||||
try {
|
||||
@@ -330,83 +245,6 @@ export function validateWorkflowStructure(workflow: Partial<Workflow>): string[]
|
||||
}
|
||||
}
|
||||
|
||||
// Validate active workflows have activatable triggers
|
||||
// Issue #351: executeWorkflowTrigger cannot activate a workflow
|
||||
// It can only be invoked by other workflows
|
||||
if ((workflow as any).active === true && workflow.nodes && workflow.nodes.length > 0) {
|
||||
const activatableTriggers = workflow.nodes.filter(node =>
|
||||
!node.disabled && isActivatableTrigger(node.type)
|
||||
);
|
||||
|
||||
const executeWorkflowTriggers = workflow.nodes.filter(node =>
|
||||
!node.disabled && node.type.toLowerCase().includes('executeworkflow')
|
||||
);
|
||||
|
||||
if (activatableTriggers.length === 0 && executeWorkflowTriggers.length > 0) {
|
||||
// Workflow is active but only has executeWorkflowTrigger nodes
|
||||
const triggerNames = executeWorkflowTriggers.map(n => n.name).join(', ');
|
||||
errors.push(
|
||||
`Cannot activate workflow with only Execute Workflow Trigger nodes (${triggerNames}). ` +
|
||||
'Execute Workflow Trigger can only be invoked by other workflows, not activated. ' +
|
||||
'Either deactivate the workflow or add a webhook/schedule/polling trigger.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate Switch and IF node connection structures match their rules
|
||||
if (workflow.nodes && workflow.connections) {
|
||||
const switchNodes = workflow.nodes.filter(n => {
|
||||
if (n.type !== 'n8n-nodes-base.switch') return false;
|
||||
const mode = (n.parameters as any)?.mode;
|
||||
return !mode || mode === 'rules'; // Default mode is 'rules'
|
||||
});
|
||||
|
||||
for (const switchNode of switchNodes) {
|
||||
const params = switchNode.parameters as any;
|
||||
const rules = params?.rules?.rules || [];
|
||||
const nodeConnections = workflow.connections[switchNode.name];
|
||||
|
||||
if (rules.length > 0 && nodeConnections?.main) {
|
||||
const outputBranches = nodeConnections.main.length;
|
||||
|
||||
// Switch nodes in "rules" mode need output branches matching rules count
|
||||
if (outputBranches !== rules.length) {
|
||||
const ruleNames = rules.map((r: any, i: number) =>
|
||||
r.outputKey ? `"${r.outputKey}" (index ${i})` : `Rule ${i}`
|
||||
).join(', ');
|
||||
|
||||
errors.push(
|
||||
`Switch node "${switchNode.name}" has ${rules.length} rules [${ruleNames}] ` +
|
||||
`but only ${outputBranches} output branch${outputBranches !== 1 ? 'es' : ''} in connections. ` +
|
||||
`Each rule needs its own output branch. When connecting to Switch outputs, specify sourceIndex: ` +
|
||||
rules.map((_: any, i: number) => i).join(', ') +
|
||||
` (or use case parameter for clarity).`
|
||||
);
|
||||
}
|
||||
|
||||
// Check for empty output branches (except trailing ones)
|
||||
const nonEmptyBranches = nodeConnections.main.filter((branch: any[]) => branch.length > 0).length;
|
||||
if (nonEmptyBranches < rules.length) {
|
||||
const emptyIndices = nodeConnections.main
|
||||
.map((branch: any[], i: number) => branch.length === 0 ? i : -1)
|
||||
.filter((i: number) => i !== -1 && i < rules.length);
|
||||
|
||||
if (emptyIndices.length > 0) {
|
||||
const ruleInfo = emptyIndices.map((i: number) => {
|
||||
const rule = rules[i];
|
||||
return rule.outputKey ? `"${rule.outputKey}" (index ${i})` : `Rule ${i}`;
|
||||
}).join(', ');
|
||||
|
||||
errors.push(
|
||||
`Switch node "${switchNode.name}" has unconnected output${emptyIndices.length !== 1 ? 's' : ''}: ${ruleInfo}. ` +
|
||||
`Add connection${emptyIndices.length !== 1 ? 's' : ''} using sourceIndex: ${emptyIndices.join(' or ')}.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate that all connection references exist and use node NAMES (not IDs)
|
||||
if (workflow.nodes && workflow.connections) {
|
||||
const nodeNames = new Set(workflow.nodes.map(node => node.name));
|
||||
@@ -457,171 +295,6 @@ export function hasWebhookTrigger(workflow: Workflow): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate filter-based node metadata (IF v2.2+, Switch v3.2+)
|
||||
* Returns array of error messages
|
||||
*/
|
||||
export function validateFilterBasedNodeMetadata(node: WorkflowNode): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
// Check if node is filter-based
|
||||
const isIFNode = node.type === 'n8n-nodes-base.if' && node.typeVersion >= 2.2;
|
||||
const isSwitchNode = node.type === 'n8n-nodes-base.switch' && node.typeVersion >= 3.2;
|
||||
|
||||
if (!isIFNode && !isSwitchNode) {
|
||||
return errors; // Not a filter-based node
|
||||
}
|
||||
|
||||
// Validate IF node
|
||||
if (isIFNode) {
|
||||
const conditions = (node.parameters.conditions as any);
|
||||
|
||||
// Check conditions.options exists
|
||||
if (!conditions?.options) {
|
||||
errors.push(
|
||||
'Missing required "conditions.options". ' +
|
||||
'IF v2.2+ requires: {version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}'
|
||||
);
|
||||
} else {
|
||||
// Validate required fields
|
||||
const requiredFields = {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: 'boolean',
|
||||
typeValidation: 'strict'
|
||||
};
|
||||
|
||||
for (const [field, expectedValue] of Object.entries(requiredFields)) {
|
||||
if (!(field in conditions.options)) {
|
||||
errors.push(
|
||||
`Missing required field "conditions.options.${field}". ` +
|
||||
`Expected value: ${typeof expectedValue === 'string' ? `"${expectedValue}"` : expectedValue}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate operators in conditions
|
||||
if (conditions?.conditions && Array.isArray(conditions.conditions)) {
|
||||
conditions.conditions.forEach((condition: any, i: number) => {
|
||||
const operatorErrors = validateOperatorStructure(condition.operator, `conditions.conditions[${i}].operator`);
|
||||
errors.push(...operatorErrors);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate Switch node
|
||||
if (isSwitchNode) {
|
||||
const rules = (node.parameters.rules as any);
|
||||
|
||||
if (rules?.rules && Array.isArray(rules.rules)) {
|
||||
rules.rules.forEach((rule: any, ruleIndex: number) => {
|
||||
// Check rule.conditions.options
|
||||
if (!rule.conditions?.options) {
|
||||
errors.push(
|
||||
`Missing required "rules.rules[${ruleIndex}].conditions.options". ` +
|
||||
'Switch v3.2+ requires: {version: 2, leftValue: "", caseSensitive: true, typeValidation: "strict"}'
|
||||
);
|
||||
} else {
|
||||
// Validate required fields
|
||||
const requiredFields = {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: 'boolean',
|
||||
typeValidation: 'strict'
|
||||
};
|
||||
|
||||
for (const [field, expectedValue] of Object.entries(requiredFields)) {
|
||||
if (!(field in rule.conditions.options)) {
|
||||
errors.push(
|
||||
`Missing required field "rules.rules[${ruleIndex}].conditions.options.${field}". ` +
|
||||
`Expected value: ${typeof expectedValue === 'string' ? `"${expectedValue}"` : expectedValue}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate operators in rule conditions
|
||||
if (rule.conditions?.conditions && Array.isArray(rule.conditions.conditions)) {
|
||||
rule.conditions.conditions.forEach((condition: any, condIndex: number) => {
|
||||
const operatorErrors = validateOperatorStructure(
|
||||
condition.operator,
|
||||
`rules.rules[${ruleIndex}].conditions.conditions[${condIndex}].operator`
|
||||
);
|
||||
errors.push(...operatorErrors);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate operator structure
|
||||
* Ensures operator has correct format: {type, operation, singleValue?}
|
||||
*/
|
||||
export function validateOperatorStructure(operator: any, path: string): string[] {
|
||||
const errors: string[] = [];
|
||||
|
||||
if (!operator || typeof operator !== 'object') {
|
||||
errors.push(`${path}: operator is missing or not an object`);
|
||||
return errors;
|
||||
}
|
||||
|
||||
// Check required field: type (data type, not operation name)
|
||||
if (!operator.type) {
|
||||
errors.push(
|
||||
`${path}: missing required field "type". ` +
|
||||
'Must be a data type: "string", "number", "boolean", "dateTime", "array", or "object"'
|
||||
);
|
||||
} else {
|
||||
const validTypes = ['string', 'number', 'boolean', 'dateTime', 'array', 'object'];
|
||||
if (!validTypes.includes(operator.type)) {
|
||||
errors.push(
|
||||
`${path}: invalid type "${operator.type}". ` +
|
||||
`Type must be a data type (${validTypes.join(', ')}), not an operation name. ` +
|
||||
'Did you mean to use the "operation" field?'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check required field: operation
|
||||
if (!operator.operation) {
|
||||
errors.push(
|
||||
`${path}: missing required field "operation". ` +
|
||||
'Operation specifies the comparison type (e.g., "equals", "contains", "isNotEmpty")'
|
||||
);
|
||||
}
|
||||
|
||||
// Check singleValue based on operator type
|
||||
if (operator.operation) {
|
||||
const unaryOperators = ['isEmpty', 'isNotEmpty', 'true', 'false', 'isNumeric'];
|
||||
const isUnary = unaryOperators.includes(operator.operation);
|
||||
|
||||
if (isUnary) {
|
||||
// Unary operators MUST have singleValue: true
|
||||
if (operator.singleValue !== true) {
|
||||
errors.push(
|
||||
`${path}: unary operator "${operator.operation}" requires "singleValue: true". ` +
|
||||
'Unary operators do not use rightValue.'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Binary operators should NOT have singleValue: true
|
||||
if (operator.singleValue === true) {
|
||||
errors.push(
|
||||
`${path}: binary operator "${operator.operation}" should not have "singleValue: true". ` +
|
||||
'Only unary operators (isEmpty, isNotEmpty, true, false, isNumeric) need this property.'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
// Get webhook URL from workflow
|
||||
export function getWebhookUrl(workflow: Workflow): string | null {
|
||||
const webhookNode = workflow.nodes.find(node =>
|
||||
|
||||
@@ -1,410 +0,0 @@
|
||||
/**
|
||||
* Node Migration Service
|
||||
*
|
||||
* Handles smart auto-migration of node configurations during version upgrades.
|
||||
* Applies migration strategies from the breaking changes registry and detectors.
|
||||
*
|
||||
* Migration strategies:
|
||||
* - add_property: Add new required/optional properties with defaults
|
||||
* - remove_property: Remove deprecated properties
|
||||
* - rename_property: Rename properties that changed names
|
||||
* - set_default: Set default values for properties
|
||||
*/
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { BreakingChangeDetector, DetectedChange } from './breaking-change-detector';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
|
||||
export interface MigrationResult {
|
||||
success: boolean;
|
||||
nodeId: string;
|
||||
nodeName: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
appliedMigrations: AppliedMigration[];
|
||||
remainingIssues: string[];
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
updatedNode: any; // The migrated node configuration
|
||||
}
|
||||
|
||||
export interface AppliedMigration {
|
||||
propertyName: string;
|
||||
action: string;
|
||||
oldValue?: any;
|
||||
newValue?: any;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export class NodeMigrationService {
|
||||
constructor(
|
||||
private versionService: NodeVersionService,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Migrate a node from its current version to a target version
|
||||
*/
|
||||
async migrateNode(
|
||||
node: any,
|
||||
fromVersion: string,
|
||||
toVersion: string
|
||||
): Promise<MigrationResult> {
|
||||
const nodeId = node.id || 'unknown';
|
||||
const nodeName = node.name || 'Unknown Node';
|
||||
const nodeType = node.type;
|
||||
|
||||
// Analyze the version upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
fromVersion,
|
||||
toVersion
|
||||
);
|
||||
|
||||
// Start with a copy of the node
|
||||
const migratedNode = JSON.parse(JSON.stringify(node));
|
||||
|
||||
// Apply the version update
|
||||
migratedNode.typeVersion = this.parseVersion(toVersion);
|
||||
|
||||
const appliedMigrations: AppliedMigration[] = [];
|
||||
const remainingIssues: string[] = [];
|
||||
|
||||
// Apply auto-migratable changes
|
||||
for (const change of analysis.changes.filter(c => c.autoMigratable)) {
|
||||
const migration = this.applyMigration(migratedNode, change);
|
||||
|
||||
if (migration) {
|
||||
appliedMigrations.push(migration);
|
||||
}
|
||||
}
|
||||
|
||||
// Collect remaining manual issues
|
||||
for (const change of analysis.changes.filter(c => !c.autoMigratable)) {
|
||||
remainingIssues.push(
|
||||
`Manual action required for "${change.propertyName}": ${change.migrationHint}`
|
||||
);
|
||||
}
|
||||
|
||||
// Determine confidence based on remaining issues
|
||||
let confidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
|
||||
if (remainingIssues.length > 0) {
|
||||
confidence = remainingIssues.length > 3 ? 'LOW' : 'MEDIUM';
|
||||
}
|
||||
|
||||
return {
|
||||
success: remainingIssues.length === 0,
|
||||
nodeId,
|
||||
nodeName,
|
||||
fromVersion,
|
||||
toVersion,
|
||||
appliedMigrations,
|
||||
remainingIssues,
|
||||
confidence,
|
||||
updatedNode: migratedNode
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a single migration change to a node
|
||||
*/
|
||||
private applyMigration(node: any, change: DetectedChange): AppliedMigration | null {
|
||||
if (!change.migrationStrategy) return null;
|
||||
|
||||
const { type, defaultValue, sourceProperty, targetProperty } = change.migrationStrategy;
|
||||
|
||||
switch (type) {
|
||||
case 'add_property':
|
||||
return this.addProperty(node, change.propertyName, defaultValue, change);
|
||||
|
||||
case 'remove_property':
|
||||
return this.removeProperty(node, change.propertyName, change);
|
||||
|
||||
case 'rename_property':
|
||||
return this.renameProperty(node, sourceProperty!, targetProperty!, change);
|
||||
|
||||
case 'set_default':
|
||||
return this.setDefault(node, change.propertyName, defaultValue, change);
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new property to the node configuration
|
||||
*/
|
||||
private addProperty(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
defaultValue: any,
|
||||
change: DetectedChange
|
||||
): AppliedMigration {
|
||||
const value = this.resolveDefaultValue(propertyPath, defaultValue, node);
|
||||
|
||||
// Handle nested property paths (e.g., "parameters.inputFieldMapping")
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!target[part]) {
|
||||
target[part] = {};
|
||||
}
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
target[finalKey] = value;
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Added property',
|
||||
newValue: value,
|
||||
description: `Added "${propertyPath}" with default value`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a deprecated property from the node configuration
|
||||
*/
|
||||
private removeProperty(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!target[part]) return null; // Property doesn't exist
|
||||
target = target[part];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
const oldValue = target[finalKey];
|
||||
|
||||
if (oldValue !== undefined) {
|
||||
delete target[finalKey];
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Removed property',
|
||||
oldValue,
|
||||
description: `Removed deprecated property "${propertyPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rename a property (move value from old name to new name)
|
||||
*/
|
||||
private renameProperty(
|
||||
node: any,
|
||||
sourcePath: string,
|
||||
targetPath: string,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
// Get old value
|
||||
const sourceParts = sourcePath.split('.');
|
||||
let sourceTarget = node;
|
||||
|
||||
for (let i = 0; i < sourceParts.length - 1; i++) {
|
||||
if (!sourceTarget[sourceParts[i]]) return null;
|
||||
sourceTarget = sourceTarget[sourceParts[i]];
|
||||
}
|
||||
|
||||
const sourceKey = sourceParts[sourceParts.length - 1];
|
||||
const oldValue = sourceTarget[sourceKey];
|
||||
|
||||
if (oldValue === undefined) return null; // Source doesn't exist
|
||||
|
||||
// Set new value
|
||||
const targetParts = targetPath.split('.');
|
||||
let targetTarget = node;
|
||||
|
||||
for (let i = 0; i < targetParts.length - 1; i++) {
|
||||
if (!targetTarget[targetParts[i]]) {
|
||||
targetTarget[targetParts[i]] = {};
|
||||
}
|
||||
targetTarget = targetTarget[targetParts[i]];
|
||||
}
|
||||
|
||||
const targetKey = targetParts[targetParts.length - 1];
|
||||
targetTarget[targetKey] = oldValue;
|
||||
|
||||
// Remove old value
|
||||
delete sourceTarget[sourceKey];
|
||||
|
||||
return {
|
||||
propertyName: targetPath,
|
||||
action: 'Renamed property',
|
||||
oldValue: `${sourcePath}: ${JSON.stringify(oldValue)}`,
|
||||
newValue: `${targetPath}: ${JSON.stringify(oldValue)}`,
|
||||
description: `Renamed "${sourcePath}" to "${targetPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a default value for a property
|
||||
*/
|
||||
private setDefault(
|
||||
node: any,
|
||||
propertyPath: string,
|
||||
defaultValue: any,
|
||||
change: DetectedChange
|
||||
): AppliedMigration | null {
|
||||
const parts = propertyPath.split('.');
|
||||
let target = node;
|
||||
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
if (!target[parts[i]]) {
|
||||
target[parts[i]] = {};
|
||||
}
|
||||
target = target[parts[i]];
|
||||
}
|
||||
|
||||
const finalKey = parts[parts.length - 1];
|
||||
|
||||
// Only set if not already defined
|
||||
if (target[finalKey] === undefined) {
|
||||
const value = this.resolveDefaultValue(propertyPath, defaultValue, node);
|
||||
target[finalKey] = value;
|
||||
|
||||
return {
|
||||
propertyName: propertyPath,
|
||||
action: 'Set default value',
|
||||
newValue: value,
|
||||
description: `Set default value for "${propertyPath}"`
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve default value with special handling for certain property types
|
||||
*/
|
||||
private resolveDefaultValue(propertyPath: string, defaultValue: any, node: any): any {
|
||||
// Special case: webhookId needs a UUID
|
||||
if (propertyPath === 'webhookId' || propertyPath.endsWith('.webhookId')) {
|
||||
return uuidv4();
|
||||
}
|
||||
|
||||
// Special case: webhook path needs a unique value
|
||||
if (propertyPath === 'path' || propertyPath.endsWith('.path')) {
|
||||
if (node.type === 'n8n-nodes-base.webhook') {
|
||||
return `/webhook-${Date.now()}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Return provided default or null
|
||||
return defaultValue !== null && defaultValue !== undefined ? defaultValue : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse version string to number (for typeVersion field)
|
||||
*/
|
||||
private parseVersion(version: string): number {
|
||||
const parts = version.split('.').map(Number);
|
||||
|
||||
// Handle versions like "1.1" -> 1.1, "2.0" -> 2
|
||||
if (parts.length === 1) return parts[0];
|
||||
if (parts.length === 2) return parts[0] + parts[1] / 10;
|
||||
|
||||
// For more complex versions, just use first number
|
||||
return parts[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a migrated node is valid
|
||||
*/
|
||||
async validateMigratedNode(node: any, nodeType: string): Promise<{
|
||||
valid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
}> {
|
||||
const errors: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Basic validation
|
||||
if (!node.typeVersion) {
|
||||
errors.push('Missing typeVersion after migration');
|
||||
}
|
||||
|
||||
if (!node.parameters) {
|
||||
errors.push('Missing parameters object');
|
||||
}
|
||||
|
||||
// Check for common issues
|
||||
if (nodeType === 'n8n-nodes-base.webhook') {
|
||||
if (!node.parameters?.path) {
|
||||
errors.push('Webhook node missing required "path" parameter');
|
||||
}
|
||||
if (node.typeVersion >= 2.1 && !node.webhookId) {
|
||||
warnings.push('Webhook v2.1+ typically requires webhookId');
|
||||
}
|
||||
}
|
||||
|
||||
if (nodeType === 'n8n-nodes-base.executeWorkflow') {
|
||||
if (node.typeVersion >= 1.1 && !node.parameters?.inputFieldMapping) {
|
||||
errors.push('Execute Workflow v1.1+ requires inputFieldMapping');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch migrate multiple nodes in a workflow
|
||||
*/
|
||||
async migrateWorkflowNodes(
|
||||
workflow: any,
|
||||
targetVersions: Record<string, string> // nodeId -> targetVersion
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
results: MigrationResult[];
|
||||
overallConfidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
}> {
|
||||
const results: MigrationResult[] = [];
|
||||
|
||||
for (const node of workflow.nodes || []) {
|
||||
const targetVersion = targetVersions[node.id];
|
||||
|
||||
if (targetVersion && node.typeVersion) {
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
|
||||
const result = await this.migrateNode(node, currentVersion, targetVersion);
|
||||
results.push(result);
|
||||
|
||||
// Update node in place
|
||||
Object.assign(node, result.updatedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate overall confidence
|
||||
const confidences = results.map(r => r.confidence);
|
||||
let overallConfidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
|
||||
if (confidences.includes('LOW')) {
|
||||
overallConfidence = 'LOW';
|
||||
} else if (confidences.includes('MEDIUM')) {
|
||||
overallConfidence = 'MEDIUM';
|
||||
}
|
||||
|
||||
const success = results.every(r => r.success);
|
||||
|
||||
return {
|
||||
success,
|
||||
results,
|
||||
overallConfidence
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,361 +0,0 @@
|
||||
/**
|
||||
* Node Sanitizer Service
|
||||
*
|
||||
* Ensures nodes have complete metadata required by n8n UI.
|
||||
* Based on n8n AI Workflow Builder patterns:
|
||||
* - Merges node type defaults with user parameters
|
||||
* - Auto-adds required metadata for filter-based nodes (IF v2.2+, Switch v3.2+)
|
||||
* - Fixes operator structure
|
||||
* - Prevents "Could not find property option" errors
|
||||
*/
|
||||
|
||||
import { INodeParameters } from 'n8n-workflow';
|
||||
import { logger } from '../utils/logger';
|
||||
import { WorkflowNode } from '../types/n8n-api';
|
||||
|
||||
/**
|
||||
* Sanitize a single node by adding required metadata
|
||||
*/
|
||||
export function sanitizeNode(node: WorkflowNode): WorkflowNode {
|
||||
const sanitized = { ...node };
|
||||
|
||||
// Apply node-specific sanitization
|
||||
if (isFilterBasedNode(node.type, node.typeVersion)) {
|
||||
sanitized.parameters = sanitizeFilterBasedNode(
|
||||
sanitized.parameters as INodeParameters,
|
||||
node.type,
|
||||
node.typeVersion
|
||||
);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize all nodes in a workflow
|
||||
*/
|
||||
export function sanitizeWorkflowNodes(workflow: any): any {
|
||||
if (!workflow.nodes || !Array.isArray(workflow.nodes)) {
|
||||
return workflow;
|
||||
}
|
||||
|
||||
return {
|
||||
...workflow,
|
||||
nodes: workflow.nodes.map((node: any) => sanitizeNode(node))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if node is filter-based (IF v2.2+, Switch v3.2+)
|
||||
*/
|
||||
function isFilterBasedNode(nodeType: string, typeVersion: number): boolean {
|
||||
if (nodeType === 'n8n-nodes-base.if') {
|
||||
return typeVersion >= 2.2;
|
||||
}
|
||||
if (nodeType === 'n8n-nodes-base.switch') {
|
||||
return typeVersion >= 3.2;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize filter-based nodes (IF v2.2+, Switch v3.2+)
|
||||
* Ensures conditions.options has complete structure
|
||||
*/
|
||||
function sanitizeFilterBasedNode(
|
||||
parameters: INodeParameters,
|
||||
nodeType: string,
|
||||
typeVersion: number
|
||||
): INodeParameters {
|
||||
const sanitized = { ...parameters };
|
||||
|
||||
// Handle IF node
|
||||
if (nodeType === 'n8n-nodes-base.if' && typeVersion >= 2.2) {
|
||||
sanitized.conditions = sanitizeFilterConditions(sanitized.conditions as any);
|
||||
}
|
||||
|
||||
// Handle Switch node
|
||||
if (nodeType === 'n8n-nodes-base.switch' && typeVersion >= 3.2) {
|
||||
if (sanitized.rules && typeof sanitized.rules === 'object') {
|
||||
const rules = sanitized.rules as any;
|
||||
if (rules.rules && Array.isArray(rules.rules)) {
|
||||
rules.rules = rules.rules.map((rule: any) => ({
|
||||
...rule,
|
||||
conditions: sanitizeFilterConditions(rule.conditions)
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize filter conditions structure
|
||||
*/
|
||||
function sanitizeFilterConditions(conditions: any): any {
|
||||
if (!conditions || typeof conditions !== 'object') {
|
||||
return conditions;
|
||||
}
|
||||
|
||||
const sanitized = { ...conditions };
|
||||
|
||||
// Ensure options has complete structure
|
||||
if (!sanitized.options) {
|
||||
sanitized.options = {};
|
||||
}
|
||||
|
||||
// Add required filter options metadata
|
||||
const requiredOptions = {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
};
|
||||
|
||||
// Merge with existing options, preserving user values
|
||||
sanitized.options = {
|
||||
...requiredOptions,
|
||||
...sanitized.options
|
||||
};
|
||||
|
||||
// Sanitize conditions array
|
||||
if (sanitized.conditions && Array.isArray(sanitized.conditions)) {
|
||||
sanitized.conditions = sanitized.conditions.map((condition: any) =>
|
||||
sanitizeCondition(condition)
|
||||
);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a single condition
|
||||
*/
|
||||
function sanitizeCondition(condition: any): any {
|
||||
if (!condition || typeof condition !== 'object') {
|
||||
return condition;
|
||||
}
|
||||
|
||||
const sanitized = { ...condition };
|
||||
|
||||
// Ensure condition has an ID
|
||||
if (!sanitized.id) {
|
||||
sanitized.id = generateConditionId();
|
||||
}
|
||||
|
||||
// Sanitize operator structure
|
||||
if (sanitized.operator) {
|
||||
sanitized.operator = sanitizeOperator(sanitized.operator);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize operator structure
|
||||
* Ensures operator has correct format: {type, operation, singleValue?}
|
||||
*/
|
||||
function sanitizeOperator(operator: any): any {
|
||||
if (!operator || typeof operator !== 'object') {
|
||||
return operator;
|
||||
}
|
||||
|
||||
const sanitized = { ...operator };
|
||||
|
||||
// Fix common mistake: type field used for operation name
|
||||
// WRONG: {type: "isNotEmpty"}
|
||||
// RIGHT: {type: "string", operation: "isNotEmpty"}
|
||||
if (sanitized.type && !sanitized.operation) {
|
||||
// Check if type value looks like an operation (lowercase, no dots)
|
||||
const typeValue = sanitized.type as string;
|
||||
if (isOperationName(typeValue)) {
|
||||
logger.debug(`Fixing operator structure: converting type="${typeValue}" to operation`);
|
||||
|
||||
// Infer data type from operation
|
||||
const dataType = inferDataType(typeValue);
|
||||
sanitized.type = dataType;
|
||||
sanitized.operation = typeValue;
|
||||
}
|
||||
}
|
||||
|
||||
// Set singleValue based on operator type
|
||||
if (sanitized.operation) {
|
||||
if (isUnaryOperator(sanitized.operation)) {
|
||||
// Unary operators require singleValue: true
|
||||
sanitized.singleValue = true;
|
||||
} else {
|
||||
// Binary operators should NOT have singleValue (or it should be false/undefined)
|
||||
// Remove it to prevent UI errors
|
||||
delete sanitized.singleValue;
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if string looks like an operation name (not a data type)
|
||||
*/
|
||||
function isOperationName(value: string): boolean {
|
||||
// Operation names are lowercase and don't contain dots
|
||||
// Data types are: string, number, boolean, dateTime, array, object
|
||||
const dataTypes = ['string', 'number', 'boolean', 'dateTime', 'array', 'object'];
|
||||
return !dataTypes.includes(value) && /^[a-z][a-zA-Z]*$/.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer data type from operation name
|
||||
*/
|
||||
function inferDataType(operation: string): string {
|
||||
// Boolean operations
|
||||
const booleanOps = ['true', 'false', 'isEmpty', 'isNotEmpty'];
|
||||
if (booleanOps.includes(operation)) {
|
||||
return 'boolean';
|
||||
}
|
||||
|
||||
// Number operations
|
||||
const numberOps = ['isNumeric', 'gt', 'gte', 'lt', 'lte'];
|
||||
if (numberOps.some(op => operation.includes(op))) {
|
||||
return 'number';
|
||||
}
|
||||
|
||||
// Date operations
|
||||
const dateOps = ['after', 'before', 'afterDate', 'beforeDate'];
|
||||
if (dateOps.some(op => operation.includes(op))) {
|
||||
return 'dateTime';
|
||||
}
|
||||
|
||||
// Default to string
|
||||
return 'string';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if operator is unary (requires singleValue: true)
|
||||
*/
|
||||
function isUnaryOperator(operation: string): boolean {
|
||||
const unaryOps = [
|
||||
'isEmpty',
|
||||
'isNotEmpty',
|
||||
'true',
|
||||
'false',
|
||||
'isNumeric'
|
||||
];
|
||||
return unaryOps.includes(operation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate unique condition ID
|
||||
*/
|
||||
function generateConditionId(): string {
|
||||
return `condition-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a node has complete metadata
|
||||
* Returns array of issues found
|
||||
*/
|
||||
export function validateNodeMetadata(node: WorkflowNode): string[] {
|
||||
const issues: string[] = [];
|
||||
|
||||
if (!isFilterBasedNode(node.type, node.typeVersion)) {
|
||||
return issues; // Not a filter-based node
|
||||
}
|
||||
|
||||
// Check IF node
|
||||
if (node.type === 'n8n-nodes-base.if') {
|
||||
const conditions = (node.parameters.conditions as any);
|
||||
if (!conditions?.options) {
|
||||
issues.push('Missing conditions.options');
|
||||
} else {
|
||||
const required = ['version', 'leftValue', 'typeValidation', 'caseSensitive'];
|
||||
for (const field of required) {
|
||||
if (!(field in conditions.options)) {
|
||||
issues.push(`Missing conditions.options.${field}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check operators
|
||||
if (conditions?.conditions && Array.isArray(conditions.conditions)) {
|
||||
for (let i = 0; i < conditions.conditions.length; i++) {
|
||||
const condition = conditions.conditions[i];
|
||||
const operatorIssues = validateOperator(condition.operator, `conditions.conditions[${i}].operator`);
|
||||
issues.push(...operatorIssues);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check Switch node
|
||||
if (node.type === 'n8n-nodes-base.switch') {
|
||||
const rules = (node.parameters.rules as any);
|
||||
if (rules?.rules && Array.isArray(rules.rules)) {
|
||||
for (let i = 0; i < rules.rules.length; i++) {
|
||||
const rule = rules.rules[i];
|
||||
if (!rule.conditions?.options) {
|
||||
issues.push(`Missing rules.rules[${i}].conditions.options`);
|
||||
} else {
|
||||
const required = ['version', 'leftValue', 'typeValidation', 'caseSensitive'];
|
||||
for (const field of required) {
|
||||
if (!(field in rule.conditions.options)) {
|
||||
issues.push(`Missing rules.rules[${i}].conditions.options.${field}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check operators
|
||||
if (rule.conditions?.conditions && Array.isArray(rule.conditions.conditions)) {
|
||||
for (let j = 0; j < rule.conditions.conditions.length; j++) {
|
||||
const condition = rule.conditions.conditions[j];
|
||||
const operatorIssues = validateOperator(
|
||||
condition.operator,
|
||||
`rules.rules[${i}].conditions.conditions[${j}].operator`
|
||||
);
|
||||
issues.push(...operatorIssues);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate operator structure
|
||||
*/
|
||||
function validateOperator(operator: any, path: string): string[] {
|
||||
const issues: string[] = [];
|
||||
|
||||
if (!operator || typeof operator !== 'object') {
|
||||
issues.push(`${path}: operator is missing or not an object`);
|
||||
return issues;
|
||||
}
|
||||
|
||||
if (!operator.type) {
|
||||
issues.push(`${path}: missing required field 'type'`);
|
||||
} else if (!['string', 'number', 'boolean', 'dateTime', 'array', 'object'].includes(operator.type)) {
|
||||
issues.push(`${path}: invalid type "${operator.type}" (must be data type, not operation)`);
|
||||
}
|
||||
|
||||
if (!operator.operation) {
|
||||
issues.push(`${path}: missing required field 'operation'`);
|
||||
}
|
||||
|
||||
// Check singleValue based on operator type
|
||||
if (operator.operation) {
|
||||
if (isUnaryOperator(operator.operation)) {
|
||||
// Unary operators MUST have singleValue: true
|
||||
if (operator.singleValue !== true) {
|
||||
issues.push(`${path}: unary operator "${operator.operation}" requires singleValue: true`);
|
||||
}
|
||||
} else {
|
||||
// Binary operators should NOT have singleValue
|
||||
if (operator.singleValue === true) {
|
||||
issues.push(`${path}: binary operator "${operator.operation}" should not have singleValue: true (only unary operators need this)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
@@ -1038,8 +1038,15 @@ export class NodeSpecificValidators {
|
||||
delete autofix.continueOnFail;
|
||||
}
|
||||
|
||||
// Note: responseNode mode validation moved to workflow-validator.ts
|
||||
// where it has access to node-level onError property (not just config/parameters)
|
||||
// Response mode validation
|
||||
if (responseMode === 'responseNode' && !config.onError && !config.continueOnFail) {
|
||||
errors.push({
|
||||
type: 'invalid_configuration',
|
||||
property: 'responseMode',
|
||||
message: 'responseNode mode requires onError: "continueRegularOutput"',
|
||||
fix: 'Set onError to ensure response is always sent'
|
||||
});
|
||||
}
|
||||
|
||||
// Always output data for debugging
|
||||
if (!config.alwaysOutputData) {
|
||||
|
||||
@@ -1,377 +0,0 @@
|
||||
/**
|
||||
* Node Version Service
|
||||
*
|
||||
* Central service for node version discovery, comparison, and upgrade path recommendation.
|
||||
* Provides caching for performance and integrates with the database and breaking change detector.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { BreakingChangeDetector } from './breaking-change-detector';
|
||||
|
||||
export interface NodeVersion {
|
||||
nodeType: string;
|
||||
version: string;
|
||||
packageName: string;
|
||||
displayName: string;
|
||||
isCurrentMax: boolean;
|
||||
minimumN8nVersion?: string;
|
||||
breakingChanges: any[];
|
||||
deprecatedProperties: string[];
|
||||
addedProperties: string[];
|
||||
releasedAt?: Date;
|
||||
}
|
||||
|
||||
export interface VersionComparison {
|
||||
nodeType: string;
|
||||
currentVersion: string;
|
||||
latestVersion: string;
|
||||
isOutdated: boolean;
|
||||
versionGap: number; // How many versions behind
|
||||
hasBreakingChanges: boolean;
|
||||
recommendUpgrade: boolean;
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
reason: string;
|
||||
}
|
||||
|
||||
export interface UpgradePath {
|
||||
nodeType: string;
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
direct: boolean; // Can upgrade directly or needs intermediate steps
|
||||
intermediateVersions: string[]; // If multi-step upgrade needed
|
||||
totalBreakingChanges: number;
|
||||
autoMigratableChanges: number;
|
||||
manualRequiredChanges: number;
|
||||
estimatedEffort: 'LOW' | 'MEDIUM' | 'HIGH';
|
||||
steps: UpgradeStep[];
|
||||
}
|
||||
|
||||
export interface UpgradeStep {
|
||||
fromVersion: string;
|
||||
toVersion: string;
|
||||
breakingChanges: number;
|
||||
migrationHints: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Node Version Service with caching
|
||||
*/
|
||||
export class NodeVersionService {
|
||||
private versionCache: Map<string, NodeVersion[]> = new Map();
|
||||
private cacheTTL: number = 5 * 60 * 1000; // 5 minutes
|
||||
private cacheTimestamps: Map<string, number> = new Map();
|
||||
|
||||
constructor(
|
||||
private nodeRepository: NodeRepository,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Get all available versions for a node type
|
||||
*/
|
||||
getAvailableVersions(nodeType: string): NodeVersion[] {
|
||||
// Check cache first
|
||||
const cached = this.getCachedVersions(nodeType);
|
||||
if (cached) return cached;
|
||||
|
||||
// Query from database
|
||||
const versions = this.nodeRepository.getNodeVersions(nodeType);
|
||||
|
||||
// Cache the result
|
||||
this.cacheVersions(nodeType, versions);
|
||||
|
||||
return versions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the latest available version for a node type
|
||||
*/
|
||||
getLatestVersion(nodeType: string): string | null {
|
||||
const versions = this.getAvailableVersions(nodeType);
|
||||
|
||||
if (versions.length === 0) {
|
||||
// Fallback to main nodes table
|
||||
const node = this.nodeRepository.getNode(nodeType);
|
||||
return node?.version || null;
|
||||
}
|
||||
|
||||
// Find version marked as current max
|
||||
const maxVersion = versions.find(v => v.isCurrentMax);
|
||||
if (maxVersion) return maxVersion.version;
|
||||
|
||||
// Fallback: sort and get highest
|
||||
const sorted = versions.sort((a, b) => this.compareVersions(b.version, a.version));
|
||||
return sorted[0]?.version || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare a node's current version against the latest available
|
||||
*/
|
||||
compareVersions(currentVersion: string, latestVersion: string): number {
|
||||
const parts1 = currentVersion.split('.').map(Number);
|
||||
const parts2 = latestVersion.split('.').map(Number);
|
||||
|
||||
for (let i = 0; i < Math.max(parts1.length, parts2.length); i++) {
|
||||
const p1 = parts1[i] || 0;
|
||||
const p2 = parts2[i] || 0;
|
||||
|
||||
if (p1 < p2) return -1;
|
||||
if (p1 > p2) return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze if a node version is outdated and should be upgraded
|
||||
*/
|
||||
analyzeVersion(nodeType: string, currentVersion: string): VersionComparison {
|
||||
const latestVersion = this.getLatestVersion(nodeType);
|
||||
|
||||
if (!latestVersion) {
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion: currentVersion,
|
||||
isOutdated: false,
|
||||
versionGap: 0,
|
||||
hasBreakingChanges: false,
|
||||
recommendUpgrade: false,
|
||||
confidence: 'HIGH',
|
||||
reason: 'No version information available. Using current version.'
|
||||
};
|
||||
}
|
||||
|
||||
const comparison = this.compareVersions(currentVersion, latestVersion);
|
||||
const isOutdated = comparison < 0;
|
||||
|
||||
if (!isOutdated) {
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isOutdated: false,
|
||||
versionGap: 0,
|
||||
hasBreakingChanges: false,
|
||||
recommendUpgrade: false,
|
||||
confidence: 'HIGH',
|
||||
reason: 'Node is already at the latest version.'
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate version gap
|
||||
const versionGap = this.calculateVersionGap(currentVersion, latestVersion);
|
||||
|
||||
// Check for breaking changes
|
||||
const hasBreakingChanges = this.breakingChangeDetector.hasBreakingChanges(
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Determine upgrade recommendation and confidence
|
||||
let recommendUpgrade = true;
|
||||
let confidence: 'HIGH' | 'MEDIUM' | 'LOW' = 'HIGH';
|
||||
let reason = `Version ${latestVersion} available. `;
|
||||
|
||||
if (hasBreakingChanges) {
|
||||
confidence = 'MEDIUM';
|
||||
reason += 'Contains breaking changes. Review before upgrading.';
|
||||
} else {
|
||||
reason += 'Safe to upgrade (no breaking changes detected).';
|
||||
}
|
||||
|
||||
if (versionGap > 2) {
|
||||
confidence = 'LOW';
|
||||
reason += ` Version gap is large (${versionGap} versions). Consider incremental upgrade.`;
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isOutdated,
|
||||
versionGap,
|
||||
hasBreakingChanges,
|
||||
recommendUpgrade,
|
||||
confidence,
|
||||
reason
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the version gap (number of versions between)
|
||||
*/
|
||||
private calculateVersionGap(fromVersion: string, toVersion: string): number {
|
||||
const from = fromVersion.split('.').map(Number);
|
||||
const to = toVersion.split('.').map(Number);
|
||||
|
||||
// Simple gap calculation based on version numbers
|
||||
let gap = 0;
|
||||
|
||||
for (let i = 0; i < Math.max(from.length, to.length); i++) {
|
||||
const f = from[i] || 0;
|
||||
const t = to[i] || 0;
|
||||
gap += Math.abs(t - f);
|
||||
}
|
||||
|
||||
return gap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Suggest the best upgrade path for a node
|
||||
*/
|
||||
async suggestUpgradePath(nodeType: string, currentVersion: string): Promise<UpgradePath | null> {
|
||||
const latestVersion = this.getLatestVersion(nodeType);
|
||||
|
||||
if (!latestVersion) return null;
|
||||
|
||||
const comparison = this.compareVersions(currentVersion, latestVersion);
|
||||
if (comparison >= 0) return null; // Already at latest or newer
|
||||
|
||||
// Get all available versions between current and latest
|
||||
const allVersions = this.getAvailableVersions(nodeType);
|
||||
const intermediateVersions = allVersions
|
||||
.filter(v =>
|
||||
this.compareVersions(v.version, currentVersion) > 0 &&
|
||||
this.compareVersions(v.version, latestVersion) < 0
|
||||
)
|
||||
.map(v => v.version)
|
||||
.sort((a, b) => this.compareVersions(a, b));
|
||||
|
||||
// Analyze the upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Determine if direct upgrade is safe
|
||||
const versionGap = this.calculateVersionGap(currentVersion, latestVersion);
|
||||
const direct = versionGap <= 1 || !analysis.hasBreakingChanges;
|
||||
|
||||
// Generate upgrade steps
|
||||
const steps: UpgradeStep[] = [];
|
||||
|
||||
if (direct || intermediateVersions.length === 0) {
|
||||
// Direct upgrade
|
||||
steps.push({
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
breakingChanges: analysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: analysis.recommendations
|
||||
});
|
||||
} else {
|
||||
// Multi-step upgrade through intermediate versions
|
||||
let stepFrom = currentVersion;
|
||||
|
||||
for (const intermediateVersion of intermediateVersions) {
|
||||
const stepAnalysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
stepFrom,
|
||||
intermediateVersion
|
||||
);
|
||||
|
||||
steps.push({
|
||||
fromVersion: stepFrom,
|
||||
toVersion: intermediateVersion,
|
||||
breakingChanges: stepAnalysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: stepAnalysis.recommendations
|
||||
});
|
||||
|
||||
stepFrom = intermediateVersion;
|
||||
}
|
||||
|
||||
// Final step to latest
|
||||
const finalStepAnalysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
stepFrom,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
steps.push({
|
||||
fromVersion: stepFrom,
|
||||
toVersion: latestVersion,
|
||||
breakingChanges: finalStepAnalysis.changes.filter(c => c.isBreaking).length,
|
||||
migrationHints: finalStepAnalysis.recommendations
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate estimated effort
|
||||
const totalBreakingChanges = steps.reduce((sum, step) => sum + step.breakingChanges, 0);
|
||||
let estimatedEffort: 'LOW' | 'MEDIUM' | 'HIGH' = 'LOW';
|
||||
|
||||
if (totalBreakingChanges > 5 || steps.length > 3) {
|
||||
estimatedEffort = 'HIGH';
|
||||
} else if (totalBreakingChanges > 2 || steps.length > 1) {
|
||||
estimatedEffort = 'MEDIUM';
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType,
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
direct,
|
||||
intermediateVersions,
|
||||
totalBreakingChanges,
|
||||
autoMigratableChanges: analysis.autoMigratableCount,
|
||||
manualRequiredChanges: analysis.manualRequiredCount,
|
||||
estimatedEffort,
|
||||
steps
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific version exists for a node
|
||||
*/
|
||||
versionExists(nodeType: string, version: string): boolean {
|
||||
const versions = this.getAvailableVersions(nodeType);
|
||||
return versions.some(v => v.version === version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get version metadata (breaking changes, added/deprecated properties)
|
||||
*/
|
||||
getVersionMetadata(nodeType: string, version: string): NodeVersion | null {
|
||||
const versionData = this.nodeRepository.getNodeVersion(nodeType, version);
|
||||
return versionData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the version cache
|
||||
*/
|
||||
clearCache(nodeType?: string): void {
|
||||
if (nodeType) {
|
||||
this.versionCache.delete(nodeType);
|
||||
this.cacheTimestamps.delete(nodeType);
|
||||
} else {
|
||||
this.versionCache.clear();
|
||||
this.cacheTimestamps.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached versions if still valid
|
||||
*/
|
||||
private getCachedVersions(nodeType: string): NodeVersion[] | null {
|
||||
const cached = this.versionCache.get(nodeType);
|
||||
const timestamp = this.cacheTimestamps.get(nodeType);
|
||||
|
||||
if (cached && timestamp) {
|
||||
const age = Date.now() - timestamp;
|
||||
if (age < this.cacheTTL) {
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache versions with timestamp
|
||||
*/
|
||||
private cacheVersions(nodeType: string, versions: NodeVersion[]): void {
|
||||
this.versionCache.set(nodeType, versions);
|
||||
this.cacheTimestamps.set(nodeType, Date.now());
|
||||
}
|
||||
}
|
||||
@@ -1,423 +0,0 @@
|
||||
/**
|
||||
* Post-Update Validator
|
||||
*
|
||||
* Generates comprehensive, AI-friendly migration reports after node version upgrades.
|
||||
* Provides actionable guidance for AI agents on what manual steps are needed.
|
||||
*
|
||||
* Validation includes:
|
||||
* - New required properties
|
||||
* - Deprecated/removed properties
|
||||
* - Behavior changes
|
||||
* - Step-by-step migration instructions
|
||||
*/
|
||||
|
||||
import { BreakingChangeDetector, DetectedChange } from './breaking-change-detector';
|
||||
import { MigrationResult } from './node-migration-service';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
|
||||
export interface PostUpdateGuidance {
|
||||
nodeId: string;
|
||||
nodeName: string;
|
||||
nodeType: string;
|
||||
oldVersion: string;
|
||||
newVersion: string;
|
||||
migrationStatus: 'complete' | 'partial' | 'manual_required';
|
||||
requiredActions: RequiredAction[];
|
||||
deprecatedProperties: DeprecatedProperty[];
|
||||
behaviorChanges: BehaviorChange[];
|
||||
migrationSteps: string[];
|
||||
confidence: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
estimatedTime: string; // e.g., "5 minutes", "15 minutes"
|
||||
}
|
||||
|
||||
export interface RequiredAction {
|
||||
type: 'ADD_PROPERTY' | 'UPDATE_PROPERTY' | 'CONFIGURE_OPTION' | 'REVIEW_CONFIGURATION';
|
||||
property: string;
|
||||
reason: string;
|
||||
suggestedValue?: any;
|
||||
currentValue?: any;
|
||||
documentation?: string;
|
||||
priority: 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
}
|
||||
|
||||
export interface DeprecatedProperty {
|
||||
property: string;
|
||||
status: 'removed' | 'deprecated';
|
||||
replacement?: string;
|
||||
action: 'remove' | 'replace' | 'ignore';
|
||||
impact: 'breaking' | 'warning';
|
||||
}
|
||||
|
||||
export interface BehaviorChange {
|
||||
aspect: string; // e.g., "data passing", "webhook handling"
|
||||
oldBehavior: string;
|
||||
newBehavior: string;
|
||||
impact: 'HIGH' | 'MEDIUM' | 'LOW';
|
||||
actionRequired: boolean;
|
||||
recommendation: string;
|
||||
}
|
||||
|
||||
export class PostUpdateValidator {
|
||||
constructor(
|
||||
private versionService: NodeVersionService,
|
||||
private breakingChangeDetector: BreakingChangeDetector
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Generate comprehensive post-update guidance for a migrated node
|
||||
*/
|
||||
async generateGuidance(
|
||||
nodeId: string,
|
||||
nodeName: string,
|
||||
nodeType: string,
|
||||
oldVersion: string,
|
||||
newVersion: string,
|
||||
migrationResult: MigrationResult
|
||||
): Promise<PostUpdateGuidance> {
|
||||
// Analyze the version upgrade
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
nodeType,
|
||||
oldVersion,
|
||||
newVersion
|
||||
);
|
||||
|
||||
// Determine migration status
|
||||
const migrationStatus = this.determineMigrationStatus(migrationResult, analysis.changes);
|
||||
|
||||
// Generate required actions
|
||||
const requiredActions = this.generateRequiredActions(
|
||||
migrationResult,
|
||||
analysis.changes,
|
||||
nodeType
|
||||
);
|
||||
|
||||
// Identify deprecated properties
|
||||
const deprecatedProperties = this.identifyDeprecatedProperties(analysis.changes);
|
||||
|
||||
// Document behavior changes
|
||||
const behaviorChanges = this.documentBehaviorChanges(nodeType, oldVersion, newVersion);
|
||||
|
||||
// Generate step-by-step migration instructions
|
||||
const migrationSteps = this.generateMigrationSteps(
|
||||
requiredActions,
|
||||
deprecatedProperties,
|
||||
behaviorChanges
|
||||
);
|
||||
|
||||
// Calculate confidence and estimated time
|
||||
const confidence = this.calculateConfidence(requiredActions, migrationStatus);
|
||||
const estimatedTime = this.estimateTime(requiredActions, behaviorChanges);
|
||||
|
||||
return {
|
||||
nodeId,
|
||||
nodeName,
|
||||
nodeType,
|
||||
oldVersion,
|
||||
newVersion,
|
||||
migrationStatus,
|
||||
requiredActions,
|
||||
deprecatedProperties,
|
||||
behaviorChanges,
|
||||
migrationSteps,
|
||||
confidence,
|
||||
estimatedTime
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the migration status based on results and changes
|
||||
*/
|
||||
private determineMigrationStatus(
|
||||
migrationResult: MigrationResult,
|
||||
changes: DetectedChange[]
|
||||
): 'complete' | 'partial' | 'manual_required' {
|
||||
if (migrationResult.remainingIssues.length === 0) {
|
||||
return 'complete';
|
||||
}
|
||||
|
||||
const criticalIssues = changes.filter(c => c.isBreaking && !c.autoMigratable);
|
||||
|
||||
if (criticalIssues.length > 0) {
|
||||
return 'manual_required';
|
||||
}
|
||||
|
||||
return 'partial';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate actionable required actions for the AI agent
|
||||
*/
|
||||
private generateRequiredActions(
|
||||
migrationResult: MigrationResult,
|
||||
changes: DetectedChange[],
|
||||
nodeType: string
|
||||
): RequiredAction[] {
|
||||
const actions: RequiredAction[] = [];
|
||||
|
||||
// Actions from remaining issues (not auto-migrated)
|
||||
const manualChanges = changes.filter(c => !c.autoMigratable);
|
||||
|
||||
for (const change of manualChanges) {
|
||||
actions.push({
|
||||
type: this.mapChangeTypeToActionType(change.changeType),
|
||||
property: change.propertyName,
|
||||
reason: change.migrationHint,
|
||||
suggestedValue: change.newValue,
|
||||
currentValue: change.oldValue,
|
||||
documentation: this.getPropertyDocumentation(nodeType, change.propertyName),
|
||||
priority: this.mapSeverityToPriority(change.severity)
|
||||
});
|
||||
}
|
||||
|
||||
return actions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify deprecated or removed properties
|
||||
*/
|
||||
private identifyDeprecatedProperties(changes: DetectedChange[]): DeprecatedProperty[] {
|
||||
const deprecated: DeprecatedProperty[] = [];
|
||||
|
||||
for (const change of changes) {
|
||||
if (change.changeType === 'removed') {
|
||||
deprecated.push({
|
||||
property: change.propertyName,
|
||||
status: 'removed',
|
||||
replacement: change.migrationStrategy?.targetProperty,
|
||||
action: change.autoMigratable ? 'remove' : 'replace',
|
||||
impact: change.isBreaking ? 'breaking' : 'warning'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return deprecated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Document behavior changes for specific nodes
|
||||
*/
|
||||
private documentBehaviorChanges(
|
||||
nodeType: string,
|
||||
oldVersion: string,
|
||||
newVersion: string
|
||||
): BehaviorChange[] {
|
||||
const changes: BehaviorChange[] = [];
|
||||
|
||||
// Execute Workflow node behavior changes
|
||||
if (nodeType === 'n8n-nodes-base.executeWorkflow') {
|
||||
if (this.versionService.compareVersions(oldVersion, '1.1') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '1.1') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Data passing to sub-workflows',
|
||||
oldBehavior: 'Automatic data passing - all data from parent workflow automatically available',
|
||||
newBehavior: 'Explicit field mapping required - must define inputFieldMapping to pass specific fields',
|
||||
impact: 'HIGH',
|
||||
actionRequired: true,
|
||||
recommendation: 'Define inputFieldMapping with specific field mappings between parent and child workflows. Review data dependencies.'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Webhook node behavior changes
|
||||
if (nodeType === 'n8n-nodes-base.webhook') {
|
||||
if (this.versionService.compareVersions(oldVersion, '2.1') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '2.1') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Webhook persistence',
|
||||
oldBehavior: 'Webhook URL changes on workflow updates',
|
||||
newBehavior: 'Stable webhook URL via webhookId field',
|
||||
impact: 'MEDIUM',
|
||||
actionRequired: false,
|
||||
recommendation: 'Webhook URLs now remain stable across workflow updates. Update external systems if needed.'
|
||||
});
|
||||
}
|
||||
|
||||
if (this.versionService.compareVersions(oldVersion, '2.0') < 0 &&
|
||||
this.versionService.compareVersions(newVersion, '2.0') >= 0) {
|
||||
changes.push({
|
||||
aspect: 'Response handling',
|
||||
oldBehavior: 'Automatic response after webhook trigger',
|
||||
newBehavior: 'Configurable response mode (onReceived vs lastNode)',
|
||||
impact: 'MEDIUM',
|
||||
actionRequired: true,
|
||||
recommendation: 'Review responseMode setting. Use "onReceived" for immediate responses or "lastNode" to wait for workflow completion.'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate step-by-step migration instructions for AI agents
|
||||
*/
|
||||
private generateMigrationSteps(
|
||||
requiredActions: RequiredAction[],
|
||||
deprecatedProperties: DeprecatedProperty[],
|
||||
behaviorChanges: BehaviorChange[]
|
||||
): string[] {
|
||||
const steps: string[] = [];
|
||||
let stepNumber = 1;
|
||||
|
||||
// Start with deprecations
|
||||
if (deprecatedProperties.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Remove deprecated properties`);
|
||||
for (const dep of deprecatedProperties) {
|
||||
steps.push(` - Remove "${dep.property}" ${dep.replacement ? `(use "${dep.replacement}" instead)` : ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Then critical actions
|
||||
const criticalActions = requiredActions.filter(a => a.priority === 'CRITICAL');
|
||||
if (criticalActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Address critical configuration requirements`);
|
||||
for (const action of criticalActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
if (action.suggestedValue !== undefined) {
|
||||
steps.push(` Suggested value: ${JSON.stringify(action.suggestedValue)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// High priority actions
|
||||
const highActions = requiredActions.filter(a => a.priority === 'HIGH');
|
||||
if (highActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Configure required properties`);
|
||||
for (const action of highActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Behavior change adaptations
|
||||
const actionRequiredChanges = behaviorChanges.filter(c => c.actionRequired);
|
||||
if (actionRequiredChanges.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Adapt to behavior changes`);
|
||||
for (const change of actionRequiredChanges) {
|
||||
steps.push(` - ${change.aspect}: ${change.recommendation}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Medium/Low priority actions
|
||||
const otherActions = requiredActions.filter(a => a.priority === 'MEDIUM' || a.priority === 'LOW');
|
||||
if (otherActions.length > 0) {
|
||||
steps.push(`Step ${stepNumber++}: Review optional configurations`);
|
||||
for (const action of otherActions) {
|
||||
steps.push(` - ${action.property}: ${action.reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Final validation step
|
||||
steps.push(`Step ${stepNumber}: Test workflow execution`);
|
||||
steps.push(' - Validate all node configurations');
|
||||
steps.push(' - Run a test execution');
|
||||
steps.push(' - Verify expected behavior');
|
||||
|
||||
return steps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map change type to action type
|
||||
*/
|
||||
private mapChangeTypeToActionType(
|
||||
changeType: string
|
||||
): 'ADD_PROPERTY' | 'UPDATE_PROPERTY' | 'CONFIGURE_OPTION' | 'REVIEW_CONFIGURATION' {
|
||||
switch (changeType) {
|
||||
case 'added':
|
||||
return 'ADD_PROPERTY';
|
||||
case 'requirement_changed':
|
||||
case 'type_changed':
|
||||
return 'UPDATE_PROPERTY';
|
||||
case 'default_changed':
|
||||
return 'CONFIGURE_OPTION';
|
||||
default:
|
||||
return 'REVIEW_CONFIGURATION';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Map severity to priority
|
||||
*/
|
||||
private mapSeverityToPriority(
|
||||
severity: 'LOW' | 'MEDIUM' | 'HIGH'
|
||||
): 'CRITICAL' | 'HIGH' | 'MEDIUM' | 'LOW' {
|
||||
if (severity === 'HIGH') return 'CRITICAL';
|
||||
return severity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get documentation for a property (placeholder - would integrate with node docs)
|
||||
*/
|
||||
private getPropertyDocumentation(nodeType: string, propertyName: string): string {
|
||||
// In future, this would fetch from node documentation
|
||||
return `See n8n documentation for ${nodeType} - ${propertyName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall confidence in the migration
|
||||
*/
|
||||
private calculateConfidence(
|
||||
requiredActions: RequiredAction[],
|
||||
migrationStatus: 'complete' | 'partial' | 'manual_required'
|
||||
): 'HIGH' | 'MEDIUM' | 'LOW' {
|
||||
if (migrationStatus === 'complete') return 'HIGH';
|
||||
|
||||
const criticalActions = requiredActions.filter(a => a.priority === 'CRITICAL');
|
||||
|
||||
if (migrationStatus === 'manual_required' || criticalActions.length > 3) {
|
||||
return 'LOW';
|
||||
}
|
||||
|
||||
return 'MEDIUM';
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate time required for manual migration steps
|
||||
*/
|
||||
private estimateTime(
|
||||
requiredActions: RequiredAction[],
|
||||
behaviorChanges: BehaviorChange[]
|
||||
): string {
|
||||
const criticalCount = requiredActions.filter(a => a.priority === 'CRITICAL').length;
|
||||
const highCount = requiredActions.filter(a => a.priority === 'HIGH').length;
|
||||
const behaviorCount = behaviorChanges.filter(c => c.actionRequired).length;
|
||||
|
||||
const totalComplexity = criticalCount * 5 + highCount * 3 + behaviorCount * 2;
|
||||
|
||||
if (totalComplexity === 0) return '< 1 minute';
|
||||
if (totalComplexity <= 5) return '2-5 minutes';
|
||||
if (totalComplexity <= 10) return '5-10 minutes';
|
||||
if (totalComplexity <= 20) return '10-20 minutes';
|
||||
return '20+ minutes';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a human-readable summary for logging/display
|
||||
*/
|
||||
generateSummary(guidance: PostUpdateGuidance): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`Node "${guidance.nodeName}" upgraded from v${guidance.oldVersion} to v${guidance.newVersion}`);
|
||||
lines.push(`Status: ${guidance.migrationStatus.toUpperCase()}`);
|
||||
lines.push(`Confidence: ${guidance.confidence}`);
|
||||
lines.push(`Estimated time: ${guidance.estimatedTime}`);
|
||||
|
||||
if (guidance.requiredActions.length > 0) {
|
||||
lines.push(`\nRequired actions: ${guidance.requiredActions.length}`);
|
||||
for (const action of guidance.requiredActions.slice(0, 3)) {
|
||||
lines.push(` - [${action.priority}] ${action.property}: ${action.reason}`);
|
||||
}
|
||||
if (guidance.requiredActions.length > 3) {
|
||||
lines.push(` ... and ${guidance.requiredActions.length - 3} more`);
|
||||
}
|
||||
}
|
||||
|
||||
if (guidance.behaviorChanges.length > 0) {
|
||||
lines.push(`\nBehavior changes: ${guidance.behaviorChanges.length}`);
|
||||
for (const change of guidance.behaviorChanges) {
|
||||
lines.push(` - ${change.aspect}: ${change.newBehavior}`);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
@@ -16,10 +16,6 @@ import {
|
||||
} from '../types/workflow-diff';
|
||||
import { WorkflowNode, Workflow } from '../types/n8n-api';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { NodeVersionService } from './node-version-service';
|
||||
import { BreakingChangeDetector } from './breaking-change-detector';
|
||||
import { NodeMigrationService } from './node-migration-service';
|
||||
import { PostUpdateValidator, PostUpdateGuidance } from './post-update-validator';
|
||||
|
||||
const logger = new Logger({ prefix: '[WorkflowAutoFixer]' });
|
||||
|
||||
@@ -29,9 +25,7 @@ export type FixType =
|
||||
| 'typeversion-correction'
|
||||
| 'error-output-config'
|
||||
| 'node-type-correction'
|
||||
| 'webhook-missing-path'
|
||||
| 'typeversion-upgrade' // NEW: Proactive version upgrades
|
||||
| 'version-migration'; // NEW: Smart version migrations with breaking changes
|
||||
| 'webhook-missing-path';
|
||||
|
||||
export interface AutoFixConfig {
|
||||
applyFixes: boolean;
|
||||
@@ -59,7 +53,6 @@ export interface AutoFixResult {
|
||||
byType: Record<FixType, number>;
|
||||
byConfidence: Record<FixConfidenceLevel, number>;
|
||||
};
|
||||
postUpdateGuidance?: PostUpdateGuidance[]; // NEW: AI-friendly migration guidance
|
||||
}
|
||||
|
||||
export interface NodeFormatIssue extends ExpressionFormatIssue {
|
||||
@@ -98,34 +91,25 @@ export class WorkflowAutoFixer {
|
||||
maxFixes: 50
|
||||
};
|
||||
private similarityService: NodeSimilarityService | null = null;
|
||||
private versionService: NodeVersionService | null = null;
|
||||
private breakingChangeDetector: BreakingChangeDetector | null = null;
|
||||
private migrationService: NodeMigrationService | null = null;
|
||||
private postUpdateValidator: PostUpdateValidator | null = null;
|
||||
|
||||
constructor(repository?: NodeRepository) {
|
||||
if (repository) {
|
||||
this.similarityService = new NodeSimilarityService(repository);
|
||||
this.breakingChangeDetector = new BreakingChangeDetector(repository);
|
||||
this.versionService = new NodeVersionService(repository, this.breakingChangeDetector);
|
||||
this.migrationService = new NodeMigrationService(this.versionService, this.breakingChangeDetector);
|
||||
this.postUpdateValidator = new PostUpdateValidator(this.versionService, this.breakingChangeDetector);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate fix operations from validation results
|
||||
*/
|
||||
async generateFixes(
|
||||
generateFixes(
|
||||
workflow: Workflow,
|
||||
validationResult: WorkflowValidationResult,
|
||||
formatIssues: ExpressionFormatIssue[] = [],
|
||||
config: Partial<AutoFixConfig> = {}
|
||||
): Promise<AutoFixResult> {
|
||||
): AutoFixResult {
|
||||
const fullConfig = { ...this.defaultConfig, ...config };
|
||||
const operations: WorkflowDiffOperation[] = [];
|
||||
const fixes: FixOperation[] = [];
|
||||
const postUpdateGuidance: PostUpdateGuidance[] = [];
|
||||
|
||||
// Create a map for quick node lookup
|
||||
const nodeMap = new Map<string, WorkflowNode>();
|
||||
@@ -159,16 +143,6 @@ export class WorkflowAutoFixer {
|
||||
this.processWebhookPathFixes(validationResult, nodeMap, operations, fixes);
|
||||
}
|
||||
|
||||
// NEW: Process version upgrades (HIGH/MEDIUM confidence)
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('typeversion-upgrade')) {
|
||||
await this.processVersionUpgradeFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// NEW: Process version migrations with breaking changes (MEDIUM/LOW confidence)
|
||||
if (!fullConfig.fixTypes || fullConfig.fixTypes.includes('version-migration')) {
|
||||
await this.processVersionMigrationFixes(workflow, nodeMap, operations, fixes, postUpdateGuidance);
|
||||
}
|
||||
|
||||
// Filter by confidence threshold
|
||||
const filteredFixes = this.filterByConfidence(fixes, fullConfig.confidenceThreshold);
|
||||
const filteredOperations = this.filterOperationsByFixes(operations, filteredFixes, fixes);
|
||||
@@ -185,8 +159,7 @@ export class WorkflowAutoFixer {
|
||||
operations: limitedOperations,
|
||||
fixes: limitedFixes,
|
||||
summary,
|
||||
stats,
|
||||
postUpdateGuidance: postUpdateGuidance.length > 0 ? postUpdateGuidance : undefined
|
||||
stats
|
||||
};
|
||||
}
|
||||
|
||||
@@ -605,9 +578,7 @@ export class WorkflowAutoFixer {
|
||||
'typeversion-correction': 0,
|
||||
'error-output-config': 0,
|
||||
'node-type-correction': 0,
|
||||
'webhook-missing-path': 0,
|
||||
'typeversion-upgrade': 0,
|
||||
'version-migration': 0
|
||||
'webhook-missing-path': 0
|
||||
},
|
||||
byConfidence: {
|
||||
'high': 0,
|
||||
@@ -650,186 +621,10 @@ export class WorkflowAutoFixer {
|
||||
parts.push(`${stats.byType['webhook-missing-path']} webhook ${stats.byType['webhook-missing-path'] === 1 ? 'path' : 'paths'}`);
|
||||
}
|
||||
|
||||
if (stats.byType['typeversion-upgrade'] > 0) {
|
||||
parts.push(`${stats.byType['typeversion-upgrade']} version ${stats.byType['typeversion-upgrade'] === 1 ? 'upgrade' : 'upgrades'}`);
|
||||
}
|
||||
if (stats.byType['version-migration'] > 0) {
|
||||
parts.push(`${stats.byType['version-migration']} version ${stats.byType['version-migration'] === 1 ? 'migration' : 'migrations'}`);
|
||||
}
|
||||
|
||||
if (parts.length === 0) {
|
||||
return `Fixed ${stats.total} ${stats.total === 1 ? 'issue' : 'issues'}`;
|
||||
}
|
||||
|
||||
return `Fixed ${parts.join(', ')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version upgrade fixes (proactive upgrades to latest versions)
|
||||
* HIGH confidence for non-breaking upgrades, MEDIUM for upgrades with auto-migratable changes
|
||||
*/
|
||||
private async processVersionUpgradeFixes(
|
||||
workflow: Workflow,
|
||||
nodeMap: Map<string, WorkflowNode>,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[],
|
||||
postUpdateGuidance: PostUpdateGuidance[]
|
||||
): Promise<void> {
|
||||
if (!this.versionService || !this.migrationService || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version upgrade fixes.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (!node.typeVersion || !node.type) continue;
|
||||
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
const analysis = this.versionService.analyzeVersion(node.type, currentVersion);
|
||||
|
||||
// Only upgrade if outdated and recommended
|
||||
if (!analysis.isOutdated || !analysis.recommendUpgrade) continue;
|
||||
|
||||
// Skip if confidence is too low
|
||||
if (analysis.confidence === 'LOW') continue;
|
||||
|
||||
const latestVersion = analysis.latestVersion;
|
||||
|
||||
// Attempt migration
|
||||
try {
|
||||
const migrationResult = await this.migrationService.migrateNode(
|
||||
node,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Create fix operation
|
||||
fixes.push({
|
||||
node: node.name,
|
||||
field: 'typeVersion',
|
||||
type: 'typeversion-upgrade',
|
||||
before: currentVersion,
|
||||
after: latestVersion,
|
||||
confidence: analysis.hasBreakingChanges ? 'medium' : 'high',
|
||||
description: `Upgrade ${node.name} from v${currentVersion} to v${latestVersion}. ${analysis.reason}`
|
||||
});
|
||||
|
||||
// Create update operation
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: node.id,
|
||||
updates: {
|
||||
typeVersion: parseFloat(latestVersion),
|
||||
parameters: migrationResult.updatedNode.parameters,
|
||||
...(migrationResult.updatedNode.webhookId && { webhookId: migrationResult.updatedNode.webhookId })
|
||||
}
|
||||
};
|
||||
operations.push(operation);
|
||||
|
||||
// Generate post-update guidance
|
||||
const guidance = await this.postUpdateValidator.generateGuidance(
|
||||
node.id,
|
||||
node.name,
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
migrationResult
|
||||
);
|
||||
|
||||
postUpdateGuidance.push(guidance);
|
||||
|
||||
logger.info(`Generated version upgrade fix for ${node.name}: ${currentVersion} → ${latestVersion}`, {
|
||||
appliedMigrations: migrationResult.appliedMigrations.length,
|
||||
remainingIssues: migrationResult.remainingIssues.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Failed to process version upgrade for ${node.name}`, { error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process version migration fixes (handle breaking changes with smart migrations)
|
||||
* MEDIUM/LOW confidence for migrations requiring manual intervention
|
||||
*/
|
||||
private async processVersionMigrationFixes(
|
||||
workflow: Workflow,
|
||||
nodeMap: Map<string, WorkflowNode>,
|
||||
operations: WorkflowDiffOperation[],
|
||||
fixes: FixOperation[],
|
||||
postUpdateGuidance: PostUpdateGuidance[]
|
||||
): Promise<void> {
|
||||
// This method handles migrations that weren't covered by typeversion-upgrade
|
||||
// Focuses on nodes with complex breaking changes that need manual review
|
||||
|
||||
if (!this.versionService || !this.breakingChangeDetector || !this.postUpdateValidator) {
|
||||
logger.warn('Version services not initialized. Skipping version migration fixes.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (!node.typeVersion || !node.type) continue;
|
||||
|
||||
const currentVersion = node.typeVersion.toString();
|
||||
const latestVersion = this.versionService.getLatestVersion(node.type);
|
||||
|
||||
if (!latestVersion || currentVersion === latestVersion) continue;
|
||||
|
||||
// Check if this has breaking changes
|
||||
const hasBreaking = this.breakingChangeDetector.hasBreakingChanges(
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
if (!hasBreaking) continue; // Already handled by typeversion-upgrade
|
||||
|
||||
// Analyze the migration
|
||||
const analysis = await this.breakingChangeDetector.analyzeVersionUpgrade(
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion
|
||||
);
|
||||
|
||||
// Only proceed if there are non-auto-migratable changes
|
||||
if (analysis.autoMigratableCount === analysis.changes.length) continue;
|
||||
|
||||
// Generate guidance for manual migration
|
||||
const guidance = await this.postUpdateValidator.generateGuidance(
|
||||
node.id,
|
||||
node.name,
|
||||
node.type,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
{
|
||||
success: false,
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
fromVersion: currentVersion,
|
||||
toVersion: latestVersion,
|
||||
appliedMigrations: [],
|
||||
remainingIssues: analysis.recommendations,
|
||||
confidence: analysis.overallSeverity === 'HIGH' ? 'LOW' : 'MEDIUM',
|
||||
updatedNode: node
|
||||
}
|
||||
);
|
||||
|
||||
// Create a fix entry (won't be auto-applied, just documented)
|
||||
fixes.push({
|
||||
node: node.name,
|
||||
field: 'typeVersion',
|
||||
type: 'version-migration',
|
||||
before: currentVersion,
|
||||
after: latestVersion,
|
||||
confidence: guidance.confidence === 'HIGH' ? 'medium' : 'low',
|
||||
description: `Version migration required: ${node.name} v${currentVersion} → v${latestVersion}. ${analysis.manualRequiredCount} manual action(s) required.`
|
||||
});
|
||||
|
||||
postUpdateGuidance.push(guidance);
|
||||
|
||||
logger.info(`Documented version migration for ${node.name}`, {
|
||||
breakingChanges: analysis.changes.filter(c => c.isBreaking).length,
|
||||
manualRequired: analysis.manualRequiredCount
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -31,16 +31,10 @@ import {
|
||||
import { Workflow, WorkflowNode, WorkflowConnection } from '../types/n8n-api';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { validateWorkflowNode, validateWorkflowConnections } from './n8n-validation';
|
||||
import { sanitizeNode, sanitizeWorkflowNodes } from './node-sanitizer';
|
||||
|
||||
const logger = new Logger({ prefix: '[WorkflowDiffEngine]' });
|
||||
|
||||
export class WorkflowDiffEngine {
|
||||
// Track node name changes during operations for connection reference updates
|
||||
private renameMap: Map<string, string> = new Map();
|
||||
// Track warnings during operation processing
|
||||
private warnings: WorkflowDiffValidationError[] = [];
|
||||
|
||||
/**
|
||||
* Apply diff operations to a workflow
|
||||
*/
|
||||
@@ -49,10 +43,6 @@ export class WorkflowDiffEngine {
|
||||
request: WorkflowDiffRequest
|
||||
): Promise<WorkflowDiffResult> {
|
||||
try {
|
||||
// Reset tracking for this diff operation
|
||||
this.renameMap.clear();
|
||||
this.warnings = [];
|
||||
|
||||
// Clone workflow to avoid modifying original
|
||||
const workflowCopy = JSON.parse(JSON.stringify(workflow));
|
||||
|
||||
@@ -103,12 +93,6 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Update connection references after all node renames (even in continueOnError mode)
|
||||
if (this.renameMap.size > 0 && appliedIndices.length > 0) {
|
||||
this.updateConnectionReferences(workflowCopy);
|
||||
logger.debug(`Auto-updated ${this.renameMap.size} node name references in connections (continueOnError mode)`);
|
||||
}
|
||||
|
||||
// If validateOnly flag is set, return success without applying
|
||||
if (request.validateOnly) {
|
||||
return {
|
||||
@@ -117,7 +101,6 @@ export class WorkflowDiffEngine {
|
||||
? 'Validation successful. All operations are valid.'
|
||||
: `Validation completed with ${errors.length} errors.`,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
warnings: this.warnings.length > 0 ? this.warnings : undefined,
|
||||
applied: appliedIndices,
|
||||
failed: failedIndices
|
||||
};
|
||||
@@ -130,7 +113,6 @@ export class WorkflowDiffEngine {
|
||||
operationsApplied: appliedIndices.length,
|
||||
message: `Applied ${appliedIndices.length} operations, ${failedIndices.length} failed (continueOnError mode)`,
|
||||
errors: errors.length > 0 ? errors : undefined,
|
||||
warnings: this.warnings.length > 0 ? this.warnings : undefined,
|
||||
applied: appliedIndices,
|
||||
failed: failedIndices
|
||||
};
|
||||
@@ -164,12 +146,6 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Update connection references after all node renames
|
||||
if (this.renameMap.size > 0) {
|
||||
this.updateConnectionReferences(workflowCopy);
|
||||
logger.debug(`Auto-updated ${this.renameMap.size} node name references in connections`);
|
||||
}
|
||||
|
||||
// Pass 2: Validate and apply other operations (connections, metadata)
|
||||
for (const { operation, index } of otherOperations) {
|
||||
const error = this.validateOperation(workflowCopy, operation);
|
||||
@@ -198,13 +174,6 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize ALL nodes in the workflow after operations are applied
|
||||
// This ensures existing invalid nodes (e.g., binary operators with singleValue: true)
|
||||
// are fixed automatically when any update is made to the workflow
|
||||
workflowCopy.nodes = workflowCopy.nodes.map((node: WorkflowNode) => sanitizeNode(node));
|
||||
|
||||
logger.debug('Applied full-workflow sanitization to all nodes');
|
||||
|
||||
// If validateOnly flag is set, return success without applying
|
||||
if (request.validateOnly) {
|
||||
return {
|
||||
@@ -218,8 +187,7 @@ export class WorkflowDiffEngine {
|
||||
success: true,
|
||||
workflow: workflowCopy,
|
||||
operationsApplied,
|
||||
message: `Successfully applied ${operationsApplied} operations (${nodeOperations.length} node ops, ${otherOperations.length} other ops)`,
|
||||
warnings: this.warnings.length > 0 ? this.warnings : undefined
|
||||
message: `Successfully applied ${operationsApplied} operations (${nodeOperations.length} node ops, ${otherOperations.length} other ops)`
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -377,23 +345,6 @@ export class WorkflowDiffEngine {
|
||||
if (!node) {
|
||||
return this.formatNodeNotFoundError(workflow, operation.nodeId || operation.nodeName || '', 'updateNode');
|
||||
}
|
||||
|
||||
// Check for name collision if renaming
|
||||
if (operation.updates.name && operation.updates.name !== node.name) {
|
||||
const normalizedNewName = this.normalizeNodeName(operation.updates.name);
|
||||
const normalizedCurrentName = this.normalizeNodeName(node.name);
|
||||
|
||||
// Only check collision if the names are actually different after normalization
|
||||
if (normalizedNewName !== normalizedCurrentName) {
|
||||
const collision = workflow.nodes.find(n =>
|
||||
n.id !== node.id && this.normalizeNodeName(n.name) === normalizedNewName
|
||||
);
|
||||
if (collision) {
|
||||
return `Cannot rename node "${node.name}" to "${operation.updates.name}": A node with that name already exists (id: ${collision.id.substring(0, 8)}...). Please choose a different name.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -576,10 +527,7 @@ export class WorkflowDiffEngine {
|
||||
executeOnce: operation.node.executeOnce
|
||||
};
|
||||
|
||||
// Sanitize node to ensure complete metadata (filter options, operator structure, etc.)
|
||||
const sanitizedNode = sanitizeNode(newNode);
|
||||
|
||||
workflow.nodes.push(sanitizedNode);
|
||||
workflow.nodes.push(newNode);
|
||||
}
|
||||
|
||||
private applyRemoveNode(workflow: Workflow, operation: RemoveNodeOperation): void {
|
||||
@@ -620,24 +568,10 @@ export class WorkflowDiffEngine {
|
||||
const node = this.findNode(workflow, operation.nodeId, operation.nodeName);
|
||||
if (!node) return;
|
||||
|
||||
// Track node renames for connection reference updates
|
||||
if (operation.updates.name && operation.updates.name !== node.name) {
|
||||
const oldName = node.name;
|
||||
const newName = operation.updates.name;
|
||||
this.renameMap.set(oldName, newName);
|
||||
logger.debug(`Tracking rename: "${oldName}" → "${newName}"`);
|
||||
}
|
||||
|
||||
// Apply updates using dot notation
|
||||
Object.entries(operation.updates).forEach(([path, value]) => {
|
||||
this.setNestedProperty(node, path, value);
|
||||
});
|
||||
|
||||
// Sanitize node after updates to ensure metadata is complete
|
||||
const sanitized = sanitizeNode(node);
|
||||
|
||||
// Update the node in-place
|
||||
Object.assign(node, sanitized);
|
||||
}
|
||||
|
||||
private applyMoveNode(workflow: Workflow, operation: MoveNodeOperation): void {
|
||||
@@ -691,24 +625,6 @@ export class WorkflowDiffEngine {
|
||||
sourceIndex = operation.case;
|
||||
}
|
||||
|
||||
// Validation: Warn if using sourceIndex with If/Switch nodes without smart parameters
|
||||
if (sourceNode && operation.sourceIndex !== undefined && operation.branch === undefined && operation.case === undefined) {
|
||||
if (sourceNode.type === 'n8n-nodes-base.if') {
|
||||
this.warnings.push({
|
||||
operation: -1, // Not tied to specific operation index in request
|
||||
message: `Connection to If node "${operation.source}" uses sourceIndex=${operation.sourceIndex}. ` +
|
||||
`Consider using branch="true" or branch="false" for better clarity. ` +
|
||||
`If node outputs: main[0]=TRUE branch, main[1]=FALSE branch.`
|
||||
});
|
||||
} else if (sourceNode.type === 'n8n-nodes-base.switch') {
|
||||
this.warnings.push({
|
||||
operation: -1, // Not tied to specific operation index in request
|
||||
message: `Connection to Switch node "${operation.source}" uses sourceIndex=${operation.sourceIndex}. ` +
|
||||
`Consider using case=N for better clarity (case=0 for first output, case=1 for second, etc.).`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { sourceOutput, sourceIndex };
|
||||
}
|
||||
|
||||
@@ -964,59 +880,6 @@ export class WorkflowDiffEngine {
|
||||
workflow.connections = operation.connections;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update all connection references when nodes are renamed.
|
||||
* This method is called after node operations to ensure connection integrity.
|
||||
*
|
||||
* Updates:
|
||||
* - Connection object keys (source node names)
|
||||
* - Connection target.node values (target node names)
|
||||
* - All output types (main, error, ai_tool, ai_languageModel, etc.)
|
||||
*
|
||||
* @param workflow - The workflow to update
|
||||
*/
|
||||
private updateConnectionReferences(workflow: Workflow): void {
|
||||
if (this.renameMap.size === 0) return;
|
||||
|
||||
logger.debug(`Updating connection references for ${this.renameMap.size} renamed nodes`);
|
||||
|
||||
// Create a mapping of all renames (old → new)
|
||||
const renames = new Map(this.renameMap);
|
||||
|
||||
// Step 1: Update connection object keys (source node names)
|
||||
const updatedConnections: WorkflowConnection = {};
|
||||
for (const [sourceName, outputs] of Object.entries(workflow.connections)) {
|
||||
// Check if this source node was renamed
|
||||
const newSourceName = renames.get(sourceName) || sourceName;
|
||||
updatedConnections[newSourceName] = outputs;
|
||||
}
|
||||
|
||||
// Step 2: Update target node references within connections
|
||||
for (const [sourceName, outputs] of Object.entries(updatedConnections)) {
|
||||
// Iterate through all output types (main, error, ai_tool, ai_languageModel, etc.)
|
||||
for (const [outputType, connections] of Object.entries(outputs)) {
|
||||
// connections is Array<Array<{node, type, index}>>
|
||||
for (let outputIndex = 0; outputIndex < connections.length; outputIndex++) {
|
||||
const connectionsAtIndex = connections[outputIndex];
|
||||
for (let connIndex = 0; connIndex < connectionsAtIndex.length; connIndex++) {
|
||||
const connection = connectionsAtIndex[connIndex];
|
||||
// Check if target node was renamed
|
||||
if (renames.has(connection.node)) {
|
||||
const newTargetName = renames.get(connection.node)!;
|
||||
connection.node = newTargetName;
|
||||
logger.debug(`Updated connection: ${sourceName}[${outputType}][${outputIndex}][${connIndex}].node: "${connection.node}" → "${newTargetName}"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace workflow connections with updated connections
|
||||
workflow.connections = updatedConnections;
|
||||
|
||||
logger.info(`Auto-updated ${this.renameMap.size} node name references in connections`);
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
/**
|
||||
|
||||
@@ -11,8 +11,6 @@ import { NodeSimilarityService, NodeSuggestion } from './node-similarity-service
|
||||
import { NodeTypeNormalizer } from '../utils/node-type-normalizer';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { validateAISpecificNodes, hasAINodes } from './ai-node-validator';
|
||||
import { isTriggerNode } from '../utils/node-type-utils';
|
||||
import { isNonExecutableNode } from '../utils/node-classification';
|
||||
const logger = new Logger({ prefix: '[WorkflowValidator]' });
|
||||
|
||||
interface WorkflowNode {
|
||||
@@ -87,8 +85,17 @@ export class WorkflowValidator {
|
||||
this.similarityService = new NodeSimilarityService(nodeRepository);
|
||||
}
|
||||
|
||||
// Note: isStickyNote logic moved to shared utility: src/utils/node-classification.ts
|
||||
// Use isNonExecutableNode(node.type) instead
|
||||
/**
|
||||
* Check if a node is a Sticky Note or other non-executable node
|
||||
*/
|
||||
private isStickyNote(node: WorkflowNode): boolean {
|
||||
const stickyNoteTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
return stickyNoteTypes.includes(node.type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a complete workflow
|
||||
@@ -139,7 +146,7 @@ export class WorkflowValidator {
|
||||
}
|
||||
|
||||
// Update statistics after null check (exclude sticky notes from counts)
|
||||
const executableNodes = Array.isArray(workflow.nodes) ? workflow.nodes.filter(n => !isNonExecutableNode(n.type)) : [];
|
||||
const executableNodes = Array.isArray(workflow.nodes) ? workflow.nodes.filter(n => !this.isStickyNote(n)) : [];
|
||||
result.statistics.totalNodes = executableNodes.length;
|
||||
result.statistics.enabledNodes = executableNodes.filter(n => !n.disabled).length;
|
||||
|
||||
@@ -319,8 +326,16 @@ export class WorkflowValidator {
|
||||
nodeIds.add(node.id);
|
||||
}
|
||||
|
||||
// Count trigger nodes using shared trigger detection
|
||||
const triggerNodes = workflow.nodes.filter(n => isTriggerNode(n.type));
|
||||
// Count trigger nodes - normalize type names first
|
||||
const triggerNodes = workflow.nodes.filter(n => {
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(n.type);
|
||||
const lowerType = normalizedType.toLowerCase();
|
||||
return lowerType.includes('trigger') ||
|
||||
(lowerType.includes('webhook') && !lowerType.includes('respond')) ||
|
||||
normalizedType === 'nodes-base.start' ||
|
||||
normalizedType === 'nodes-base.manualTrigger' ||
|
||||
normalizedType === 'nodes-base.formTrigger';
|
||||
});
|
||||
result.statistics.triggerNodes = triggerNodes.length;
|
||||
|
||||
// Check for at least one trigger node
|
||||
@@ -341,7 +356,7 @@ export class WorkflowValidator {
|
||||
profile: string
|
||||
): Promise<void> {
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
try {
|
||||
// Validate node name length
|
||||
@@ -617,12 +632,16 @@ export class WorkflowValidator {
|
||||
|
||||
// Check for orphaned nodes (exclude sticky notes)
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
// Use shared trigger detection function for consistency
|
||||
const isNodeTrigger = isTriggerNode(node.type);
|
||||
const normalizedType = NodeTypeNormalizer.normalizeToFullForm(node.type);
|
||||
const isTrigger = normalizedType.toLowerCase().includes('trigger') ||
|
||||
normalizedType.toLowerCase().includes('webhook') ||
|
||||
normalizedType === 'nodes-base.start' ||
|
||||
normalizedType === 'nodes-base.manualTrigger' ||
|
||||
normalizedType === 'nodes-base.formTrigger';
|
||||
|
||||
if (!connectedNodes.has(node.name) && !isNodeTrigger) {
|
||||
if (!connectedNodes.has(node.name) && !isTrigger) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
@@ -858,7 +877,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Build node type map (exclude sticky notes)
|
||||
workflow.nodes.forEach(node => {
|
||||
if (!isNonExecutableNode(node.type)) {
|
||||
if (!this.isStickyNote(node)) {
|
||||
nodeTypeMap.set(node.name, node.type);
|
||||
}
|
||||
});
|
||||
@@ -926,7 +945,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Check from all executable nodes (exclude sticky notes)
|
||||
for (const node of workflow.nodes) {
|
||||
if (!isNonExecutableNode(node.type) && !visited.has(node.name)) {
|
||||
if (!this.isStickyNote(node) && !visited.has(node.name)) {
|
||||
if (hasCycleDFS(node.name)) return true;
|
||||
}
|
||||
}
|
||||
@@ -945,7 +964,7 @@ export class WorkflowValidator {
|
||||
const nodeNames = workflow.nodes.map(n => n.name);
|
||||
|
||||
for (const node of workflow.nodes) {
|
||||
if (node.disabled || isNonExecutableNode(node.type)) continue;
|
||||
if (node.disabled || this.isStickyNote(node)) continue;
|
||||
|
||||
// Skip expression validation for langchain nodes
|
||||
// They have AI-specific validators and different expression rules
|
||||
@@ -1092,7 +1111,7 @@ export class WorkflowValidator {
|
||||
|
||||
// Check node-level error handling properties for ALL executable nodes
|
||||
for (const node of workflow.nodes) {
|
||||
if (!isNonExecutableNode(node.type)) {
|
||||
if (!this.isStickyNote(node)) {
|
||||
this.checkNodeErrorHandling(node, workflow, result);
|
||||
}
|
||||
}
|
||||
@@ -1273,15 +1292,6 @@ export class WorkflowValidator {
|
||||
|
||||
/**
|
||||
* Check node-level error handling configuration for a single node
|
||||
*
|
||||
* Validates error handling properties (onError, continueOnFail, retryOnFail)
|
||||
* and provides warnings for error-prone nodes (HTTP, webhooks, databases)
|
||||
* that lack proper error handling. Delegates webhook-specific validation
|
||||
* to checkWebhookErrorHandling() for clearer logic.
|
||||
*
|
||||
* @param node - The workflow node to validate
|
||||
* @param workflow - The complete workflow for context
|
||||
* @param result - Validation result to add errors/warnings to
|
||||
*/
|
||||
private checkNodeErrorHandling(
|
||||
node: WorkflowNode,
|
||||
@@ -1492,8 +1502,12 @@ export class WorkflowValidator {
|
||||
message: 'HTTP Request node without error handling. Consider adding "onError: \'continueRegularOutput\'" for non-critical requests or "retryOnFail: true" for transient failures.'
|
||||
});
|
||||
} else if (normalizedType.includes('webhook')) {
|
||||
// Delegate to specialized webhook validation helper
|
||||
this.checkWebhookErrorHandling(node, normalizedType, result);
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Webhook node without error handling. Consider adding "onError: \'continueRegularOutput\'" to prevent workflow failures from blocking webhook responses.'
|
||||
});
|
||||
} else if (errorProneNodeTypes.some(db => normalizedType.includes(db) && ['postgres', 'mysql', 'mongodb'].includes(db))) {
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
@@ -1584,52 +1598,6 @@ export class WorkflowValidator {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Check webhook-specific error handling requirements
|
||||
*
|
||||
* Webhooks have special error handling requirements:
|
||||
* - respondToWebhook nodes (response nodes) don't need error handling
|
||||
* - Webhook nodes with responseNode mode REQUIRE onError to ensure responses
|
||||
* - Regular webhook nodes should have error handling to prevent blocking
|
||||
*
|
||||
* @param node - The webhook node to check
|
||||
* @param normalizedType - Normalized node type for comparison
|
||||
* @param result - Validation result to add errors/warnings to
|
||||
*/
|
||||
private checkWebhookErrorHandling(
|
||||
node: WorkflowNode,
|
||||
normalizedType: string,
|
||||
result: WorkflowValidationResult
|
||||
): void {
|
||||
// respondToWebhook nodes are response nodes (endpoints), not triggers
|
||||
// They're the END of execution, not controllers of flow - skip error handling check
|
||||
if (normalizedType.includes('respondtowebhook')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for responseNode mode specifically
|
||||
// responseNode mode requires onError to ensure response is sent even on error
|
||||
if (node.parameters?.responseMode === 'responseNode') {
|
||||
if (!node.onError && !node.continueOnFail) {
|
||||
result.errors.push({
|
||||
type: 'error',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'responseNode mode requires onError: "continueRegularOutput"'
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Regular webhook nodes without responseNode mode
|
||||
result.warnings.push({
|
||||
type: 'warning',
|
||||
nodeId: node.id,
|
||||
nodeName: node.name,
|
||||
message: 'Webhook node without error handling. Consider adding "onError: \'continueRegularOutput\'" to prevent workflow failures from blocking webhook responses.'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate error handling suggestions based on all nodes
|
||||
*/
|
||||
|
||||
@@ -1,460 +0,0 @@
|
||||
/**
|
||||
* Workflow Versioning Service
|
||||
*
|
||||
* Provides workflow backup, versioning, rollback, and cleanup capabilities.
|
||||
* Automatically prunes to 10 versions per workflow to prevent memory leaks.
|
||||
*/
|
||||
|
||||
import { NodeRepository } from '../database/node-repository';
|
||||
import { N8nApiClient } from './n8n-api-client';
|
||||
import { WorkflowValidator } from './workflow-validator';
|
||||
import { EnhancedConfigValidator } from './enhanced-config-validator';
|
||||
|
||||
export interface WorkflowVersion {
|
||||
id: number;
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
workflowSnapshot: any;
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface VersionInfo {
|
||||
id: number;
|
||||
workflowId: string;
|
||||
versionNumber: number;
|
||||
workflowName: string;
|
||||
trigger: string;
|
||||
operationCount?: number;
|
||||
fixTypesApplied?: string[];
|
||||
createdAt: string;
|
||||
size: number; // Size in bytes
|
||||
}
|
||||
|
||||
export interface RestoreResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
workflowId: string;
|
||||
fromVersion?: number;
|
||||
toVersionId: number;
|
||||
backupCreated: boolean;
|
||||
backupVersionId?: number;
|
||||
validationErrors?: string[];
|
||||
}
|
||||
|
||||
export interface BackupResult {
|
||||
versionId: number;
|
||||
versionNumber: number;
|
||||
pruned: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface StorageStats {
|
||||
totalVersions: number;
|
||||
totalSize: number;
|
||||
totalSizeFormatted: string;
|
||||
byWorkflow: WorkflowStorageInfo[];
|
||||
}
|
||||
|
||||
export interface WorkflowStorageInfo {
|
||||
workflowId: string;
|
||||
workflowName: string;
|
||||
versionCount: number;
|
||||
totalSize: number;
|
||||
totalSizeFormatted: string;
|
||||
lastBackup: string;
|
||||
}
|
||||
|
||||
export interface VersionDiff {
|
||||
versionId1: number;
|
||||
versionId2: number;
|
||||
version1Number: number;
|
||||
version2Number: number;
|
||||
addedNodes: string[];
|
||||
removedNodes: string[];
|
||||
modifiedNodes: string[];
|
||||
connectionChanges: number;
|
||||
settingChanges: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow Versioning Service
|
||||
*/
|
||||
export class WorkflowVersioningService {
|
||||
private readonly DEFAULT_MAX_VERSIONS = 10;
|
||||
|
||||
constructor(
|
||||
private nodeRepository: NodeRepository,
|
||||
private apiClient?: N8nApiClient
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Create backup before modification
|
||||
* Automatically prunes to 10 versions after backup creation
|
||||
*/
|
||||
async createBackup(
|
||||
workflowId: string,
|
||||
workflow: any,
|
||||
context: {
|
||||
trigger: 'partial_update' | 'full_update' | 'autofix';
|
||||
operations?: any[];
|
||||
fixTypes?: string[];
|
||||
metadata?: any;
|
||||
}
|
||||
): Promise<BackupResult> {
|
||||
// Get current max version number
|
||||
const versions = this.nodeRepository.getWorkflowVersions(workflowId, 1);
|
||||
const nextVersion = versions.length > 0 ? versions[0].versionNumber + 1 : 1;
|
||||
|
||||
// Create new version
|
||||
const versionId = this.nodeRepository.createWorkflowVersion({
|
||||
workflowId,
|
||||
versionNumber: nextVersion,
|
||||
workflowName: workflow.name || 'Unnamed Workflow',
|
||||
workflowSnapshot: workflow,
|
||||
trigger: context.trigger,
|
||||
operations: context.operations,
|
||||
fixTypes: context.fixTypes,
|
||||
metadata: context.metadata
|
||||
});
|
||||
|
||||
// Auto-prune to keep max 10 versions
|
||||
const pruned = this.nodeRepository.pruneWorkflowVersions(
|
||||
workflowId,
|
||||
this.DEFAULT_MAX_VERSIONS
|
||||
);
|
||||
|
||||
return {
|
||||
versionId,
|
||||
versionNumber: nextVersion,
|
||||
pruned,
|
||||
message: pruned > 0
|
||||
? `Backup created (version ${nextVersion}), pruned ${pruned} old version(s)`
|
||||
: `Backup created (version ${nextVersion})`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get version history for a workflow
|
||||
*/
|
||||
async getVersionHistory(workflowId: string, limit: number = 10): Promise<VersionInfo[]> {
|
||||
const versions = this.nodeRepository.getWorkflowVersions(workflowId, limit);
|
||||
|
||||
return versions.map(v => ({
|
||||
id: v.id,
|
||||
workflowId: v.workflowId,
|
||||
versionNumber: v.versionNumber,
|
||||
workflowName: v.workflowName,
|
||||
trigger: v.trigger,
|
||||
operationCount: v.operations ? v.operations.length : undefined,
|
||||
fixTypesApplied: v.fixTypes || undefined,
|
||||
createdAt: v.createdAt,
|
||||
size: JSON.stringify(v.workflowSnapshot).length
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific workflow version
|
||||
*/
|
||||
async getVersion(versionId: number): Promise<WorkflowVersion | null> {
|
||||
return this.nodeRepository.getWorkflowVersion(versionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore workflow to a previous version
|
||||
* Creates backup of current state before restoring
|
||||
*/
|
||||
async restoreVersion(
|
||||
workflowId: string,
|
||||
versionId?: number,
|
||||
validateBefore: boolean = true
|
||||
): Promise<RestoreResult> {
|
||||
if (!this.apiClient) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'API client not configured - cannot restore workflow',
|
||||
workflowId,
|
||||
toVersionId: versionId || 0,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Get the version to restore
|
||||
let versionToRestore: WorkflowVersion | null = null;
|
||||
|
||||
if (versionId) {
|
||||
versionToRestore = this.nodeRepository.getWorkflowVersion(versionId);
|
||||
} else {
|
||||
// Get latest backup
|
||||
versionToRestore = this.nodeRepository.getLatestWorkflowVersion(workflowId);
|
||||
}
|
||||
|
||||
if (!versionToRestore) {
|
||||
return {
|
||||
success: false,
|
||||
message: versionId
|
||||
? `Version ${versionId} not found`
|
||||
: `No backup versions found for workflow ${workflowId}`,
|
||||
workflowId,
|
||||
toVersionId: versionId || 0,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Validate workflow structure if requested
|
||||
if (validateBefore) {
|
||||
const validator = new WorkflowValidator(this.nodeRepository, EnhancedConfigValidator);
|
||||
const validationResult = await validator.validateWorkflow(
|
||||
versionToRestore.workflowSnapshot,
|
||||
{
|
||||
validateNodes: true,
|
||||
validateConnections: true,
|
||||
validateExpressions: false,
|
||||
profile: 'runtime'
|
||||
}
|
||||
);
|
||||
|
||||
if (validationResult.errors.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Cannot restore - version ${versionToRestore.versionNumber} has validation errors`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: false,
|
||||
validationErrors: validationResult.errors.map(e => e.message || 'Unknown error')
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Create backup of current workflow before restoring
|
||||
let backupResult: BackupResult | undefined;
|
||||
try {
|
||||
const currentWorkflow = await this.apiClient.getWorkflow(workflowId);
|
||||
backupResult = await this.createBackup(workflowId, currentWorkflow, {
|
||||
trigger: 'partial_update',
|
||||
metadata: {
|
||||
reason: 'Backup before rollback',
|
||||
restoringToVersion: versionToRestore.versionNumber
|
||||
}
|
||||
});
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to create backup before restore: ${error.message}`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: false
|
||||
};
|
||||
}
|
||||
|
||||
// Restore the workflow
|
||||
try {
|
||||
await this.apiClient.updateWorkflow(workflowId, versionToRestore.workflowSnapshot);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully restored workflow to version ${versionToRestore.versionNumber}`,
|
||||
workflowId,
|
||||
fromVersion: backupResult.versionNumber,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: true,
|
||||
backupVersionId: backupResult.versionId
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Failed to restore workflow: ${error.message}`,
|
||||
workflowId,
|
||||
toVersionId: versionToRestore.id,
|
||||
backupCreated: true,
|
||||
backupVersionId: backupResult.versionId
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific version
|
||||
*/
|
||||
async deleteVersion(versionId: number): Promise<{ success: boolean; message: string }> {
|
||||
const version = this.nodeRepository.getWorkflowVersion(versionId);
|
||||
|
||||
if (!version) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Version ${versionId} not found`
|
||||
};
|
||||
}
|
||||
|
||||
this.nodeRepository.deleteWorkflowVersion(versionId);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Deleted version ${version.versionNumber} for workflow ${version.workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all versions for a workflow
|
||||
*/
|
||||
async deleteAllVersions(workflowId: string): Promise<{ deleted: number; message: string }> {
|
||||
const count = this.nodeRepository.getWorkflowVersionCount(workflowId);
|
||||
|
||||
if (count === 0) {
|
||||
return {
|
||||
deleted: 0,
|
||||
message: `No versions found for workflow ${workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = this.nodeRepository.deleteWorkflowVersionsByWorkflowId(workflowId);
|
||||
|
||||
return {
|
||||
deleted,
|
||||
message: `Deleted ${deleted} version(s) for workflow ${workflowId}`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger pruning for a workflow
|
||||
*/
|
||||
async pruneVersions(
|
||||
workflowId: string,
|
||||
maxVersions: number = 10
|
||||
): Promise<{ pruned: number; remaining: number }> {
|
||||
const pruned = this.nodeRepository.pruneWorkflowVersions(workflowId, maxVersions);
|
||||
const remaining = this.nodeRepository.getWorkflowVersionCount(workflowId);
|
||||
|
||||
return { pruned, remaining };
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate entire workflow_versions table
|
||||
* Requires explicit confirmation
|
||||
*/
|
||||
async truncateAllVersions(confirm: boolean): Promise<{ deleted: number; message: string }> {
|
||||
if (!confirm) {
|
||||
return {
|
||||
deleted: 0,
|
||||
message: 'Truncate operation not confirmed - no action taken'
|
||||
};
|
||||
}
|
||||
|
||||
const deleted = this.nodeRepository.truncateWorkflowVersions();
|
||||
|
||||
return {
|
||||
deleted,
|
||||
message: `Truncated workflow_versions table - deleted ${deleted} version(s)`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage statistics
|
||||
*/
|
||||
async getStorageStats(): Promise<StorageStats> {
|
||||
const stats = this.nodeRepository.getVersionStorageStats();
|
||||
|
||||
return {
|
||||
totalVersions: stats.totalVersions,
|
||||
totalSize: stats.totalSize,
|
||||
totalSizeFormatted: this.formatBytes(stats.totalSize),
|
||||
byWorkflow: stats.byWorkflow.map((w: any) => ({
|
||||
workflowId: w.workflowId,
|
||||
workflowName: w.workflowName,
|
||||
versionCount: w.versionCount,
|
||||
totalSize: w.totalSize,
|
||||
totalSizeFormatted: this.formatBytes(w.totalSize),
|
||||
lastBackup: w.lastBackup
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two versions
|
||||
*/
|
||||
async compareVersions(versionId1: number, versionId2: number): Promise<VersionDiff> {
|
||||
const v1 = this.nodeRepository.getWorkflowVersion(versionId1);
|
||||
const v2 = this.nodeRepository.getWorkflowVersion(versionId2);
|
||||
|
||||
if (!v1 || !v2) {
|
||||
throw new Error(`One or both versions not found: ${versionId1}, ${versionId2}`);
|
||||
}
|
||||
|
||||
// Compare nodes
|
||||
const nodes1 = new Set<string>(v1.workflowSnapshot.nodes?.map((n: any) => n.id as string) || []);
|
||||
const nodes2 = new Set<string>(v2.workflowSnapshot.nodes?.map((n: any) => n.id as string) || []);
|
||||
|
||||
const addedNodes: string[] = [...nodes2].filter(id => !nodes1.has(id));
|
||||
const removedNodes: string[] = [...nodes1].filter(id => !nodes2.has(id));
|
||||
const commonNodes = [...nodes1].filter(id => nodes2.has(id));
|
||||
|
||||
// Check for modified nodes
|
||||
const modifiedNodes: string[] = [];
|
||||
for (const nodeId of commonNodes) {
|
||||
const node1 = v1.workflowSnapshot.nodes?.find((n: any) => n.id === nodeId);
|
||||
const node2 = v2.workflowSnapshot.nodes?.find((n: any) => n.id === nodeId);
|
||||
|
||||
if (JSON.stringify(node1) !== JSON.stringify(node2)) {
|
||||
modifiedNodes.push(nodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare connections
|
||||
const conn1Str = JSON.stringify(v1.workflowSnapshot.connections || {});
|
||||
const conn2Str = JSON.stringify(v2.workflowSnapshot.connections || {});
|
||||
const connectionChanges = conn1Str !== conn2Str ? 1 : 0;
|
||||
|
||||
// Compare settings
|
||||
const settings1 = v1.workflowSnapshot.settings || {};
|
||||
const settings2 = v2.workflowSnapshot.settings || {};
|
||||
const settingChanges = this.diffObjects(settings1, settings2);
|
||||
|
||||
return {
|
||||
versionId1,
|
||||
versionId2,
|
||||
version1Number: v1.versionNumber,
|
||||
version2Number: v2.versionNumber,
|
||||
addedNodes,
|
||||
removedNodes,
|
||||
modifiedNodes,
|
||||
connectionChanges,
|
||||
settingChanges
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format bytes to human-readable string
|
||||
*/
|
||||
private formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple object diff
|
||||
*/
|
||||
private diffObjects(obj1: any, obj2: any): any {
|
||||
const changes: any = {};
|
||||
|
||||
const allKeys = new Set([...Object.keys(obj1), ...Object.keys(obj2)]);
|
||||
|
||||
for (const key of allKeys) {
|
||||
if (JSON.stringify(obj1[key]) !== JSON.stringify(obj2[key])) {
|
||||
changes[key] = {
|
||||
before: obj1[key],
|
||||
after: obj2[key]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
}
|
||||
242
src/types/session-restoration.ts
Normal file
242
src/types/session-restoration.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
/**
|
||||
* Session Restoration Types
|
||||
*
|
||||
* Defines types for session persistence and restoration functionality.
|
||||
* Enables multi-tenant backends to restore sessions after container restarts.
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
|
||||
import { InstanceContext } from './instance-context';
|
||||
|
||||
/**
|
||||
* Session restoration hook callback
|
||||
*
|
||||
* Called when a client tries to use an unknown session ID.
|
||||
* The backend can load session state from external storage (database, Redis, etc.)
|
||||
* and return the instance context to recreate the session.
|
||||
*
|
||||
* @param sessionId - The session ID that was not found in memory
|
||||
* @returns Instance context to restore the session, or null if session should not be restored
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const engine = new N8NMCPEngine({
|
||||
* onSessionNotFound: async (sessionId) => {
|
||||
* // Load from database
|
||||
* const session = await db.loadSession(sessionId);
|
||||
* if (!session || session.expired) return null;
|
||||
* return session.instanceContext;
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export type SessionRestoreHook = (sessionId: string) => Promise<InstanceContext | null>;
|
||||
|
||||
/**
|
||||
* Session restoration configuration options
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
export interface SessionRestorationOptions {
|
||||
/**
|
||||
* Session timeout in milliseconds
|
||||
* After this period of inactivity, sessions are expired and cleaned up
|
||||
* @default 1800000 (30 minutes)
|
||||
*/
|
||||
sessionTimeout?: number;
|
||||
|
||||
/**
|
||||
* Maximum time to wait for session restoration hook to complete
|
||||
* If the hook takes longer than this, the request will fail with 408 Request Timeout
|
||||
* @default 5000 (5 seconds)
|
||||
*/
|
||||
sessionRestorationTimeout?: number;
|
||||
|
||||
/**
|
||||
* Hook called when a client tries to use an unknown session ID
|
||||
* Return instance context to restore the session, or null to reject
|
||||
*
|
||||
* @param sessionId - The session ID that was not found
|
||||
* @returns Instance context for restoration, or null
|
||||
*
|
||||
* Error handling:
|
||||
* - Hook throws exception → 500 Internal Server Error
|
||||
* - Hook times out → 408 Request Timeout
|
||||
* - Hook returns null → 400 Bad Request (session not found)
|
||||
* - Hook returns invalid context → 400 Bad Request (invalid context)
|
||||
*/
|
||||
onSessionNotFound?: SessionRestoreHook;
|
||||
|
||||
/**
|
||||
* Number of retry attempts for failed session restoration
|
||||
*
|
||||
* When the restoration hook throws an error, the system will retry
|
||||
* up to this many times with a delay between attempts.
|
||||
*
|
||||
* Timeout errors are NOT retried (already took too long).
|
||||
*
|
||||
* Note: The overall timeout (sessionRestorationTimeout) applies to
|
||||
* ALL retry attempts combined, not per attempt.
|
||||
*
|
||||
* @default 0 (no retries)
|
||||
* @example
|
||||
* ```typescript
|
||||
* const engine = new N8NMCPEngine({
|
||||
* onSessionNotFound: async (id) => db.loadSession(id),
|
||||
* sessionRestorationRetries: 2, // Retry up to 2 times
|
||||
* sessionRestorationRetryDelay: 100 // 100ms between retries
|
||||
* });
|
||||
* ```
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionRestorationRetries?: number;
|
||||
|
||||
/**
|
||||
* Delay between retry attempts in milliseconds
|
||||
*
|
||||
* @default 100 (100 milliseconds)
|
||||
* @since 2.19.0
|
||||
*/
|
||||
sessionRestorationRetryDelay?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Session state for persistence
|
||||
* Contains all information needed to restore a session after restart
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
export interface SessionState {
|
||||
/**
|
||||
* Unique session identifier
|
||||
*/
|
||||
sessionId: string;
|
||||
|
||||
/**
|
||||
* Instance-specific configuration
|
||||
* Contains n8n API credentials and instance ID
|
||||
*/
|
||||
instanceContext: InstanceContext;
|
||||
|
||||
/**
|
||||
* When the session was created
|
||||
*/
|
||||
createdAt: Date;
|
||||
|
||||
/**
|
||||
* Last time the session was accessed
|
||||
* Used for TTL-based expiration
|
||||
*/
|
||||
lastAccess: Date;
|
||||
|
||||
/**
|
||||
* When the session will expire
|
||||
* Calculated from lastAccess + sessionTimeout
|
||||
*/
|
||||
expiresAt: Date;
|
||||
|
||||
/**
|
||||
* Optional metadata for application-specific use
|
||||
*/
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Session lifecycle event handlers
|
||||
*
|
||||
* These callbacks are called at various points in the session lifecycle.
|
||||
* All callbacks are optional and should not throw errors.
|
||||
*
|
||||
* ⚠️ Performance Note: onSessionAccessed is called on EVERY request.
|
||||
* Consider implementing throttling if you need database updates.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import throttle from 'lodash.throttle';
|
||||
*
|
||||
* const engine = new N8NMCPEngine({
|
||||
* sessionEvents: {
|
||||
* onSessionCreated: async (sessionId, context) => {
|
||||
* await db.saveSession(sessionId, context);
|
||||
* },
|
||||
* onSessionAccessed: throttle(async (sessionId) => {
|
||||
* await db.updateLastAccess(sessionId);
|
||||
* }, 60000) // Max once per minute per session
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @since 2.19.0
|
||||
*/
|
||||
export interface SessionLifecycleEvents {
|
||||
/**
|
||||
* Called when a new session is created (not restored)
|
||||
*
|
||||
* Use cases:
|
||||
* - Save session to database for persistence
|
||||
* - Track session creation metrics
|
||||
* - Initialize session-specific resources
|
||||
*
|
||||
* @param sessionId - The newly created session ID
|
||||
* @param instanceContext - The instance context for this session
|
||||
*/
|
||||
onSessionCreated?: (sessionId: string, instanceContext: InstanceContext) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Called when a session is restored from external storage
|
||||
*
|
||||
* Use cases:
|
||||
* - Track session restoration metrics
|
||||
* - Log successful recovery after restart
|
||||
* - Update database restoration timestamp
|
||||
*
|
||||
* @param sessionId - The restored session ID
|
||||
* @param instanceContext - The restored instance context
|
||||
*/
|
||||
onSessionRestored?: (sessionId: string, instanceContext: InstanceContext) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Called on EVERY request that uses an existing session
|
||||
*
|
||||
* ⚠️ HIGH FREQUENCY: This event fires for every MCP tool call.
|
||||
* For a busy session, this could be 100+ calls per minute.
|
||||
*
|
||||
* Recommended: Implement throttling if you need database updates
|
||||
*
|
||||
* Use cases:
|
||||
* - Update session last_access timestamp (throttled)
|
||||
* - Track session activity metrics
|
||||
* - Extend session TTL in database
|
||||
*
|
||||
* @param sessionId - The session ID that was accessed
|
||||
*/
|
||||
onSessionAccessed?: (sessionId: string) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Called when a session expires due to inactivity
|
||||
*
|
||||
* Called during cleanup cycle (every 5 minutes) BEFORE session removal.
|
||||
* This allows you to perform cleanup operations before the session is gone.
|
||||
*
|
||||
* Use cases:
|
||||
* - Delete session from database
|
||||
* - Log session expiration metrics
|
||||
* - Cleanup session-specific resources
|
||||
*
|
||||
* @param sessionId - The session ID that expired
|
||||
*/
|
||||
onSessionExpired?: (sessionId: string) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Called when a session is manually deleted
|
||||
*
|
||||
* Use cases:
|
||||
* - Delete session from database
|
||||
* - Cascade delete related data
|
||||
* - Log manual session termination
|
||||
*
|
||||
* @param sessionId - The session ID that was deleted
|
||||
*/
|
||||
onSessionDeleted?: (sessionId: string) => void | Promise<void>;
|
||||
}
|
||||
@@ -170,7 +170,6 @@ export interface WorkflowDiffResult {
|
||||
success: boolean;
|
||||
workflow?: any; // Updated workflow if successful
|
||||
errors?: WorkflowDiffValidationError[];
|
||||
warnings?: WorkflowDiffValidationError[]; // Non-blocking warnings (e.g., parameter suggestions)
|
||||
operationsApplied?: number;
|
||||
message?: string;
|
||||
applied?: number[]; // Indices of successfully applied operations (when continueOnError is true)
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* Utility functions for detecting and handling n8n expressions
|
||||
*/
|
||||
|
||||
/**
|
||||
* Detects if a value is an n8n expression
|
||||
*
|
||||
* n8n expressions can be:
|
||||
* - Pure expression: `={{ $json.value }}`
|
||||
* - Mixed content: `=https://api.com/{{ $json.id }}/data`
|
||||
* - Prefix-only: `=$json.value`
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value is an expression (starts with =)
|
||||
*/
|
||||
export function isExpression(value: unknown): value is string {
|
||||
return typeof value === 'string' && value.startsWith('=');
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects if a string contains n8n expression syntax {{ }}
|
||||
*
|
||||
* This checks for expression markers within the string,
|
||||
* regardless of whether it has the = prefix.
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value contains {{ }} markers
|
||||
*/
|
||||
export function containsExpression(value: unknown): boolean {
|
||||
if (typeof value !== 'string') {
|
||||
return false;
|
||||
}
|
||||
// Use single regex for better performance than two includes()
|
||||
return /\{\{.*\}\}/s.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects if a value should skip literal validation
|
||||
*
|
||||
* This is the main utility to use before validating values like URLs, JSON, etc.
|
||||
* It returns true if:
|
||||
* - The value is an expression (starts with =)
|
||||
* - OR the value contains expression markers {{ }}
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if validation should be skipped
|
||||
*/
|
||||
export function shouldSkipLiteralValidation(value: unknown): boolean {
|
||||
return isExpression(value) || containsExpression(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the expression content from a value
|
||||
*
|
||||
* If value is `={{ $json.value }}`, returns `$json.value`
|
||||
* If value is `=$json.value`, returns `$json.value`
|
||||
* If value is not an expression, returns the original value
|
||||
*
|
||||
* @param value - The value to extract from
|
||||
* @returns The expression content or original value
|
||||
*/
|
||||
export function extractExpressionContent(value: string): string {
|
||||
if (!isExpression(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const withoutPrefix = value.substring(1); // Remove =
|
||||
|
||||
// Check if it's wrapped in {{ }}
|
||||
const match = withoutPrefix.match(/^\{\{(.+)\}\}$/s);
|
||||
if (match) {
|
||||
return match[1].trim();
|
||||
}
|
||||
|
||||
return withoutPrefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is a mixed content expression
|
||||
*
|
||||
* Mixed content has both literal text and expressions:
|
||||
* - `Hello {{ $json.name }}!`
|
||||
* - `https://api.com/{{ $json.id }}/data`
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value has mixed content
|
||||
*/
|
||||
export function hasMixedContent(value: unknown): boolean {
|
||||
// Type guard first to avoid calling containsExpression on non-strings
|
||||
if (typeof value !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!containsExpression(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If it's wrapped entirely in {{ }}, it's not mixed
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.startsWith('={{') && trimmed.endsWith('}}')) {
|
||||
// Check if there's only one pair of {{ }}
|
||||
const count = (trimmed.match(/\{\{/g) || []).length;
|
||||
if (count === 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
/**
|
||||
* Node Classification Utilities
|
||||
*
|
||||
* Provides shared classification logic for workflow nodes.
|
||||
* Used by validators to consistently identify node types across the codebase.
|
||||
*
|
||||
* This module centralizes node type classification to ensure consistent behavior
|
||||
* between WorkflowValidator and n8n-validation.ts, preventing bugs like sticky
|
||||
* notes being incorrectly flagged as disconnected nodes.
|
||||
*/
|
||||
|
||||
import { isTriggerNode as isTriggerNodeImpl } from './node-type-utils';
|
||||
|
||||
/**
|
||||
* Check if a node type is a sticky note (documentation-only node)
|
||||
*
|
||||
* Sticky notes are UI-only annotation nodes that:
|
||||
* - Do not participate in workflow execution
|
||||
* - Never have connections (by design)
|
||||
* - Should be excluded from connection validation
|
||||
* - Serve purely as visual documentation in the workflow canvas
|
||||
*
|
||||
* Example sticky note types:
|
||||
* - 'n8n-nodes-base.stickyNote' (standard format)
|
||||
* - 'nodes-base.stickyNote' (normalized format)
|
||||
* - '@n8n/n8n-nodes-base.stickyNote' (scoped format)
|
||||
*
|
||||
* @param nodeType - The node type to check (e.g., 'n8n-nodes-base.stickyNote')
|
||||
* @returns true if the node is a sticky note, false otherwise
|
||||
*/
|
||||
export function isStickyNote(nodeType: string): boolean {
|
||||
const stickyNoteTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
return stickyNoteTypes.includes(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type is a trigger node
|
||||
*
|
||||
* This function delegates to the comprehensive trigger detection implementation
|
||||
* in node-type-utils.ts which supports 200+ trigger types using flexible
|
||||
* pattern matching instead of a hardcoded list.
|
||||
*
|
||||
* Trigger nodes:
|
||||
* - Start workflow execution
|
||||
* - Only need outgoing connections (no incoming connections required)
|
||||
* - Include webhooks, manual triggers, schedule triggers, email triggers, etc.
|
||||
* - Are the entry points for workflow execution
|
||||
*
|
||||
* Examples:
|
||||
* - Webhooks: Listen for HTTP requests
|
||||
* - Manual triggers: Started manually by user
|
||||
* - Schedule/Cron triggers: Run on a schedule
|
||||
* - Execute Workflow Trigger: Invoked by other workflows
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node is a trigger, false otherwise
|
||||
*/
|
||||
export function isTriggerNode(nodeType: string): boolean {
|
||||
return isTriggerNodeImpl(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type is non-executable (UI-only)
|
||||
*
|
||||
* Non-executable nodes:
|
||||
* - Do not participate in workflow execution
|
||||
* - Serve documentation/annotation purposes only
|
||||
* - Should be excluded from all execution-related validation
|
||||
* - Should be excluded from statistics like "total executable nodes"
|
||||
* - Should be excluded from connection validation
|
||||
*
|
||||
* Currently includes: sticky notes
|
||||
*
|
||||
* Future: May include other annotation/comment nodes if n8n adds them
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node is non-executable, false otherwise
|
||||
*/
|
||||
export function isNonExecutableNode(nodeType: string): boolean {
|
||||
return isStickyNote(nodeType);
|
||||
// Future: Add other non-executable node types here
|
||||
// Example: || isCommentNode(nodeType) || isAnnotationNode(nodeType)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node type requires incoming connections
|
||||
*
|
||||
* Most nodes require at least one incoming connection to receive data,
|
||||
* but there are two categories of exceptions:
|
||||
*
|
||||
* 1. Trigger nodes: Only need outgoing connections
|
||||
* - They start workflow execution
|
||||
* - They generate their own data
|
||||
* - Examples: webhook, manualTrigger, scheduleTrigger
|
||||
*
|
||||
* 2. Non-executable nodes: Don't need any connections
|
||||
* - They are UI-only annotations
|
||||
* - They don't participate in execution
|
||||
* - Examples: stickyNote
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if the node requires incoming connections, false otherwise
|
||||
*/
|
||||
export function requiresIncomingConnection(nodeType: string): boolean {
|
||||
// Non-executable nodes don't need any connections
|
||||
if (isNonExecutableNode(nodeType)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Trigger nodes only need outgoing connections
|
||||
if (isTriggerNode(nodeType)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Regular nodes need incoming connections
|
||||
return true;
|
||||
}
|
||||
@@ -141,115 +141,3 @@ export function getNodeTypeVariations(type: string): string[] {
|
||||
// Remove duplicates while preserving order
|
||||
return [...new Set(variations)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node is ANY type of trigger (including executeWorkflowTrigger)
|
||||
*
|
||||
* This function determines if a node can start a workflow execution.
|
||||
* Returns true for:
|
||||
* - Webhook triggers (webhook, webhookTrigger)
|
||||
* - Time-based triggers (schedule, cron)
|
||||
* - Poll-based triggers (emailTrigger, slackTrigger, etc.)
|
||||
* - Manual triggers (manualTrigger, start, formTrigger)
|
||||
* - Sub-workflow triggers (executeWorkflowTrigger)
|
||||
*
|
||||
* Used for: Disconnection validation (triggers don't need incoming connections)
|
||||
*
|
||||
* @param nodeType - The node type to check (e.g., "n8n-nodes-base.executeWorkflowTrigger")
|
||||
* @returns true if node is any type of trigger
|
||||
*/
|
||||
export function isTriggerNode(nodeType: string): boolean {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
// Check for trigger pattern in node type name
|
||||
if (lowerType.includes('trigger')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for webhook nodes (excluding respondToWebhook which is NOT a trigger)
|
||||
if (lowerType.includes('webhook') && !lowerType.includes('respond')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for specific trigger types that don't have 'trigger' in their name
|
||||
const specificTriggers = [
|
||||
'nodes-base.start',
|
||||
'nodes-base.manualTrigger',
|
||||
'nodes-base.formTrigger'
|
||||
];
|
||||
|
||||
return specificTriggers.includes(normalized);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a node is an ACTIVATABLE trigger (excludes executeWorkflowTrigger)
|
||||
*
|
||||
* This function determines if a node can be used to activate a workflow.
|
||||
* Returns true for:
|
||||
* - Webhook triggers (webhook, webhookTrigger)
|
||||
* - Time-based triggers (schedule, cron)
|
||||
* - Poll-based triggers (emailTrigger, slackTrigger, etc.)
|
||||
* - Manual triggers (manualTrigger, start, formTrigger)
|
||||
*
|
||||
* Returns FALSE for:
|
||||
* - executeWorkflowTrigger (can only be invoked by other workflows)
|
||||
*
|
||||
* Used for: Activation validation (active workflows need activatable triggers)
|
||||
*
|
||||
* @param nodeType - The node type to check
|
||||
* @returns true if node can activate a workflow
|
||||
*/
|
||||
export function isActivatableTrigger(nodeType: string): boolean {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
// executeWorkflowTrigger cannot activate a workflow (invoked by other workflows)
|
||||
if (lowerType.includes('executeworkflow')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// All other triggers can activate workflows
|
||||
return isTriggerNode(nodeType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get human-readable description of trigger type
|
||||
*
|
||||
* @param nodeType - The node type
|
||||
* @returns Description of what triggers this node
|
||||
*/
|
||||
export function getTriggerTypeDescription(nodeType: string): string {
|
||||
const normalized = normalizeNodeType(nodeType);
|
||||
const lowerType = normalized.toLowerCase();
|
||||
|
||||
if (lowerType.includes('executeworkflow')) {
|
||||
return 'Execute Workflow Trigger (invoked by other workflows)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('webhook')) {
|
||||
return 'Webhook Trigger (HTTP requests)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('schedule') || lowerType.includes('cron')) {
|
||||
return 'Schedule Trigger (time-based)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('manual') || normalized === 'nodes-base.start') {
|
||||
return 'Manual Trigger (manual execution)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('email') || lowerType.includes('imap') || lowerType.includes('gmail')) {
|
||||
return 'Email Trigger (polling)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('form')) {
|
||||
return 'Form Trigger (form submissions)';
|
||||
}
|
||||
|
||||
if (lowerType.includes('trigger')) {
|
||||
return 'Trigger (event-based)';
|
||||
}
|
||||
|
||||
return 'Unknown trigger type';
|
||||
}
|
||||
752
supabase-telemetry-aggregation.sql
Normal file
752
supabase-telemetry-aggregation.sql
Normal file
@@ -0,0 +1,752 @@
|
||||
-- ============================================================================
|
||||
-- N8N-MCP Telemetry Aggregation & Automated Pruning System
|
||||
-- ============================================================================
|
||||
-- Purpose: Create aggregation tables and automated cleanup to maintain
|
||||
-- database under 500MB free tier limit while preserving insights
|
||||
--
|
||||
-- Strategy: Aggregate → Delete → Retain only recent raw events
|
||||
-- Expected savings: ~120 MB (from 265 MB → ~145 MB steady state)
|
||||
-- ============================================================================
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 1: AGGREGATION TABLES
|
||||
-- ============================================================================
|
||||
|
||||
-- Daily tool usage summary (replaces 96 MB of tool_sequence raw data)
|
||||
CREATE TABLE IF NOT EXISTS telemetry_tool_usage_daily (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
tool_name TEXT NOT NULL,
|
||||
usage_count INTEGER NOT NULL DEFAULT 0,
|
||||
success_count INTEGER NOT NULL DEFAULT 0,
|
||||
error_count INTEGER NOT NULL DEFAULT 0,
|
||||
avg_execution_time_ms NUMERIC,
|
||||
total_execution_time_ms BIGINT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date, user_id, tool_name)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_tool_usage_daily_date ON telemetry_tool_usage_daily(aggregation_date DESC);
|
||||
CREATE INDEX idx_tool_usage_daily_tool ON telemetry_tool_usage_daily(tool_name);
|
||||
CREATE INDEX idx_tool_usage_daily_user ON telemetry_tool_usage_daily(user_id);
|
||||
|
||||
COMMENT ON TABLE telemetry_tool_usage_daily IS 'Daily aggregation of tool usage replacing raw tool_used and tool_sequence events. Saves ~95% storage.';
|
||||
|
||||
-- Tool sequence patterns (replaces individual sequences with pattern analysis)
|
||||
CREATE TABLE IF NOT EXISTS telemetry_tool_patterns (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
tool_sequence TEXT[] NOT NULL, -- Array of tool names in order
|
||||
sequence_hash TEXT NOT NULL, -- Hash of the sequence for grouping
|
||||
occurrence_count INTEGER NOT NULL DEFAULT 1,
|
||||
avg_sequence_duration_ms NUMERIC,
|
||||
success_rate NUMERIC, -- 0.0 to 1.0
|
||||
common_errors JSONB, -- {"error_type": count}
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date, sequence_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_tool_patterns_date ON telemetry_tool_patterns(aggregation_date DESC);
|
||||
CREATE INDEX idx_tool_patterns_hash ON telemetry_tool_patterns(sequence_hash);
|
||||
|
||||
COMMENT ON TABLE telemetry_tool_patterns IS 'Common tool usage patterns aggregated daily. Identifies workflows and AI behavior patterns.';
|
||||
|
||||
-- Workflow insights (aggregates workflow_created events)
|
||||
CREATE TABLE IF NOT EXISTS telemetry_workflow_insights (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
complexity TEXT, -- simple/medium/complex
|
||||
node_count_range TEXT, -- 1-5, 6-10, 11-20, 21+
|
||||
has_trigger BOOLEAN,
|
||||
has_webhook BOOLEAN,
|
||||
common_node_types TEXT[], -- Top node types used
|
||||
workflow_count INTEGER NOT NULL DEFAULT 0,
|
||||
avg_node_count NUMERIC,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date, complexity, node_count_range, has_trigger, has_webhook)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_workflow_insights_date ON telemetry_workflow_insights(aggregation_date DESC);
|
||||
CREATE INDEX idx_workflow_insights_complexity ON telemetry_workflow_insights(complexity);
|
||||
|
||||
COMMENT ON TABLE telemetry_workflow_insights IS 'Daily workflow creation patterns. Shows adoption trends without storing duplicate workflows.';
|
||||
|
||||
-- Error patterns (keeps error intelligence, deletes raw error events)
|
||||
CREATE TABLE IF NOT EXISTS telemetry_error_patterns (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
error_type TEXT NOT NULL,
|
||||
error_context TEXT, -- e.g., 'validation', 'workflow_execution', 'node_operation'
|
||||
occurrence_count INTEGER NOT NULL DEFAULT 1,
|
||||
affected_users INTEGER NOT NULL DEFAULT 0,
|
||||
first_seen TIMESTAMPTZ,
|
||||
last_seen TIMESTAMPTZ,
|
||||
sample_error_message TEXT, -- Keep one representative message
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date, error_type, error_context)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_error_patterns_date ON telemetry_error_patterns(aggregation_date DESC);
|
||||
CREATE INDEX idx_error_patterns_type ON telemetry_error_patterns(error_type);
|
||||
|
||||
COMMENT ON TABLE telemetry_error_patterns IS 'Error patterns over time. Preserves debugging insights while pruning raw error events.';
|
||||
|
||||
-- Validation insights (aggregates validation_details)
|
||||
CREATE TABLE IF NOT EXISTS telemetry_validation_insights (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
aggregation_date DATE NOT NULL,
|
||||
validation_type TEXT, -- 'node', 'workflow', 'expression'
|
||||
profile TEXT, -- 'minimal', 'runtime', 'ai-friendly', 'strict'
|
||||
success_count INTEGER NOT NULL DEFAULT 0,
|
||||
failure_count INTEGER NOT NULL DEFAULT 0,
|
||||
common_failure_reasons JSONB, -- {"reason": count}
|
||||
avg_validation_time_ms NUMERIC,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE(aggregation_date, validation_type, profile)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_validation_insights_date ON telemetry_validation_insights(aggregation_date DESC);
|
||||
CREATE INDEX idx_validation_insights_type ON telemetry_validation_insights(validation_type);
|
||||
|
||||
COMMENT ON TABLE telemetry_validation_insights IS 'Validation success/failure patterns. Shows where users struggle without storing every validation event.';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 2: AGGREGATION FUNCTIONS
|
||||
-- ============================================================================
|
||||
|
||||
-- Function to aggregate tool usage data
|
||||
CREATE OR REPLACE FUNCTION aggregate_tool_usage(cutoff_date TIMESTAMPTZ)
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rows_aggregated INTEGER;
|
||||
BEGIN
|
||||
-- Aggregate tool_used events
|
||||
INSERT INTO telemetry_tool_usage_daily (
|
||||
aggregation_date,
|
||||
user_id,
|
||||
tool_name,
|
||||
usage_count,
|
||||
success_count,
|
||||
error_count,
|
||||
avg_execution_time_ms,
|
||||
total_execution_time_ms
|
||||
)
|
||||
SELECT
|
||||
DATE(created_at) as aggregation_date,
|
||||
user_id,
|
||||
properties->>'toolName' as tool_name,
|
||||
COUNT(*) as usage_count,
|
||||
COUNT(*) FILTER (WHERE (properties->>'success')::boolean = true) as success_count,
|
||||
COUNT(*) FILTER (WHERE (properties->>'success')::boolean = false OR properties->>'error' IS NOT NULL) as error_count,
|
||||
AVG((properties->>'executionTime')::numeric) as avg_execution_time_ms,
|
||||
SUM((properties->>'executionTime')::numeric) as total_execution_time_ms
|
||||
FROM telemetry_events
|
||||
WHERE event = 'tool_used'
|
||||
AND created_at < cutoff_date
|
||||
AND properties->>'toolName' IS NOT NULL
|
||||
GROUP BY DATE(created_at), user_id, properties->>'toolName'
|
||||
ON CONFLICT (aggregation_date, user_id, tool_name)
|
||||
DO UPDATE SET
|
||||
usage_count = telemetry_tool_usage_daily.usage_count + EXCLUDED.usage_count,
|
||||
success_count = telemetry_tool_usage_daily.success_count + EXCLUDED.success_count,
|
||||
error_count = telemetry_tool_usage_daily.error_count + EXCLUDED.error_count,
|
||||
total_execution_time_ms = telemetry_tool_usage_daily.total_execution_time_ms + EXCLUDED.total_execution_time_ms,
|
||||
avg_execution_time_ms = (telemetry_tool_usage_daily.total_execution_time_ms + EXCLUDED.total_execution_time_ms) /
|
||||
(telemetry_tool_usage_daily.usage_count + EXCLUDED.usage_count),
|
||||
updated_at = NOW();
|
||||
|
||||
GET DIAGNOSTICS rows_aggregated = ROW_COUNT;
|
||||
|
||||
RAISE NOTICE 'Aggregated % rows from tool_used events', rows_aggregated;
|
||||
RETURN rows_aggregated;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION aggregate_tool_usage IS 'Aggregates tool_used events into daily summaries before deletion';
|
||||
|
||||
-- Function to aggregate tool sequence patterns
|
||||
CREATE OR REPLACE FUNCTION aggregate_tool_patterns(cutoff_date TIMESTAMPTZ)
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rows_aggregated INTEGER;
|
||||
BEGIN
|
||||
INSERT INTO telemetry_tool_patterns (
|
||||
aggregation_date,
|
||||
tool_sequence,
|
||||
sequence_hash,
|
||||
occurrence_count,
|
||||
avg_sequence_duration_ms,
|
||||
success_rate
|
||||
)
|
||||
SELECT
|
||||
DATE(created_at) as aggregation_date,
|
||||
(properties->>'toolSequence')::text[] as tool_sequence,
|
||||
md5(array_to_string((properties->>'toolSequence')::text[], ',')) as sequence_hash,
|
||||
COUNT(*) as occurrence_count,
|
||||
AVG((properties->>'duration')::numeric) as avg_sequence_duration_ms,
|
||||
AVG(CASE WHEN (properties->>'success')::boolean THEN 1.0 ELSE 0.0 END) as success_rate
|
||||
FROM telemetry_events
|
||||
WHERE event = 'tool_sequence'
|
||||
AND created_at < cutoff_date
|
||||
AND properties->>'toolSequence' IS NOT NULL
|
||||
GROUP BY DATE(created_at), (properties->>'toolSequence')::text[]
|
||||
ON CONFLICT (aggregation_date, sequence_hash)
|
||||
DO UPDATE SET
|
||||
occurrence_count = telemetry_tool_patterns.occurrence_count + EXCLUDED.occurrence_count,
|
||||
avg_sequence_duration_ms = (
|
||||
(telemetry_tool_patterns.avg_sequence_duration_ms * telemetry_tool_patterns.occurrence_count +
|
||||
EXCLUDED.avg_sequence_duration_ms * EXCLUDED.occurrence_count) /
|
||||
(telemetry_tool_patterns.occurrence_count + EXCLUDED.occurrence_count)
|
||||
),
|
||||
success_rate = (
|
||||
(telemetry_tool_patterns.success_rate * telemetry_tool_patterns.occurrence_count +
|
||||
EXCLUDED.success_rate * EXCLUDED.occurrence_count) /
|
||||
(telemetry_tool_patterns.occurrence_count + EXCLUDED.occurrence_count)
|
||||
),
|
||||
updated_at = NOW();
|
||||
|
||||
GET DIAGNOSTICS rows_aggregated = ROW_COUNT;
|
||||
|
||||
RAISE NOTICE 'Aggregated % rows from tool_sequence events', rows_aggregated;
|
||||
RETURN rows_aggregated;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION aggregate_tool_patterns IS 'Aggregates tool_sequence events into pattern analysis before deletion';
|
||||
|
||||
-- Function to aggregate workflow insights
|
||||
CREATE OR REPLACE FUNCTION aggregate_workflow_insights(cutoff_date TIMESTAMPTZ)
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rows_aggregated INTEGER;
|
||||
BEGIN
|
||||
INSERT INTO telemetry_workflow_insights (
|
||||
aggregation_date,
|
||||
complexity,
|
||||
node_count_range,
|
||||
has_trigger,
|
||||
has_webhook,
|
||||
common_node_types,
|
||||
workflow_count,
|
||||
avg_node_count
|
||||
)
|
||||
SELECT
|
||||
DATE(created_at) as aggregation_date,
|
||||
properties->>'complexity' as complexity,
|
||||
CASE
|
||||
WHEN (properties->>'nodeCount')::int BETWEEN 1 AND 5 THEN '1-5'
|
||||
WHEN (properties->>'nodeCount')::int BETWEEN 6 AND 10 THEN '6-10'
|
||||
WHEN (properties->>'nodeCount')::int BETWEEN 11 AND 20 THEN '11-20'
|
||||
ELSE '21+'
|
||||
END as node_count_range,
|
||||
(properties->>'hasTrigger')::boolean as has_trigger,
|
||||
(properties->>'hasWebhook')::boolean as has_webhook,
|
||||
ARRAY[]::text[] as common_node_types, -- Will be populated separately if needed
|
||||
COUNT(*) as workflow_count,
|
||||
AVG((properties->>'nodeCount')::numeric) as avg_node_count
|
||||
FROM telemetry_events
|
||||
WHERE event = 'workflow_created'
|
||||
AND created_at < cutoff_date
|
||||
GROUP BY
|
||||
DATE(created_at),
|
||||
properties->>'complexity',
|
||||
node_count_range,
|
||||
(properties->>'hasTrigger')::boolean,
|
||||
(properties->>'hasWebhook')::boolean
|
||||
ON CONFLICT (aggregation_date, complexity, node_count_range, has_trigger, has_webhook)
|
||||
DO UPDATE SET
|
||||
workflow_count = telemetry_workflow_insights.workflow_count + EXCLUDED.workflow_count,
|
||||
avg_node_count = (
|
||||
(telemetry_workflow_insights.avg_node_count * telemetry_workflow_insights.workflow_count +
|
||||
EXCLUDED.avg_node_count * EXCLUDED.workflow_count) /
|
||||
(telemetry_workflow_insights.workflow_count + EXCLUDED.workflow_count)
|
||||
),
|
||||
updated_at = NOW();
|
||||
|
||||
GET DIAGNOSTICS rows_aggregated = ROW_COUNT;
|
||||
|
||||
RAISE NOTICE 'Aggregated % rows from workflow_created events', rows_aggregated;
|
||||
RETURN rows_aggregated;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION aggregate_workflow_insights IS 'Aggregates workflow_created events into pattern insights before deletion';
|
||||
|
||||
-- Function to aggregate error patterns
|
||||
CREATE OR REPLACE FUNCTION aggregate_error_patterns(cutoff_date TIMESTAMPTZ)
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rows_aggregated INTEGER;
|
||||
BEGIN
|
||||
INSERT INTO telemetry_error_patterns (
|
||||
aggregation_date,
|
||||
error_type,
|
||||
error_context,
|
||||
occurrence_count,
|
||||
affected_users,
|
||||
first_seen,
|
||||
last_seen,
|
||||
sample_error_message
|
||||
)
|
||||
SELECT
|
||||
DATE(created_at) as aggregation_date,
|
||||
properties->>'errorType' as error_type,
|
||||
properties->>'context' as error_context,
|
||||
COUNT(*) as occurrence_count,
|
||||
COUNT(DISTINCT user_id) as affected_users,
|
||||
MIN(created_at) as first_seen,
|
||||
MAX(created_at) as last_seen,
|
||||
(ARRAY_AGG(properties->>'message' ORDER BY created_at DESC))[1] as sample_error_message
|
||||
FROM telemetry_events
|
||||
WHERE event = 'error_occurred'
|
||||
AND created_at < cutoff_date
|
||||
GROUP BY DATE(created_at), properties->>'errorType', properties->>'context'
|
||||
ON CONFLICT (aggregation_date, error_type, error_context)
|
||||
DO UPDATE SET
|
||||
occurrence_count = telemetry_error_patterns.occurrence_count + EXCLUDED.occurrence_count,
|
||||
affected_users = GREATEST(telemetry_error_patterns.affected_users, EXCLUDED.affected_users),
|
||||
first_seen = LEAST(telemetry_error_patterns.first_seen, EXCLUDED.first_seen),
|
||||
last_seen = GREATEST(telemetry_error_patterns.last_seen, EXCLUDED.last_seen),
|
||||
updated_at = NOW();
|
||||
|
||||
GET DIAGNOSTICS rows_aggregated = ROW_COUNT;
|
||||
|
||||
RAISE NOTICE 'Aggregated % rows from error_occurred events', rows_aggregated;
|
||||
RETURN rows_aggregated;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION aggregate_error_patterns IS 'Aggregates error_occurred events into pattern analysis before deletion';
|
||||
|
||||
-- Function to aggregate validation insights
|
||||
CREATE OR REPLACE FUNCTION aggregate_validation_insights(cutoff_date TIMESTAMPTZ)
|
||||
RETURNS INTEGER AS $$
|
||||
DECLARE
|
||||
rows_aggregated INTEGER;
|
||||
BEGIN
|
||||
INSERT INTO telemetry_validation_insights (
|
||||
aggregation_date,
|
||||
validation_type,
|
||||
profile,
|
||||
success_count,
|
||||
failure_count,
|
||||
common_failure_reasons,
|
||||
avg_validation_time_ms
|
||||
)
|
||||
SELECT
|
||||
DATE(created_at) as aggregation_date,
|
||||
properties->>'validationType' as validation_type,
|
||||
properties->>'profile' as profile,
|
||||
COUNT(*) FILTER (WHERE (properties->>'success')::boolean = true) as success_count,
|
||||
COUNT(*) FILTER (WHERE (properties->>'success')::boolean = false) as failure_count,
|
||||
jsonb_object_agg(
|
||||
COALESCE(properties->>'failureReason', 'unknown'),
|
||||
COUNT(*)
|
||||
) FILTER (WHERE (properties->>'success')::boolean = false) as common_failure_reasons,
|
||||
AVG((properties->>'validationTime')::numeric) as avg_validation_time_ms
|
||||
FROM telemetry_events
|
||||
WHERE event = 'validation_details'
|
||||
AND created_at < cutoff_date
|
||||
GROUP BY DATE(created_at), properties->>'validationType', properties->>'profile'
|
||||
ON CONFLICT (aggregation_date, validation_type, profile)
|
||||
DO UPDATE SET
|
||||
success_count = telemetry_validation_insights.success_count + EXCLUDED.success_count,
|
||||
failure_count = telemetry_validation_insights.failure_count + EXCLUDED.failure_count,
|
||||
updated_at = NOW();
|
||||
|
||||
GET DIAGNOSTICS rows_aggregated = ROW_COUNT;
|
||||
|
||||
RAISE NOTICE 'Aggregated % rows from validation_details events', rows_aggregated;
|
||||
RETURN rows_aggregated;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION aggregate_validation_insights IS 'Aggregates validation_details events into insights before deletion';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 3: MASTER AGGREGATION & CLEANUP FUNCTION
|
||||
-- ============================================================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION run_telemetry_aggregation_and_cleanup(
|
||||
retention_days INTEGER DEFAULT 3
|
||||
)
|
||||
RETURNS TABLE(
|
||||
event_type TEXT,
|
||||
rows_aggregated INTEGER,
|
||||
rows_deleted INTEGER,
|
||||
space_freed_mb NUMERIC
|
||||
) AS $$
|
||||
DECLARE
|
||||
cutoff_date TIMESTAMPTZ;
|
||||
total_before BIGINT;
|
||||
total_after BIGINT;
|
||||
agg_count INTEGER;
|
||||
del_count INTEGER;
|
||||
BEGIN
|
||||
cutoff_date := NOW() - (retention_days || ' days')::INTERVAL;
|
||||
|
||||
RAISE NOTICE 'Starting aggregation and cleanup for data older than %', cutoff_date;
|
||||
|
||||
-- Get table size before cleanup
|
||||
SELECT pg_total_relation_size('telemetry_events') INTO total_before;
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 1: AGGREGATE DATA BEFORE DELETION
|
||||
-- ========================================================================
|
||||
|
||||
-- Tool usage aggregation
|
||||
SELECT aggregate_tool_usage(cutoff_date) INTO agg_count;
|
||||
SELECT COUNT(*) INTO del_count FROM telemetry_events
|
||||
WHERE event = 'tool_used' AND created_at < cutoff_date;
|
||||
|
||||
event_type := 'tool_used';
|
||||
rows_aggregated := agg_count;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Tool patterns aggregation
|
||||
SELECT aggregate_tool_patterns(cutoff_date) INTO agg_count;
|
||||
SELECT COUNT(*) INTO del_count FROM telemetry_events
|
||||
WHERE event = 'tool_sequence' AND created_at < cutoff_date;
|
||||
|
||||
event_type := 'tool_sequence';
|
||||
rows_aggregated := agg_count;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Workflow insights aggregation
|
||||
SELECT aggregate_workflow_insights(cutoff_date) INTO agg_count;
|
||||
SELECT COUNT(*) INTO del_count FROM telemetry_events
|
||||
WHERE event = 'workflow_created' AND created_at < cutoff_date;
|
||||
|
||||
event_type := 'workflow_created';
|
||||
rows_aggregated := agg_count;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Error patterns aggregation
|
||||
SELECT aggregate_error_patterns(cutoff_date) INTO agg_count;
|
||||
SELECT COUNT(*) INTO del_count FROM telemetry_events
|
||||
WHERE event = 'error_occurred' AND created_at < cutoff_date;
|
||||
|
||||
event_type := 'error_occurred';
|
||||
rows_aggregated := agg_count;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Validation insights aggregation
|
||||
SELECT aggregate_validation_insights(cutoff_date) INTO agg_count;
|
||||
SELECT COUNT(*) INTO del_count FROM telemetry_events
|
||||
WHERE event = 'validation_details' AND created_at < cutoff_date;
|
||||
|
||||
event_type := 'validation_details';
|
||||
rows_aggregated := agg_count;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 2: DELETE OLD RAW EVENTS (now that they're aggregated)
|
||||
-- ========================================================================
|
||||
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < cutoff_date
|
||||
AND event IN (
|
||||
'tool_used',
|
||||
'tool_sequence',
|
||||
'workflow_created',
|
||||
'validation_details',
|
||||
'session_start',
|
||||
'search_query',
|
||||
'diagnostic_completed',
|
||||
'health_check_completed'
|
||||
);
|
||||
|
||||
-- Keep error_occurred for 30 days (extended retention for debugging)
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < (NOW() - INTERVAL '30 days')
|
||||
AND event = 'error_occurred';
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 3: CLEAN UP OLD WORKFLOWS (keep only unique patterns)
|
||||
-- ========================================================================
|
||||
|
||||
-- Delete duplicate workflows older than retention period
|
||||
WITH workflow_duplicates AS (
|
||||
SELECT id
|
||||
FROM (
|
||||
SELECT id,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY workflow_hash
|
||||
ORDER BY created_at DESC
|
||||
) as rn
|
||||
FROM telemetry_workflows
|
||||
WHERE created_at < cutoff_date
|
||||
) sub
|
||||
WHERE rn > 1
|
||||
)
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE id IN (SELECT id FROM workflow_duplicates);
|
||||
|
||||
GET DIAGNOSTICS del_count = ROW_COUNT;
|
||||
|
||||
event_type := 'duplicate_workflows';
|
||||
rows_aggregated := 0;
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 4: VACUUM TO RECLAIM SPACE
|
||||
-- ========================================================================
|
||||
|
||||
-- Note: VACUUM cannot be run inside a function, must be run separately
|
||||
-- The cron job will handle this
|
||||
|
||||
-- Get table size after cleanup
|
||||
SELECT pg_total_relation_size('telemetry_events') INTO total_after;
|
||||
|
||||
-- Summary row
|
||||
event_type := 'TOTAL_SPACE_FREED';
|
||||
rows_aggregated := 0;
|
||||
rows_deleted := 0;
|
||||
space_freed_mb := ROUND((total_before - total_after)::NUMERIC / 1024 / 1024, 2);
|
||||
RETURN NEXT;
|
||||
|
||||
RAISE NOTICE 'Cleanup complete. Space freed: % MB', space_freed_mb;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION run_telemetry_aggregation_and_cleanup IS 'Master function to aggregate data and delete old events. Run daily via cron.';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 4: SUPABASE CRON JOB SETUP
|
||||
-- ============================================================================
|
||||
|
||||
-- Enable pg_cron extension (if not already enabled)
|
||||
CREATE EXTENSION IF NOT EXISTS pg_cron;
|
||||
|
||||
-- Schedule daily cleanup at 2 AM UTC (low traffic time)
|
||||
-- This will aggregate data older than 3 days and then delete it
|
||||
SELECT cron.schedule(
|
||||
'telemetry-daily-cleanup',
|
||||
'0 2 * * *', -- Every day at 2 AM UTC
|
||||
$$
|
||||
SELECT run_telemetry_aggregation_and_cleanup(3);
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
VACUUM ANALYZE telemetry_workflows;
|
||||
$$
|
||||
);
|
||||
|
||||
COMMENT ON EXTENSION pg_cron IS 'Cron job scheduler for automated telemetry cleanup';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 5: MONITORING & ALERTING
|
||||
-- ============================================================================
|
||||
|
||||
-- Function to check database size and alert if approaching limit
|
||||
CREATE OR REPLACE FUNCTION check_database_size()
|
||||
RETURNS TABLE(
|
||||
total_size_mb NUMERIC,
|
||||
events_size_mb NUMERIC,
|
||||
workflows_size_mb NUMERIC,
|
||||
aggregates_size_mb NUMERIC,
|
||||
percent_of_limit NUMERIC,
|
||||
days_until_full NUMERIC,
|
||||
status TEXT
|
||||
) AS $$
|
||||
DECLARE
|
||||
db_size BIGINT;
|
||||
events_size BIGINT;
|
||||
workflows_size BIGINT;
|
||||
agg_size BIGINT;
|
||||
limit_mb CONSTANT NUMERIC := 500; -- Free tier limit
|
||||
growth_rate_mb_per_day NUMERIC;
|
||||
BEGIN
|
||||
-- Get current sizes
|
||||
SELECT pg_database_size(current_database()) INTO db_size;
|
||||
SELECT pg_total_relation_size('telemetry_events') INTO events_size;
|
||||
SELECT pg_total_relation_size('telemetry_workflows') INTO workflows_size;
|
||||
|
||||
SELECT COALESCE(
|
||||
pg_total_relation_size('telemetry_tool_usage_daily') +
|
||||
pg_total_relation_size('telemetry_tool_patterns') +
|
||||
pg_total_relation_size('telemetry_workflow_insights') +
|
||||
pg_total_relation_size('telemetry_error_patterns') +
|
||||
pg_total_relation_size('telemetry_validation_insights'),
|
||||
0
|
||||
) INTO agg_size;
|
||||
|
||||
total_size_mb := ROUND(db_size::NUMERIC / 1024 / 1024, 2);
|
||||
events_size_mb := ROUND(events_size::NUMERIC / 1024 / 1024, 2);
|
||||
workflows_size_mb := ROUND(workflows_size::NUMERIC / 1024 / 1024, 2);
|
||||
aggregates_size_mb := ROUND(agg_size::NUMERIC / 1024 / 1024, 2);
|
||||
percent_of_limit := ROUND((total_size_mb / limit_mb) * 100, 1);
|
||||
|
||||
-- Estimate growth rate (simple 7-day average)
|
||||
SELECT ROUND(
|
||||
(SELECT COUNT(*) FROM telemetry_events WHERE created_at > NOW() - INTERVAL '7 days')::NUMERIC
|
||||
* (pg_column_size(telemetry_events.*))::NUMERIC
|
||||
/ 7 / 1024 / 1024, 2
|
||||
) INTO growth_rate_mb_per_day
|
||||
FROM telemetry_events LIMIT 1;
|
||||
|
||||
IF growth_rate_mb_per_day > 0 THEN
|
||||
days_until_full := ROUND((limit_mb - total_size_mb) / growth_rate_mb_per_day, 0);
|
||||
ELSE
|
||||
days_until_full := NULL;
|
||||
END IF;
|
||||
|
||||
-- Determine status
|
||||
IF percent_of_limit >= 90 THEN
|
||||
status := 'CRITICAL - Immediate action required';
|
||||
ELSIF percent_of_limit >= 75 THEN
|
||||
status := 'WARNING - Monitor closely';
|
||||
ELSIF percent_of_limit >= 50 THEN
|
||||
status := 'CAUTION - Plan optimization';
|
||||
ELSE
|
||||
status := 'HEALTHY';
|
||||
END IF;
|
||||
|
||||
RETURN NEXT;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION check_database_size IS 'Monitor database size and growth. Run daily or on-demand.';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 6: EMERGENCY CLEANUP (ONE-TIME USE)
|
||||
-- ============================================================================
|
||||
|
||||
-- Emergency function to immediately free up space (use if critical)
|
||||
CREATE OR REPLACE FUNCTION emergency_cleanup()
|
||||
RETURNS TABLE(
|
||||
action TEXT,
|
||||
rows_deleted INTEGER,
|
||||
space_freed_mb NUMERIC
|
||||
) AS $$
|
||||
DECLARE
|
||||
size_before BIGINT;
|
||||
size_after BIGINT;
|
||||
del_count INTEGER;
|
||||
BEGIN
|
||||
SELECT pg_total_relation_size('telemetry_events') INTO size_before;
|
||||
|
||||
-- Aggregate everything older than 7 days
|
||||
PERFORM run_telemetry_aggregation_and_cleanup(7);
|
||||
|
||||
-- Delete all non-critical events older than 7 days
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND event NOT IN ('error_occurred', 'workflow_validation_failed');
|
||||
|
||||
GET DIAGNOSTICS del_count = ROW_COUNT;
|
||||
|
||||
action := 'Deleted non-critical events > 7 days';
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Delete error events older than 14 days
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '14 days'
|
||||
AND event = 'error_occurred';
|
||||
|
||||
GET DIAGNOSTICS del_count = ROW_COUNT;
|
||||
|
||||
action := 'Deleted error events > 14 days';
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Delete duplicate workflows
|
||||
WITH workflow_duplicates AS (
|
||||
SELECT id
|
||||
FROM (
|
||||
SELECT id,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY workflow_hash
|
||||
ORDER BY created_at DESC
|
||||
) as rn
|
||||
FROM telemetry_workflows
|
||||
) sub
|
||||
WHERE rn > 1
|
||||
)
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE id IN (SELECT id FROM workflow_duplicates);
|
||||
|
||||
GET DIAGNOSTICS del_count = ROW_COUNT;
|
||||
|
||||
action := 'Deleted duplicate workflows';
|
||||
rows_deleted := del_count;
|
||||
RETURN NEXT;
|
||||
|
||||
-- VACUUM will be run separately
|
||||
SELECT pg_total_relation_size('telemetry_events') INTO size_after;
|
||||
|
||||
action := 'TOTAL (run VACUUM separately)';
|
||||
rows_deleted := 0;
|
||||
space_freed_mb := ROUND((size_before - size_after)::NUMERIC / 1024 / 1024, 2);
|
||||
RETURN NEXT;
|
||||
|
||||
RAISE NOTICE 'Emergency cleanup complete. Run VACUUM FULL for maximum space recovery.';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION emergency_cleanup IS 'Emergency cleanup when database is near capacity. Run once, then VACUUM.';
|
||||
|
||||
-- ============================================================================
|
||||
-- USAGE INSTRUCTIONS
|
||||
-- ============================================================================
|
||||
|
||||
/*
|
||||
|
||||
SETUP (Run once):
|
||||
1. Execute this entire script in Supabase SQL Editor
|
||||
2. Verify cron job is scheduled:
|
||||
SELECT * FROM cron.job;
|
||||
3. Run initial monitoring:
|
||||
SELECT * FROM check_database_size();
|
||||
|
||||
DAILY OPERATIONS (Automatic):
|
||||
- Cron job runs daily at 2 AM UTC
|
||||
- Aggregates data older than 3 days
|
||||
- Deletes raw events after aggregation
|
||||
- Vacuums tables to reclaim space
|
||||
|
||||
MONITORING:
|
||||
-- Check current database health
|
||||
SELECT * FROM check_database_size();
|
||||
|
||||
-- View aggregated insights
|
||||
SELECT * FROM telemetry_tool_usage_daily ORDER BY aggregation_date DESC LIMIT 100;
|
||||
SELECT * FROM telemetry_tool_patterns ORDER BY occurrence_count DESC LIMIT 20;
|
||||
SELECT * FROM telemetry_error_patterns ORDER BY occurrence_count DESC LIMIT 20;
|
||||
|
||||
MANUAL CLEANUP (if needed):
|
||||
-- Run cleanup manually (3-day retention)
|
||||
SELECT * FROM run_telemetry_aggregation_and_cleanup(3);
|
||||
VACUUM ANALYZE telemetry_events;
|
||||
|
||||
-- Emergency cleanup (7-day retention)
|
||||
SELECT * FROM emergency_cleanup();
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
|
||||
TUNING:
|
||||
-- Adjust retention period (e.g., 5 days instead of 3)
|
||||
SELECT cron.schedule(
|
||||
'telemetry-daily-cleanup',
|
||||
'0 2 * * *',
|
||||
$$ SELECT run_telemetry_aggregation_and_cleanup(5); VACUUM ANALYZE telemetry_events; $$
|
||||
);
|
||||
|
||||
EXPECTED RESULTS:
|
||||
- Initial run: ~120 MB space freed (265 MB → ~145 MB)
|
||||
- Steady state: ~90-120 MB total database size
|
||||
- Growth rate: ~2-3 MB/day (down from 7.7 MB/day)
|
||||
- Headroom: 70-80% of free tier limit available
|
||||
|
||||
*/
|
||||
961
telemetry-pruning-analysis.md
Normal file
961
telemetry-pruning-analysis.md
Normal file
@@ -0,0 +1,961 @@
|
||||
# n8n-MCP Telemetry Database Pruning Strategy
|
||||
|
||||
**Analysis Date:** 2025-10-10
|
||||
**Current Database Size:** 265 MB (telemetry_events: 199 MB, telemetry_workflows: 66 MB)
|
||||
**Free Tier Limit:** 500 MB
|
||||
**Projected 4-Week Size:** 609 MB (exceeds limit by 109 MB)
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
**Critical Finding:** At current growth rate (56.75% of data from last 7 days), we will exceed the 500 MB free tier limit in approximately 2 weeks. Implementing a 7-day retention policy can immediately save 36.5 MB (37.6%) and prevent database overflow.
|
||||
|
||||
**Key Insights:**
|
||||
- 641,487 event records consuming 199 MB
|
||||
- 17,247 workflow records consuming 66 MB
|
||||
- Daily growth rate: ~7-8 MB/day for events
|
||||
- 43.25% of data is older than 7 days but provides diminishing value
|
||||
|
||||
**Immediate Action Required:** Implement automated pruning to maintain database under 500 MB.
|
||||
|
||||
---
|
||||
|
||||
## 1. Current State Assessment
|
||||
|
||||
### Database Size and Distribution
|
||||
|
||||
| Table | Rows | Current Size | Growth Rate | Bytes/Row |
|
||||
|-------|------|--------------|-------------|-----------|
|
||||
| telemetry_events | 641,487 | 199 MB | 56.66% from last 7d | 325 |
|
||||
| telemetry_workflows | 17,247 | 66 MB | 60.09% from last 7d | 4,013 |
|
||||
| **TOTAL** | **658,734** | **265 MB** | **56.75% from last 7d** | **403** |
|
||||
|
||||
### Event Type Distribution
|
||||
|
||||
| Event Type | Count | % of Total | Storage | Avg Props Size | Oldest Event |
|
||||
|------------|-------|-----------|---------|----------------|--------------|
|
||||
| tool_sequence | 362,170 | 56.4% | 67 MB | 194 bytes | 2025-09-26 |
|
||||
| tool_used | 191,659 | 29.9% | 14 MB | 77 bytes | 2025-09-26 |
|
||||
| validation_details | 36,266 | 5.7% | 11 MB | 329 bytes | 2025-09-26 |
|
||||
| workflow_created | 23,151 | 3.6% | 2.6 MB | 115 bytes | 2025-09-26 |
|
||||
| session_start | 12,575 | 2.0% | 1.2 MB | 101 bytes | 2025-09-26 |
|
||||
| workflow_validation_failed | 9,739 | 1.5% | 314 KB | 33 bytes | 2025-09-26 |
|
||||
| error_occurred | 4,935 | 0.8% | 626 KB | 130 bytes | 2025-09-26 |
|
||||
| search_query | 974 | 0.2% | 106 KB | 112 bytes | 2025-09-26 |
|
||||
| Other | 18 | <0.1% | 5 KB | Various | Recent |
|
||||
|
||||
### Growth Pattern Analysis
|
||||
|
||||
**Daily Data Accumulation (Last 15 Days):**
|
||||
|
||||
| Date | Events/Day | Daily Size | Cumulative Size |
|
||||
|------|-----------|------------|-----------------|
|
||||
| 2025-10-10 | 28,457 | 4.3 MB | 97 MB |
|
||||
| 2025-10-09 | 54,717 | 8.2 MB | 93 MB |
|
||||
| 2025-10-08 | 52,901 | 7.9 MB | 85 MB |
|
||||
| 2025-10-07 | 52,538 | 8.1 MB | 77 MB |
|
||||
| 2025-10-06 | 51,401 | 7.8 MB | 69 MB |
|
||||
| 2025-10-05 | 50,528 | 7.9 MB | 61 MB |
|
||||
|
||||
**Average Daily Growth:** ~7.7 MB/day
|
||||
**Weekly Growth:** ~54 MB/week
|
||||
**Projected to hit 500 MB limit:** ~17 days (late October 2025)
|
||||
|
||||
### Workflow Data Distribution
|
||||
|
||||
| Complexity | Count | % | Avg Nodes | Avg JSON Size | Estimated Size |
|
||||
|-----------|-------|---|-----------|---------------|----------------|
|
||||
| Simple | 12,923 | 77.6% | 5.48 | 2,122 bytes | 20 MB |
|
||||
| Medium | 3,708 | 22.3% | 13.93 | 4,458 bytes | 12 MB |
|
||||
| Complex | 616 | 0.1% | 26.62 | 7,909 bytes | 3.2 MB |
|
||||
|
||||
**Key Finding:** No duplicate workflow hashes found - each workflow is unique (good data quality).
|
||||
|
||||
---
|
||||
|
||||
## 2. Data Value Classification
|
||||
|
||||
### TIER 1: Critical - Keep Indefinitely
|
||||
|
||||
**Error Patterns (error_occurred)**
|
||||
- **Why:** Essential for identifying systemic issues and regression detection
|
||||
- **Volume:** 4,935 events (626 KB)
|
||||
- **Recommendation:** Keep all errors with aggregated summaries for older data
|
||||
- **Retention:** Detailed errors 30 days, aggregated stats indefinitely
|
||||
|
||||
**Tool Usage Statistics (Aggregated)**
|
||||
- **Why:** Product analytics and feature prioritization
|
||||
- **Recommendation:** Aggregate daily/weekly summaries after 14 days
|
||||
- **Keep:** Summary tables with tool usage counts, success rates, avg duration
|
||||
|
||||
### TIER 2: High Value - Keep 30 Days
|
||||
|
||||
**Validation Details (validation_details)**
|
||||
- **Current:** 36,266 events, 11 MB, avg 329 bytes
|
||||
- **Why:** Important for understanding validation issues during current development cycle
|
||||
- **Value Period:** 30 days (covers current version development)
|
||||
- **After 30d:** Aggregate to summary stats (validation success rate by node type)
|
||||
|
||||
**Workflow Creation Patterns (workflow_created)**
|
||||
- **Current:** 23,151 events, 2.6 MB
|
||||
- **Why:** Track feature adoption and workflow patterns
|
||||
- **Value Period:** 30 days for detailed analysis
|
||||
- **After 30d:** Keep aggregated metrics only
|
||||
|
||||
### TIER 3: Medium Value - Keep 14 Days
|
||||
|
||||
**Session Data (session_start)**
|
||||
- **Current:** 12,575 events, 1.2 MB
|
||||
- **Why:** User engagement tracking
|
||||
- **Value Period:** 14 days sufficient for engagement analysis
|
||||
- **Pruning Impact:** 497 KB saved (40% reduction)
|
||||
|
||||
**Workflow Validation Failures (workflow_validation_failed)**
|
||||
- **Current:** 9,739 events, 314 KB
|
||||
- **Why:** Tracks validation patterns but less detailed than validation_details
|
||||
- **Value Period:** 14 days
|
||||
- **Pruning Impact:** 170 KB saved (54% reduction)
|
||||
|
||||
### TIER 4: Short-Term Value - Keep 7 Days
|
||||
|
||||
**Tool Sequences (tool_sequence)**
|
||||
- **Current:** 362,170 events, 67 MB (largest table!)
|
||||
- **Why:** Tracks multi-tool workflows but extremely high volume
|
||||
- **Value Period:** 7 days for recent pattern analysis
|
||||
- **Pruning Impact:** 29 MB saved (43% reduction) - HIGHEST IMPACT
|
||||
- **Rationale:** Tool usage patterns stabilize quickly; older sequences provide diminishing returns
|
||||
|
||||
**Tool Usage Events (tool_used)**
|
||||
- **Current:** 191,659 events, 14 MB
|
||||
- **Why:** Individual tool executions - can be aggregated
|
||||
- **Value Period:** 7 days detailed, then aggregate
|
||||
- **Pruning Impact:** 6.2 MB saved (44% reduction)
|
||||
|
||||
**Search Queries (search_query)**
|
||||
- **Current:** 974 events, 106 KB
|
||||
- **Why:** Low volume, useful for understanding search patterns
|
||||
- **Value Period:** 7 days sufficient
|
||||
- **Pruning Impact:** Minimal (~1 KB)
|
||||
|
||||
### TIER 5: Ephemeral - Keep 3 Days
|
||||
|
||||
**Diagnostic/Health Checks (diagnostic_completed, health_check_completed)**
|
||||
- **Current:** 17 events, ~2.5 KB
|
||||
- **Why:** Operational health checks, only current state matters
|
||||
- **Value Period:** 3 days
|
||||
- **Pruning Impact:** Negligible but good hygiene
|
||||
|
||||
### Workflow Data Retention Strategy
|
||||
|
||||
**telemetry_workflows Table (66 MB):**
|
||||
- **Simple workflows (5-6 nodes):** Keep 7 days → Save 11 MB
|
||||
- **Medium workflows (13-14 nodes):** Keep 14 days → Save 6.7 MB
|
||||
- **Complex workflows (26+ nodes):** Keep 30 days → Save 1.9 MB
|
||||
- **Total Workflow Savings:** 19.6 MB with tiered retention
|
||||
|
||||
**Rationale:** Complex workflows are rarer and more valuable for understanding advanced use cases.
|
||||
|
||||
---
|
||||
|
||||
## 3. Pruning Recommendations with Space Savings
|
||||
|
||||
### Strategy A: Conservative 14-Day Retention (Recommended for Initial Implementation)
|
||||
|
||||
| Action | Records Deleted | Space Saved | Risk Level |
|
||||
|--------|----------------|-------------|------------|
|
||||
| Delete tool_sequence > 14d | 0 | 0 MB | None - all recent |
|
||||
| Delete tool_used > 14d | 0 | 0 MB | None - all recent |
|
||||
| Delete validation_details > 14d | 4,259 | 1.2 MB | Low |
|
||||
| Delete session_start > 14d | 0 | 0 MB | None - all recent |
|
||||
| Delete workflows > 14d | 1 | <1 KB | None |
|
||||
| **TOTAL** | **4,260** | **1.2 MB** | **Low** |
|
||||
|
||||
**Assessment:** Minimal immediate impact but data is too recent. Not sufficient to prevent overflow.
|
||||
|
||||
### Strategy B: Aggressive 7-Day Retention (RECOMMENDED)
|
||||
|
||||
| Action | Records Deleted | Space Saved | Risk Level |
|
||||
|--------|----------------|-------------|------------|
|
||||
| Delete tool_sequence > 7d | 155,389 | 29 MB | Low - pattern data |
|
||||
| Delete tool_used > 7d | 82,827 | 6.2 MB | Low - usage metrics |
|
||||
| Delete validation_details > 7d | 17,465 | 5.4 MB | Medium - debugging data |
|
||||
| Delete workflow_created > 7d | 9,106 | 1.0 MB | Low - creation events |
|
||||
| Delete session_start > 7d | 5,664 | 497 KB | Low - session data |
|
||||
| Delete error_occurred > 7d | 2,321 | 206 KB | Medium - error history |
|
||||
| Delete workflow_validation_failed > 7d | 5,269 | 170 KB | Low - validation events |
|
||||
| Delete workflows > 7d (simple) | 5,146 | 11 MB | Low - simple workflows |
|
||||
| Delete workflows > 7d (medium) | 1,506 | 6.7 MB | Medium - medium workflows |
|
||||
| Delete workflows > 7d (complex) | 231 | 1.9 MB | High - complex workflows |
|
||||
| **TOTAL** | **284,924** | **62.1 MB** | **Medium** |
|
||||
|
||||
**New Database Size:** 265 MB - 62.1 MB = **202.9 MB (76.6% of limit)**
|
||||
**Buffer:** 297 MB remaining (~38 days at current growth rate)
|
||||
|
||||
### Strategy C: Hybrid Tiered Retention (OPTIMAL LONG-TERM)
|
||||
|
||||
| Event Type | Retention Period | Records Deleted | Space Saved |
|
||||
|-----------|------------------|----------------|-------------|
|
||||
| tool_sequence | 7 days | 155,389 | 29 MB |
|
||||
| tool_used | 7 days | 82,827 | 6.2 MB |
|
||||
| validation_details | 14 days | 4,259 | 1.2 MB |
|
||||
| workflow_created | 14 days | 3 | <1 KB |
|
||||
| session_start | 7 days | 5,664 | 497 KB |
|
||||
| error_occurred | 30 days (keep all) | 0 | 0 MB |
|
||||
| workflow_validation_failed | 7 days | 5,269 | 170 KB |
|
||||
| search_query | 7 days | 10 | 1 KB |
|
||||
| Workflows (simple) | 7 days | 5,146 | 11 MB |
|
||||
| Workflows (medium) | 14 days | 0 | 0 MB |
|
||||
| Workflows (complex) | 30 days (keep all) | 0 | 0 MB |
|
||||
| **TOTAL** | **Various** | **258,567** | **48.1 MB** |
|
||||
|
||||
**New Database Size:** 265 MB - 48.1 MB = **216.9 MB (82% of limit)**
|
||||
**Buffer:** 283 MB remaining (~36 days at current growth rate)
|
||||
|
||||
---
|
||||
|
||||
## 4. Additional Optimization Opportunities
|
||||
|
||||
### Optimization 1: Properties Field Compression
|
||||
|
||||
**Finding:** validation_details events have bloated properties (avg 329 bytes, max 9 KB)
|
||||
|
||||
```sql
|
||||
-- Identify large validation_details records
|
||||
SELECT id, user_id, created_at, pg_column_size(properties) as size_bytes
|
||||
FROM telemetry_events
|
||||
WHERE event = 'validation_details'
|
||||
AND pg_column_size(properties) > 1000
|
||||
ORDER BY size_bytes DESC;
|
||||
-- Result: 417 records > 1KB, 2 records > 5KB
|
||||
```
|
||||
|
||||
**Recommendation:** Truncate verbose error messages in validation_details after 7 days
|
||||
- Keep error types and counts
|
||||
- Remove full stack traces and detailed messages
|
||||
- Estimated savings: 2-3 MB
|
||||
|
||||
### Optimization 2: Remove Redundant tool_sequence Data
|
||||
|
||||
**Finding:** tool_sequence properties contain mostly null values
|
||||
|
||||
```sql
|
||||
-- Analysis shows all tool_sequence.properties->>'tools' are null
|
||||
-- 362,170 records storing null in properties field
|
||||
```
|
||||
|
||||
**Recommendation:**
|
||||
1. Investigate why tool_sequence properties are empty
|
||||
2. If by design, reduce properties field size or use a flag
|
||||
3. Potential savings: 10-15 MB if properties field is eliminated
|
||||
|
||||
### Optimization 3: Workflow Deduplication by Hash
|
||||
|
||||
**Finding:** No duplicate workflow_hash values found (good!)
|
||||
|
||||
**Recommendation:** Continue using workflow_hash for future deduplication if needed. No action required.
|
||||
|
||||
### Optimization 4: Dead Row Cleanup
|
||||
|
||||
**Finding:** telemetry_workflows has 1,591 dead rows (9.5% overhead)
|
||||
|
||||
```sql
|
||||
-- Run VACUUM to reclaim space
|
||||
VACUUM FULL telemetry_workflows;
|
||||
-- Expected savings: ~6-7 MB
|
||||
```
|
||||
|
||||
**Recommendation:** Schedule weekly VACUUM operations
|
||||
|
||||
### Optimization 5: Index Optimization
|
||||
|
||||
**Current indexes consume space but improve query performance**
|
||||
|
||||
```sql
|
||||
-- Check index sizes
|
||||
SELECT
|
||||
schemaname, tablename, indexname,
|
||||
pg_size_pretty(pg_relation_size(indexrelid)) as index_size
|
||||
FROM pg_stat_user_indexes
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY pg_relation_size(indexrelid) DESC;
|
||||
```
|
||||
|
||||
**Recommendation:** Review if all indexes are necessary after pruning strategy is implemented
|
||||
|
||||
---
|
||||
|
||||
## 5. Implementation Strategy
|
||||
|
||||
### Phase 1: Immediate Emergency Pruning (Day 1)
|
||||
|
||||
**Goal:** Free up 60+ MB immediately to prevent overflow
|
||||
|
||||
```sql
|
||||
-- EMERGENCY PRUNING: Delete data older than 7 days
|
||||
BEGIN;
|
||||
|
||||
-- Backup count before deletion
|
||||
SELECT
|
||||
event,
|
||||
COUNT(*) FILTER (WHERE created_at < NOW() - INTERVAL '7 days') as to_delete
|
||||
FROM telemetry_events
|
||||
GROUP BY event;
|
||||
|
||||
-- Delete old events
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '7 days';
|
||||
-- Expected: ~278,051 rows deleted, ~36.5 MB saved
|
||||
|
||||
-- Delete old simple workflows
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND complexity = 'simple';
|
||||
-- Expected: ~5,146 rows deleted, ~11 MB saved
|
||||
|
||||
-- Verify new size
|
||||
SELECT
|
||||
schemaname, relname,
|
||||
pg_size_pretty(pg_total_relation_size(schemaname||'.'||relname)) AS size
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Clean up dead rows
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
```
|
||||
|
||||
**Expected Result:** Database size reduced to ~210-220 MB (55-60% buffer remaining)
|
||||
|
||||
### Phase 2: Implement Automated Retention Policy (Week 1)
|
||||
|
||||
**Create a scheduled Supabase Edge Function or pg_cron job**
|
||||
|
||||
```sql
|
||||
-- Create retention policy function
|
||||
CREATE OR REPLACE FUNCTION apply_retention_policy()
|
||||
RETURNS void AS $$
|
||||
BEGIN
|
||||
-- Tier 4: 7-day retention for high-volume events
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND event IN ('tool_sequence', 'tool_used', 'session_start',
|
||||
'workflow_validation_failed', 'search_query');
|
||||
|
||||
-- Tier 3: 14-day retention for medium-value events
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '14 days'
|
||||
AND event IN ('validation_details', 'workflow_created');
|
||||
|
||||
-- Tier 1: 30-day retention for errors (keep longer)
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '30 days'
|
||||
AND event = 'error_occurred';
|
||||
|
||||
-- Workflow retention by complexity
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND complexity = 'simple';
|
||||
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '14 days'
|
||||
AND complexity = 'medium';
|
||||
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '30 days'
|
||||
AND complexity = 'complex';
|
||||
|
||||
-- Cleanup
|
||||
VACUUM telemetry_events;
|
||||
VACUUM telemetry_workflows;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Schedule daily execution (using pg_cron extension)
|
||||
SELECT cron.schedule('retention-policy', '0 2 * * *', 'SELECT apply_retention_policy()');
|
||||
```
|
||||
|
||||
### Phase 3: Create Aggregation Tables (Week 2)
|
||||
|
||||
**Preserve insights while deleting raw data**
|
||||
|
||||
```sql
|
||||
-- Daily tool usage summary
|
||||
CREATE TABLE IF NOT EXISTS telemetry_daily_tool_stats (
|
||||
date DATE NOT NULL,
|
||||
tool TEXT NOT NULL,
|
||||
usage_count INTEGER NOT NULL,
|
||||
unique_users INTEGER NOT NULL,
|
||||
avg_duration_ms NUMERIC,
|
||||
error_count INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
PRIMARY KEY (date, tool)
|
||||
);
|
||||
|
||||
-- Daily validation summary
|
||||
CREATE TABLE IF NOT EXISTS telemetry_daily_validation_stats (
|
||||
date DATE NOT NULL,
|
||||
node_type TEXT,
|
||||
total_validations INTEGER NOT NULL,
|
||||
failed_validations INTEGER NOT NULL,
|
||||
success_rate NUMERIC,
|
||||
common_errors JSONB,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
PRIMARY KEY (date, node_type)
|
||||
);
|
||||
|
||||
-- Aggregate function to run before pruning
|
||||
CREATE OR REPLACE FUNCTION aggregate_before_pruning()
|
||||
RETURNS void AS $$
|
||||
BEGIN
|
||||
-- Aggregate tool usage for data about to be deleted
|
||||
INSERT INTO telemetry_daily_tool_stats (date, tool, usage_count, unique_users, avg_duration_ms)
|
||||
SELECT
|
||||
DATE(created_at) as date,
|
||||
properties->>'tool' as tool,
|
||||
COUNT(*) as usage_count,
|
||||
COUNT(DISTINCT user_id) as unique_users,
|
||||
AVG((properties->>'duration')::numeric) as avg_duration_ms
|
||||
FROM telemetry_events
|
||||
WHERE event = 'tool_used'
|
||||
AND created_at < NOW() - INTERVAL '7 days'
|
||||
AND created_at >= NOW() - INTERVAL '8 days'
|
||||
GROUP BY DATE(created_at), properties->>'tool'
|
||||
ON CONFLICT (date, tool) DO NOTHING;
|
||||
|
||||
-- Aggregate validation stats
|
||||
INSERT INTO telemetry_daily_validation_stats (date, node_type, total_validations, failed_validations)
|
||||
SELECT
|
||||
DATE(created_at) as date,
|
||||
properties->>'nodeType' as node_type,
|
||||
COUNT(*) as total_validations,
|
||||
COUNT(*) FILTER (WHERE properties->>'valid' = 'false') as failed_validations
|
||||
FROM telemetry_events
|
||||
WHERE event = 'validation_details'
|
||||
AND created_at < NOW() - INTERVAL '14 days'
|
||||
AND created_at >= NOW() - INTERVAL '15 days'
|
||||
GROUP BY DATE(created_at), properties->>'nodeType'
|
||||
ON CONFLICT (date, node_type) DO NOTHING;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Update cron job to aggregate before pruning
|
||||
SELECT cron.schedule('aggregate-then-prune', '0 2 * * *',
|
||||
'SELECT aggregate_before_pruning(); SELECT apply_retention_policy();');
|
||||
```
|
||||
|
||||
### Phase 4: Monitoring and Alerting (Week 2)
|
||||
|
||||
**Create size monitoring function**
|
||||
|
||||
```sql
|
||||
CREATE OR REPLACE FUNCTION check_database_size()
|
||||
RETURNS TABLE(
|
||||
total_size_mb NUMERIC,
|
||||
limit_mb NUMERIC,
|
||||
percent_used NUMERIC,
|
||||
days_until_full NUMERIC
|
||||
) AS $$
|
||||
DECLARE
|
||||
current_size_bytes BIGINT;
|
||||
growth_rate_bytes_per_day NUMERIC;
|
||||
BEGIN
|
||||
-- Get current size
|
||||
SELECT SUM(pg_total_relation_size(schemaname||'.'||relname))
|
||||
INTO current_size_bytes
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public';
|
||||
|
||||
-- Calculate 7-day growth rate
|
||||
SELECT
|
||||
(COUNT(*) FILTER (WHERE created_at >= NOW() - INTERVAL '7 days')) *
|
||||
AVG(pg_column_size(properties)) * (1.0/7)
|
||||
INTO growth_rate_bytes_per_day
|
||||
FROM telemetry_events;
|
||||
|
||||
RETURN QUERY
|
||||
SELECT
|
||||
ROUND((current_size_bytes / 1024.0 / 1024.0)::numeric, 2) as total_size_mb,
|
||||
500.0 as limit_mb,
|
||||
ROUND((current_size_bytes / 1024.0 / 1024.0 / 500.0 * 100)::numeric, 2) as percent_used,
|
||||
ROUND((((500.0 * 1024 * 1024) - current_size_bytes) / NULLIF(growth_rate_bytes_per_day, 0))::numeric, 1) as days_until_full;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Alert function (integrate with external monitoring)
|
||||
CREATE OR REPLACE FUNCTION alert_if_size_critical()
|
||||
RETURNS void AS $$
|
||||
DECLARE
|
||||
size_pct NUMERIC;
|
||||
BEGIN
|
||||
SELECT percent_used INTO size_pct FROM check_database_size();
|
||||
|
||||
IF size_pct > 90 THEN
|
||||
-- Log critical alert
|
||||
INSERT INTO telemetry_events (user_id, event, properties)
|
||||
VALUES ('system', 'database_size_critical',
|
||||
json_build_object('percent_used', size_pct, 'timestamp', NOW())::jsonb);
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. Priority Order for Implementation
|
||||
|
||||
### Priority 1: URGENT (Day 1)
|
||||
1. **Execute Emergency Pruning** - Delete data older than 7 days
|
||||
- Impact: 47.5 MB saved immediately
|
||||
- Risk: Low - data already analyzed
|
||||
- SQL: Provided in Phase 1
|
||||
|
||||
### Priority 2: HIGH (Week 1)
|
||||
2. **Implement Automated Retention Policy**
|
||||
- Impact: Prevents future overflow
|
||||
- Risk: Low with proper testing
|
||||
- Implementation: Phase 2 function
|
||||
|
||||
3. **Run VACUUM FULL**
|
||||
- Impact: 6-7 MB reclaimed from dead rows
|
||||
- Risk: Low but locks tables briefly
|
||||
- Command: `VACUUM FULL telemetry_workflows;`
|
||||
|
||||
### Priority 3: MEDIUM (Week 2)
|
||||
4. **Create Aggregation Tables**
|
||||
- Impact: Preserves insights, enables longer-term pruning
|
||||
- Risk: Low - additive only
|
||||
- Implementation: Phase 3 tables and functions
|
||||
|
||||
5. **Implement Monitoring**
|
||||
- Impact: Prevents future surprises
|
||||
- Risk: None
|
||||
- Implementation: Phase 4 monitoring functions
|
||||
|
||||
### Priority 4: LOW (Month 1)
|
||||
6. **Optimize Properties Fields**
|
||||
- Impact: 2-3 MB additional savings
|
||||
- Risk: Medium - requires code changes
|
||||
- Action: Truncate verbose error messages
|
||||
|
||||
7. **Investigate tool_sequence null properties**
|
||||
- Impact: 10-15 MB potential savings
|
||||
- Risk: Medium - requires application changes
|
||||
- Action: Code review and optimization
|
||||
|
||||
---
|
||||
|
||||
## 7. Risk Assessment
|
||||
|
||||
### Strategy B (7-Day Retention): Risks and Mitigations
|
||||
|
||||
| Risk | Likelihood | Impact | Mitigation |
|
||||
|------|-----------|---------|------------|
|
||||
| Loss of debugging data for old issues | Medium | Medium | Keep error_occurred for 30 days; aggregate validation stats |
|
||||
| Unable to analyze long-term trends | Low | Low | Implement aggregation tables before pruning |
|
||||
| Accidental deletion of critical data | Low | High | Test on staging; implement backups; add rollback capability |
|
||||
| Performance impact during deletion | Medium | Low | Run during off-peak hours (2 AM UTC) |
|
||||
| VACUUM locks table briefly | Low | Low | Schedule during low-usage window |
|
||||
|
||||
### Strategy C (Hybrid Tiered): Risks and Mitigations
|
||||
|
||||
| Risk | Likelihood | Impact | Mitigation |
|
||||
|------|-----------|---------|------------|
|
||||
| Complex logic leads to bugs | Medium | Medium | Thorough testing; monitoring; gradual rollout |
|
||||
| Different retention per event type confusing | Low | Low | Document clearly; add comments in code |
|
||||
| Tiered approach still insufficient | Low | High | Monitor growth; adjust retention if needed |
|
||||
|
||||
---
|
||||
|
||||
## 8. Monitoring Metrics
|
||||
|
||||
### Key Metrics to Track Post-Implementation
|
||||
|
||||
1. **Database Size Trend**
|
||||
```sql
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
- Target: Stay under 300 MB (60% of limit)
|
||||
- Alert threshold: 90% (450 MB)
|
||||
|
||||
2. **Daily Growth Rate**
|
||||
```sql
|
||||
SELECT
|
||||
DATE(created_at) as date,
|
||||
COUNT(*) as events,
|
||||
pg_size_pretty(SUM(pg_column_size(properties))::bigint) as daily_size
|
||||
FROM telemetry_events
|
||||
WHERE created_at >= NOW() - INTERVAL '7 days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY date DESC;
|
||||
```
|
||||
- Target: < 8 MB/day average
|
||||
- Alert threshold: > 12 MB/day sustained
|
||||
|
||||
3. **Retention Policy Execution**
|
||||
```sql
|
||||
-- Add logging to retention policy function
|
||||
CREATE TABLE retention_policy_log (
|
||||
executed_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
events_deleted INTEGER,
|
||||
workflows_deleted INTEGER,
|
||||
space_reclaimed_mb NUMERIC
|
||||
);
|
||||
```
|
||||
- Monitor: Daily successful execution
|
||||
- Alert: If job fails or deletes 0 rows unexpectedly
|
||||
|
||||
4. **Data Availability Check**
|
||||
```sql
|
||||
-- Ensure sufficient data for analysis
|
||||
SELECT
|
||||
event,
|
||||
COUNT(*) as available_records,
|
||||
MIN(created_at) as oldest_record,
|
||||
MAX(created_at) as newest_record
|
||||
FROM telemetry_events
|
||||
GROUP BY event;
|
||||
```
|
||||
- Target: 7 days of data always available
|
||||
- Alert: If oldest_record > 8 days ago (retention policy failing)
|
||||
|
||||
---
|
||||
|
||||
## 9. Recommended Action Plan
|
||||
|
||||
### Immediate Actions (Today)
|
||||
|
||||
**Step 1:** Execute emergency pruning
|
||||
```sql
|
||||
-- Backup first (optional but recommended)
|
||||
-- Create a copy of current stats
|
||||
CREATE TABLE telemetry_events_stats_backup AS
|
||||
SELECT event, COUNT(*), MIN(created_at), MAX(created_at)
|
||||
FROM telemetry_events
|
||||
GROUP BY event;
|
||||
|
||||
-- Execute pruning
|
||||
DELETE FROM telemetry_events WHERE created_at < NOW() - INTERVAL '7 days';
|
||||
DELETE FROM telemetry_workflows WHERE created_at < NOW() - INTERVAL '7 days' AND complexity = 'simple';
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
```
|
||||
|
||||
**Step 2:** Verify results
|
||||
```sql
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
|
||||
**Expected outcome:** Database size ~210-220 MB (58-60% buffer remaining)
|
||||
|
||||
### Week 1 Actions
|
||||
|
||||
**Step 3:** Implement automated retention policy
|
||||
- Create retention policy function (Phase 2 code)
|
||||
- Test function on staging/development environment
|
||||
- Schedule daily execution via pg_cron
|
||||
|
||||
**Step 4:** Set up monitoring
|
||||
- Create monitoring functions (Phase 4 code)
|
||||
- Configure alerts for size thresholds
|
||||
- Document escalation procedures
|
||||
|
||||
### Week 2 Actions
|
||||
|
||||
**Step 5:** Create aggregation tables
|
||||
- Implement summary tables (Phase 3 code)
|
||||
- Backfill historical aggregations if needed
|
||||
- Update retention policy to aggregate before pruning
|
||||
|
||||
**Step 6:** Optimize and tune
|
||||
- Review query performance post-pruning
|
||||
- Adjust retention periods if needed based on actual usage
|
||||
- Document any issues or improvements
|
||||
|
||||
### Monthly Maintenance
|
||||
|
||||
**Step 7:** Regular review
|
||||
- Monthly review of database growth trends
|
||||
- Quarterly review of retention policy effectiveness
|
||||
- Adjust retention periods based on product needs
|
||||
|
||||
---
|
||||
|
||||
## 10. SQL Execution Scripts
|
||||
|
||||
### Script 1: Emergency Pruning (Run First)
|
||||
|
||||
```sql
|
||||
-- ============================================
|
||||
-- EMERGENCY PRUNING SCRIPT
|
||||
-- Expected savings: ~50 MB
|
||||
-- Execution time: 2-5 minutes
|
||||
-- ============================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create backup of current state
|
||||
CREATE TABLE IF NOT EXISTS pruning_audit (
|
||||
executed_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
action TEXT,
|
||||
records_affected INTEGER,
|
||||
size_before_mb NUMERIC,
|
||||
size_after_mb NUMERIC
|
||||
);
|
||||
|
||||
-- Record size before
|
||||
INSERT INTO pruning_audit (action, size_before_mb)
|
||||
SELECT 'before_pruning',
|
||||
pg_total_relation_size('telemetry_events')::numeric / 1024 / 1024;
|
||||
|
||||
-- Delete old events (keep last 7 days)
|
||||
WITH deleted AS (
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
RETURNING *
|
||||
)
|
||||
INSERT INTO pruning_audit (action, records_affected)
|
||||
SELECT 'delete_events_7d', COUNT(*) FROM deleted;
|
||||
|
||||
-- Delete old simple workflows (keep last 7 days)
|
||||
WITH deleted AS (
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND complexity = 'simple'
|
||||
RETURNING *
|
||||
)
|
||||
INSERT INTO pruning_audit (action, records_affected)
|
||||
SELECT 'delete_workflows_simple_7d', COUNT(*) FROM deleted;
|
||||
|
||||
-- Record size after
|
||||
UPDATE pruning_audit
|
||||
SET size_after_mb = pg_total_relation_size('telemetry_events')::numeric / 1024 / 1024
|
||||
WHERE action = 'before_pruning';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Cleanup dead space
|
||||
VACUUM FULL telemetry_events;
|
||||
VACUUM FULL telemetry_workflows;
|
||||
|
||||
-- Verify results
|
||||
SELECT * FROM pruning_audit ORDER BY executed_at DESC LIMIT 5;
|
||||
SELECT * FROM check_database_size();
|
||||
```
|
||||
|
||||
### Script 2: Create Retention Policy (Run After Testing)
|
||||
|
||||
```sql
|
||||
-- ============================================
|
||||
-- AUTOMATED RETENTION POLICY
|
||||
-- Schedule: Daily at 2 AM UTC
|
||||
-- ============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION apply_retention_policy()
|
||||
RETURNS TABLE(
|
||||
action TEXT,
|
||||
records_deleted INTEGER,
|
||||
execution_time_ms INTEGER
|
||||
) AS $$
|
||||
DECLARE
|
||||
start_time TIMESTAMPTZ;
|
||||
end_time TIMESTAMPTZ;
|
||||
deleted_count INTEGER;
|
||||
BEGIN
|
||||
-- Tier 4: 7-day retention (high volume, low long-term value)
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND event IN ('tool_sequence', 'tool_used', 'session_start',
|
||||
'workflow_validation_failed', 'search_query');
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_tier4_7d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Tier 3: 14-day retention (medium value)
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '14 days'
|
||||
AND event IN ('validation_details', 'workflow_created');
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_tier3_14d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Tier 1: 30-day retention (errors - keep longer)
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_events
|
||||
WHERE created_at < NOW() - INTERVAL '30 days'
|
||||
AND event = 'error_occurred';
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_errors_30d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Workflow pruning by complexity
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '7 days'
|
||||
AND complexity = 'simple';
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_workflows_simple_7d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '14 days'
|
||||
AND complexity = 'medium';
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_workflows_medium_14d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
start_time := clock_timestamp();
|
||||
|
||||
DELETE FROM telemetry_workflows
|
||||
WHERE created_at < NOW() - INTERVAL '30 days'
|
||||
AND complexity = 'complex';
|
||||
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
||||
|
||||
end_time := clock_timestamp();
|
||||
action := 'delete_workflows_complex_30d';
|
||||
records_deleted := deleted_count;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
|
||||
-- Vacuum to reclaim space
|
||||
start_time := clock_timestamp();
|
||||
VACUUM telemetry_events;
|
||||
VACUUM telemetry_workflows;
|
||||
end_time := clock_timestamp();
|
||||
|
||||
action := 'vacuum_tables';
|
||||
records_deleted := 0;
|
||||
execution_time_ms := EXTRACT(MILLISECONDS FROM (end_time - start_time))::INTEGER;
|
||||
RETURN NEXT;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Test the function (dry run - won't schedule yet)
|
||||
SELECT * FROM apply_retention_policy();
|
||||
|
||||
-- After testing, schedule with pg_cron
|
||||
-- Requires pg_cron extension: CREATE EXTENSION IF NOT EXISTS pg_cron;
|
||||
-- SELECT cron.schedule('retention-policy', '0 2 * * *', 'SELECT apply_retention_policy()');
|
||||
```
|
||||
|
||||
### Script 3: Create Monitoring Dashboard
|
||||
|
||||
```sql
|
||||
-- ============================================
|
||||
-- MONITORING QUERIES
|
||||
-- Run these regularly to track database health
|
||||
-- ============================================
|
||||
|
||||
-- Query 1: Current database size and projections
|
||||
SELECT
|
||||
'Current Size' as metric,
|
||||
pg_size_pretty(SUM(pg_total_relation_size(schemaname||'.'||relname))) as value
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public'
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Free Tier Limit' as metric,
|
||||
'500 MB' as value
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Percent Used' as metric,
|
||||
CONCAT(
|
||||
ROUND(
|
||||
(SUM(pg_total_relation_size(schemaname||'.'||relname))::numeric /
|
||||
(500.0 * 1024 * 1024) * 100),
|
||||
2
|
||||
),
|
||||
'%'
|
||||
) as value
|
||||
FROM pg_stat_user_tables
|
||||
WHERE schemaname = 'public';
|
||||
|
||||
-- Query 2: Data age distribution
|
||||
SELECT
|
||||
event,
|
||||
COUNT(*) as total_records,
|
||||
MIN(created_at) as oldest_record,
|
||||
MAX(created_at) as newest_record,
|
||||
ROUND(EXTRACT(EPOCH FROM (MAX(created_at) - MIN(created_at))) / 86400, 2) as age_days
|
||||
FROM telemetry_events
|
||||
GROUP BY event
|
||||
ORDER BY total_records DESC;
|
||||
|
||||
-- Query 3: Daily growth tracking (last 7 days)
|
||||
SELECT
|
||||
DATE(created_at) as date,
|
||||
COUNT(*) as daily_events,
|
||||
pg_size_pretty(SUM(pg_column_size(properties))::bigint) as daily_data_size,
|
||||
COUNT(DISTINCT user_id) as active_users
|
||||
FROM telemetry_events
|
||||
WHERE created_at >= NOW() - INTERVAL '7 days'
|
||||
GROUP BY DATE(created_at)
|
||||
ORDER BY date DESC;
|
||||
|
||||
-- Query 4: Retention policy effectiveness
|
||||
SELECT
|
||||
DATE(executed_at) as execution_date,
|
||||
action,
|
||||
records_deleted,
|
||||
execution_time_ms
|
||||
FROM (
|
||||
SELECT * FROM apply_retention_policy()
|
||||
) AS policy_run
|
||||
ORDER BY execution_date DESC;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
**Immediate Action Required:** Implement Strategy B (7-day retention) immediately to avoid database overflow within 2 weeks.
|
||||
|
||||
**Long-Term Strategy:** Transition to Strategy C (Hybrid Tiered Retention) with automated aggregation to balance data preservation with storage constraints.
|
||||
|
||||
**Expected Outcomes:**
|
||||
- Immediate: 50+ MB saved (26% reduction)
|
||||
- Ongoing: Database stabilized at 200-220 MB (40-44% of limit)
|
||||
- Buffer: 30-40 days before limit with current growth rate
|
||||
- Risk: Low with proper testing and monitoring
|
||||
|
||||
**Success Metrics:**
|
||||
1. Database size < 300 MB consistently
|
||||
2. 7+ days of detailed event data always available
|
||||
3. No impact on product analytics capabilities
|
||||
4. Automated retention policy runs daily without errors
|
||||
|
||||
---
|
||||
|
||||
**Analysis completed:** 2025-10-10
|
||||
**Next review date:** 2025-11-10 (monthly check)
|
||||
**Escalation:** If database exceeds 400 MB, consider upgrading to paid tier or implementing more aggressive pruning
|
||||
@@ -205,20 +205,9 @@ describe.skipIf(!dbExists)('Database Content Validation', () => {
|
||||
|
||||
it('MUST have FTS5 index properly ranked', () => {
|
||||
const results = db.prepare(`
|
||||
SELECT
|
||||
n.node_type,
|
||||
rank
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
SELECT node_type, rank FROM nodes_fts
|
||||
WHERE nodes_fts MATCH 'webhook'
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER('webhook') THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER('%webhook%') THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER('%webhook%') THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank
|
||||
ORDER BY rank
|
||||
LIMIT 5
|
||||
`).all();
|
||||
|
||||
@@ -226,7 +215,7 @@ describe.skipIf(!dbExists)('Database Content Validation', () => {
|
||||
'CRITICAL: FTS5 ranking not working. Search quality will be degraded.'
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
// Exact match should be in top results (using production boosting logic with CASE-first ordering)
|
||||
// Exact match should be in top results
|
||||
const topNodes = results.slice(0, 3).map((r: any) => r.node_type);
|
||||
expect(topNodes,
|
||||
'WARNING: Exact match "nodes-base.webhook" not in top 3 ranked results'
|
||||
|
||||
@@ -136,25 +136,14 @@ describe('Node FTS5 Search Integration Tests', () => {
|
||||
describe('FTS5 Search Quality', () => {
|
||||
it('should rank exact matches higher', () => {
|
||||
const results = db.prepare(`
|
||||
SELECT
|
||||
n.node_type,
|
||||
rank
|
||||
FROM nodes n
|
||||
JOIN nodes_fts ON n.rowid = nodes_fts.rowid
|
||||
SELECT node_type, rank FROM nodes_fts
|
||||
WHERE nodes_fts MATCH 'webhook'
|
||||
ORDER BY
|
||||
CASE
|
||||
WHEN LOWER(n.display_name) = LOWER('webhook') THEN 0
|
||||
WHEN LOWER(n.display_name) LIKE LOWER('%webhook%') THEN 1
|
||||
WHEN LOWER(n.node_type) LIKE LOWER('%webhook%') THEN 2
|
||||
ELSE 3
|
||||
END,
|
||||
rank
|
||||
ORDER BY rank
|
||||
LIMIT 10
|
||||
`).all();
|
||||
|
||||
expect(results.length).toBeGreaterThan(0);
|
||||
// Exact match should be in top results (using production boosting logic with CASE-first ordering)
|
||||
// Exact match should be in top results
|
||||
const topResults = results.slice(0, 3).map((r: any) => r.node_type);
|
||||
expect(topResults).toContain('nodes-base.webhook');
|
||||
});
|
||||
|
||||
@@ -1,321 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
|
||||
/**
|
||||
* Integration tests for sql.js memory leak fix (Issue #330)
|
||||
*
|
||||
* These tests verify that the SQLJSAdapter optimizations:
|
||||
* 1. Use configurable save intervals (default 5000ms)
|
||||
* 2. Don't trigger saves on read-only operations
|
||||
* 3. Batch multiple rapid writes into single save
|
||||
* 4. Clean up resources properly
|
||||
*
|
||||
* Note: These tests use actual sql.js adapter behavior patterns
|
||||
* to verify the fix works under realistic load.
|
||||
*/
|
||||
|
||||
describe('SQLJSAdapter Memory Leak Prevention (Issue #330)', () => {
|
||||
let tempDbPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create temporary database file path
|
||||
const tempDir = os.tmpdir();
|
||||
tempDbPath = path.join(tempDir, `test-sqljs-${Date.now()}.db`);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Cleanup temporary file
|
||||
try {
|
||||
await fs.unlink(tempDbPath);
|
||||
} catch (error) {
|
||||
// File might not exist, ignore error
|
||||
}
|
||||
});
|
||||
|
||||
describe('Save Interval Configuration', () => {
|
||||
it('should respect SQLJS_SAVE_INTERVAL_MS environment variable', () => {
|
||||
const originalEnv = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
|
||||
try {
|
||||
// Set custom interval
|
||||
process.env.SQLJS_SAVE_INTERVAL_MS = '10000';
|
||||
|
||||
// Verify parsing logic
|
||||
const envInterval = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
const interval = envInterval ? parseInt(envInterval, 10) : 5000;
|
||||
|
||||
expect(interval).toBe(10000);
|
||||
} finally {
|
||||
// Restore environment
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.SQLJS_SAVE_INTERVAL_MS = originalEnv;
|
||||
} else {
|
||||
delete process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should use default 5000ms when env var is not set', () => {
|
||||
const originalEnv = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
|
||||
try {
|
||||
// Ensure env var is not set
|
||||
delete process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
|
||||
// Verify default is used
|
||||
const envInterval = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
const interval = envInterval ? parseInt(envInterval, 10) : 5000;
|
||||
|
||||
expect(interval).toBe(5000);
|
||||
} finally {
|
||||
// Restore environment
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.SQLJS_SAVE_INTERVAL_MS = originalEnv;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should validate and reject invalid intervals', () => {
|
||||
const invalidValues = [
|
||||
'invalid',
|
||||
'50', // Too low (< 100ms)
|
||||
'-100', // Negative
|
||||
'0', // Zero
|
||||
'', // Empty string
|
||||
];
|
||||
|
||||
invalidValues.forEach((invalidValue) => {
|
||||
const parsed = parseInt(invalidValue, 10);
|
||||
const interval = (isNaN(parsed) || parsed < 100) ? 5000 : parsed;
|
||||
|
||||
// All invalid values should fall back to 5000
|
||||
expect(interval).toBe(5000);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Save Debouncing Behavior', () => {
|
||||
it('should debounce multiple rapid write operations', async () => {
|
||||
const saveCallback = vi.fn();
|
||||
let timer: NodeJS.Timeout | null = null;
|
||||
const saveInterval = 100; // Use short interval for test speed
|
||||
|
||||
// Simulate scheduleSave() logic
|
||||
const scheduleSave = () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = setTimeout(() => {
|
||||
saveCallback();
|
||||
}, saveInterval);
|
||||
};
|
||||
|
||||
// Simulate 10 rapid write operations
|
||||
for (let i = 0; i < 10; i++) {
|
||||
scheduleSave();
|
||||
}
|
||||
|
||||
// Should not have saved yet (still debouncing)
|
||||
expect(saveCallback).not.toHaveBeenCalled();
|
||||
|
||||
// Wait for debounce interval
|
||||
await new Promise(resolve => setTimeout(resolve, saveInterval + 50));
|
||||
|
||||
// Should have saved exactly once (all 10 operations batched)
|
||||
expect(saveCallback).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Cleanup
|
||||
if (timer) clearTimeout(timer);
|
||||
});
|
||||
|
||||
it('should not accumulate save timers (memory leak prevention)', () => {
|
||||
let timer: NodeJS.Timeout | null = null;
|
||||
const timers: NodeJS.Timeout[] = [];
|
||||
|
||||
const scheduleSave = () => {
|
||||
// Critical: clear existing timer before creating new one
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
timer = setTimeout(() => {
|
||||
// Save logic
|
||||
}, 5000);
|
||||
|
||||
timers.push(timer);
|
||||
};
|
||||
|
||||
// Simulate 100 rapid operations
|
||||
for (let i = 0; i < 100; i++) {
|
||||
scheduleSave();
|
||||
}
|
||||
|
||||
// Should have created 100 timers total
|
||||
expect(timers.length).toBe(100);
|
||||
|
||||
// But only 1 timer should be active (others cleared)
|
||||
// This is the key to preventing timer leak
|
||||
|
||||
// Cleanup active timer
|
||||
if (timer) clearTimeout(timer);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Read vs Write Operation Handling', () => {
|
||||
it('should not trigger save on SELECT queries', () => {
|
||||
const saveCallback = vi.fn();
|
||||
|
||||
// Simulate prepare() for SELECT
|
||||
// Old code: would call scheduleSave() here (bug)
|
||||
// New code: does NOT call scheduleSave()
|
||||
|
||||
// prepare() should not trigger save
|
||||
expect(saveCallback).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should trigger save only on write operations', () => {
|
||||
const saveCallback = vi.fn();
|
||||
|
||||
// Simulate exec() for INSERT
|
||||
saveCallback(); // exec() calls scheduleSave()
|
||||
|
||||
// Simulate run() for UPDATE
|
||||
saveCallback(); // run() calls scheduleSave()
|
||||
|
||||
// Should have scheduled saves for write operations
|
||||
expect(saveCallback).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Memory Allocation Optimization', () => {
|
||||
it('should not use Buffer.from() for Uint8Array', () => {
|
||||
// Original code (memory leak):
|
||||
// const data = db.export(); // 2-5MB Uint8Array
|
||||
// const buffer = Buffer.from(data); // Another 2-5MB copy!
|
||||
// fsSync.writeFileSync(path, buffer);
|
||||
|
||||
// Fixed code (no copy):
|
||||
// const data = db.export(); // 2-5MB Uint8Array
|
||||
// fsSync.writeFileSync(path, data); // Write directly
|
||||
|
||||
const mockData = new Uint8Array(1024 * 1024 * 2); // 2MB
|
||||
|
||||
// Verify Uint8Array can be used directly (no Buffer.from needed)
|
||||
expect(mockData).toBeInstanceOf(Uint8Array);
|
||||
expect(mockData.byteLength).toBe(2 * 1024 * 1024);
|
||||
|
||||
// The fix eliminates the Buffer.from() step entirely
|
||||
// This saves 50% of temporary memory allocations
|
||||
});
|
||||
|
||||
it('should cleanup data reference after save', () => {
|
||||
let data: Uint8Array | null = null;
|
||||
let savedSuccessfully = false;
|
||||
|
||||
try {
|
||||
// Simulate export
|
||||
data = new Uint8Array(1024);
|
||||
|
||||
// Simulate write
|
||||
savedSuccessfully = true;
|
||||
} catch (error) {
|
||||
savedSuccessfully = false;
|
||||
} finally {
|
||||
// Critical: null out reference to help GC
|
||||
data = null;
|
||||
}
|
||||
|
||||
expect(savedSuccessfully).toBe(true);
|
||||
expect(data).toBeNull();
|
||||
});
|
||||
|
||||
it('should cleanup even when save fails', () => {
|
||||
let data: Uint8Array | null = null;
|
||||
let errorCaught = false;
|
||||
|
||||
try {
|
||||
data = new Uint8Array(1024);
|
||||
throw new Error('Simulated save failure');
|
||||
} catch (error) {
|
||||
errorCaught = true;
|
||||
} finally {
|
||||
// Cleanup must happen even on error
|
||||
data = null;
|
||||
}
|
||||
|
||||
expect(errorCaught).toBe(true);
|
||||
expect(data).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Load Test Simulation', () => {
|
||||
it('should handle 100 operations without excessive memory growth', async () => {
|
||||
const saveCallback = vi.fn();
|
||||
let timer: NodeJS.Timeout | null = null;
|
||||
const saveInterval = 50; // Fast for testing
|
||||
|
||||
const scheduleSave = () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = setTimeout(() => {
|
||||
saveCallback();
|
||||
}, saveInterval);
|
||||
};
|
||||
|
||||
// Simulate 100 database operations
|
||||
for (let i = 0; i < 100; i++) {
|
||||
scheduleSave();
|
||||
|
||||
// Simulate varying operation speeds
|
||||
if (i % 10 === 0) {
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for final save
|
||||
await new Promise(resolve => setTimeout(resolve, saveInterval + 50));
|
||||
|
||||
// With old code (100ms interval, save on every operation):
|
||||
// - Would trigger ~100 saves
|
||||
// - Each save: 4-10MB temporary allocation
|
||||
// - Total temporary memory: 400-1000MB
|
||||
|
||||
// With new code (5000ms interval, debounced):
|
||||
// - Triggers only a few saves (operations batched)
|
||||
// - Same temporary allocation per save
|
||||
// - Total temporary memory: ~20-50MB (90-95% reduction)
|
||||
|
||||
// Should have saved much fewer times than operations (batching works)
|
||||
expect(saveCallback.mock.calls.length).toBeLessThan(10);
|
||||
|
||||
// Cleanup
|
||||
if (timer) clearTimeout(timer);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Long-Running Deployment Simulation', () => {
|
||||
it('should not accumulate references over time', () => {
|
||||
const operations: any[] = [];
|
||||
|
||||
// Simulate 1000 operations (representing hours of runtime)
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
let data: Uint8Array | null = new Uint8Array(1024);
|
||||
|
||||
// Simulate operation
|
||||
operations.push({ index: i });
|
||||
|
||||
// Critical: cleanup after each operation
|
||||
data = null;
|
||||
}
|
||||
|
||||
expect(operations.length).toBe(1000);
|
||||
|
||||
// Key point: each operation's data reference was nulled
|
||||
// In old code, these would accumulate in memory
|
||||
// In new code, GC can reclaim them
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -555,9 +555,8 @@ describe('MCP Performance Tests', () => {
|
||||
console.log(`Sustained load test - Requests: ${requestCount}, RPS: ${requestsPerSecond.toFixed(2)}, Errors: ${errorCount}`);
|
||||
console.log(`Environment: ${process.env.CI ? 'CI' : 'Local'}`);
|
||||
|
||||
// Environment-aware RPS threshold
|
||||
// Relaxed to 75 RPS locally to account for parallel test execution overhead
|
||||
const rpsThreshold = process.env.CI ? 50 : 75;
|
||||
// Environment-aware RPS threshold (relaxed -8% for type safety overhead)
|
||||
const rpsThreshold = process.env.CI ? 50 : 92;
|
||||
expect(requestsPerSecond).toBeGreaterThan(rpsThreshold);
|
||||
|
||||
// Error rate should be very low
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { getN8nCredentials } from './credentials';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { createDatabaseAdapter } from '../../../../src/database/database-adapter';
|
||||
import * as path from 'path';
|
||||
|
||||
// Singleton repository instance for tests
|
||||
let repositoryInstance: NodeRepository | null = null;
|
||||
|
||||
/**
|
||||
* Creates MCP context for testing MCP handlers against real n8n instance
|
||||
@@ -18,27 +12,3 @@ export function createMcpContext(): InstanceContext {
|
||||
n8nApiKey: creds.apiKey
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets or creates a NodeRepository instance for integration tests
|
||||
* Uses the project's main database
|
||||
*/
|
||||
export async function getMcpRepository(): Promise<NodeRepository> {
|
||||
if (repositoryInstance) {
|
||||
return repositoryInstance;
|
||||
}
|
||||
|
||||
// Use the main project database
|
||||
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
|
||||
const db = await createDatabaseAdapter(dbPath);
|
||||
repositoryInstance = new NodeRepository(db);
|
||||
|
||||
return repositoryInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the repository instance (useful for test cleanup)
|
||||
*/
|
||||
export function resetMcpRepository(): void {
|
||||
repositoryInstance = null;
|
||||
}
|
||||
|
||||
@@ -623,9 +623,7 @@ describe('Integration: handleAutofixWorkflow', () => {
|
||||
const response = await handleAutofixWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
applyFixes: false,
|
||||
// Exclude version upgrade fixes to test "no fixes" scenario
|
||||
fixTypes: ['expression-format', 'typeversion-correction', 'error-output-config', 'node-type-correction', 'webhook-missing-path']
|
||||
applyFixes: false
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
|
||||
@@ -19,9 +19,8 @@ import { createTestContext, TestContext, createTestWorkflowName } from '../utils
|
||||
import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdatePartialWorkflow } from '../../../../src/mcp/handlers-workflow-diff';
|
||||
import { Workflow } from '../../../../src/types/n8n-api';
|
||||
|
||||
@@ -29,21 +28,15 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
// Skip workflow validation for these tests - they test n8n API behavior with edge cases
|
||||
process.env.SKIP_WORKFLOW_VALIDATION = 'true';
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await context.cleanup();
|
||||
// Clean up environment variable
|
||||
delete process.env.SKIP_WORKFLOW_VALIDATION;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -137,11 +130,9 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
if (!result.success) console.log("VALIDATION ERROR:", JSON.stringify(result, null, 2));
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Fetch actual workflow from n8n API
|
||||
@@ -244,7 +235,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -377,7 +367,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -580,7 +569,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -717,7 +705,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -863,7 +850,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -968,7 +954,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1097,7 +1082,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1196,7 +1180,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1277,7 +1260,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1359,7 +1341,6 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -1492,7 +1473,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 1
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1603,7 +1584,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
branch: 'true'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1719,7 +1700,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1857,7 +1838,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
case: 1
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -1970,7 +1951,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
sourceIndex: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2089,7 +2070,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2195,7 +2176,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2307,7 +2288,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
targetIndex: 0
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
@@ -2446,7 +2427,7 @@ describe('Integration: Smart Parameters with Real n8n API', () => {
|
||||
target: 'Merge'
|
||||
}
|
||||
]
|
||||
}, repository);
|
||||
});
|
||||
|
||||
const fetchedWorkflow = await client.getWorkflow(workflow.id);
|
||||
|
||||
|
||||
@@ -12,22 +12,19 @@ import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { SIMPLE_WEBHOOK_WORKFLOW, SIMPLE_HTTP_WORKFLOW, MULTI_NODE_WORKFLOW } from '../utils/fixtures';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdatePartialWorkflow } from '../../../../src/mcp/handlers-workflow-diff';
|
||||
|
||||
describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -59,7 +56,7 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Add a Set node and connect it to maintain workflow validity
|
||||
// Add a Set node
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
@@ -84,17 +81,9 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'addConnection',
|
||||
source: 'Webhook',
|
||||
target: 'Set',
|
||||
sourcePort: 'main',
|
||||
targetPort: 'main'
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -133,7 +122,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -166,7 +154,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -198,7 +185,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -233,7 +219,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -269,7 +254,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -307,7 +291,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -341,7 +324,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -369,7 +351,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
id: created.id,
|
||||
operations: [{ type: 'disableNode', nodeName: 'Webhook' }]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -384,7 +365,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -429,7 +409,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -467,7 +446,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -476,7 +454,7 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
});
|
||||
|
||||
describe('removeConnection', () => {
|
||||
it('should reject removal of last connection (creates invalid workflow)', async () => {
|
||||
it('should remove connection between nodes', async () => {
|
||||
const workflow = {
|
||||
...SIMPLE_HTTP_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Remove Connection'),
|
||||
@@ -488,7 +466,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Try to remove the only connection - should be rejected (leaves 2 nodes with no connections)
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
@@ -496,19 +473,16 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
{
|
||||
type: 'removeConnection',
|
||||
source: 'Webhook',
|
||||
target: 'HTTP Request',
|
||||
sourcePort: 'main',
|
||||
targetPort: 'main'
|
||||
target: 'HTTP Request'
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should fail validation - multi-node workflow needs connections
|
||||
expect(response.success).toBe(false);
|
||||
expect(response.error).toContain('Workflow validation failed');
|
||||
expect(response.success).toBe(true);
|
||||
const updated = response.data as any;
|
||||
expect(Object.keys(updated.connections || {})).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should ignore error for non-existent connection with ignoreErrors flag', async () => {
|
||||
@@ -535,7 +509,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -545,7 +518,7 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
});
|
||||
|
||||
describe('replaceConnections', () => {
|
||||
it('should reject replacing with empty connections (creates invalid workflow)', async () => {
|
||||
it('should replace all connections', async () => {
|
||||
const workflow = {
|
||||
...SIMPLE_HTTP_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Replace Connections'),
|
||||
@@ -557,7 +530,7 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Try to replace with empty connections - should be rejected (leaves 2 nodes with no connections)
|
||||
// Replace with empty connections
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
@@ -568,13 +541,12 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should fail validation - multi-node workflow needs connections
|
||||
expect(response.success).toBe(false);
|
||||
expect(response.error).toContain('Workflow validation failed');
|
||||
expect(response.success).toBe(true);
|
||||
const updated = response.data as any;
|
||||
expect(Object.keys(updated.connections || {})).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -597,7 +569,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
id: created.id,
|
||||
operations: [{ type: 'removeNode', nodeName: 'HTTP Request' }]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -613,7 +584,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
validateOnly: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -653,7 +623,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -691,7 +660,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -724,7 +692,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -759,7 +726,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -817,7 +783,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -850,7 +815,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
validateOnly: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -894,7 +858,6 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
],
|
||||
continueOnError: true
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -904,194 +867,4 @@ describe('Integration: handleUpdatePartialWorkflow', () => {
|
||||
expect(response.details?.failed).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
// ======================================================================
|
||||
// WORKFLOW STRUCTURE VALIDATION (prevents corrupted workflows)
|
||||
// ======================================================================
|
||||
|
||||
describe('Workflow Structure Validation', () => {
|
||||
it('should reject removal of all connections in multi-node workflow', async () => {
|
||||
// Create workflow with 2 nodes and 1 connection
|
||||
const workflow = {
|
||||
...SIMPLE_HTTP_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Reject Empty Connections'),
|
||||
tags: ['mcp-integration-test']
|
||||
};
|
||||
|
||||
const created = await client.createWorkflow(workflow);
|
||||
expect(created.id).toBeTruthy();
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Try to remove the only connection - should be rejected
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'removeConnection',
|
||||
source: 'Webhook',
|
||||
target: 'HTTP Request',
|
||||
sourcePort: 'main',
|
||||
targetPort: 'main'
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should fail validation
|
||||
expect(response.success).toBe(false);
|
||||
expect(response.error).toContain('Workflow validation failed');
|
||||
expect(response.details?.errors).toBeDefined();
|
||||
expect(Array.isArray(response.details?.errors)).toBe(true);
|
||||
expect((response.details?.errors as string[])[0]).toContain('no connections');
|
||||
});
|
||||
|
||||
it('should reject removal of all nodes except one non-webhook node', async () => {
|
||||
// Create workflow with 4 nodes: Webhook, Set 1, Set 2, Merge
|
||||
const workflow = {
|
||||
...MULTI_NODE_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Reject Single Non-Webhook'),
|
||||
tags: ['mcp-integration-test']
|
||||
};
|
||||
|
||||
const created = await client.createWorkflow(workflow);
|
||||
expect(created.id).toBeTruthy();
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Try to remove all nodes except Merge node (non-webhook) - should be rejected
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'removeNode',
|
||||
nodeName: 'Webhook'
|
||||
},
|
||||
{
|
||||
type: 'removeNode',
|
||||
nodeName: 'Set 1'
|
||||
},
|
||||
{
|
||||
type: 'removeNode',
|
||||
nodeName: 'Set 2'
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should fail validation
|
||||
expect(response.success).toBe(false);
|
||||
expect(response.error).toContain('Workflow validation failed');
|
||||
expect(response.details?.errors).toBeDefined();
|
||||
expect(Array.isArray(response.details?.errors)).toBe(true);
|
||||
expect((response.details?.errors as string[])[0]).toContain('Single non-webhook node');
|
||||
});
|
||||
|
||||
it('should allow valid partial updates that maintain workflow integrity', async () => {
|
||||
// Create workflow with 4 nodes
|
||||
const workflow = {
|
||||
...MULTI_NODE_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Valid Update'),
|
||||
tags: ['mcp-integration-test']
|
||||
};
|
||||
|
||||
const created = await client.createWorkflow(workflow);
|
||||
expect(created.id).toBeTruthy();
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Valid update: add a node and connect it
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Process Data',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3.4,
|
||||
position: [850, 300],
|
||||
parameters: {
|
||||
assignments: {
|
||||
assignments: []
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
type: 'addConnection',
|
||||
source: 'Merge',
|
||||
target: 'Process Data',
|
||||
sourcePort: 'main',
|
||||
targetPort: 'main'
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should succeed
|
||||
expect(response.success).toBe(true);
|
||||
const updated = response.data as any;
|
||||
expect(updated.nodes).toHaveLength(5); // Original 4 + 1 new
|
||||
expect(updated.nodes.find((n: any) => n.name === 'Process Data')).toBeDefined();
|
||||
});
|
||||
|
||||
it('should reject adding node without connecting it (disconnected node)', async () => {
|
||||
// Create workflow with 2 connected nodes
|
||||
const workflow = {
|
||||
...SIMPLE_HTTP_WORKFLOW,
|
||||
name: createTestWorkflowName('Partial - Reject Disconnected Node'),
|
||||
tags: ['mcp-integration-test']
|
||||
};
|
||||
|
||||
const created = await client.createWorkflow(workflow);
|
||||
expect(created.id).toBeTruthy();
|
||||
if (!created.id) throw new Error('Workflow ID is missing');
|
||||
context.trackWorkflow(created.id);
|
||||
|
||||
// Try to add a third node WITHOUT connecting it - should be rejected
|
||||
const response = await handleUpdatePartialWorkflow(
|
||||
{
|
||||
id: created.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Disconnected Set',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3.4,
|
||||
position: [800, 300],
|
||||
parameters: {
|
||||
assignments: {
|
||||
assignments: []
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: No connection operation - this creates a disconnected node
|
||||
}
|
||||
]
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
// Should fail validation - disconnected node detected
|
||||
expect(response.success).toBe(false);
|
||||
expect(response.error).toContain('Workflow validation failed');
|
||||
expect(response.details?.errors).toBeDefined();
|
||||
expect(Array.isArray(response.details?.errors)).toBe(true);
|
||||
const errorMessage = (response.details?.errors as string[])[0];
|
||||
expect(errorMessage).toContain('Disconnected nodes detected');
|
||||
expect(errorMessage).toContain('Disconnected Set');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,22 +11,19 @@ import { getTestN8nClient } from '../utils/n8n-client';
|
||||
import { N8nApiClient } from '../../../../src/services/n8n-api-client';
|
||||
import { SIMPLE_WEBHOOK_WORKFLOW, SIMPLE_HTTP_WORKFLOW } from '../utils/fixtures';
|
||||
import { cleanupOrphanedWorkflows } from '../utils/cleanup-helpers';
|
||||
import { createMcpContext, getMcpRepository } from '../utils/mcp-context';
|
||||
import { createMcpContext } from '../utils/mcp-context';
|
||||
import { InstanceContext } from '../../../../src/types/instance-context';
|
||||
import { NodeRepository } from '../../../../src/database/node-repository';
|
||||
import { handleUpdateWorkflow } from '../../../../src/mcp/handlers-n8n-manager';
|
||||
|
||||
describe('Integration: handleUpdateWorkflow', () => {
|
||||
let context: TestContext;
|
||||
let client: N8nApiClient;
|
||||
let mcpContext: InstanceContext;
|
||||
let repository: NodeRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
beforeEach(() => {
|
||||
context = createTestContext();
|
||||
client = getTestN8nClient();
|
||||
mcpContext = createMcpContext();
|
||||
repository = await getMcpRepository();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -71,7 +68,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: replacement.nodes,
|
||||
connections: replacement.connections
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -142,7 +138,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: updatedNodes,
|
||||
connections: updatedConnections
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -188,7 +183,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
timezone: 'Europe/London'
|
||||
}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -234,7 +228,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
],
|
||||
connections: {}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -249,7 +242,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
id: '99999999',
|
||||
name: 'Should Fail'
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -289,7 +281,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
nodes: current.nodes, // Required by n8n API
|
||||
connections: current.connections // Required by n8n API
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
@@ -335,7 +326,6 @@ describe('Integration: handleUpdateWorkflow', () => {
|
||||
timezone: 'America/New_York'
|
||||
}
|
||||
},
|
||||
repository,
|
||||
mcpContext
|
||||
);
|
||||
|
||||
|
||||
747
tests/integration/session-lifecycle-retry.test.ts
Normal file
747
tests/integration/session-lifecycle-retry.test.ts
Normal file
@@ -0,0 +1,747 @@
|
||||
/**
|
||||
* Integration tests for Session Lifecycle Events (Phase 3) and Retry Policy (Phase 4)
|
||||
*
|
||||
* Tests complete event flow and retry behavior in realistic scenarios
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { N8NMCPEngine } from '../../src/mcp-engine';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
import { SessionRestoreHook, SessionState } from '../../src/types/session-restoration';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
// In-memory session storage for testing
|
||||
const sessionStorage: Map<string, SessionState> = new Map();
|
||||
|
||||
/**
|
||||
* Mock session store with failure simulation
|
||||
*/
|
||||
class MockSessionStore {
|
||||
private failureCount = 0;
|
||||
private maxFailures = 0;
|
||||
|
||||
/**
|
||||
* Configure transient failures for retry testing
|
||||
*/
|
||||
setTransientFailures(count: number): void {
|
||||
this.failureCount = 0;
|
||||
this.maxFailures = count;
|
||||
}
|
||||
|
||||
async saveSession(sessionState: SessionState): Promise<void> {
|
||||
sessionStorage.set(sessionState.sessionId, {
|
||||
...sessionState,
|
||||
lastAccess: sessionState.lastAccess || new Date(),
|
||||
expiresAt: sessionState.expiresAt || new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
}
|
||||
|
||||
async loadSession(sessionId: string): Promise<InstanceContext | null> {
|
||||
// Simulate transient failures
|
||||
if (this.failureCount < this.maxFailures) {
|
||||
this.failureCount++;
|
||||
throw new Error(`Transient database error (attempt ${this.failureCount})`);
|
||||
}
|
||||
|
||||
const session = sessionStorage.get(sessionId);
|
||||
if (!session) return null;
|
||||
|
||||
// Check if expired
|
||||
if (session.expiresAt < new Date()) {
|
||||
sessionStorage.delete(sessionId);
|
||||
return null;
|
||||
}
|
||||
|
||||
return session.instanceContext;
|
||||
}
|
||||
|
||||
async deleteSession(sessionId: string): Promise<void> {
|
||||
sessionStorage.delete(sessionId);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
sessionStorage.clear();
|
||||
this.failureCount = 0;
|
||||
this.maxFailures = 0;
|
||||
}
|
||||
}
|
||||
|
||||
describe('Session Lifecycle Events & Retry Policy Integration Tests', () => {
|
||||
const TEST_AUTH_TOKEN = 'lifecycle-retry-test-token-32-chars-min';
|
||||
let mockStore: MockSessionStore;
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
// Event tracking
|
||||
let eventLog: Array<{ event: string; sessionId: string; timestamp: number }> = [];
|
||||
|
||||
beforeEach(() => {
|
||||
// Save and set environment
|
||||
originalEnv = { ...process.env };
|
||||
process.env.AUTH_TOKEN = TEST_AUTH_TOKEN;
|
||||
process.env.PORT = '0';
|
||||
process.env.NODE_ENV = 'test';
|
||||
// Use in-memory database for tests - these tests focus on session lifecycle,
|
||||
// not node queries, so we don't need the full node database
|
||||
process.env.NODE_DB_PATH = ':memory:';
|
||||
|
||||
// Clear storage and events
|
||||
mockStore = new MockSessionStore();
|
||||
mockStore.clear();
|
||||
eventLog = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore environment
|
||||
process.env = originalEnv;
|
||||
mockStore.clear();
|
||||
eventLog = [];
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
// Helper to create properly mocked Request and Response objects
|
||||
// Simplified to match working session-persistence test - SDK doesn't need full socket mock
|
||||
function createMockReqRes(sessionId?: string, body?: any) {
|
||||
const req = {
|
||||
method: 'POST',
|
||||
path: '/mcp',
|
||||
url: '/mcp',
|
||||
originalUrl: '/mcp',
|
||||
headers: {
|
||||
'authorization': `Bearer ${TEST_AUTH_TOKEN}`,
|
||||
...(sessionId && { 'mcp-session-id': sessionId })
|
||||
} as Record<string, string>,
|
||||
body: body || {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools/list',
|
||||
params: {},
|
||||
id: 1
|
||||
},
|
||||
ip: '127.0.0.1',
|
||||
readable: true,
|
||||
readableEnded: false,
|
||||
complete: true,
|
||||
get: vi.fn((header: string) => req.headers[header.toLowerCase()]),
|
||||
on: vi.fn((event: string, handler: Function) => {}),
|
||||
removeListener: vi.fn((event: string, handler: Function) => {})
|
||||
} as any as Request;
|
||||
|
||||
const res = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn().mockReturnThis(),
|
||||
setHeader: vi.fn(),
|
||||
send: vi.fn().mockReturnThis(),
|
||||
writeHead: vi.fn().mockReturnThis(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
flushHeaders: vi.fn(),
|
||||
on: vi.fn((event: string, handler: Function) => res),
|
||||
once: vi.fn((event: string, handler: Function) => res),
|
||||
removeListener: vi.fn(),
|
||||
headersSent: false,
|
||||
finished: false
|
||||
} as any as Response;
|
||||
|
||||
return { req, res };
|
||||
}
|
||||
|
||||
// Helper to track events
|
||||
function createEventTracker() {
|
||||
return {
|
||||
onSessionCreated: vi.fn((sessionId: string) => {
|
||||
eventLog.push({ event: 'created', sessionId, timestamp: Date.now() });
|
||||
}),
|
||||
onSessionRestored: vi.fn((sessionId: string) => {
|
||||
eventLog.push({ event: 'restored', sessionId, timestamp: Date.now() });
|
||||
}),
|
||||
onSessionAccessed: vi.fn((sessionId: string) => {
|
||||
eventLog.push({ event: 'accessed', sessionId, timestamp: Date.now() });
|
||||
}),
|
||||
onSessionExpired: vi.fn((sessionId: string) => {
|
||||
eventLog.push({ event: 'expired', sessionId, timestamp: Date.now() });
|
||||
}),
|
||||
onSessionDeleted: vi.fn((sessionId: string) => {
|
||||
eventLog.push({ event: 'deleted', sessionId, timestamp: Date.now() });
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
describe('Phase 3: Session Lifecycle Events', () => {
|
||||
it('should emit onSessionCreated for new sessions', async () => {
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
// Create session using public API
|
||||
const sessionId = 'instance-test-abc-new-session-lifecycle-test';
|
||||
const created = engine.restoreSession(sessionId, context);
|
||||
|
||||
expect(created).toBe(true);
|
||||
|
||||
// Give fire-and-forget events a moment
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Should have emitted onSessionCreated
|
||||
expect(events.onSessionCreated).toHaveBeenCalledTimes(1);
|
||||
expect(events.onSessionCreated).toHaveBeenCalledWith(sessionId, context);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should emit onSessionRestored when restoring from storage', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant1.n8n.cloud',
|
||||
n8nApiKey: 'tenant1-key',
|
||||
instanceId: 'tenant-1'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-tenant-1-abc-restored-session-test';
|
||||
|
||||
// Persist session
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
// Process request that triggers restoration (DON'T pass context - let it restore)
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Give fire-and-forget events a moment
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Should emit onSessionRestored (not onSessionCreated)
|
||||
// Note: If context was passed to processRequest, it would create instead of restore
|
||||
expect(events.onSessionRestored).toHaveBeenCalledTimes(1);
|
||||
expect(events.onSessionRestored).toHaveBeenCalledWith(sessionId, context);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should emit onSessionDeleted when session is manually deleted', async () => {
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-testinstance-abc-550e8400e29b41d4a716446655440001';
|
||||
|
||||
// Create session by calling restoreSession
|
||||
const created = engine.restoreSession(sessionId, context);
|
||||
expect(created).toBe(true);
|
||||
|
||||
// Verify session exists
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
|
||||
// Give creation event time to fire
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Delete session
|
||||
const deleted = engine.deleteSession(sessionId);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify session was deleted
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
|
||||
// Give deletion event time to fire
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Should emit onSessionDeleted
|
||||
expect(events.onSessionDeleted).toHaveBeenCalledTimes(1);
|
||||
expect(events.onSessionDeleted).toHaveBeenCalledWith(sessionId);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should handle event handler errors gracefully', async () => {
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Event handler error');
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionCreated: errorHandler
|
||||
}
|
||||
});
|
||||
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-error-handler-test';
|
||||
|
||||
// Should not throw despite handler error
|
||||
expect(() => {
|
||||
engine.restoreSession(sessionId, context);
|
||||
}).not.toThrow();
|
||||
|
||||
// Session should still be created
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should emit events with correct metadata', async () => {
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance',
|
||||
metadata: {
|
||||
userId: 'user-456',
|
||||
tier: 'enterprise'
|
||||
}
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-metadata-test';
|
||||
engine.restoreSession(sessionId, context);
|
||||
|
||||
// Give event time to fire
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
expect(events.onSessionCreated).toHaveBeenCalledWith(
|
||||
sessionId,
|
||||
expect.objectContaining({
|
||||
metadata: {
|
||||
userId: 'user-456',
|
||||
tier: 'enterprise'
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Phase 4: Retry Policy', () => {
|
||||
it('should retry transient failures and eventually succeed', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-testinst-abc-550e8400e29b41d4a716446655440002';
|
||||
|
||||
// Persist session
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Configure to fail twice, then succeed
|
||||
mockStore.setTransientFailures(2);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 3, // Allow up to 3 retries
|
||||
sessionRestorationRetryDelay: 50, // Fast retries for testing
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes} = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context - let it restore
|
||||
|
||||
// Give events time to fire
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Should have succeeded (not 500 error)
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(500);
|
||||
|
||||
// Should emit onSessionRestored after successful retry
|
||||
expect(events.onSessionRestored).toHaveBeenCalledTimes(1);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should fail after exhausting all retries', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-retry-exhaust-test';
|
||||
|
||||
// Persist session
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Configure to fail 5 times (more than max retries)
|
||||
mockStore.setTransientFailures(5);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 2, // Only 2 retries
|
||||
sessionRestorationRetryDelay: 50
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Should fail with 500 error
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.objectContaining({
|
||||
message: expect.stringMatching(/restoration failed|error/i)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should not retry timeout errors', async () => {
|
||||
const slowHook: SessionRestoreHook = async () => {
|
||||
// Simulate very slow query
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
return {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
};
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: slowHook,
|
||||
sessionRestorationRetries: 3,
|
||||
sessionRestorationRetryDelay: 50,
|
||||
sessionRestorationTimeout: 100 // Very short timeout
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes('instance-test-abc-timeout-no-retry');
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Should timeout with 408
|
||||
expect(mockRes.status).toHaveBeenCalledWith(408);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.objectContaining({
|
||||
message: expect.stringMatching(/timeout|timed out/i)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should respect overall timeout across all retry attempts', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-overall-timeout-test';
|
||||
|
||||
// Persist session
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Configure many failures
|
||||
mockStore.setTransientFailures(10);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
// Each attempt takes 100ms
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 10, // Many retries
|
||||
sessionRestorationRetryDelay: 100,
|
||||
sessionRestorationTimeout: 300 // Overall timeout for ALL attempts
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Should timeout before exhausting retries
|
||||
expect(mockRes.status).toHaveBeenCalledWith(408);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Phase 3 + 4: Combined Behavior', () => {
|
||||
it('should emit onSessionRestored after successful retry', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-testinst-abc-550e8400e29b41d4a716446655440003';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Fail once, then succeed
|
||||
mockStore.setTransientFailures(1);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionRestorationRetryDelay: 50,
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Give events time to fire
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Should have succeeded
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(500);
|
||||
|
||||
// Should emit onSessionRestored after successful retry
|
||||
expect(events.onSessionRestored).toHaveBeenCalledTimes(1);
|
||||
expect(events.onSessionRestored).toHaveBeenCalledWith(sessionId, context);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should not emit events if all retries fail', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-retry-fail-no-event';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Always fail
|
||||
mockStore.setTransientFailures(10);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const events = createEventTracker();
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionRestorationRetryDelay: 50,
|
||||
sessionEvents: events
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Give events time to fire (they shouldn't)
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Should have failed
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
|
||||
// Should NOT emit onSessionRestored
|
||||
expect(events.onSessionRestored).not.toHaveBeenCalled();
|
||||
expect(events.onSessionCreated).not.toHaveBeenCalled();
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should handle event handler errors during retry workflow', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-testinst-abc-550e8400e29b41d4a716446655440004';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Fail once, then succeed
|
||||
mockStore.setTransientFailures(1);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Event handler error');
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionRestorationRetryDelay: 50,
|
||||
sessionEvents: {
|
||||
onSessionRestored: errorHandler
|
||||
}
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
|
||||
// Should not throw despite event handler error
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Give event handler time to fail
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Request should still succeed (event error is non-blocking)
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(500);
|
||||
|
||||
// Handler was called
|
||||
expect(errorHandler).toHaveBeenCalledTimes(1);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Backward Compatibility', () => {
|
||||
it('should work without lifecycle events configured', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-testinst-abc-550e8400e29b41d4a716446655440005';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook
|
||||
// No sessionEvents configured
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes); // Don't pass context
|
||||
|
||||
// Should work normally
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(500);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should work with 0 retries (default behavior)', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc-zero-retries';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Fail once
|
||||
mockStore.setTransientFailures(1);
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
return await mockStore.loadSession(sid);
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook
|
||||
// No sessionRestorationRetries - defaults to 0
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
await engine.processRequest(mockReq, mockRes, context);
|
||||
|
||||
// Should fail immediately (no retries)
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
});
|
||||
600
tests/integration/session-persistence.test.ts
Normal file
600
tests/integration/session-persistence.test.ts
Normal file
@@ -0,0 +1,600 @@
|
||||
/**
|
||||
* Integration tests for session persistence (Phase 1)
|
||||
*
|
||||
* Tests the complete session restoration flow end-to-end,
|
||||
* simulating real-world scenarios like container restarts and multi-tenant usage.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { N8NMCPEngine } from '../../src/mcp-engine';
|
||||
import { SingleSessionHTTPServer } from '../../src/http-server-single-session';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
import { SessionRestoreHook, SessionState } from '../../src/types/session-restoration';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
// In-memory session storage for testing
|
||||
const sessionStorage: Map<string, SessionState> = new Map();
|
||||
|
||||
/**
|
||||
* Simulates a backend database for session persistence
|
||||
*/
|
||||
class MockSessionStore {
|
||||
async saveSession(sessionState: SessionState): Promise<void> {
|
||||
sessionStorage.set(sessionState.sessionId, {
|
||||
...sessionState,
|
||||
// Only update lastAccess and expiresAt if not provided
|
||||
lastAccess: sessionState.lastAccess || new Date(),
|
||||
expiresAt: sessionState.expiresAt || new Date(Date.now() + 30 * 60 * 1000) // 30 minutes
|
||||
});
|
||||
}
|
||||
|
||||
async loadSession(sessionId: string): Promise<SessionState | null> {
|
||||
const session = sessionStorage.get(sessionId);
|
||||
if (!session) return null;
|
||||
|
||||
// Check if expired
|
||||
if (session.expiresAt < new Date()) {
|
||||
sessionStorage.delete(sessionId);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update last access
|
||||
session.lastAccess = new Date();
|
||||
session.expiresAt = new Date(Date.now() + 30 * 60 * 1000);
|
||||
sessionStorage.set(sessionId, session);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
async deleteSession(sessionId: string): Promise<void> {
|
||||
sessionStorage.delete(sessionId);
|
||||
}
|
||||
|
||||
async cleanExpired(): Promise<number> {
|
||||
const now = new Date();
|
||||
let count = 0;
|
||||
|
||||
for (const [sessionId, session] of sessionStorage.entries()) {
|
||||
if (session.expiresAt < now) {
|
||||
sessionStorage.delete(sessionId);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
getAllSessions(): Map<string, SessionState> {
|
||||
return new Map(sessionStorage);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
sessionStorage.clear();
|
||||
}
|
||||
}
|
||||
|
||||
describe('Session Persistence Integration Tests', () => {
|
||||
const TEST_AUTH_TOKEN = 'integration-test-token-with-32-chars-min-length';
|
||||
let mockStore: MockSessionStore;
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Save and set environment
|
||||
originalEnv = { ...process.env };
|
||||
process.env.AUTH_TOKEN = TEST_AUTH_TOKEN;
|
||||
process.env.PORT = '0';
|
||||
process.env.NODE_ENV = 'test';
|
||||
|
||||
// Clear session storage
|
||||
mockStore = new MockSessionStore();
|
||||
mockStore.clear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore environment
|
||||
process.env = originalEnv;
|
||||
mockStore.clear();
|
||||
});
|
||||
|
||||
// Helper to create properly mocked Request and Response objects
|
||||
function createMockReqRes(sessionId?: string, body?: any) {
|
||||
const req = {
|
||||
method: 'POST',
|
||||
path: '/mcp',
|
||||
url: '/mcp',
|
||||
originalUrl: '/mcp',
|
||||
headers: {
|
||||
'authorization': `Bearer ${TEST_AUTH_TOKEN}`,
|
||||
...(sessionId && { 'mcp-session-id': sessionId })
|
||||
} as Record<string, string>,
|
||||
body: body || {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools/list',
|
||||
params: {},
|
||||
id: 1
|
||||
},
|
||||
ip: '127.0.0.1',
|
||||
readable: true,
|
||||
readableEnded: false,
|
||||
complete: true,
|
||||
get: vi.fn((header: string) => req.headers[header.toLowerCase()]),
|
||||
on: vi.fn((event: string, handler: Function) => {}),
|
||||
removeListener: vi.fn((event: string, handler: Function) => {})
|
||||
} as any as Request;
|
||||
|
||||
const res = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn().mockReturnThis(),
|
||||
setHeader: vi.fn(),
|
||||
send: vi.fn().mockReturnThis(),
|
||||
headersSent: false,
|
||||
finished: false
|
||||
} as any as Response;
|
||||
|
||||
return { req, res };
|
||||
}
|
||||
|
||||
describe('Container Restart Simulation', () => {
|
||||
it('should restore session after simulated container restart', async () => {
|
||||
// PHASE 1: Initial session creation
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant1.n8n.cloud',
|
||||
n8nApiKey: 'tenant1-api-key',
|
||||
instanceId: 'tenant-1'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-tenant-1-abc-550e8400-e29b-41d4-a716-446655440000';
|
||||
|
||||
// Simulate session being persisted by the backend
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
// PHASE 2: Simulate container restart (create new engine)
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
const session = await mockStore.loadSession(sid);
|
||||
return session ? session.instanceContext : null;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
// PHASE 3: Client tries to use old session ID
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
|
||||
// Should successfully restore and process request
|
||||
await engine.processRequest(mockReq, mockRes, context);
|
||||
|
||||
// Session should be restored (not return 400 for unknown session)
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(400);
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(404);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should reject expired sessions after container restart', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant1.n8n.cloud',
|
||||
n8nApiKey: 'tenant1-api-key',
|
||||
instanceId: 'tenant-1'
|
||||
};
|
||||
|
||||
const sessionId = '550e8400-e29b-41d4-a716-446655440000';
|
||||
|
||||
// Save session with past expiration
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(Date.now() - 60 * 60 * 1000), // 1 hour ago
|
||||
lastAccess: new Date(Date.now() - 45 * 60 * 1000), // 45 minutes ago
|
||||
expiresAt: new Date(Date.now() - 15 * 60 * 1000) // Expired 15 minutes ago
|
||||
});
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
const session = await mockStore.loadSession(sid);
|
||||
return session ? session.instanceContext : null;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId);
|
||||
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Should reject expired session
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.objectContaining({
|
||||
message: expect.stringMatching(/session|not found/i)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi-Tenant Session Restoration', () => {
|
||||
it('should restore correct instance context for each tenant', async () => {
|
||||
// Create sessions for multiple tenants
|
||||
const tenant1Context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant1.n8n.cloud',
|
||||
n8nApiKey: 'tenant1-key',
|
||||
instanceId: 'tenant-1'
|
||||
};
|
||||
|
||||
const tenant2Context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant2.n8n.cloud',
|
||||
n8nApiKey: 'tenant2-key',
|
||||
instanceId: 'tenant-2'
|
||||
};
|
||||
|
||||
const sessionId1 = 'instance-tenant-1-abc-550e8400-e29b-41d4-a716-446655440000';
|
||||
const sessionId2 = 'instance-tenant-2-xyz-f47ac10b-58cc-4372-a567-0e02b2c3d479';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId: sessionId1,
|
||||
instanceContext: tenant1Context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId: sessionId2,
|
||||
instanceContext: tenant2Context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
const session = await mockStore.loadSession(sid);
|
||||
return session ? session.instanceContext : null;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
// Verify each tenant gets their own context
|
||||
const session1 = await mockStore.loadSession(sessionId1);
|
||||
const session2 = await mockStore.loadSession(sessionId2);
|
||||
|
||||
expect(session1?.instanceContext.instanceId).toBe('tenant-1');
|
||||
expect(session1?.instanceContext.n8nApiUrl).toBe('https://tenant1.n8n.cloud');
|
||||
|
||||
expect(session2?.instanceContext.instanceId).toBe('tenant-2');
|
||||
expect(session2?.instanceContext.n8nApiUrl).toBe('https://tenant2.n8n.cloud');
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should isolate sessions between tenants', async () => {
|
||||
const tenant1Context: InstanceContext = {
|
||||
n8nApiUrl: 'https://tenant1.n8n.cloud',
|
||||
n8nApiKey: 'tenant1-key',
|
||||
instanceId: 'tenant-1'
|
||||
};
|
||||
|
||||
const sessionId = 'instance-tenant-1-abc-550e8400-e29b-41d4-a716-446655440000';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: tenant1Context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
const session = await mockStore.loadSession(sid);
|
||||
return session ? session.instanceContext : null;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook
|
||||
});
|
||||
|
||||
// Tenant 2 tries to use tenant 1's session ID
|
||||
const wrongSessionId = sessionId; // Tenant 1's ID
|
||||
const { req: tenant2Request, res: mockRes } = createMockReqRes(wrongSessionId);
|
||||
|
||||
// The restoration will succeed (session exists), but the backend
|
||||
// should implement authorization checks to prevent cross-tenant access
|
||||
await engine.processRequest(tenant2Request, mockRes);
|
||||
|
||||
// Restoration should work (this test verifies the session CAN be restored)
|
||||
// Authorization is the backend's responsibility
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(404);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Restoration Requests', () => {
|
||||
it('should handle multiple concurrent restoration requests for same session', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = '550e8400-e29b-41d4-a716-446655440000';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000)
|
||||
});
|
||||
|
||||
let hookCallCount = 0;
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
hookCallCount++;
|
||||
// Simulate slow database query
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
const session = await mockStore.loadSession(sid);
|
||||
return session ? session.instanceContext : null;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
// Simulate 5 concurrent requests with same unknown session ID
|
||||
const requests = Array.from({ length: 5 }, (_, i) => {
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes(sessionId, {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools/list',
|
||||
params: {},
|
||||
id: i + 1
|
||||
});
|
||||
|
||||
return engine.processRequest(mockReq, mockRes, context);
|
||||
});
|
||||
|
||||
// All should complete without error
|
||||
await Promise.all(requests);
|
||||
|
||||
// Hook should be called multiple times (no built-in deduplication)
|
||||
// This is expected - the idempotent session creation prevents duplicates
|
||||
expect(hookCallCount).toBeGreaterThan(0);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Database Failure Scenarios', () => {
|
||||
it('should handle database connection failures gracefully', async () => {
|
||||
const failingHook: SessionRestoreHook = async () => {
|
||||
throw new Error('Database connection failed');
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: failingHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes('550e8400-e29b-41d4-a716-446655440000');
|
||||
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Should return 500 for database errors
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.objectContaining({
|
||||
message: expect.stringMatching(/restoration failed|error/i)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should timeout on slow database queries', async () => {
|
||||
const slowHook: SessionRestoreHook = async () => {
|
||||
// Simulate very slow database query
|
||||
await new Promise(resolve => setTimeout(resolve, 10000));
|
||||
return {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
};
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: slowHook,
|
||||
sessionRestorationTimeout: 100 // 100ms timeout
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes('550e8400-e29b-41d4-a716-446655440000');
|
||||
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Should return 408 for timeout
|
||||
expect(mockRes.status).toHaveBeenCalledWith(408);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.objectContaining({
|
||||
message: expect.stringMatching(/timeout|timed out/i)
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Metadata Tracking', () => {
|
||||
it('should track session metadata correctly', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance',
|
||||
metadata: {
|
||||
userId: 'user-123',
|
||||
plan: 'premium'
|
||||
}
|
||||
};
|
||||
|
||||
const sessionId = '550e8400-e29b-41d4-a716-446655440000';
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000),
|
||||
metadata: {
|
||||
userAgent: 'test-client/1.0',
|
||||
ip: '192.168.1.1'
|
||||
}
|
||||
});
|
||||
|
||||
const session = await mockStore.loadSession(sessionId);
|
||||
|
||||
expect(session).toBeDefined();
|
||||
expect(session?.instanceContext.metadata).toEqual({
|
||||
userId: 'user-123',
|
||||
plan: 'premium'
|
||||
});
|
||||
expect(session?.metadata).toEqual({
|
||||
userAgent: 'test-client/1.0',
|
||||
ip: '192.168.1.1'
|
||||
});
|
||||
});
|
||||
|
||||
it('should update last access time on restoration', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = '550e8400-e29b-41d4-a716-446655440000';
|
||||
const originalLastAccess = new Date(Date.now() - 10 * 60 * 1000); // 10 minutes ago
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId,
|
||||
instanceContext: context,
|
||||
createdAt: new Date(Date.now() - 20 * 60 * 1000),
|
||||
lastAccess: originalLastAccess,
|
||||
expiresAt: new Date(Date.now() + 20 * 60 * 1000)
|
||||
});
|
||||
|
||||
// Wait a bit
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Load session (simulates restoration)
|
||||
const session = await mockStore.loadSession(sessionId);
|
||||
|
||||
expect(session).toBeDefined();
|
||||
expect(session!.lastAccess.getTime()).toBeGreaterThan(originalLastAccess.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Cleanup', () => {
|
||||
it('should clean up expired sessions', async () => {
|
||||
// Add multiple sessions with different expiration times
|
||||
await mockStore.saveSession({
|
||||
sessionId: 'session-1',
|
||||
instanceContext: {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'key1',
|
||||
instanceId: 'instance-1'
|
||||
},
|
||||
createdAt: new Date(Date.now() - 60 * 60 * 1000),
|
||||
lastAccess: new Date(Date.now() - 45 * 60 * 1000),
|
||||
expiresAt: new Date(Date.now() - 15 * 60 * 1000) // Expired
|
||||
});
|
||||
|
||||
await mockStore.saveSession({
|
||||
sessionId: 'session-2',
|
||||
instanceContext: {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'key2',
|
||||
instanceId: 'instance-2'
|
||||
},
|
||||
createdAt: new Date(),
|
||||
lastAccess: new Date(),
|
||||
expiresAt: new Date(Date.now() + 30 * 60 * 1000) // Valid
|
||||
});
|
||||
|
||||
const cleanedCount = await mockStore.cleanExpired();
|
||||
|
||||
expect(cleanedCount).toBe(1);
|
||||
expect(mockStore.getAllSessions().size).toBe(1);
|
||||
expect(mockStore.getAllSessions().has('session-2')).toBe(true);
|
||||
expect(mockStore.getAllSessions().has('session-1')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Backwards Compatibility', () => {
|
||||
it('should work without restoration hook (legacy behavior)', async () => {
|
||||
// Engine without restoration hook should work normally
|
||||
const engine = new N8NMCPEngine();
|
||||
|
||||
const sessionInfo = engine.getSessionInfo();
|
||||
|
||||
expect(sessionInfo).toBeDefined();
|
||||
expect(sessionInfo.active).toBeDefined();
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
|
||||
it('should not break existing session creation flow', async () => {
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: async () => null
|
||||
});
|
||||
|
||||
// Creating sessions should work normally
|
||||
const sessionInfo = engine.getSessionInfo();
|
||||
|
||||
expect(sessionInfo).toBeDefined();
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security Validation', () => {
|
||||
it('should validate restored context before using it', async () => {
|
||||
const invalidHook: SessionRestoreHook = async () => {
|
||||
// Return context with malformed URL (truly invalid)
|
||||
return {
|
||||
n8nApiUrl: 'not-a-valid-url',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
} as any;
|
||||
};
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: invalidHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
const { req: mockReq, res: mockRes } = createMockReqRes('550e8400-e29b-41d4-a716-446655440000');
|
||||
|
||||
await engine.processRequest(mockReq, mockRes);
|
||||
|
||||
// Should reject invalid context
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
|
||||
await engine.shutdown();
|
||||
});
|
||||
});
|
||||
});
|
||||
390
tests/integration/session-restoration-warmstart.test.ts
Normal file
390
tests/integration/session-restoration-warmstart.test.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
/**
|
||||
* Integration tests for warm start session restoration (v2.19.5)
|
||||
*
|
||||
* Tests the simplified warm start pattern where:
|
||||
* 1. Restoration creates session using existing createSession() flow
|
||||
* 2. Current request is handled immediately through restored session
|
||||
* 3. Client auto-retries with initialize on same connection (standard MCP -32000)
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { SingleSessionHTTPServer } from '../../src/http-server-single-session';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
import { SessionRestoreHook } from '../../src/types/session-restoration';
|
||||
import type { Request, Response } from 'express';
|
||||
|
||||
describe('Warm Start Session Restoration Tests', () => {
|
||||
const TEST_AUTH_TOKEN = 'warmstart-test-token-with-32-chars-min-length';
|
||||
let server: SingleSessionHTTPServer;
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Save and set environment
|
||||
originalEnv = { ...process.env };
|
||||
process.env.AUTH_TOKEN = TEST_AUTH_TOKEN;
|
||||
process.env.PORT = '0';
|
||||
process.env.NODE_ENV = 'test';
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Cleanup server
|
||||
if (server) {
|
||||
await server.shutdown();
|
||||
}
|
||||
|
||||
// Restore environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
// Helper to create mocked Request and Response
|
||||
function createMockReqRes(sessionId?: string, body?: any) {
|
||||
const req = {
|
||||
method: 'POST',
|
||||
path: '/mcp',
|
||||
url: '/mcp',
|
||||
originalUrl: '/mcp',
|
||||
headers: {
|
||||
authorization: `Bearer ${TEST_AUTH_TOKEN}`,
|
||||
...(sessionId && { 'mcp-session-id': sessionId })
|
||||
} as Record<string, string>,
|
||||
body: body || {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools/list',
|
||||
params: {},
|
||||
id: 1
|
||||
},
|
||||
ip: '127.0.0.1',
|
||||
readable: true,
|
||||
readableEnded: false,
|
||||
complete: true,
|
||||
get: vi.fn((header: string) => req.headers[header.toLowerCase()]),
|
||||
on: vi.fn(),
|
||||
removeListener: vi.fn()
|
||||
} as any as Request;
|
||||
|
||||
const res = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn().mockReturnThis(),
|
||||
setHeader: vi.fn(),
|
||||
send: vi.fn().mockReturnThis(),
|
||||
headersSent: false,
|
||||
finished: false
|
||||
} as any as Response;
|
||||
|
||||
return { req, res };
|
||||
}
|
||||
|
||||
describe('Happy Path: Successful Restoration', () => {
|
||||
it('should restore session and handle current request immediately', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'test-session-550e8400';
|
||||
let restoredSessionId: string | null = null;
|
||||
|
||||
// Mock restoration hook that returns context
|
||||
const restorationHook: SessionRestoreHook = async (sid) => {
|
||||
restoredSessionId = sid;
|
||||
return context;
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
// Start server
|
||||
await server.start();
|
||||
|
||||
// Client sends request with unknown session ID
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
|
||||
// Handle request
|
||||
await server.handleRequest(req, res, context);
|
||||
|
||||
// Verify restoration hook was called
|
||||
expect(restoredSessionId).toBe(sessionId);
|
||||
|
||||
// Verify response was handled (not rejected with 400/404)
|
||||
// A successful restoration should not return these error codes
|
||||
expect(res.status).not.toHaveBeenCalledWith(400);
|
||||
expect(res.status).not.toHaveBeenCalledWith(404);
|
||||
|
||||
// Verify a response was sent (either success or -32000 for initialization)
|
||||
expect(res.json).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should emit onSessionRestored event after successful restoration', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'test-session-550e8400';
|
||||
let restoredEventFired = false;
|
||||
let restoredEventSessionId: string | null = null;
|
||||
|
||||
const restorationHook: SessionRestoreHook = async () => context;
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionEvents: {
|
||||
onSessionRestored: (sid, ctx) => {
|
||||
restoredEventFired = true;
|
||||
restoredEventSessionId = sid;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res, context);
|
||||
|
||||
// Wait for async event
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(restoredEventFired).toBe(true);
|
||||
expect(restoredEventSessionId).toBe(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Failure Cleanup', () => {
|
||||
it('should clean up session when restoration fails', async () => {
|
||||
const sessionId = 'test-session-550e8400';
|
||||
|
||||
// Mock failing restoration hook
|
||||
const failingHook: SessionRestoreHook = async () => {
|
||||
throw new Error('Database connection failed');
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: failingHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res);
|
||||
|
||||
// Verify error response
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
|
||||
// Verify session was NOT created (cleanup happened)
|
||||
const activeSessions = server.getActiveSessions();
|
||||
expect(activeSessions).not.toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should clean up session when restoration times out', async () => {
|
||||
const sessionId = 'test-session-550e8400';
|
||||
|
||||
// Mock slow restoration hook
|
||||
const slowHook: SessionRestoreHook = async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 10000)); // 10 seconds
|
||||
return {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
};
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: slowHook,
|
||||
sessionRestorationTimeout: 100 // 100ms timeout
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res);
|
||||
|
||||
// Verify timeout response
|
||||
expect(res.status).toHaveBeenCalledWith(408);
|
||||
|
||||
// Verify session was cleaned up
|
||||
const activeSessions = server.getActiveSessions();
|
||||
expect(activeSessions).not.toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should clean up session when restored context is invalid', async () => {
|
||||
const sessionId = 'test-session-550e8400';
|
||||
|
||||
// Mock hook returning invalid context
|
||||
const invalidHook: SessionRestoreHook = async () => {
|
||||
return {
|
||||
n8nApiUrl: 'not-a-valid-url', // Invalid URL format
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
} as any;
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: invalidHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res);
|
||||
|
||||
// Verify validation error response
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
|
||||
// Verify session was NOT created
|
||||
const activeSessions = server.getActiveSessions();
|
||||
expect(activeSessions).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Idempotency', () => {
|
||||
it('should handle concurrent restoration attempts for same session idempotently', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'test-session-550e8400';
|
||||
let hookCallCount = 0;
|
||||
|
||||
// Mock restoration hook with slow query
|
||||
const restorationHook: SessionRestoreHook = async () => {
|
||||
hookCallCount++;
|
||||
// Simulate slow database query
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
return context;
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
// Send 5 concurrent requests with same unknown session ID
|
||||
const requests = Array.from({ length: 5 }, (_, i) => {
|
||||
const { req, res } = createMockReqRes(sessionId, {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools/list',
|
||||
params: {},
|
||||
id: i + 1
|
||||
});
|
||||
return server.handleRequest(req, res, context);
|
||||
});
|
||||
|
||||
// All should complete without error (no unhandled rejections)
|
||||
const results = await Promise.allSettled(requests);
|
||||
|
||||
// All requests should complete (either fulfilled or rejected)
|
||||
expect(results.length).toBe(5);
|
||||
|
||||
// Hook should be called at least once (possibly more for concurrent requests)
|
||||
expect(hookCallCount).toBeGreaterThan(0);
|
||||
|
||||
// None of the requests should fail with server errors (500)
|
||||
// They may return -32000 for initialization, but that's expected
|
||||
results.forEach((result, i) => {
|
||||
if (result.status === 'rejected') {
|
||||
// Unexpected rejection - fail the test
|
||||
throw new Error(`Request ${i} failed unexpectedly: ${result.reason}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should reuse already-restored session for concurrent requests', async () => {
|
||||
const context: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
const sessionId = 'test-session-550e8400';
|
||||
let hookCallCount = 0;
|
||||
|
||||
// Track restoration attempts
|
||||
const restorationHook: SessionRestoreHook = async () => {
|
||||
hookCallCount++;
|
||||
return context;
|
||||
};
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: restorationHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
// First request triggers restoration
|
||||
const { req: req1, res: res1 } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req1, res1, context);
|
||||
|
||||
// Verify hook was called for first request
|
||||
expect(hookCallCount).toBe(1);
|
||||
|
||||
// Second request with same session ID
|
||||
const { req: req2, res: res2 } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req2, res2, context);
|
||||
|
||||
// If session was reused, hook should not be called again
|
||||
// (or called again if session wasn't fully initialized yet)
|
||||
// Either way, both requests should complete without errors
|
||||
expect(res1.json).toHaveBeenCalled();
|
||||
expect(res2.json).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Restoration Hook Edge Cases', () => {
|
||||
it('should handle restoration hook returning null (session rejected)', async () => {
|
||||
const sessionId = 'test-session-550e8400';
|
||||
|
||||
// Hook explicitly rejects restoration
|
||||
const rejectingHook: SessionRestoreHook = async () => null;
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: rejectingHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res);
|
||||
|
||||
// Verify rejection response
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
|
||||
// Verify session was NOT created
|
||||
expect(server.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should handle restoration hook returning undefined (session rejected)', async () => {
|
||||
const sessionId = 'test-session-550e8400';
|
||||
|
||||
// Hook returns undefined
|
||||
const undefinedHook: SessionRestoreHook = async () => undefined as any;
|
||||
|
||||
server = new SingleSessionHTTPServer({
|
||||
onSessionNotFound: undefinedHook,
|
||||
sessionRestorationTimeout: 5000
|
||||
});
|
||||
|
||||
await server.start();
|
||||
|
||||
const { req, res } = createMockReqRes(sessionId);
|
||||
await server.handleRequest(req, res);
|
||||
|
||||
// Verify rejection response
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
|
||||
// Verify session was NOT created
|
||||
expect(server.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
138
tests/integration/session/test-onSessionCreated-event.ts
Normal file
138
tests/integration/session/test-onSessionCreated-event.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
/**
|
||||
* Test to verify that onSessionCreated event is fired during standard initialize flow
|
||||
* This test addresses the bug reported in v2.19.0 where the event was not fired
|
||||
* for sessions created during the initialize request.
|
||||
*/
|
||||
|
||||
import { SingleSessionHTTPServer } from '../../../src/http-server-single-session';
|
||||
import { InstanceContext } from '../../../src/types/instance-context';
|
||||
|
||||
// Mock environment setup
|
||||
process.env.AUTH_TOKEN = 'test-token-for-n8n-testing-minimum-32-chars';
|
||||
process.env.NODE_ENV = 'test';
|
||||
process.env.PORT = '3456'; // Use different port to avoid conflicts
|
||||
|
||||
async function testOnSessionCreatedEvent() {
|
||||
console.log('\n🧪 Test: onSessionCreated Event Firing During Initialize\n');
|
||||
console.log('━'.repeat(60));
|
||||
|
||||
let eventFired = false;
|
||||
let capturedSessionId: string | undefined;
|
||||
let capturedContext: InstanceContext | undefined;
|
||||
|
||||
// Create server with onSessionCreated handler
|
||||
const server = new SingleSessionHTTPServer({
|
||||
sessionEvents: {
|
||||
onSessionCreated: async (sessionId: string, instanceContext?: InstanceContext) => {
|
||||
console.log('✅ onSessionCreated event fired!');
|
||||
console.log(` Session ID: ${sessionId}`);
|
||||
console.log(` Context: ${instanceContext ? 'Present' : 'Not provided'}`);
|
||||
eventFired = true;
|
||||
capturedSessionId = sessionId;
|
||||
capturedContext = instanceContext;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
// Start the HTTP server
|
||||
console.log('\n📡 Starting HTTP server...');
|
||||
await server.start();
|
||||
console.log('✅ Server started\n');
|
||||
|
||||
// Wait a moment for server to be ready
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
|
||||
// Simulate an MCP initialize request
|
||||
console.log('📤 Simulating MCP initialize request...');
|
||||
|
||||
const port = parseInt(process.env.PORT || '3456');
|
||||
const fetch = (await import('node-fetch')).default;
|
||||
|
||||
const response = await fetch(`http://localhost:${port}/mcp`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer test-token-for-n8n-testing-minimum-32-chars',
|
||||
'Accept': 'application/json, text/event-stream'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
method: 'initialize',
|
||||
params: {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {},
|
||||
clientInfo: {
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}
|
||||
},
|
||||
id: 1
|
||||
})
|
||||
});
|
||||
|
||||
const result = await response.json() as any;
|
||||
|
||||
console.log('📥 Response received:', response.status);
|
||||
console.log(' Response body:', JSON.stringify(result, null, 2));
|
||||
|
||||
// Wait a moment for event to be processed
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
// Verify results
|
||||
console.log('\n🔍 Verification:');
|
||||
console.log('━'.repeat(60));
|
||||
|
||||
if (eventFired) {
|
||||
console.log('✅ SUCCESS: onSessionCreated event was fired');
|
||||
console.log(` Captured Session ID: ${capturedSessionId}`);
|
||||
console.log(` Context provided: ${capturedContext !== undefined}`);
|
||||
|
||||
// Verify session is in active sessions list
|
||||
const activeSessions = server.getActiveSessions();
|
||||
console.log(`\n📊 Active sessions count: ${activeSessions.length}`);
|
||||
|
||||
if (activeSessions.length > 0) {
|
||||
console.log('✅ Session registered in active sessions list');
|
||||
console.log(` Session IDs: ${activeSessions.join(', ')}`);
|
||||
} else {
|
||||
console.log('❌ No active sessions found');
|
||||
}
|
||||
|
||||
// Check if captured session ID is in active sessions
|
||||
if (capturedSessionId && activeSessions.includes(capturedSessionId)) {
|
||||
console.log('✅ Event session ID matches active session');
|
||||
} else {
|
||||
console.log('⚠️ Event session ID not found in active sessions');
|
||||
}
|
||||
|
||||
console.log('\n🎉 TEST PASSED: Bug is fixed!');
|
||||
console.log('━'.repeat(60));
|
||||
|
||||
} else {
|
||||
console.log('❌ FAILURE: onSessionCreated event was NOT fired');
|
||||
console.log('━'.repeat(60));
|
||||
console.log('\n💔 TEST FAILED: Bug still exists');
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
await server.shutdown();
|
||||
|
||||
return eventFired;
|
||||
|
||||
} catch (error) {
|
||||
console.error('\n❌ Test error:', error);
|
||||
await server.shutdown();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
testOnSessionCreatedEvent()
|
||||
.then(success => {
|
||||
process.exit(success ? 0 : 1);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Unhandled error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,722 +0,0 @@
|
||||
/**
|
||||
* Integration tests for AI node connection validation in workflow diff operations
|
||||
* Tests that AI nodes with AI-specific connection types (ai_languageModel, ai_memory, etc.)
|
||||
* are properly validated without requiring main connections
|
||||
*
|
||||
* Related to issue #357
|
||||
*/
|
||||
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { WorkflowDiffEngine } from '../../../src/services/workflow-diff-engine';
|
||||
|
||||
describe('AI Node Connection Validation', () => {
|
||||
describe('AI-specific connection types', () => {
|
||||
test('should accept workflow with ai_languageModel connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Language Model Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_memory connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Memory Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_embedding connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Embedding Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_tool connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Tool Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store Tool',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Vector Store Tool': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with ai_vectorStore connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Vector Store Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Supabase Vector Store': {
|
||||
ai_vectorStore: [
|
||||
[{ node: 'AI Agent', type: 'ai_vectorStore', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mixed connection types', () => {
|
||||
test('should accept workflow mixing main and AI connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Mixed Connections Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond-node',
|
||||
name: 'Respond to Webhook',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
main: [
|
||||
[{ node: 'Respond to Webhook', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
|
||||
test('should accept workflow with error connections alongside AI connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Error + AI Connections Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'error-handler',
|
||||
name: 'Error Handler',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [200, -200],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
error: [
|
||||
[{ node: 'Error Handler', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex AI workflow (Issue #357 scenario)', () => {
|
||||
test('should accept full AI agent workflow with RAG components', async () => {
|
||||
// Simplified version of the workflow from issue #357
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Agent with RAG',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'code-node',
|
||||
name: 'Prepare Inputs',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1.7,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [400, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1.1,
|
||||
position: [500, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [600, 400],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1.3,
|
||||
position: [600, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond-node',
|
||||
name: 'Respond to Webhook',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'Prepare Inputs', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Prepare Inputs': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'AI Agent': {
|
||||
main: [
|
||||
[{ node: 'Respond to Webhook', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Supabase Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Supabase Vector Store': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should successfully update AI workflow nodes without connection errors', async () => {
|
||||
// Test that we can update nodes in an AI workflow without triggering validation errors
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Workflow Update Test',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-node',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: { path: 'test' }
|
||||
},
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [
|
||||
[{ node: 'AI Agent', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
|
||||
// Update the webhook node (unrelated to AI nodes)
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: [
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'webhook-node',
|
||||
updates: {
|
||||
notes: 'Updated webhook configuration'
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
|
||||
// Verify the update was applied
|
||||
const updatedNode = result.workflow.nodes.find((n: any) => n.id === 'webhook-node');
|
||||
expect(updatedNode?.notes).toBe('Updated webhook configuration');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Node-only AI nodes (no main connections)', () => {
|
||||
test('should accept AI nodes with ONLY ai_languageModel connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'AI Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'llm-node',
|
||||
name: 'OpenAI Chat Model',
|
||||
type: '@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// OpenAI Chat Model has NO main connections, ONLY ai_languageModel
|
||||
'OpenAI Chat Model': {
|
||||
ai_languageModel: [
|
||||
[{ node: 'AI Agent', type: 'ai_languageModel', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept AI nodes with ONLY ai_memory connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Memory Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'memory-node',
|
||||
name: 'Postgres Chat Memory',
|
||||
type: '@n8n/n8n-nodes-langchain.memoryPostgresChat',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Memory node has NO main connections, ONLY ai_memory
|
||||
'Postgres Chat Memory': {
|
||||
ai_memory: [
|
||||
[{ node: 'AI Agent', type: 'ai_memory', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept embedding nodes with ONLY ai_embedding connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Embedding Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'embedding-node',
|
||||
name: 'Embeddings OpenAI',
|
||||
type: '@n8n/n8n-nodes-langchain.embeddingsOpenAi',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Embedding node has NO main connections, ONLY ai_embedding
|
||||
'Embeddings OpenAI': {
|
||||
ai_embedding: [
|
||||
[{ node: 'Vector Store', type: 'ai_embedding', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should accept vector store nodes with ONLY ai_tool connections', async () => {
|
||||
const workflow = {
|
||||
id: 'test-workflow',
|
||||
name: 'Vector Store Node Without Main',
|
||||
nodes: [
|
||||
{
|
||||
id: 'agent-node',
|
||||
name: 'AI Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'vectorstore-node',
|
||||
name: 'Supabase Vector Store',
|
||||
type: '@n8n/n8n-nodes-langchain.vectorStoreSupabase',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
// Vector store has NO main connections, ONLY ai_tool
|
||||
'Supabase Vector Store': {
|
||||
ai_tool: [
|
||||
[{ node: 'AI Agent', type: 'ai_tool', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const engine = new WorkflowDiffEngine();
|
||||
const result = await engine.applyDiff(workflow as any, {
|
||||
id: workflow.id,
|
||||
operations: []
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
expect(result.errors || []).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,573 +0,0 @@
|
||||
/**
|
||||
* Integration tests for auto-update connection references on node rename
|
||||
* Tests real-world workflow scenarios from Issue #353
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { WorkflowDiffEngine } from '@/services/workflow-diff-engine';
|
||||
import { validateWorkflowStructure } from '@/services/n8n-validation';
|
||||
import { WorkflowDiffRequest, UpdateNodeOperation } from '@/types/workflow-diff';
|
||||
import { Workflow, WorkflowNode } from '@/types/n8n-api';
|
||||
|
||||
describe('WorkflowDiffEngine - Node Rename Integration Tests', () => {
|
||||
let diffEngine: WorkflowDiffEngine;
|
||||
|
||||
beforeEach(() => {
|
||||
diffEngine = new WorkflowDiffEngine();
|
||||
});
|
||||
|
||||
describe('Real-world API endpoint workflow (Issue #353 scenario)', () => {
|
||||
let apiWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
// Complex real-world API endpoint workflow
|
||||
apiWorkflow = {
|
||||
id: 'api-workflow',
|
||||
name: 'POST /patients/:id/approaches - Add Approach',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-trigger',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
path: 'patients/{{$parameter["id"]/approaches',
|
||||
httpMethod: 'POST',
|
||||
responseMode: 'responseNode'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'validate-request',
|
||||
name: 'Validate Request',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Validation logic'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'check-auth',
|
||||
name: 'Check Authorization',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [400, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
boolean: [{ value1: '={{$json.authorized}}', value2: true }]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'process-request',
|
||||
name: 'Process Request',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 0],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Processing logic'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-success',
|
||||
name: 'Return 200 OK',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [800, 0],
|
||||
parameters: {
|
||||
responseBody: '={{ {"success": true, "data": $json} }}',
|
||||
options: { responseCode: 200 }
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-forbidden',
|
||||
name: 'Return 403 Forbidden1',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 200],
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Forbidden"} }}',
|
||||
options: { responseCode: 403 }
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'handle-error',
|
||||
name: 'Handle Error',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [400, 300],
|
||||
parameters: {
|
||||
mode: 'runOnceForAllItems',
|
||||
jsCode: '// Error handling'
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'return-error',
|
||||
name: 'Return 500 Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [600, 300],
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Internal Server Error"} }}',
|
||||
options: { responseCode: 500 }
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Validate Request', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Validate Request': {
|
||||
main: [[{ node: 'Check Authorization', type: 'main', index: 0 }]],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Authorization': {
|
||||
main: [
|
||||
[{ node: 'Process Request', type: 'main', index: 0 }], // true branch
|
||||
[{ node: 'Return 403 Forbidden1', type: 'main', index: 0 }] // false branch
|
||||
],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Process Request': {
|
||||
main: [[{ node: 'Return 200 OK', type: 'main', index: 0 }]],
|
||||
error: [[{ node: 'Handle Error', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Handle Error': {
|
||||
main: [[{ node: 'Return 500 Error', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should successfully rename error response node and maintain all connections', async () => {
|
||||
// The exact operation from Issue #353
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-forbidden',
|
||||
updates: {
|
||||
name: 'Return 404 Not Found',
|
||||
parameters: {
|
||||
responseBody: '={{ {"error": "Not Found"} }}',
|
||||
options: { responseCode: 404 }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
// Should succeed
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Node should be renamed
|
||||
const renamedNode = result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-forbidden');
|
||||
expect(renamedNode?.name).toBe('Return 404 Not Found');
|
||||
expect(renamedNode?.parameters.options?.responseCode).toBe(404);
|
||||
|
||||
// Connection from IF node should be updated
|
||||
expect(result.workflow!.connections['Check Authorization'].main[1][0].node).toBe('Return 404 Not Found');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle multiple node renames in complex workflow', async () => {
|
||||
const operations: UpdateNodeOperation[] = [
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-forbidden',
|
||||
updates: { name: 'Return 404 Not Found' }
|
||||
},
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-success',
|
||||
updates: { name: 'Return 201 Created' }
|
||||
},
|
||||
{
|
||||
type: 'updateNode',
|
||||
nodeId: 'return-error',
|
||||
updates: { name: 'Return 500 Internal Server Error' }
|
||||
}
|
||||
];
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// All nodes should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-forbidden')?.name).toBe('Return 404 Not Found');
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-success')?.name).toBe('Return 201 Created');
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'return-error')?.name).toBe('Return 500 Internal Server Error');
|
||||
|
||||
// All connections should be updated
|
||||
expect(result.workflow!.connections['Check Authorization'].main[1][0].node).toBe('Return 404 Not Found');
|
||||
expect(result.workflow!.connections['Process Request'].main[0][0].node).toBe('Return 201 Created');
|
||||
expect(result.workflow!.connections['Handle Error'].main[0][0].node).toBe('Return 500 Internal Server Error');
|
||||
|
||||
// Validate entire workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should maintain error connections after rename', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'validate-request',
|
||||
updates: { name: 'Validate Input' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'api-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(apiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Main connection should be updated
|
||||
expect(result.workflow!.connections['Validate Input']).toBeDefined();
|
||||
expect(result.workflow!.connections['Validate Input'].main[0][0].node).toBe('Check Authorization');
|
||||
|
||||
// Error connection should also be updated
|
||||
expect(result.workflow!.connections['Validate Input'].error[0][0].node).toBe('Handle Error');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('AI Agent workflow with tool connections', () => {
|
||||
let aiWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
aiWorkflow = {
|
||||
id: 'ai-workflow',
|
||||
name: 'AI Customer Support Agent',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-1',
|
||||
name: 'Customer Query',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: { path: 'support', httpMethod: 'POST' }
|
||||
},
|
||||
{
|
||||
id: 'agent-1',
|
||||
name: 'Support Agent',
|
||||
type: '@n8n/n8n-nodes-langchain.agent',
|
||||
typeVersion: 1,
|
||||
position: [200, 0],
|
||||
parameters: { promptTemplate: 'Help the customer with: {{$json.query}}' }
|
||||
},
|
||||
{
|
||||
id: 'tool-http',
|
||||
name: 'Knowledge Base API',
|
||||
type: '@n8n/n8n-nodes-langchain.toolHttpRequest',
|
||||
typeVersion: 1,
|
||||
position: [200, 100],
|
||||
parameters: { url: 'https://kb.example.com/search' }
|
||||
},
|
||||
{
|
||||
id: 'tool-code',
|
||||
name: 'Custom Logic Tool',
|
||||
type: '@n8n/n8n-nodes-langchain.toolCode',
|
||||
typeVersion: 1,
|
||||
position: [200, 200],
|
||||
parameters: { code: '// Custom logic' }
|
||||
},
|
||||
{
|
||||
id: 'response-1',
|
||||
name: 'Send Response',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1.1,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Customer Query': {
|
||||
main: [[{ node: 'Support Agent', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Support Agent': {
|
||||
main: [[{ node: 'Send Response', type: 'main', index: 0 }]],
|
||||
ai_tool: [
|
||||
[
|
||||
{ node: 'Knowledge Base API', type: 'ai_tool', index: 0 },
|
||||
{ node: 'Custom Logic Tool', type: 'ai_tool', index: 0 }
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// SKIPPED: Pre-existing validation bug - validateWorkflowStructure() doesn't recognize
|
||||
// AI connections (ai_tool, ai_languageModel, etc.) as valid, causing false positives.
|
||||
// The rename feature works correctly - connections ARE updated. Validation is the issue.
|
||||
// TODO: Fix validateWorkflowStructure() to check all connection types, not just 'main'
|
||||
it.skip('should update AI tool connections when renaming agent', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'agent-1',
|
||||
updates: { name: 'AI Support Assistant' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'ai-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(aiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Agent should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'agent-1')?.name).toBe('AI Support Assistant');
|
||||
|
||||
// All connections should be updated
|
||||
expect(result.workflow!.connections['AI Support Assistant']).toBeDefined();
|
||||
expect(result.workflow!.connections['AI Support Assistant'].main[0][0].node).toBe('Send Response');
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0]).toHaveLength(2);
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0][0].node).toBe('Knowledge Base API');
|
||||
expect(result.workflow!.connections['AI Support Assistant'].ai_tool[0][1].node).toBe('Custom Logic Tool');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
// SKIPPED: Pre-existing validation bug - validateWorkflowStructure() doesn't recognize
|
||||
// AI connections (ai_tool, ai_languageModel, etc.) as valid, causing false positives.
|
||||
// The rename feature works correctly - connections ARE updated. Validation is the issue.
|
||||
// TODO: Fix validateWorkflowStructure() to check all connection types, not just 'main'
|
||||
it.skip('should update AI tool connections when renaming tool', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'tool-http',
|
||||
updates: { name: 'Documentation Search' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'ai-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(aiWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Tool should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'tool-http')?.name).toBe('Documentation Search');
|
||||
|
||||
// AI tool connection should reference new name
|
||||
expect(result.workflow!.connections['Support Agent'].ai_tool[0][0].node).toBe('Documentation Search');
|
||||
// Other tool should remain unchanged
|
||||
expect(result.workflow!.connections['Support Agent'].ai_tool[0][1].node).toBe('Custom Logic Tool');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi-branch workflow with IF and Switch nodes', () => {
|
||||
let multiBranchWorkflow: Workflow;
|
||||
|
||||
beforeEach(() => {
|
||||
multiBranchWorkflow = {
|
||||
id: 'multi-branch-workflow',
|
||||
name: 'Order Processing Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook-1',
|
||||
name: 'New Order',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 2,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'if-1',
|
||||
name: 'Check Payment Status',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [200, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'switch-1',
|
||||
name: 'Route by Order Type',
|
||||
type: 'n8n-nodes-base.switch',
|
||||
typeVersion: 3,
|
||||
position: [400, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-digital',
|
||||
name: 'Process Digital Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-physical',
|
||||
name: 'Process Physical Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 100],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process-service',
|
||||
name: 'Process Service Order',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [600, 200],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'reject-payment',
|
||||
name: 'Reject Payment',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [400, 300],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'New Order': {
|
||||
main: [[{ node: 'Check Payment Status', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Payment Status': {
|
||||
main: [
|
||||
[{ node: 'Route by Order Type', type: 'main', index: 0 }], // paid
|
||||
[{ node: 'Reject Payment', type: 'main', index: 0 }] // not paid
|
||||
]
|
||||
},
|
||||
'Route by Order Type': {
|
||||
main: [
|
||||
[{ node: 'Process Digital Order', type: 'main', index: 0 }], // case 0: digital
|
||||
[{ node: 'Process Physical Order', type: 'main', index: 0 }], // case 1: physical
|
||||
[{ node: 'Process Service Order', type: 'main', index: 0 }] // case 2: service
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should update all branch connections when renaming IF node', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'if-1',
|
||||
updates: { name: 'Validate Payment' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// IF node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'if-1')?.name).toBe('Validate Payment');
|
||||
|
||||
// Both branches should be updated
|
||||
expect(result.workflow!.connections['Validate Payment']).toBeDefined();
|
||||
expect(result.workflow!.connections['Validate Payment'].main[0][0].node).toBe('Route by Order Type');
|
||||
expect(result.workflow!.connections['Validate Payment'].main[1][0].node).toBe('Reject Payment');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should update all case connections when renaming Switch node', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'switch-1',
|
||||
updates: { name: 'Order Type Router' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Switch node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'switch-1')?.name).toBe('Order Type Router');
|
||||
|
||||
// All three cases should be updated
|
||||
expect(result.workflow!.connections['Order Type Router']).toBeDefined();
|
||||
expect(result.workflow!.connections['Order Type Router'].main).toHaveLength(3);
|
||||
expect(result.workflow!.connections['Order Type Router'].main[0][0].node).toBe('Process Digital Order');
|
||||
expect(result.workflow!.connections['Order Type Router'].main[1][0].node).toBe('Process Physical Order');
|
||||
expect(result.workflow!.connections['Order Type Router'].main[2][0].node).toBe('Process Service Order');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should update specific case target when renamed', async () => {
|
||||
const operation: UpdateNodeOperation = {
|
||||
type: 'updateNode',
|
||||
nodeId: 'process-digital',
|
||||
updates: { name: 'Send Digital Download Link' }
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'multi-branch-workflow',
|
||||
operations: [operation]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(multiBranchWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.workflow).toBeDefined();
|
||||
|
||||
// Digital order node should be renamed
|
||||
expect(result.workflow!.nodes.find((n: WorkflowNode) => n.id === 'process-digital')?.name).toBe('Send Digital Download Link');
|
||||
|
||||
// Case 0 connection should be updated
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[0][0].node).toBe('Send Digital Download Link');
|
||||
// Other cases should remain unchanged
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[1][0].node).toBe('Process Physical Order');
|
||||
expect(result.workflow!.connections['Route by Order Type'].main[2][0].node).toBe('Process Service Order');
|
||||
|
||||
// Validate workflow structure
|
||||
const validationErrors = validateWorkflowStructure(result.workflow!);
|
||||
expect(validationErrors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -178,151 +178,4 @@ describe('Database Adapter - Unit Tests', () => {
|
||||
expect(mockDb.pragma('other_key')).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SQLJSAdapter Save Behavior (Memory Leak Fix - Issue #330)', () => {
|
||||
it('should use default 5000ms save interval when env var not set', () => {
|
||||
// Verify default interval is 5000ms (not old 100ms)
|
||||
const DEFAULT_INTERVAL = 5000;
|
||||
expect(DEFAULT_INTERVAL).toBe(5000);
|
||||
});
|
||||
|
||||
it('should use custom save interval from SQLJS_SAVE_INTERVAL_MS env var', () => {
|
||||
// Mock environment variable
|
||||
const originalEnv = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
process.env.SQLJS_SAVE_INTERVAL_MS = '10000';
|
||||
|
||||
// Test that interval would be parsed
|
||||
const envInterval = process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
const parsedInterval = envInterval ? parseInt(envInterval, 10) : 5000;
|
||||
|
||||
expect(parsedInterval).toBe(10000);
|
||||
|
||||
// Restore environment
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.SQLJS_SAVE_INTERVAL_MS = originalEnv;
|
||||
} else {
|
||||
delete process.env.SQLJS_SAVE_INTERVAL_MS;
|
||||
}
|
||||
});
|
||||
|
||||
it('should fall back to default when invalid env var is provided', () => {
|
||||
// Test validation logic
|
||||
const testCases = [
|
||||
{ input: 'invalid', expected: 5000 },
|
||||
{ input: '50', expected: 5000 }, // Too low (< 100)
|
||||
{ input: '-100', expected: 5000 }, // Negative
|
||||
{ input: '0', expected: 5000 }, // Zero
|
||||
];
|
||||
|
||||
testCases.forEach(({ input, expected }) => {
|
||||
const parsed = parseInt(input, 10);
|
||||
const interval = (isNaN(parsed) || parsed < 100) ? 5000 : parsed;
|
||||
expect(interval).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('should debounce multiple rapid saves using configured interval', () => {
|
||||
// Test debounce logic
|
||||
let timer: NodeJS.Timeout | null = null;
|
||||
const mockSave = vi.fn();
|
||||
|
||||
const scheduleSave = (interval: number) => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
timer = setTimeout(() => {
|
||||
mockSave();
|
||||
}, interval);
|
||||
};
|
||||
|
||||
// Simulate rapid operations
|
||||
scheduleSave(5000);
|
||||
scheduleSave(5000);
|
||||
scheduleSave(5000);
|
||||
|
||||
// Should only schedule once (debounced)
|
||||
expect(mockSave).not.toHaveBeenCalled();
|
||||
|
||||
// Cleanup
|
||||
if (timer) clearTimeout(timer);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SQLJSAdapter Memory Optimization', () => {
|
||||
it('should not use Buffer.from() copy in saveToFile()', () => {
|
||||
// Test that direct Uint8Array write logic is correct
|
||||
const mockData = new Uint8Array([1, 2, 3, 4, 5]);
|
||||
|
||||
// Verify Uint8Array can be used directly
|
||||
expect(mockData).toBeInstanceOf(Uint8Array);
|
||||
expect(mockData.length).toBe(5);
|
||||
|
||||
// This test verifies the pattern used in saveToFile()
|
||||
// The actual implementation writes mockData directly to fsSync.writeFileSync()
|
||||
// without using Buffer.from(mockData) which would double memory usage
|
||||
});
|
||||
|
||||
it('should cleanup resources with explicit null assignment', () => {
|
||||
// Test cleanup pattern used in saveToFile()
|
||||
let data: Uint8Array | null = new Uint8Array([1, 2, 3]);
|
||||
|
||||
try {
|
||||
// Simulate save operation
|
||||
expect(data).not.toBeNull();
|
||||
} finally {
|
||||
// Explicit cleanup helps GC
|
||||
data = null;
|
||||
}
|
||||
|
||||
expect(data).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle save errors without leaking resources', () => {
|
||||
// Test error handling with cleanup
|
||||
let data: Uint8Array | null = null;
|
||||
let errorThrown = false;
|
||||
|
||||
try {
|
||||
data = new Uint8Array([1, 2, 3]);
|
||||
// Simulate error
|
||||
throw new Error('Save failed');
|
||||
} catch (error) {
|
||||
errorThrown = true;
|
||||
} finally {
|
||||
// Cleanup happens even on error
|
||||
data = null;
|
||||
}
|
||||
|
||||
expect(errorThrown).toBe(true);
|
||||
expect(data).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Read vs Write Operation Handling', () => {
|
||||
it('should not trigger save on read-only prepare() calls', () => {
|
||||
// Test that prepare() doesn't schedule save
|
||||
// Only exec() and SQLJSStatement.run() should trigger saves
|
||||
|
||||
const mockScheduleSave = vi.fn();
|
||||
|
||||
// Simulate prepare() - should NOT call scheduleSave
|
||||
// prepare() just creates statement, doesn't modify DB
|
||||
|
||||
// Simulate exec() - SHOULD call scheduleSave
|
||||
mockScheduleSave();
|
||||
|
||||
expect(mockScheduleSave).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should trigger save on write operations (INSERT/UPDATE/DELETE)', () => {
|
||||
const mockScheduleSave = vi.fn();
|
||||
|
||||
// Simulate write operations
|
||||
mockScheduleSave(); // INSERT
|
||||
mockScheduleSave(); // UPDATE
|
||||
mockScheduleSave(); // DELETE
|
||||
|
||||
expect(mockScheduleSave).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -632,12 +632,13 @@ describe('HTTP Server Session Management', () => {
|
||||
it('should handle transport cleanup on close', async () => {
|
||||
server = new SingleSessionHTTPServer();
|
||||
|
||||
// Test the transport cleanup mechanism by setting up a transport with onclose
|
||||
// Test the transport cleanup mechanism by calling removeSession directly
|
||||
const sessionId = 'test-session-id-1234-5678-9012-345678901234';
|
||||
const mockTransport = {
|
||||
close: vi.fn().mockResolvedValue(undefined),
|
||||
sessionId,
|
||||
onclose: null as (() => void) | null
|
||||
onclose: undefined as (() => void) | undefined,
|
||||
onerror: undefined as ((error: Error) => void) | undefined
|
||||
};
|
||||
|
||||
(server as any).transports[sessionId] = mockTransport;
|
||||
@@ -647,18 +648,16 @@ describe('HTTP Server Session Management', () => {
|
||||
createdAt: new Date()
|
||||
};
|
||||
|
||||
// Set up the onclose handler like the real implementation would
|
||||
mockTransport.onclose = () => {
|
||||
(server as any).removeSession(sessionId, 'transport_closed');
|
||||
};
|
||||
// Directly call removeSession to test cleanup behavior
|
||||
await (server as any).removeSession(sessionId, 'transport_closed');
|
||||
|
||||
// Simulate transport close
|
||||
if (mockTransport.onclose) {
|
||||
await mockTransport.onclose();
|
||||
}
|
||||
|
||||
// Verify cleanup was triggered
|
||||
// Verify cleanup completed
|
||||
expect((server as any).transports[sessionId]).toBeUndefined();
|
||||
expect((server as any).servers[sessionId]).toBeUndefined();
|
||||
expect((server as any).sessionMetadata[sessionId]).toBeUndefined();
|
||||
expect(mockTransport.close).toHaveBeenCalled();
|
||||
expect(mockTransport.onclose).toBeUndefined();
|
||||
expect(mockTransport.onerror).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle multiple concurrent sessions', async () => {
|
||||
|
||||
@@ -24,12 +24,10 @@ vi.mock('@/mcp/handlers-n8n-manager', () => ({
|
||||
// Import mocked modules
|
||||
import { getN8nApiClient } from '@/mcp/handlers-n8n-manager';
|
||||
import { logger } from '@/utils/logger';
|
||||
import type { NodeRepository } from '@/database/node-repository';
|
||||
|
||||
describe('handlers-workflow-diff', () => {
|
||||
let mockApiClient: any;
|
||||
let mockDiffEngine: any;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
// Helper function to create test workflow
|
||||
const createTestWorkflow = (overrides = {}) => ({
|
||||
@@ -55,8 +53,8 @@ describe('handlers-workflow-diff', () => {
|
||||
},
|
||||
],
|
||||
connections: {
|
||||
'Start': {
|
||||
main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]],
|
||||
node1: {
|
||||
main: [[{ node: 'node2', type: 'main', index: 0 }]],
|
||||
},
|
||||
},
|
||||
createdAt: '2024-01-01T00:00:00Z',
|
||||
@@ -80,9 +78,6 @@ describe('handlers-workflow-diff', () => {
|
||||
applyDiff: vi.fn(),
|
||||
};
|
||||
|
||||
// Setup mock repository
|
||||
mockRepository = {} as NodeRepository;
|
||||
|
||||
// Mock the API client getter
|
||||
vi.mocked(getN8nApiClient).mockReturnValue(mockApiClient);
|
||||
|
||||
@@ -109,12 +104,6 @@ describe('handlers-workflow-diff', () => {
|
||||
parameters: {},
|
||||
},
|
||||
],
|
||||
connections: {
|
||||
...testWorkflow.connections,
|
||||
'HTTP Request': {
|
||||
main: [[{ node: 'New Node', type: 'main', index: 0 }]],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const diffRequest = {
|
||||
@@ -146,7 +135,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(updatedWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: true,
|
||||
@@ -188,10 +177,9 @@ describe('handlers-workflow-diff', () => {
|
||||
operationsApplied: 1,
|
||||
message: 'Validation successful',
|
||||
errors: [],
|
||||
warnings: []
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: true,
|
||||
@@ -200,9 +188,6 @@ describe('handlers-workflow-diff', () => {
|
||||
valid: true,
|
||||
operationsToApply: 1,
|
||||
},
|
||||
details: {
|
||||
warnings: []
|
||||
}
|
||||
});
|
||||
|
||||
expect(mockApiClient.updateWorkflow).not.toHaveBeenCalled();
|
||||
@@ -242,27 +227,7 @@ describe('handlers-workflow-diff', () => {
|
||||
mockApiClient.getWorkflow.mockResolvedValue(testWorkflow);
|
||||
mockDiffEngine.applyDiff.mockResolvedValue({
|
||||
success: true,
|
||||
workflow: {
|
||||
...testWorkflow,
|
||||
nodes: [
|
||||
{ ...testWorkflow.nodes[0], name: 'Updated Start' },
|
||||
testWorkflow.nodes[1],
|
||||
{
|
||||
id: 'node3',
|
||||
name: 'Set Node',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [500, 100],
|
||||
parameters: {},
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Updated Start': testWorkflow.connections['Start'],
|
||||
'HTTP Request': {
|
||||
main: [[{ node: 'Set Node', type: 'main', index: 0 }]],
|
||||
},
|
||||
},
|
||||
},
|
||||
workflow: { ...testWorkflow, nodes: [...testWorkflow.nodes, {}] },
|
||||
operationsApplied: 3,
|
||||
message: 'Successfully applied 3 operations',
|
||||
errors: [],
|
||||
@@ -271,7 +236,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue({ ...testWorkflow });
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Applied 3 operations');
|
||||
@@ -301,7 +266,7 @@ describe('handlers-workflow-diff', () => {
|
||||
failed: [0],
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -323,7 +288,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -338,7 +303,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'non-existent',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -367,7 +332,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node1', updates: {} }],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -392,7 +357,7 @@ describe('handlers-workflow-diff', () => {
|
||||
],
|
||||
};
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(invalidInput, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(invalidInput);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBe('Invalid input');
|
||||
@@ -441,7 +406,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue({ ...testWorkflow });
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(mockDiffEngine.applyDiff).toHaveBeenCalledWith(testWorkflow, diffRequest);
|
||||
@@ -464,7 +429,7 @@ describe('handlers-workflow-diff', () => {
|
||||
await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node1', updates: {} }],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Workflow diff request received',
|
||||
@@ -482,7 +447,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -498,7 +463,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -514,7 +479,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -530,7 +495,7 @@ describe('handlers-workflow-diff', () => {
|
||||
const result = await handleUpdatePartialWorkflow({
|
||||
id: 'test-id',
|
||||
operations: [],
|
||||
}, mockRepository);
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
@@ -573,7 +538,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(testWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(mockDiffEngine.applyDiff).toHaveBeenCalledWith(testWorkflow, diffRequest);
|
||||
@@ -596,7 +561,7 @@ describe('handlers-workflow-diff', () => {
|
||||
});
|
||||
mockApiClient.updateWorkflow.mockResolvedValue(testWorkflow);
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Applied 0 operations');
|
||||
@@ -622,7 +587,7 @@ describe('handlers-workflow-diff', () => {
|
||||
errors: ['Operation 2 failed: Node "invalid-node" not found'],
|
||||
});
|
||||
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest, mockRepository);
|
||||
const result = await handleUpdatePartialWorkflow(diffRequest);
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
|
||||
@@ -1,685 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { BreakingChangeDetector, type DetectedChange, type VersionUpgradeAnalysis } from '@/services/breaking-change-detector';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import * as BreakingChangesRegistry from '@/services/breaking-changes-registry';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/breaking-changes-registry');
|
||||
|
||||
describe('BreakingChangeDetector', () => {
|
||||
let detector: BreakingChangeDetector;
|
||||
let mockRepository: NodeRepository;
|
||||
|
||||
const createMockVersionData = (version: string, properties: any[] = []) => ({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version,
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request',
|
||||
isCurrentMax: false,
|
||||
propertiesSchema: properties,
|
||||
breakingChanges: [],
|
||||
deprecatedProperties: [],
|
||||
addedProperties: []
|
||||
});
|
||||
|
||||
const createMockProperty = (name: string, type: string = 'string', required = false) => ({
|
||||
name,
|
||||
displayName: name,
|
||||
type,
|
||||
required,
|
||||
default: null
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
detector = new BreakingChangeDetector(mockRepository);
|
||||
});
|
||||
|
||||
describe('analyzeVersionUpgrade', () => {
|
||||
it('should combine registry and dynamic changes', async () => {
|
||||
const registryChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'registryProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'From registry',
|
||||
autoMigratable: true,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([registryChange]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('dynamicProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.length).toBeGreaterThan(0);
|
||||
expect(result.changes.some(c => c.source === 'registry')).toBe(true);
|
||||
expect(result.changes.some(c => c.source === 'dynamic')).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect breaking changes', async () => {
|
||||
const breakingChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'criticalProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'This is breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([breakingChange]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.hasBreakingChanges).toBe(true);
|
||||
});
|
||||
|
||||
it('should calculate auto-migratable and manual counts', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'autoProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Auto',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'manualProp',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manual',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.autoMigratableCount).toBe(1);
|
||||
expect(result.manualRequiredCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should determine overall severity', async () => {
|
||||
const highSeverityChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'criticalProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Critical',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([highSeverityChange]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should generate recommendations', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop1',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Remove this',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop2',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manual work needed',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.length).toBeGreaterThan(0);
|
||||
expect(result.recommendations.some(r => r.includes('breaking change'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('automatically migrated'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('manual intervention'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dynamic change detection', () => {
|
||||
it('should detect added properties', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('newProp')]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange).toBeDefined();
|
||||
expect(addedChange?.propertyName).toBe('newProp');
|
||||
expect(addedChange?.source).toBe('dynamic');
|
||||
});
|
||||
|
||||
it('should mark required added properties as breaking', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('requiredProp', 'string', true)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange?.isBreaking).toBe(true);
|
||||
expect(addedChange?.severity).toBe('HIGH');
|
||||
expect(addedChange?.autoMigratable).toBe(false);
|
||||
});
|
||||
|
||||
it('should mark optional added properties as non-breaking', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', []);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('optionalProp', 'string', false)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const addedChange = result.changes.find(c => c.changeType === 'added');
|
||||
expect(addedChange?.isBreaking).toBe(false);
|
||||
expect(addedChange?.severity).toBe('LOW');
|
||||
expect(addedChange?.autoMigratable).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect removed properties', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('oldProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const removedChange = result.changes.find(c => c.changeType === 'removed');
|
||||
expect(removedChange).toBeDefined();
|
||||
expect(removedChange?.propertyName).toBe('oldProp');
|
||||
expect(removedChange?.isBreaking).toBe(true);
|
||||
expect(removedChange?.autoMigratable).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect requirement changes', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('prop', 'string', false)]);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('prop', 'string', true)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const requirementChange = result.changes.find(c => c.changeType === 'requirement_changed');
|
||||
expect(requirementChange).toBeDefined();
|
||||
expect(requirementChange?.isBreaking).toBe(true);
|
||||
expect(requirementChange?.oldValue).toBe('optional');
|
||||
expect(requirementChange?.newValue).toBe('required');
|
||||
});
|
||||
|
||||
it('should detect when property becomes optional', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('prop', 'string', true)]);
|
||||
const v2 = createMockVersionData('2.0', [createMockProperty('prop', 'string', false)]);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const requirementChange = result.changes.find(c => c.changeType === 'requirement_changed');
|
||||
expect(requirementChange).toBeDefined();
|
||||
expect(requirementChange?.isBreaking).toBe(false);
|
||||
expect(requirementChange?.severity).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should handle missing version data gracefully', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.filter(c => c.source === 'dynamic')).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle missing properties schema', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const v1 = { ...createMockVersionData('1.0'), propertiesSchema: null };
|
||||
const v2 = { ...createMockVersionData('2.0'), propertiesSchema: null };
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1 as any)
|
||||
.mockReturnValueOnce(v2 as any);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes.filter(c => c.source === 'dynamic')).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('change merging and deduplication', () => {
|
||||
it('should prioritize registry changes over dynamic', async () => {
|
||||
const registryChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'sharedProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'From registry',
|
||||
autoMigratable: true,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([registryChange]);
|
||||
|
||||
const v1 = createMockVersionData('1.0', [createMockProperty('sharedProp')]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
const sharedChanges = result.changes.filter(c => c.propertyName === 'sharedProp');
|
||||
expect(sharedChanges).toHaveLength(1);
|
||||
expect(sharedChanges[0].source).toBe('registry');
|
||||
});
|
||||
|
||||
it('should sort changes by severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Low',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'highProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'High',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'medProp',
|
||||
changeType: 'renamed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Medium',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: { type: 'rename_property', sourceProperty: 'old', targetProperty: 'new' }
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.changes[0].severity).toBe('HIGH');
|
||||
expect(result.changes[result.changes.length - 1].severity).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasBreakingChanges', () => {
|
||||
it('should return true when breaking changes exist', () => {
|
||||
const breakingChange: BreakingChangesRegistry.BreakingChange = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
};
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getBreakingChangesForNode').mockReturnValue([breakingChange]);
|
||||
|
||||
const result = detector.hasBreakingChanges('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when no breaking changes', () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getBreakingChangesForNode').mockReturnValue([]);
|
||||
|
||||
const result = detector.hasBreakingChanges('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getChangedProperties', () => {
|
||||
it('should return list of changed property names', () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop1',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop2',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
|
||||
const result = detector.getChangedProperties('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toEqual(['prop1', 'prop2']);
|
||||
});
|
||||
|
||||
it('should return empty array when no changes', () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const result = detector.getChangedProperties('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('recommendations generation', () => {
|
||||
it('should recommend safe upgrade when no breaking changes', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: 'Safe',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: { type: 'add_property', defaultValue: null }
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('No breaking changes'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('safe'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about breaking changes', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Breaking',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('breaking change'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should list manual changes required', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'manualProp',
|
||||
changeType: 'requirement_changed',
|
||||
isBreaking: true,
|
||||
migrationHint: 'Manually configure this',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.recommendations.some(r => r.includes('manual intervention'))).toBe(true);
|
||||
expect(result.recommendations.some(r => r.includes('manualProp'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nested properties', () => {
|
||||
it('should flatten nested properties for comparison', async () => {
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue([]);
|
||||
|
||||
const nestedProp = {
|
||||
name: 'parent',
|
||||
displayName: 'Parent',
|
||||
type: 'options',
|
||||
options: [
|
||||
createMockProperty('child1'),
|
||||
createMockProperty('child2')
|
||||
]
|
||||
};
|
||||
|
||||
const v1 = createMockVersionData('1.0', [nestedProp]);
|
||||
const v2 = createMockVersionData('2.0', []);
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
// Should detect removal of parent and nested properties
|
||||
expect(result.changes.some(c => c.propertyName.includes('parent'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('overall severity calculation', () => {
|
||||
it('should return HIGH when any change is HIGH severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'highProp',
|
||||
changeType: 'removed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: false,
|
||||
severity: 'HIGH',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should return MEDIUM when no HIGH but has MEDIUM', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'lowProp',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
},
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'medProp',
|
||||
changeType: 'renamed',
|
||||
isBreaking: true,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'MEDIUM',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should return LOW when all changes are LOW severity', async () => {
|
||||
const changes: BreakingChangesRegistry.BreakingChange[] = [
|
||||
{
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
propertyName: 'prop',
|
||||
changeType: 'added',
|
||||
isBreaking: false,
|
||||
migrationHint: '',
|
||||
autoMigratable: true,
|
||||
severity: 'LOW',
|
||||
migrationStrategy: undefined
|
||||
}
|
||||
];
|
||||
|
||||
vi.spyOn(BreakingChangesRegistry, 'getAllChangesForNode').mockReturnValue(changes);
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = await detector.analyzeVersionUpgrade('nodes-base.httpRequest', '1.0', '2.0');
|
||||
|
||||
expect(result.overallSeverity).toBe('LOW');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -802,335 +802,4 @@ describe('EnhancedConfigValidator', () => {
|
||||
expect(result.errors[0].property).toBe('test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('enhanceHttpRequestValidation', () => {
|
||||
it('should suggest alwaysOutputData for HTTP Request nodes', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET'
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true },
|
||||
{ name: 'method', type: 'options', required: false }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('alwaysOutputData: true at node level')
|
||||
);
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('ensures the node produces output even when HTTP requests fail')
|
||||
);
|
||||
});
|
||||
|
||||
it('should suggest responseFormat for API endpoint URLs', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET',
|
||||
options: {} // Empty options, no responseFormat
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true },
|
||||
{ name: 'method', type: 'options', required: false },
|
||||
{ name: 'options', type: 'collection', required: false }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('responseFormat')
|
||||
);
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('options.response.response.responseFormat')
|
||||
);
|
||||
});
|
||||
|
||||
it('should suggest responseFormat for Supabase URLs', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: 'https://xxciwnthnnywanbplqwg.supabase.co/rest/v1/messages',
|
||||
method: 'GET',
|
||||
options: {}
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('responseFormat')
|
||||
);
|
||||
});
|
||||
|
||||
it('should NOT suggest responseFormat when already configured', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: 'https://api.example.com/data',
|
||||
method: 'GET',
|
||||
options: {
|
||||
response: {
|
||||
response: {
|
||||
responseFormat: 'json'
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true },
|
||||
{ name: 'options', type: 'collection', required: false }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
const responseFormatSuggestion = result.suggestions.find(
|
||||
(s: string) => s.includes('responseFormat')
|
||||
);
|
||||
expect(responseFormatSuggestion).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should warn about missing protocol in expression-based URLs', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: '=www.{{ $json.domain }}.com',
|
||||
method: 'GET'
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.warnings).toContainEqual(
|
||||
expect.objectContaining({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: expect.stringContaining('missing http:// or https://')
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should warn about missing protocol in expressions with template markers', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: '={{ $json.domain }}/api/data',
|
||||
method: 'GET'
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.warnings).toContainEqual(
|
||||
expect.objectContaining({
|
||||
type: 'invalid_value',
|
||||
property: 'url',
|
||||
message: expect.stringContaining('missing http:// or https://')
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should NOT warn when expression includes http protocol', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: '={{ "https://" + $json.domain + ".com" }}',
|
||||
method: 'GET'
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
const urlWarning = result.warnings.find(
|
||||
(w: any) => w.property === 'url' && w.message.includes('protocol')
|
||||
);
|
||||
expect(urlWarning).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should NOT suggest responseFormat for non-API URLs', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: 'https://example.com/page.html',
|
||||
method: 'GET',
|
||||
options: {}
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
const responseFormatSuggestion = result.suggestions.find(
|
||||
(s: string) => s.includes('responseFormat')
|
||||
);
|
||||
expect(responseFormatSuggestion).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should detect missing protocol in expressions with uppercase HTTP', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const config = {
|
||||
url: '={{ "HTTP://" + $json.domain + ".com" }}',
|
||||
method: 'GET'
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
// Should NOT warn because HTTP:// is present (case-insensitive)
|
||||
expect(result.warnings).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should NOT suggest responseFormat for false positive URLs', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const testUrls = [
|
||||
'https://example.com/therapist-directory',
|
||||
'https://restaurant-bookings.com/reserve',
|
||||
'https://forest-management.org/data'
|
||||
];
|
||||
|
||||
testUrls.forEach(url => {
|
||||
const config = {
|
||||
url,
|
||||
method: 'GET',
|
||||
options: {}
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
const responseFormatSuggestion = result.suggestions.find(
|
||||
(s: string) => s.includes('responseFormat')
|
||||
);
|
||||
expect(responseFormatSuggestion).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('should suggest responseFormat for case-insensitive API paths', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const testUrls = [
|
||||
'https://example.com/API/users',
|
||||
'https://example.com/Rest/data',
|
||||
'https://example.com/REST/v1/items'
|
||||
];
|
||||
|
||||
testUrls.forEach(url => {
|
||||
const config = {
|
||||
url,
|
||||
method: 'GET',
|
||||
options: {}
|
||||
};
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
const result = EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
|
||||
expect(result.suggestions).toContainEqual(
|
||||
expect.stringContaining('responseFormat')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null and undefined URLs gracefully', () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
const testConfigs = [
|
||||
{ url: null, method: 'GET' },
|
||||
{ url: undefined, method: 'GET' },
|
||||
{ url: '', method: 'GET' }
|
||||
];
|
||||
|
||||
testConfigs.forEach(config => {
|
||||
const properties = [
|
||||
{ name: 'url', type: 'string', required: true }
|
||||
];
|
||||
|
||||
expect(() => {
|
||||
EnhancedConfigValidator.validateWithMode(
|
||||
nodeType,
|
||||
config,
|
||||
properties,
|
||||
'operation',
|
||||
'ai-friendly'
|
||||
);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -413,242 +413,6 @@ describe('N8nApiClient', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Response Format Validation (PR #367)', () => {
|
||||
beforeEach(() => {
|
||||
client = new N8nApiClient(defaultConfig);
|
||||
});
|
||||
|
||||
describe('listWorkflows - validation', () => {
|
||||
it('should handle modern format with data and nextCursor', async () => {
|
||||
const response = { data: [{ id: '1', name: 'Test' }], nextCursor: 'abc123' };
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: response });
|
||||
|
||||
const result = await client.listWorkflows();
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.nextCursor).toBe('abc123');
|
||||
});
|
||||
|
||||
it('should wrap legacy array format and log warning', async () => {
|
||||
const workflows = [{ id: '1', name: 'Test' }];
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: workflows });
|
||||
|
||||
const result = await client.listWorkflows();
|
||||
|
||||
expect(result).toEqual({ data: workflows, nextCursor: null });
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('n8n API returned array directly')
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('workflows')
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on null response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: null });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on undefined response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: undefined });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on string response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: 'invalid' });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on number response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: 42 });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on invalid structure with different keys', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { items: [], total: 10 } });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: expected {data: [], nextCursor?: string}, got object with keys: [items, total]'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when data is not an array', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { data: 'invalid' } });
|
||||
|
||||
await expect(client.listWorkflows()).rejects.toThrow(
|
||||
'Invalid response from n8n API for workflows: expected {data: [], nextCursor?: string}'
|
||||
);
|
||||
});
|
||||
|
||||
it('should limit exposed keys to first 5 when many keys present', async () => {
|
||||
const manyKeys = { items: [], total: 10, page: 1, limit: 20, hasMore: true, metadata: {} };
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: manyKeys });
|
||||
|
||||
try {
|
||||
await client.listWorkflows();
|
||||
expect.fail('Should have thrown error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('items, total, page, limit, hasMore...');
|
||||
expect(error.message).not.toContain('metadata');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('listExecutions - validation', () => {
|
||||
it('should handle modern format with data and nextCursor', async () => {
|
||||
const response = { data: [{ id: '1' }], nextCursor: 'abc123' };
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: response });
|
||||
|
||||
const result = await client.listExecutions();
|
||||
|
||||
expect(result).toEqual(response);
|
||||
});
|
||||
|
||||
it('should wrap legacy array format and log warning', async () => {
|
||||
const executions = [{ id: '1' }];
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: executions });
|
||||
|
||||
const result = await client.listExecutions();
|
||||
|
||||
expect(result).toEqual({ data: executions, nextCursor: null });
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('executions')
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on null response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: null });
|
||||
|
||||
await expect(client.listExecutions()).rejects.toThrow(
|
||||
'Invalid response from n8n API for executions: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on invalid structure', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { items: [] } });
|
||||
|
||||
await expect(client.listExecutions()).rejects.toThrow(
|
||||
'Invalid response from n8n API for executions'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when data is not an array', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { data: 'invalid' } });
|
||||
|
||||
await expect(client.listExecutions()).rejects.toThrow(
|
||||
'Invalid response from n8n API for executions'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listCredentials - validation', () => {
|
||||
it('should handle modern format with data and nextCursor', async () => {
|
||||
const response = { data: [{ id: '1' }], nextCursor: 'abc123' };
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: response });
|
||||
|
||||
const result = await client.listCredentials();
|
||||
|
||||
expect(result).toEqual(response);
|
||||
});
|
||||
|
||||
it('should wrap legacy array format and log warning', async () => {
|
||||
const credentials = [{ id: '1' }];
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: credentials });
|
||||
|
||||
const result = await client.listCredentials();
|
||||
|
||||
expect(result).toEqual({ data: credentials, nextCursor: null });
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('credentials')
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on null response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: null });
|
||||
|
||||
await expect(client.listCredentials()).rejects.toThrow(
|
||||
'Invalid response from n8n API for credentials: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on invalid structure', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { items: [] } });
|
||||
|
||||
await expect(client.listCredentials()).rejects.toThrow(
|
||||
'Invalid response from n8n API for credentials'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when data is not an array', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { data: 'invalid' } });
|
||||
|
||||
await expect(client.listCredentials()).rejects.toThrow(
|
||||
'Invalid response from n8n API for credentials'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listTags - validation', () => {
|
||||
it('should handle modern format with data and nextCursor', async () => {
|
||||
const response = { data: [{ id: '1' }], nextCursor: 'abc123' };
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: response });
|
||||
|
||||
const result = await client.listTags();
|
||||
|
||||
expect(result).toEqual(response);
|
||||
});
|
||||
|
||||
it('should wrap legacy array format and log warning', async () => {
|
||||
const tags = [{ id: '1' }];
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: tags });
|
||||
|
||||
const result = await client.listTags();
|
||||
|
||||
expect(result).toEqual({ data: tags, nextCursor: null });
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('tags')
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on null response', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: null });
|
||||
|
||||
await expect(client.listTags()).rejects.toThrow(
|
||||
'Invalid response from n8n API for tags: response is not an object'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error on invalid structure', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { items: [] } });
|
||||
|
||||
await expect(client.listTags()).rejects.toThrow(
|
||||
'Invalid response from n8n API for tags'
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when data is not an array', async () => {
|
||||
mockAxiosInstance.get.mockResolvedValue({ data: { data: 'invalid' } });
|
||||
|
||||
await expect(client.listTags()).rejects.toThrow(
|
||||
'Invalid response from n8n API for tags'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExecution', () => {
|
||||
beforeEach(() => {
|
||||
client = new N8nApiClient(defaultConfig);
|
||||
|
||||
@@ -1,532 +0,0 @@
|
||||
import { describe, test, expect } from 'vitest';
|
||||
import { validateWorkflowStructure } from '@/services/n8n-validation';
|
||||
import type { Workflow } from '@/types/n8n-api';
|
||||
|
||||
describe('n8n-validation - Sticky Notes Bug Fix', () => {
|
||||
describe('sticky notes should be excluded from disconnected nodes validation', () => {
|
||||
test('should allow workflow with sticky notes and connected functional nodes', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'HTTP Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Documentation Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'This is a documentation note' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should have no errors - sticky note should be ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle multiple sticky notes without errors', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Documented Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Process',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
// 10 sticky notes for documentation
|
||||
...Array.from({ length: 10 }, (_, i) => ({
|
||||
id: `sticky${i}`,
|
||||
name: `📝 Note ${i}`,
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [100 + i * 50, 100] as [number, number],
|
||||
parameters: { content: `Documentation note ${i}` }
|
||||
}))
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Process', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle all sticky note type variations', () => {
|
||||
const stickyTypes = [
|
||||
'n8n-nodes-base.stickyNote',
|
||||
'nodes-base.stickyNote',
|
||||
'@n8n/n8n-nodes-base.stickyNote'
|
||||
];
|
||||
|
||||
stickyTypes.forEach((stickyType, index) => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: `sticky${index}`,
|
||||
name: `Note ${index}`,
|
||||
type: stickyType,
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: `Note ${index}` }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Sticky note should be ignored regardless of type variation
|
||||
expect(errors.every(e => !e.includes(`Note ${index}`))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle complex workflow with multiple sticky notes (real-world scenario)', () => {
|
||||
// Simulates workflow like "POST /auth/login" with 4 sticky notes
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'POST /auth/login',
|
||||
nodes: [
|
||||
{
|
||||
id: 'webhook1',
|
||||
name: 'Webhook Trigger',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/auth/login', httpMethod: 'POST' }
|
||||
},
|
||||
{
|
||||
id: 'http1',
|
||||
name: 'Authenticate',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond1',
|
||||
name: 'Return Success',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [650, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'respond2',
|
||||
name: 'Return Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [650, 350],
|
||||
parameters: {}
|
||||
},
|
||||
// 4 sticky notes for documentation
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: '📝 Webhook Trigger',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 150],
|
||||
parameters: { content: 'Receives login request' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: '📝 Authenticate with Supabase',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 150],
|
||||
parameters: { content: 'Validates credentials' }
|
||||
},
|
||||
{
|
||||
id: 'sticky3',
|
||||
name: '📝 Return Tokens',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [650, 150],
|
||||
parameters: { content: 'Returns access and refresh tokens' }
|
||||
},
|
||||
{
|
||||
id: 'sticky4',
|
||||
name: '📝 Return Error',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [650, 450],
|
||||
parameters: { content: 'Returns error message' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook Trigger': {
|
||||
main: [[{ node: 'Authenticate', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Authenticate': {
|
||||
main: [
|
||||
[{ node: 'Return Success', type: 'main', index: 0 }],
|
||||
[{ node: 'Return Error', type: 'main', index: 0 }]
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should have no errors - all sticky notes should be ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validation should still detect truly disconnected functional nodes', () => {
|
||||
test('should detect disconnected HTTP node but ignore sticky note', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Disconnected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Sticky Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note' }
|
||||
}
|
||||
],
|
||||
connections: {} // No connections
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error on HTTP node, but NOT on sticky note
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const disconnectedError = errors.find(e => e.includes('Disconnected'));
|
||||
expect(disconnectedError).toBeDefined();
|
||||
expect(disconnectedError).toContain('Disconnected HTTP');
|
||||
expect(disconnectedError).not.toContain('Sticky Note');
|
||||
});
|
||||
|
||||
test('should detect multiple disconnected functional nodes but ignore sticky notes', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Disconnected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Disconnected Set',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
// Multiple sticky notes that should be ignored
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Note 1',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note 1' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: 'Note 2',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 100],
|
||||
parameters: { content: 'Note 2' }
|
||||
}
|
||||
],
|
||||
connections: {} // No connections
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error because there are no connections
|
||||
// When there are NO connections, validation shows "Multi-node workflow has no connections"
|
||||
// This is the expected behavior - it suggests connecting any two executable nodes
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const connectionError = errors.find(e => e.includes('no connections') || e.includes('Disconnected'));
|
||||
expect(connectionError).toBeDefined();
|
||||
// Error should NOT mention sticky notes
|
||||
expect(connectionError).not.toContain('Note 1');
|
||||
expect(connectionError).not.toContain('Note 2');
|
||||
});
|
||||
|
||||
test('should allow sticky notes but still validate functional node connections', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Test Workflow',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Connected HTTP',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Disconnected Set',
|
||||
type: 'n8n-nodes-base.set',
|
||||
typeVersion: 3,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Sticky Note',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note' }
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Webhook': {
|
||||
main: [[{ node: 'Connected HTTP', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should error only on disconnected Set node
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
const disconnectedError = errors.find(e => e.includes('Disconnected'));
|
||||
expect(disconnectedError).toBeDefined();
|
||||
expect(disconnectedError).toContain('Disconnected Set');
|
||||
expect(disconnectedError).not.toContain('Connected HTTP');
|
||||
expect(disconnectedError).not.toContain('Sticky Note');
|
||||
});
|
||||
});
|
||||
|
||||
describe('regression tests - ensure sticky notes work like in n8n UI', () => {
|
||||
test('single webhook with sticky notes should be valid (matches n8n UI behavior)', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Webhook Only with Notes',
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/test' }
|
||||
},
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Usage Instructions',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Call this webhook to trigger the workflow' }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Webhook-only workflows are valid in n8n
|
||||
// Sticky notes should not affect this
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
|
||||
test('workflow with only sticky notes should be invalid (no executable nodes)', () => {
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Only Notes',
|
||||
nodes: [
|
||||
{
|
||||
id: 'sticky1',
|
||||
name: 'Note 1',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250, 100],
|
||||
parameters: { content: 'Note 1' }
|
||||
},
|
||||
{
|
||||
id: 'sticky2',
|
||||
name: 'Note 2',
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [450, 100],
|
||||
parameters: { content: 'Note 2' }
|
||||
}
|
||||
],
|
||||
connections: {}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should fail because there are no executable nodes
|
||||
expect(errors.length).toBeGreaterThan(0);
|
||||
expect(errors.some(e => e.includes('at least one executable node'))).toBe(true);
|
||||
});
|
||||
|
||||
test('complex production workflow structure should validate correctly', () => {
|
||||
// Tests a realistic production workflow structure
|
||||
const workflow: Partial<Workflow> = {
|
||||
name: 'Production API Endpoint',
|
||||
nodes: [
|
||||
// Functional nodes
|
||||
{
|
||||
id: 'webhook1',
|
||||
name: 'API Webhook',
|
||||
type: 'n8n-nodes-base.webhook',
|
||||
typeVersion: 1,
|
||||
position: [250, 300],
|
||||
parameters: { path: '/api/endpoint' }
|
||||
},
|
||||
{
|
||||
id: 'validate1',
|
||||
name: 'Validate Input',
|
||||
type: 'n8n-nodes-base.code',
|
||||
typeVersion: 2,
|
||||
position: [450, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'branch1',
|
||||
name: 'Check Valid',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2,
|
||||
position: [650, 300],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'process1',
|
||||
name: 'Process Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [850, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'success1',
|
||||
name: 'Return Success',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [1050, 250],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: 'error1',
|
||||
name: 'Return Error',
|
||||
type: 'n8n-nodes-base.respondToWebhook',
|
||||
typeVersion: 1,
|
||||
position: [850, 350],
|
||||
parameters: {}
|
||||
},
|
||||
// Documentation sticky notes (11 notes like in real workflow)
|
||||
...Array.from({ length: 11 }, (_, i) => ({
|
||||
id: `sticky${i}`,
|
||||
name: `📝 Documentation ${i}`,
|
||||
type: 'n8n-nodes-base.stickyNote',
|
||||
typeVersion: 1,
|
||||
position: [250 + i * 100, 100] as [number, number],
|
||||
parameters: { content: `Documentation section ${i}` }
|
||||
}))
|
||||
],
|
||||
connections: {
|
||||
'API Webhook': {
|
||||
main: [[{ node: 'Validate Input', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Validate Input': {
|
||||
main: [[{ node: 'Check Valid', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Check Valid': {
|
||||
main: [
|
||||
[{ node: 'Process Request', type: 'main', index: 0 }],
|
||||
[{ node: 'Return Error', type: 'main', index: 0 }]
|
||||
]
|
||||
},
|
||||
'Process Request': {
|
||||
main: [[{ node: 'Return Success', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
|
||||
// Should be valid - all functional nodes connected, sticky notes ignored
|
||||
expect(errors).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -540,7 +540,7 @@ describe('n8n-validation', () => {
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
expect(errors.some(e => e.includes('Single non-webhook node workflow is invalid'))).toBe(true);
|
||||
expect(errors).toContain('Single-node workflows are only valid for webhooks. Add at least one more node and connect them. Example: Manual Trigger → Set node');
|
||||
});
|
||||
|
||||
it('should detect empty connections in multi-node workflow', () => {
|
||||
@@ -568,7 +568,7 @@ describe('n8n-validation', () => {
|
||||
};
|
||||
|
||||
const errors = validateWorkflowStructure(workflow);
|
||||
expect(errors.some(e => e.includes('Multi-node workflow has no connections between nodes'))).toBe(true);
|
||||
expect(errors).toContain('Multi-node workflow has empty connections. Connect nodes like this: connections: { "Node1 Name": { "main": [[{ "node": "Node2 Name", "type": "main", "index": 0 }]] } }');
|
||||
});
|
||||
|
||||
it('should validate node type format - missing package prefix', () => {
|
||||
|
||||
@@ -1,798 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { NodeMigrationService, type MigrationResult, type AppliedMigration } from '@/services/node-migration-service';
|
||||
import { NodeVersionService } from '@/services/node-version-service';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis, type DetectedChange } from '@/services/breaking-change-detector';
|
||||
|
||||
vi.mock('@/services/node-version-service');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('NodeMigrationService', () => {
|
||||
let service: NodeMigrationService;
|
||||
let mockVersionService: NodeVersionService;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockNode = (id: string, type: string, version: number, parameters: any = {}) => ({
|
||||
id,
|
||||
name: `${type}-node`,
|
||||
type,
|
||||
typeVersion: version,
|
||||
position: [0, 0] as [number, number],
|
||||
parameters
|
||||
});
|
||||
|
||||
const createMockChange = (
|
||||
propertyName: string,
|
||||
changeType: DetectedChange['changeType'],
|
||||
autoMigratable: boolean,
|
||||
migrationStrategy?: any
|
||||
): DetectedChange => ({
|
||||
propertyName,
|
||||
changeType,
|
||||
isBreaking: true,
|
||||
migrationHint: `Migrate ${propertyName}`,
|
||||
autoMigratable,
|
||||
migrationStrategy,
|
||||
severity: 'MEDIUM',
|
||||
source: 'registry'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockVersionService = {} as any;
|
||||
mockBreakingChangeDetector = {} as any;
|
||||
service = new NodeMigrationService(mockVersionService, mockBreakingChangeDetector);
|
||||
});
|
||||
|
||||
describe('migrateNode', () => {
|
||||
it('should update node typeVersion', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2);
|
||||
expect(result.fromVersion).toBe('1.0');
|
||||
expect(result.toVersion).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should apply auto-migratable changes', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newProperty', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].propertyName).toBe('newProperty');
|
||||
expect(result.appliedMigrations[0].action).toBe('Added property');
|
||||
});
|
||||
|
||||
it('should collect remaining manual issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('manualProperty', 'requirement_changed', false)
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.remainingIssues).toHaveLength(1);
|
||||
expect(result.remainingIssues[0]).toContain('manualProperty');
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should determine confidence based on remaining issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysisNoIssues: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysisNoIssues);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should set MEDIUM confidence for few issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'requirement_changed', false),
|
||||
createMockChange('prop2', 'requirement_changed', false)
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should set LOW confidence for many issues', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(5).fill(createMockChange('prop', 'requirement_changed', false)),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.confidence).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('addProperty migration', () => {
|
||||
it('should add new property with default value', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('newField', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'test-value'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.newField).toBe('test-value');
|
||||
});
|
||||
|
||||
it('should handle nested property paths', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, { parameters: {} });
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('parameters.authentication', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: 'none'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.parameters.authentication).toBe('none');
|
||||
});
|
||||
|
||||
it('should generate webhookId for webhook nodes', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('webhookId', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: null
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '2.0', '2.1');
|
||||
|
||||
expect(result.updatedNode.webhookId).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i);
|
||||
});
|
||||
|
||||
it('should generate unique webhook paths', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('path', 'added', true, {
|
||||
type: 'add_property',
|
||||
defaultValue: null
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.path).toMatch(/^\/webhook-\d+$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeProperty migration', () => {
|
||||
it('should remove deprecated property', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).oldField = 'value';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('oldField', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.oldField).toBeUndefined();
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].action).toBe('Removed property');
|
||||
expect(result.appliedMigrations[0].oldValue).toBe('value');
|
||||
});
|
||||
|
||||
it('should handle removing nested properties', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {
|
||||
parameters: { oldAuth: 'basic' }
|
||||
});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('parameters.oldAuth', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.parameters.oldAuth).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should skip removal if property does not exist', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('nonExistentField', 'removed', true, {
|
||||
type: 'remove_property'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('renameProperty migration', () => {
|
||||
it('should rename property', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).oldName = 'value';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newName', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'oldName',
|
||||
targetProperty: 'newName'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.oldName).toBeUndefined();
|
||||
expect(result.updatedNode.newName).toBe('value');
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.appliedMigrations[0].action).toBe('Renamed property');
|
||||
});
|
||||
|
||||
it.skip('should handle nested property renaming', async () => {
|
||||
// Skipped: deep cloning creates new objects that aren't detected by the migration logic
|
||||
// The feature works in production, but testing nested renames requires more complex mocking
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {
|
||||
parameters: { oldParam: 'test' }
|
||||
});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('parameters.newParam', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'parameters.oldParam',
|
||||
targetProperty: 'parameters.newParam'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(1);
|
||||
expect(result.updatedNode.parameters.oldParam).toBeUndefined();
|
||||
expect(result.updatedNode.parameters.newParam).toBe('test');
|
||||
});
|
||||
|
||||
it('should skip rename if source does not exist', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newName', 'renamed', true, {
|
||||
type: 'rename_property',
|
||||
sourceProperty: 'nonExistent',
|
||||
targetProperty: 'newName'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setDefault migration', () => {
|
||||
it('should set default value if property is undefined', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('field', 'default_changed', true, {
|
||||
type: 'set_default',
|
||||
defaultValue: 'new-default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.field).toBe('new-default');
|
||||
});
|
||||
|
||||
it('should not overwrite existing value', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1, {});
|
||||
(node as any).field = 'existing';
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [
|
||||
createMockChange('field', 'default_changed', true, {
|
||||
type: 'set_default',
|
||||
defaultValue: 'new-default'
|
||||
})
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.0', '2.0');
|
||||
|
||||
expect(result.updatedNode.field).toBe('existing');
|
||||
expect(result.appliedMigrations).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateMigratedNode', () => {
|
||||
it('should validate basic node structure', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 2, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should detect missing typeVersion', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 2), typeVersion: undefined };
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain('Missing typeVersion after migration');
|
||||
});
|
||||
|
||||
it('should detect missing parameters', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 2), parameters: undefined };
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'nodes-base.httpRequest');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain('Missing parameters object');
|
||||
});
|
||||
|
||||
it('should validate webhook node requirements', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.webhook');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some(e => e.includes('path'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should warn about missing webhookId in v2.1+', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.webhook', 2.1, { path: '/test' });
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.webhook');
|
||||
|
||||
expect(result.warnings.some(w => w.includes('webhookId'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate executeWorkflow requirements', async () => {
|
||||
const node = createMockNode('node-1', 'n8n-nodes-base.executeWorkflow', 1.1, {});
|
||||
|
||||
const result = await service.validateMigratedNode(node, 'n8n-nodes-base.executeWorkflow');
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some(e => e.includes('inputFieldMapping'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateWorkflowNodes', () => {
|
||||
it('should migrate multiple nodes in a workflow', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: '',
|
||||
fromVersion: '',
|
||||
toVersion: '',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0',
|
||||
'node-2': '2.1'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(2);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.overallConfidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should calculate overall confidence as LOW if any migration is LOW', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysisLow: VersionUpgradeAnalysis = {
|
||||
nodeType: '',
|
||||
fromVersion: '',
|
||||
toVersion: '',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(5).fill(createMockChange('prop', 'requirement_changed', false)),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysisLow);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.overallConfidence).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should update nodes in place', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1, {})
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(workflow.nodes[0].typeVersion).toBe(2);
|
||||
});
|
||||
|
||||
it('should skip nodes without target versions', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', 1),
|
||||
createMockNode('node-2', 'nodes-base.webhook', 2)
|
||||
]
|
||||
};
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const targetVersions = {
|
||||
'node-1': '2.0'
|
||||
};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(mockBreakingChangeDetector.analyzeVersionUpgrade).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle nodes without typeVersion', async () => {
|
||||
const node = { ...createMockNode('node-1', 'nodes-base.httpRequest', 1), typeVersion: undefined };
|
||||
|
||||
const workflow = { nodes: [node] };
|
||||
const targetVersions = { 'node-1': '2.0' };
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle empty workflow', async () => {
|
||||
const workflow = { nodes: [] };
|
||||
const targetVersions = {};
|
||||
|
||||
const result = await service.migrateWorkflowNodes(workflow, targetVersions);
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.overallConfidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should handle version string with single digit', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1',
|
||||
toVersion: '2',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1', '2');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle version string with decimal', async () => {
|
||||
const node = createMockNode('node-1', 'nodes-base.httpRequest', 1);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.1',
|
||||
toVersion: '2.3',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.migrateNode(node, '1.1', '2.3');
|
||||
|
||||
expect(result.updatedNode.typeVersion).toBe(2.3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,461 +0,0 @@
|
||||
/**
|
||||
* Node Sanitizer Tests
|
||||
* Tests for auto-adding required metadata to filter-based nodes
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { sanitizeNode, validateNodeMetadata } from '../../../src/services/node-sanitizer';
|
||||
import { WorkflowNode } from '../../../src/types/n8n-api';
|
||||
|
||||
describe('Node Sanitizer', () => {
|
||||
describe('sanitizeNode', () => {
|
||||
it('should add complete filter options to IF v2.2 node', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if',
|
||||
name: 'IF Node',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'condition1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'isNotEmpty'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
|
||||
// Check that options were added
|
||||
expect(sanitized.parameters.conditions).toHaveProperty('options');
|
||||
const options = (sanitized.parameters.conditions as any).options;
|
||||
|
||||
expect(options).toEqual({
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
});
|
||||
});
|
||||
|
||||
it('should preserve existing options while adding missing fields', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if-partial',
|
||||
name: 'IF Node Partial',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
caseSensitive: false // User-provided value
|
||||
},
|
||||
conditions: []
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
const options = (sanitized.parameters.conditions as any).options;
|
||||
|
||||
// Should preserve user value
|
||||
expect(options.caseSensitive).toBe(false);
|
||||
|
||||
// Should add missing fields
|
||||
expect(options.version).toBe(2);
|
||||
expect(options.leftValue).toBe('');
|
||||
expect(options.typeValidation).toBe('strict');
|
||||
});
|
||||
|
||||
it('should fix invalid operator structure (type field misuse)', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if-bad-operator',
|
||||
name: 'IF Bad Operator',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'condition1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'isNotEmpty' // WRONG: type should be data type, not operation
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
const condition = (sanitized.parameters.conditions as any).conditions[0];
|
||||
|
||||
// Should fix operator structure
|
||||
expect(condition.operator.type).toBe('boolean'); // Inferred data type (isEmpty/isNotEmpty are boolean ops)
|
||||
expect(condition.operator.operation).toBe('isNotEmpty'); // Moved to operation field
|
||||
});
|
||||
|
||||
it('should add singleValue for unary operators', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if-unary',
|
||||
name: 'IF Unary',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'condition1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'isNotEmpty'
|
||||
// Missing singleValue
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
const condition = (sanitized.parameters.conditions as any).conditions[0];
|
||||
|
||||
expect(condition.operator.singleValue).toBe(true);
|
||||
});
|
||||
|
||||
it('should sanitize Switch v3.2 node rules', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-switch',
|
||||
name: 'Switch Node',
|
||||
type: 'n8n-nodes-base.switch',
|
||||
typeVersion: 3.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
mode: 'rules',
|
||||
rules: {
|
||||
rules: [
|
||||
{
|
||||
outputKey: 'audio',
|
||||
conditions: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.fileType }}',
|
||||
rightValue: 'audio',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'equals'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
const rule = (sanitized.parameters.rules as any).rules[0];
|
||||
|
||||
// Check that options were added to rule conditions
|
||||
expect(rule.conditions).toHaveProperty('options');
|
||||
expect(rule.conditions.options).toEqual({
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
});
|
||||
});
|
||||
|
||||
it('should not modify non-filter nodes', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-http',
|
||||
name: 'HTTP Request',
|
||||
type: 'n8n-nodes-base.httpRequest',
|
||||
typeVersion: 4.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
method: 'GET',
|
||||
url: 'https://example.com'
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
|
||||
// Should return unchanged
|
||||
expect(sanitized).toEqual(node);
|
||||
});
|
||||
|
||||
it('should not modify old IF versions', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if-old',
|
||||
name: 'Old IF',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.0, // Pre-filter version
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: []
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
|
||||
// Should return unchanged
|
||||
expect(sanitized).toEqual(node);
|
||||
});
|
||||
|
||||
it('should remove singleValue from binary operators like "equals"', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test-if-binary',
|
||||
name: 'IF Binary Operator',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
conditions: [
|
||||
{
|
||||
id: 'condition1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: 'test',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'equals',
|
||||
singleValue: true // WRONG: equals is binary, not unary
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const sanitized = sanitizeNode(node);
|
||||
const condition = (sanitized.parameters.conditions as any).conditions[0];
|
||||
|
||||
// Should remove singleValue from binary operator
|
||||
expect(condition.operator.singleValue).toBeUndefined();
|
||||
expect(condition.operator.type).toBe('string');
|
||||
expect(condition.operator.operation).toBe('equals');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateNodeMetadata', () => {
|
||||
it('should detect missing conditions.options', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'IF Missing Options',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
conditions: []
|
||||
// Missing options
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues[0]).toBe('Missing conditions.options');
|
||||
});
|
||||
|
||||
it('should detect missing operator.type', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'IF Bad Operator',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
},
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
operation: 'equals'
|
||||
// Missing type
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues.some(issue => issue.includes("missing required field 'type'"))).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect invalid operator.type value', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'IF Invalid Type',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
},
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'isNotEmpty', // WRONG: operation name, not data type
|
||||
operation: 'isNotEmpty'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues.some(issue => issue.includes('invalid type "isNotEmpty"'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect missing singleValue for unary operators', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'IF Missing SingleValue',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
},
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'isNotEmpty'
|
||||
// Missing singleValue: true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues.some(issue => issue.includes('requires singleValue: true'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect singleValue on binary operators', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'IF Binary with SingleValue',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
},
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: 'test',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'equals',
|
||||
singleValue: true // WRONG: equals is binary
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues.length).toBeGreaterThan(0);
|
||||
expect(issues.some(issue => issue.includes('should not have singleValue: true'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should return empty array for valid node', () => {
|
||||
const node: WorkflowNode = {
|
||||
id: 'test',
|
||||
name: 'Valid IF',
|
||||
type: 'n8n-nodes-base.if',
|
||||
typeVersion: 2.2,
|
||||
position: [0, 0],
|
||||
parameters: {
|
||||
conditions: {
|
||||
options: {
|
||||
version: 2,
|
||||
leftValue: '',
|
||||
caseSensitive: true,
|
||||
typeValidation: 'strict'
|
||||
},
|
||||
conditions: [
|
||||
{
|
||||
id: 'cond1',
|
||||
leftValue: '={{ $json.value }}',
|
||||
rightValue: '',
|
||||
operator: {
|
||||
type: 'string',
|
||||
operation: 'isNotEmpty',
|
||||
singleValue: true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const issues = validateNodeMetadata(node);
|
||||
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1610,12 +1610,7 @@ describe('NodeSpecificValidators', () => {
|
||||
});
|
||||
|
||||
describe('response mode validation', () => {
|
||||
// NOTE: responseNode mode validation was moved to workflow-validator.ts in Phase 5
|
||||
// because it requires access to node-level onError property, not just config/parameters.
|
||||
// See workflow-validator.ts checkWebhookErrorHandling() method for the actual implementation.
|
||||
// The validation cannot be performed at the node-specific-validator level.
|
||||
|
||||
it.skip('should error on responseNode without error handling - MOVED TO WORKFLOW VALIDATOR', () => {
|
||||
it('should error on responseNode without error handling', () => {
|
||||
context.config = {
|
||||
path: 'my-webhook',
|
||||
httpMethod: 'POST',
|
||||
@@ -1632,7 +1627,7 @@ describe('NodeSpecificValidators', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it.skip('should not error on responseNode with proper error handling - MOVED TO WORKFLOW VALIDATOR', () => {
|
||||
it('should not error on responseNode with proper error handling', () => {
|
||||
context.config = {
|
||||
path: 'my-webhook',
|
||||
httpMethod: 'POST',
|
||||
|
||||
@@ -1,497 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { NodeVersionService, type NodeVersion, type VersionComparison } from '@/services/node-version-service';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis } from '@/services/breaking-change-detector';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('NodeVersionService', () => {
|
||||
let service: NodeVersionService;
|
||||
let mockRepository: NodeRepository;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockVersion = (version: string, isCurrentMax = false): NodeVersion => ({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version,
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request',
|
||||
isCurrentMax,
|
||||
breakingChanges: [],
|
||||
deprecatedProperties: [],
|
||||
addedProperties: []
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
mockBreakingChangeDetector = new BreakingChangeDetector(mockRepository);
|
||||
service = new NodeVersionService(mockRepository, mockBreakingChangeDetector);
|
||||
});
|
||||
|
||||
describe('getAvailableVersions', () => {
|
||||
it('should return versions from database', () => {
|
||||
const versions = [createMockVersion('1.0'), createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toEqual(versions);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledWith('nodes-base.httpRequest');
|
||||
});
|
||||
|
||||
it('should cache results', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should use cache within TTL', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result1 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
const result2 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(result1).toEqual(result2);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should refresh cache after TTL expiry', () => {
|
||||
vi.useFakeTimers();
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
// Advance time beyond TTL (5 minutes)
|
||||
vi.advanceTimersByTime(6 * 60 * 1000);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatestVersion', () => {
|
||||
it('should return version marked as currentMax', () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('2.0', true),
|
||||
createMockVersion('1.5')
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should fallback to highest version if no currentMax', () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('2.0'),
|
||||
createMockVersion('1.5')
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should fallback to main nodes table if no versions', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue({
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
version: '1.0',
|
||||
packageName: 'n8n-nodes-base',
|
||||
displayName: 'HTTP Request'
|
||||
} as any);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBe('1.0');
|
||||
});
|
||||
|
||||
it('should return null if no version data available', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('compareVersions', () => {
|
||||
it('should return -1 when first version is lower', () => {
|
||||
const result = service.compareVersions('1.0', '2.0');
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
|
||||
it('should return 1 when first version is higher', () => {
|
||||
const result = service.compareVersions('2.0', '1.0');
|
||||
expect(result).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 0 when versions are equal', () => {
|
||||
const result = service.compareVersions('1.0', '1.0');
|
||||
expect(result).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle multi-part versions', () => {
|
||||
expect(service.compareVersions('1.2.3', '1.2.4')).toBe(-1);
|
||||
expect(service.compareVersions('2.0.0', '1.9.9')).toBe(1);
|
||||
expect(service.compareVersions('1.0.0', '1.0.0')).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle versions with different lengths', () => {
|
||||
expect(service.compareVersions('1.0', '1.0.0')).toBe(0);
|
||||
expect(service.compareVersions('1.0', '1.0.1')).toBe(-1);
|
||||
expect(service.compareVersions('2', '1.9')).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeVersion', () => {
|
||||
it('should return up-to-date status when on latest version', () => {
|
||||
const versions = [createMockVersion('1.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(false);
|
||||
expect(result.recommendUpgrade).toBe(false);
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.reason).toContain('already at the latest version');
|
||||
});
|
||||
|
||||
it('should detect outdated version', () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(true);
|
||||
expect(result.latestVersion).toBe('2.0');
|
||||
expect(result.recommendUpgrade).toBe(true);
|
||||
});
|
||||
|
||||
it('should calculate version gap', () => {
|
||||
const versions = [createMockVersion('3.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.versionGap).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should detect breaking changes and lower confidence', () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(true);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.hasBreakingChanges).toBe(true);
|
||||
expect(result.confidence).toBe('MEDIUM');
|
||||
expect(result.reason).toContain('breaking changes');
|
||||
});
|
||||
|
||||
it('should lower confidence for large version gaps', () => {
|
||||
const versions = [createMockVersion('10.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
vi.spyOn(mockBreakingChangeDetector, 'hasBreakingChanges').mockReturnValue(false);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.confidence).toBe('LOW');
|
||||
expect(result.reason).toContain('Version gap is large');
|
||||
});
|
||||
|
||||
it('should handle missing version information', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.analyzeVersion('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result.isOutdated).toBe(false);
|
||||
expect(result.confidence).toBe('HIGH');
|
||||
expect(result.reason).toContain('No version information available');
|
||||
});
|
||||
});
|
||||
|
||||
describe('suggestUpgradePath', () => {
|
||||
it('should return null when already on latest version', async () => {
|
||||
const versions = [createMockVersion('1.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when no version information available', async () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should suggest direct upgrade for simple cases', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.direct).toBe(true);
|
||||
expect(result!.steps).toHaveLength(1);
|
||||
expect(result!.steps[0].fromVersion).toBe('1.0');
|
||||
expect(result!.steps[0].toVersion).toBe('2.0');
|
||||
});
|
||||
|
||||
it('should suggest multi-step upgrade for complex cases', async () => {
|
||||
const versions = [
|
||||
createMockVersion('1.0'),
|
||||
createMockVersion('1.5'),
|
||||
createMockVersion('2.0', true)
|
||||
];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{ isBreaking: true, autoMigratable: false } as any,
|
||||
{ isBreaking: true, autoMigratable: false } as any,
|
||||
{ isBreaking: true, autoMigratable: false } as any
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.intermediateVersions).toContain('1.5');
|
||||
});
|
||||
|
||||
it('should calculate estimated effort correctly', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysisLow: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [{ isBreaking: false, autoMigratable: true } as any],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysisLow);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.estimatedEffort).toBe('LOW');
|
||||
});
|
||||
|
||||
it('should estimate HIGH effort for many breaking changes', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysisHigh: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: Array(7).fill({ isBreaking: true, autoMigratable: false }),
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 7,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysisHigh);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.estimatedEffort).toBe('HIGH');
|
||||
expect(result!.totalBreakingChanges).toBeGreaterThan(5);
|
||||
});
|
||||
|
||||
it('should include migration hints in steps', async () => {
|
||||
const versions = [createMockVersion('2.0', true)];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [{ isBreaking: true, autoMigratable: false } as any],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: ['Review property changes']
|
||||
};
|
||||
vi.spyOn(mockBreakingChangeDetector, 'analyzeVersionUpgrade').mockResolvedValue(mockAnalysis);
|
||||
|
||||
const result = await service.suggestUpgradePath('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result!.steps[0].migrationHints).toContain('Review property changes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('versionExists', () => {
|
||||
it('should return true if version exists', () => {
|
||||
const versions = [createMockVersion('1.0'), createMockVersion('2.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.versionExists('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if version does not exist', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
const result = service.versionExists('nodes-base.httpRequest', '2.0');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersionMetadata', () => {
|
||||
it('should return version metadata', () => {
|
||||
const version = createMockVersion('1.0');
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(version);
|
||||
|
||||
const result = service.getVersionMetadata('nodes-base.httpRequest', '1.0');
|
||||
|
||||
expect(result).toEqual(version);
|
||||
});
|
||||
|
||||
it('should return null if version not found', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersion').mockReturnValue(null);
|
||||
|
||||
const result = service.getVersionMetadata('nodes-base.httpRequest', '99.0');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('clearCache', () => {
|
||||
it('should clear cache for specific node type', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.clearCache('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should clear entire cache when no node type specified', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
service.clearCache();
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache management', () => {
|
||||
it('should cache different node types separately', () => {
|
||||
const httpVersions = [createMockVersion('1.0')];
|
||||
const webhookVersions = [createMockVersion('2.0')];
|
||||
|
||||
vi.spyOn(mockRepository, 'getNodeVersions')
|
||||
.mockReturnValueOnce(httpVersions)
|
||||
.mockReturnValueOnce(webhookVersions);
|
||||
|
||||
const result1 = service.getAvailableVersions('nodes-base.httpRequest');
|
||||
const result2 = service.getAvailableVersions('nodes-base.webhook');
|
||||
|
||||
expect(result1).toEqual(httpVersions);
|
||||
expect(result2).toEqual(webhookVersions);
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should not use cache after clearing', () => {
|
||||
const versions = [createMockVersion('1.0')];
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue(versions);
|
||||
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(1);
|
||||
|
||||
service.clearCache('nodes-base.httpRequest');
|
||||
service.getAvailableVersions('nodes-base.httpRequest');
|
||||
|
||||
expect(mockRepository.getNodeVersions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty version arrays', () => {
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'getNode').mockReturnValue(null);
|
||||
|
||||
const result = service.getLatestVersion('nodes-base.httpRequest');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle version comparison with zero parts', () => {
|
||||
const result = service.compareVersions('0.0.0', '0.0.1');
|
||||
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
|
||||
it('should handle single digit versions', () => {
|
||||
const result = service.compareVersions('1', '2');
|
||||
|
||||
expect(result).toBe(-1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,856 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { PostUpdateValidator, type PostUpdateGuidance } from '@/services/post-update-validator';
|
||||
import { NodeVersionService } from '@/services/node-version-service';
|
||||
import { BreakingChangeDetector, type VersionUpgradeAnalysis, type DetectedChange } from '@/services/breaking-change-detector';
|
||||
import { type MigrationResult } from '@/services/node-migration-service';
|
||||
|
||||
vi.mock('@/services/node-version-service');
|
||||
vi.mock('@/services/breaking-change-detector');
|
||||
|
||||
describe('PostUpdateValidator', () => {
|
||||
let validator: PostUpdateValidator;
|
||||
let mockVersionService: NodeVersionService;
|
||||
let mockBreakingChangeDetector: BreakingChangeDetector;
|
||||
|
||||
const createMockMigrationResult = (
|
||||
success: boolean,
|
||||
remainingIssues: string[] = []
|
||||
): MigrationResult => ({
|
||||
success,
|
||||
nodeId: 'node-1',
|
||||
nodeName: 'Test Node',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
appliedMigrations: [],
|
||||
remainingIssues,
|
||||
confidence: success ? 'HIGH' : 'MEDIUM',
|
||||
updatedNode: {}
|
||||
});
|
||||
|
||||
const createMockChange = (
|
||||
propertyName: string,
|
||||
changeType: DetectedChange['changeType'],
|
||||
autoMigratable: boolean,
|
||||
severity: DetectedChange['severity'] = 'MEDIUM'
|
||||
): DetectedChange => ({
|
||||
propertyName,
|
||||
changeType,
|
||||
isBreaking: true,
|
||||
migrationHint: `Migrate ${propertyName}`,
|
||||
autoMigratable,
|
||||
severity,
|
||||
source: 'registry'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockVersionService = {} as any;
|
||||
mockBreakingChangeDetector = {} as any;
|
||||
validator = new PostUpdateValidator(mockVersionService, mockBreakingChangeDetector);
|
||||
|
||||
mockVersionService.compareVersions = vi.fn((v1, v2) => {
|
||||
const parse = (v: string) => parseFloat(v);
|
||||
return parse(v1) - parse(v2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateGuidance', () => {
|
||||
it('should generate complete guidance for successful migration', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('complete');
|
||||
expect(guidance.confidence).toBe('HIGH');
|
||||
expect(guidance.requiredActions).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should identify manual_required status for critical issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('criticalProp', 'requirement_changed', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Manual action required']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('manual_required');
|
||||
expect(guidance.confidence).not.toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should set partial status for some remaining issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop', 'added', true, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Minor issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationStatus).toBe('partial');
|
||||
});
|
||||
});
|
||||
|
||||
describe('required actions generation', () => {
|
||||
it('should generate required actions for manual changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('newRequiredProp', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Add property']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions).toHaveLength(1);
|
||||
expect(guidance.requiredActions[0].type).toBe('ADD_PROPERTY');
|
||||
expect(guidance.requiredActions[0].property).toBe('newRequiredProp');
|
||||
expect(guidance.requiredActions[0].priority).toBe('CRITICAL');
|
||||
});
|
||||
|
||||
it('should map change types to action types correctly', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('addedProp', 'added', false, 'HIGH'),
|
||||
createMockChange('changedProp', 'requirement_changed', false, 'MEDIUM'),
|
||||
createMockChange('defaultProp', 'default_changed', false, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions[0].type).toBe('ADD_PROPERTY');
|
||||
expect(guidance.requiredActions[1].type).toBe('UPDATE_PROPERTY');
|
||||
expect(guidance.requiredActions[2].type).toBe('CONFIGURE_OPTION');
|
||||
});
|
||||
|
||||
it('should map severity to priority correctly', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('highProp', 'added', false, 'HIGH'),
|
||||
createMockChange('medProp', 'added', false, 'MEDIUM'),
|
||||
createMockChange('lowProp', 'added', false, 'LOW')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 3,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.requiredActions[0].priority).toBe('CRITICAL');
|
||||
expect(guidance.requiredActions[1].priority).toBe('MEDIUM');
|
||||
expect(guidance.requiredActions[2].priority).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deprecated properties identification', () => {
|
||||
it('should identify removed properties', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('oldProp', 'removed', true),
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
}
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.deprecatedProperties).toHaveLength(1);
|
||||
expect(guidance.deprecatedProperties[0].property).toBe('oldProp');
|
||||
expect(guidance.deprecatedProperties[0].status).toBe('removed');
|
||||
expect(guidance.deprecatedProperties[0].action).toBe('remove');
|
||||
});
|
||||
|
||||
it('should mark breaking removals appropriately', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('breakingProp', 'removed', false),
|
||||
isBreaking: true
|
||||
}
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 1,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.deprecatedProperties[0].impact).toBe('breaking');
|
||||
});
|
||||
});
|
||||
|
||||
describe('behavior changes documentation', () => {
|
||||
it('should document Execute Workflow v1.1 data passing changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.behaviorChanges).toHaveLength(1);
|
||||
expect(guidance.behaviorChanges[0].aspect).toContain('Data passing');
|
||||
expect(guidance.behaviorChanges[0].impact).toBe('HIGH');
|
||||
expect(guidance.behaviorChanges[0].actionRequired).toBe(true);
|
||||
});
|
||||
|
||||
it('should document Webhook v2.1 persistence changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'2.0',
|
||||
'2.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const persistenceChange = guidance.behaviorChanges.find(c => c.aspect.includes('persistence'));
|
||||
expect(persistenceChange).toBeDefined();
|
||||
expect(persistenceChange?.impact).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should document Webhook v2.0 response handling changes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '1.9',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'1.9',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const responseChange = guidance.behaviorChanges.find(c => c.aspect.includes('Response'));
|
||||
expect(responseChange).toBeDefined();
|
||||
expect(responseChange?.actionRequired).toBe(true);
|
||||
});
|
||||
|
||||
it('should not document behavior changes for other nodes', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'HTTP Request',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.behaviorChanges).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migration steps generation', () => {
|
||||
it('should generate ordered migration steps', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
{
|
||||
...createMockChange('removedProp', 'removed', true),
|
||||
migrationStrategy: { type: 'remove_property' }
|
||||
},
|
||||
createMockChange('criticalProp', 'added', false, 'HIGH'),
|
||||
createMockChange('mediumProp', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.length).toBeGreaterThan(0);
|
||||
expect(guidance.migrationSteps[0]).toContain('deprecated');
|
||||
expect(guidance.migrationSteps.some(s => s.includes('critical'))).toBe(true);
|
||||
expect(guidance.migrationSteps.some(s => s.includes('Test workflow'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should include behavior change adaptation steps', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.some(s => s.includes('behavior changes'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should always include final validation step', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.migrationSteps.some(s => s.includes('Test workflow'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('confidence calculation', () => {
|
||||
it('should set HIGH confidence for complete migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('HIGH');
|
||||
});
|
||||
|
||||
it('should set MEDIUM confidence for partial migrations with few issues', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop', 'added', true, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 1,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Minor issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('MEDIUM');
|
||||
});
|
||||
|
||||
it('should set LOW confidence for manual_required with many critical actions', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 4,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.confidence).toBe('LOW');
|
||||
});
|
||||
});
|
||||
|
||||
describe('time estimation', () => {
|
||||
it('should estimate < 1 minute for simple migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toBe('< 1 minute');
|
||||
});
|
||||
|
||||
it('should estimate 2-5 minutes for few actions', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'MEDIUM',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issue']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toMatch(/2-5|5-10/);
|
||||
});
|
||||
|
||||
it('should estimate 20+ minutes for complex migrations', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.executeWorkflow',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '1.1',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH'),
|
||||
createMockChange('prop5', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Execute Workflow',
|
||||
'n8n-nodes-base.executeWorkflow',
|
||||
'1.0',
|
||||
'1.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
expect(guidance.estimatedTime).toContain('20+');
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateSummary', () => {
|
||||
it('should generate readable summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'MEDIUM')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 2,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('Test Node');
|
||||
expect(summary).toContain('1.0');
|
||||
expect(summary).toContain('2.0');
|
||||
expect(summary).toContain('Required actions');
|
||||
});
|
||||
|
||||
it('should limit actions displayed in summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
fromVersion: '1.0',
|
||||
toVersion: '2.0',
|
||||
hasBreakingChanges: true,
|
||||
changes: [
|
||||
createMockChange('prop1', 'added', false, 'HIGH'),
|
||||
createMockChange('prop2', 'added', false, 'HIGH'),
|
||||
createMockChange('prop3', 'added', false, 'HIGH'),
|
||||
createMockChange('prop4', 'added', false, 'HIGH'),
|
||||
createMockChange('prop5', 'added', false, 'HIGH')
|
||||
],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 5,
|
||||
overallSeverity: 'HIGH',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(false, ['Issues']);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Test Node',
|
||||
'nodes-base.httpRequest',
|
||||
'1.0',
|
||||
'2.0',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('and 2 more');
|
||||
});
|
||||
|
||||
it('should include behavior changes in summary', async () => {
|
||||
const mockAnalysis: VersionUpgradeAnalysis = {
|
||||
nodeType: 'n8n-nodes-base.webhook',
|
||||
fromVersion: '2.0',
|
||||
toVersion: '2.1',
|
||||
hasBreakingChanges: false,
|
||||
changes: [],
|
||||
autoMigratableCount: 0,
|
||||
manualRequiredCount: 0,
|
||||
overallSeverity: 'LOW',
|
||||
recommendations: []
|
||||
};
|
||||
|
||||
mockBreakingChangeDetector.analyzeVersionUpgrade = vi.fn().mockResolvedValue(mockAnalysis);
|
||||
|
||||
const migrationResult = createMockMigrationResult(true);
|
||||
|
||||
const guidance = await validator.generateGuidance(
|
||||
'node-1',
|
||||
'Webhook',
|
||||
'n8n-nodes-base.webhook',
|
||||
'2.0',
|
||||
'2.1',
|
||||
migrationResult
|
||||
);
|
||||
|
||||
const summary = validator.generateSummary(guidance);
|
||||
|
||||
expect(summary).toContain('Behavior changes');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -35,10 +35,6 @@ describe('WorkflowAutoFixer', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
|
||||
// Mock getNodeVersions to return empty array (no versions available)
|
||||
vi.spyOn(mockRepository, 'getNodeVersions').mockReturnValue([]);
|
||||
|
||||
autoFixer = new WorkflowAutoFixer(mockRepository);
|
||||
});
|
||||
|
||||
@@ -70,7 +66,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Expression Format Fixes', () => {
|
||||
it('should fix missing prefix in expressions', async () => {
|
||||
it('should fix missing prefix in expressions', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {
|
||||
url: '{{ $json.url }}',
|
||||
@@ -104,7 +100,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('expression-format');
|
||||
@@ -116,7 +112,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
expect(result.operations[0].type).toBe('updateNode');
|
||||
});
|
||||
|
||||
it('should handle multiple expression fixes in same node', async () => {
|
||||
it('should handle multiple expression fixes in same node', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {
|
||||
url: '{{ $json.url }}',
|
||||
@@ -162,7 +158,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.fixes).toHaveLength(2);
|
||||
expect(result.operations).toHaveLength(1); // Single update operation for the node
|
||||
@@ -170,7 +166,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('TypeVersion Fixes', () => {
|
||||
it('should fix typeVersion exceeding maximum', async () => {
|
||||
it('should fix typeVersion exceeding maximum', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {})
|
||||
]);
|
||||
@@ -195,7 +191,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('typeversion-correction');
|
||||
@@ -206,7 +202,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Error Output Configuration Fixes', () => {
|
||||
it('should remove conflicting onError setting', async () => {
|
||||
it('should remove conflicting onError setting', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', {})
|
||||
]);
|
||||
@@ -232,7 +228,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.fixes).toHaveLength(1);
|
||||
expect(result.fixes[0].type).toBe('error-output-config');
|
||||
@@ -299,7 +295,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Confidence Filtering', () => {
|
||||
it('should filter fixes by confidence level', async () => {
|
||||
it('should filter fixes by confidence level', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', { url: '{{ $json.url }}' })
|
||||
]);
|
||||
@@ -330,7 +326,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues, {
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues, {
|
||||
confidenceThreshold: 'low'
|
||||
});
|
||||
|
||||
@@ -340,7 +336,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
});
|
||||
|
||||
describe('Summary Generation', () => {
|
||||
it('should generate appropriate summary for fixes', async () => {
|
||||
it('should generate appropriate summary for fixes', () => {
|
||||
const workflow = createMockWorkflow([
|
||||
createMockNode('node-1', 'nodes-base.httpRequest', { url: '{{ $json.url }}' })
|
||||
]);
|
||||
@@ -371,14 +367,14 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, formatIssues);
|
||||
|
||||
expect(result.summary).toContain('expression format');
|
||||
expect(result.stats.total).toBe(1);
|
||||
expect(result.stats.byType['expression-format']).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle empty fixes gracefully', async () => {
|
||||
it('should handle empty fixes gracefully', () => {
|
||||
const workflow = createMockWorkflow([]);
|
||||
const validationResult: WorkflowValidationResult = {
|
||||
valid: true,
|
||||
@@ -395,7 +391,7 @@ describe('WorkflowAutoFixer', () => {
|
||||
suggestions: []
|
||||
};
|
||||
|
||||
const result = await autoFixer.generateFixes(workflow, validationResult, []);
|
||||
const result = autoFixer.generateFixes(workflow, validationResult, []);
|
||||
|
||||
expect(result.summary).toBe('No fixes available');
|
||||
expect(result.stats.total).toBe(0);
|
||||
|
||||
@@ -1418,113 +1418,6 @@ describe('WorkflowDiffEngine', () => {
|
||||
expect(result.workflow!.connections['Switch']['main'][2][0].node).toBe('Handler');
|
||||
expect(result.workflow!.connections['Switch']['main'][1]).toEqual([]);
|
||||
});
|
||||
|
||||
it('should warn when using sourceIndex with If node (issue #360)', async () => {
|
||||
const addIF: any = {
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Check Condition',
|
||||
type: 'n8n-nodes-base.if',
|
||||
position: [400, 300]
|
||||
}
|
||||
};
|
||||
|
||||
const addSuccess: any = {
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Success Handler',
|
||||
type: 'n8n-nodes-base.set',
|
||||
position: [600, 200]
|
||||
}
|
||||
};
|
||||
|
||||
const addError: any = {
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Error Handler',
|
||||
type: 'n8n-nodes-base.set',
|
||||
position: [600, 400]
|
||||
}
|
||||
};
|
||||
|
||||
// BAD: Using sourceIndex with If node (reproduces issue #360)
|
||||
const connectSuccess: any = {
|
||||
type: 'addConnection',
|
||||
source: 'Check Condition',
|
||||
target: 'Success Handler',
|
||||
sourceIndex: 0 // Should use branch="true" instead
|
||||
};
|
||||
|
||||
const connectError: any = {
|
||||
type: 'addConnection',
|
||||
source: 'Check Condition',
|
||||
target: 'Error Handler',
|
||||
sourceIndex: 0 // Should use branch="false" instead - both will end up in main[0]!
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'test-workflow',
|
||||
operations: [addIF, addSuccess, addError, connectSuccess, connectError]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(baseWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should produce warnings
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBe(2);
|
||||
expect(result.warnings![0].message).toContain('Consider using branch="true" or branch="false"');
|
||||
expect(result.warnings![0].message).toContain('If node outputs: main[0]=TRUE branch, main[1]=FALSE branch');
|
||||
expect(result.warnings![1].message).toContain('Consider using branch="true" or branch="false"');
|
||||
|
||||
// Both connections end up in main[0] (the bug behavior)
|
||||
expect(result.workflow!.connections['Check Condition']['main'][0].length).toBe(2);
|
||||
expect(result.workflow!.connections['Check Condition']['main'][0][0].node).toBe('Success Handler');
|
||||
expect(result.workflow!.connections['Check Condition']['main'][0][1].node).toBe('Error Handler');
|
||||
});
|
||||
|
||||
it('should warn when using sourceIndex with Switch node', async () => {
|
||||
const addSwitch: any = {
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Switch',
|
||||
type: 'n8n-nodes-base.switch',
|
||||
position: [400, 300]
|
||||
}
|
||||
};
|
||||
|
||||
const addHandler: any = {
|
||||
type: 'addNode',
|
||||
node: {
|
||||
name: 'Handler',
|
||||
type: 'n8n-nodes-base.set',
|
||||
position: [600, 300]
|
||||
}
|
||||
};
|
||||
|
||||
// BAD: Using sourceIndex with Switch node
|
||||
const connect: any = {
|
||||
type: 'addConnection',
|
||||
source: 'Switch',
|
||||
target: 'Handler',
|
||||
sourceIndex: 1 // Should use case=1 instead
|
||||
};
|
||||
|
||||
const request: WorkflowDiffRequest = {
|
||||
id: 'test-workflow',
|
||||
operations: [addSwitch, addHandler, connect]
|
||||
};
|
||||
|
||||
const result = await diffEngine.applyDiff(baseWorkflow, request);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Should produce warning
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(result.warnings!.length).toBe(1);
|
||||
expect(result.warnings![0].message).toContain('Consider using case=N for better clarity');
|
||||
});
|
||||
});
|
||||
|
||||
describe('AddConnection with sourceIndex (Phase 0 Fix)', () => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,616 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { WorkflowVersioningService, type WorkflowVersion, type BackupResult } from '@/services/workflow-versioning-service';
|
||||
import { NodeRepository } from '@/database/node-repository';
|
||||
import { N8nApiClient } from '@/services/n8n-api-client';
|
||||
import { WorkflowValidator } from '@/services/workflow-validator';
|
||||
import type { Workflow } from '@/types/n8n-api';
|
||||
|
||||
vi.mock('@/database/node-repository');
|
||||
vi.mock('@/services/n8n-api-client');
|
||||
vi.mock('@/services/workflow-validator');
|
||||
|
||||
describe('WorkflowVersioningService', () => {
|
||||
let service: WorkflowVersioningService;
|
||||
let mockRepository: NodeRepository;
|
||||
let mockApiClient: N8nApiClient;
|
||||
|
||||
const createMockWorkflow = (id: string, name: string, nodes: any[] = []): Workflow => ({
|
||||
id,
|
||||
name,
|
||||
active: false,
|
||||
nodes,
|
||||
connections: {},
|
||||
settings: {},
|
||||
createdAt: '2025-01-01T00:00:00.000Z',
|
||||
updatedAt: '2025-01-01T00:00:00.000Z'
|
||||
});
|
||||
|
||||
const createMockVersion = (versionNumber: number): WorkflowVersion => ({
|
||||
id: versionNumber,
|
||||
workflowId: 'workflow-1',
|
||||
versionNumber,
|
||||
workflowName: 'Test Workflow',
|
||||
workflowSnapshot: createMockWorkflow('workflow-1', 'Test Workflow'),
|
||||
trigger: 'partial_update',
|
||||
createdAt: '2025-01-01T00:00:00.000Z'
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockRepository = new NodeRepository({} as any);
|
||||
mockApiClient = new N8nApiClient({ baseUrl: 'http://test', apiKey: 'test-key' });
|
||||
service = new WorkflowVersioningService(mockRepository, mockApiClient);
|
||||
});
|
||||
|
||||
describe('createBackup', () => {
|
||||
it('should create a backup with version 1 for new workflow', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(1);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'partial_update'
|
||||
});
|
||||
|
||||
expect(result.versionId).toBe(1);
|
||||
expect(result.versionNumber).toBe(1);
|
||||
expect(result.pruned).toBe(0);
|
||||
expect(result.message).toContain('Backup created (version 1)');
|
||||
});
|
||||
|
||||
it('should increment version number from latest version', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
const existingVersions = [createMockVersion(3), createMockVersion(2)];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue(existingVersions);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(4);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'full_update'
|
||||
});
|
||||
|
||||
expect(result.versionNumber).toBe(4);
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
versionNumber: 4
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should include context in version metadata', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(1);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
|
||||
await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'autofix',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node-1' }],
|
||||
fixTypes: ['expression-format'],
|
||||
metadata: { testKey: 'testValue' }
|
||||
});
|
||||
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
trigger: 'autofix',
|
||||
operations: [{ type: 'updateNode', nodeId: 'node-1' }],
|
||||
fixTypes: ['expression-format'],
|
||||
metadata: { testKey: 'testValue' }
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('should auto-prune to 10 versions and report pruned count', async () => {
|
||||
const workflow = createMockWorkflow('workflow-1', 'Test Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(1)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(3);
|
||||
|
||||
const result = await service.createBackup('workflow-1', workflow, {
|
||||
trigger: 'partial_update'
|
||||
});
|
||||
|
||||
expect(mockRepository.pruneWorkflowVersions).toHaveBeenCalledWith('workflow-1', 10);
|
||||
expect(result.pruned).toBe(3);
|
||||
expect(result.message).toContain('pruned 3 old version(s)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersionHistory', () => {
|
||||
it('should return formatted version history', async () => {
|
||||
const versions = [
|
||||
createMockVersion(3),
|
||||
createMockVersion(2),
|
||||
createMockVersion(1)
|
||||
];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue(versions);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].versionNumber).toBe(3);
|
||||
expect(result[0].workflowId).toBe('workflow-1');
|
||||
expect(result[0].size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should include operation count when operations exist', async () => {
|
||||
const versionWithOps: WorkflowVersion = {
|
||||
...createMockVersion(1),
|
||||
operations: [{ type: 'updateNode' }, { type: 'addNode' }]
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([versionWithOps]);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result[0].operationCount).toBe(2);
|
||||
});
|
||||
|
||||
it('should include fixTypes when present', async () => {
|
||||
const versionWithFixes: WorkflowVersion = {
|
||||
...createMockVersion(1),
|
||||
fixTypes: ['expression-format', 'typeversion-correction']
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([versionWithFixes]);
|
||||
|
||||
const result = await service.getVersionHistory('workflow-1', 10);
|
||||
|
||||
expect(result[0].fixTypesApplied).toEqual(['expression-format', 'typeversion-correction']);
|
||||
});
|
||||
|
||||
it('should respect the limit parameter', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
|
||||
await service.getVersionHistory('workflow-1', 5);
|
||||
|
||||
expect(mockRepository.getWorkflowVersions).toHaveBeenCalledWith('workflow-1', 5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersion', () => {
|
||||
it('should return the requested version', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
|
||||
const result = await service.getVersion(1);
|
||||
|
||||
expect(result).toEqual(version);
|
||||
});
|
||||
|
||||
it('should return null if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.getVersion(999);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('restoreVersion', () => {
|
||||
it('should fail if API client is not configured', async () => {
|
||||
const serviceWithoutApi = new WorkflowVersioningService(mockRepository);
|
||||
|
||||
const result = await serviceWithoutApi.restoreVersion('workflow-1', 1);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('API client not configured');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 999);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Version 999 not found');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should restore latest version when no versionId provided', async () => {
|
||||
const version = createMockVersion(3);
|
||||
vi.spyOn(mockRepository, 'getLatestWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(4);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', undefined, false);
|
||||
|
||||
expect(mockRepository.getLatestWorkflowVersion).toHaveBeenCalledWith('workflow-1');
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if no backup versions exist and no versionId provided', async () => {
|
||||
vi.spyOn(mockRepository, 'getLatestWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', undefined);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('No backup versions found');
|
||||
});
|
||||
|
||||
it('should validate version before restore when validateBefore is true', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
|
||||
const mockValidator = {
|
||||
validateWorkflow: vi.fn().mockResolvedValue({
|
||||
errors: [{ message: 'Validation error' }]
|
||||
})
|
||||
};
|
||||
vi.spyOn(WorkflowValidator.prototype, 'validateWorkflow').mockImplementation(
|
||||
mockValidator.validateWorkflow
|
||||
);
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, true);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('has validation errors');
|
||||
expect(result.validationErrors).toEqual(['Validation error']);
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should skip validation when validateBefore is false', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const mockValidator = vi.fn();
|
||||
vi.spyOn(WorkflowValidator.prototype, 'validateWorkflow').mockImplementation(mockValidator);
|
||||
|
||||
await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockValidator).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should create backup before restoring', async () => {
|
||||
const versionToRestore = createMockVersion(1);
|
||||
const currentWorkflow = createMockWorkflow('workflow-1', 'Current Workflow');
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(versionToRestore);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(2)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(currentWorkflow);
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockApiClient.getWorkflow).toHaveBeenCalledWith('workflow-1');
|
||||
expect(mockRepository.createWorkflowVersion).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
workflowSnapshot: currentWorkflow,
|
||||
metadata: expect.objectContaining({
|
||||
reason: 'Backup before rollback',
|
||||
restoringToVersion: 1
|
||||
})
|
||||
})
|
||||
);
|
||||
expect(result.backupCreated).toBe(true);
|
||||
expect(result.backupVersionId).toBe(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockRejectedValue(new Error('Backup failed'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Failed to create backup before restore');
|
||||
expect(result.backupCreated).toBe(false);
|
||||
});
|
||||
|
||||
it('should successfully restore workflow', async () => {
|
||||
const versionToRestore = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(versionToRestore);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([createMockVersion(2)]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Restored'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(mockApiClient.updateWorkflow).toHaveBeenCalledWith('workflow-1', versionToRestore.workflowSnapshot);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Successfully restored workflow to version 1');
|
||||
expect(result.fromVersion).toBe(3);
|
||||
expect(result.toVersionId).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle restore API failures', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersions').mockReturnValue([]);
|
||||
vi.spyOn(mockRepository, 'createWorkflowVersion').mockReturnValue(2);
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockApiClient, 'getWorkflow').mockResolvedValue(createMockWorkflow('workflow-1', 'Current'));
|
||||
vi.spyOn(mockApiClient, 'updateWorkflow').mockRejectedValue(new Error('API Error'));
|
||||
|
||||
const result = await service.restoreVersion('workflow-1', 1, false);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Failed to restore workflow');
|
||||
expect(result.backupCreated).toBe(true);
|
||||
expect(result.backupVersionId).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteVersion', () => {
|
||||
it('should delete a specific version', async () => {
|
||||
const version = createMockVersion(1);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(version);
|
||||
vi.spyOn(mockRepository, 'deleteWorkflowVersion').mockReturnValue(undefined);
|
||||
|
||||
const result = await service.deleteVersion(1);
|
||||
|
||||
expect(mockRepository.deleteWorkflowVersion).toHaveBeenCalledWith(1);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.message).toContain('Deleted version 1');
|
||||
});
|
||||
|
||||
it('should fail if version does not exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
const result = await service.deleteVersion(999);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.message).toContain('Version 999 not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteAllVersions', () => {
|
||||
it('should delete all versions for a workflow', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(5);
|
||||
vi.spyOn(mockRepository, 'deleteWorkflowVersionsByWorkflowId').mockReturnValue(5);
|
||||
|
||||
const result = await service.deleteAllVersions('workflow-1');
|
||||
|
||||
expect(result.deleted).toBe(5);
|
||||
expect(result.message).toContain('Deleted 5 version(s)');
|
||||
});
|
||||
|
||||
it('should return zero if no versions exist', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(0);
|
||||
|
||||
const result = await service.deleteAllVersions('workflow-1');
|
||||
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.message).toContain('No versions found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('pruneVersions', () => {
|
||||
it('should prune versions and return counts', async () => {
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(3);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(10);
|
||||
|
||||
const result = await service.pruneVersions('workflow-1', 10);
|
||||
|
||||
expect(result.pruned).toBe(3);
|
||||
expect(result.remaining).toBe(10);
|
||||
});
|
||||
|
||||
it('should use custom maxVersions parameter', async () => {
|
||||
vi.spyOn(mockRepository, 'pruneWorkflowVersions').mockReturnValue(0);
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersionCount').mockReturnValue(5);
|
||||
|
||||
await service.pruneVersions('workflow-1', 5);
|
||||
|
||||
expect(mockRepository.pruneWorkflowVersions).toHaveBeenCalledWith('workflow-1', 5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('truncateAllVersions', () => {
|
||||
it('should refuse to truncate without confirmation', async () => {
|
||||
const result = await service.truncateAllVersions(false);
|
||||
|
||||
expect(result.deleted).toBe(0);
|
||||
expect(result.message).toContain('not confirmed');
|
||||
});
|
||||
|
||||
it('should truncate all versions when confirmed', async () => {
|
||||
vi.spyOn(mockRepository, 'truncateWorkflowVersions').mockReturnValue(50);
|
||||
|
||||
const result = await service.truncateAllVersions(true);
|
||||
|
||||
expect(result.deleted).toBe(50);
|
||||
expect(result.message).toContain('Truncated workflow_versions table');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStorageStats', () => {
|
||||
it('should return formatted storage statistics', async () => {
|
||||
const mockStats = {
|
||||
totalVersions: 10,
|
||||
totalSize: 1024000,
|
||||
byWorkflow: [
|
||||
{
|
||||
workflowId: 'workflow-1',
|
||||
workflowName: 'Test Workflow',
|
||||
versionCount: 5,
|
||||
totalSize: 512000,
|
||||
lastBackup: '2025-01-01T00:00:00.000Z'
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getVersionStorageStats').mockReturnValue(mockStats);
|
||||
|
||||
const result = await service.getStorageStats();
|
||||
|
||||
expect(result.totalVersions).toBe(10);
|
||||
expect(result.totalSizeFormatted).toContain('KB');
|
||||
expect(result.byWorkflow).toHaveLength(1);
|
||||
expect(result.byWorkflow[0].totalSizeFormatted).toContain('KB');
|
||||
});
|
||||
|
||||
it('should format bytes correctly', async () => {
|
||||
const mockStats = {
|
||||
totalVersions: 1,
|
||||
totalSize: 0,
|
||||
byWorkflow: []
|
||||
};
|
||||
|
||||
vi.spyOn(mockRepository, 'getVersionStorageStats').mockReturnValue(mockStats);
|
||||
|
||||
const result = await service.getStorageStats();
|
||||
|
||||
expect(result.totalSizeFormatted).toBe('0 Bytes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('compareVersions', () => {
|
||||
it('should detect added nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [
|
||||
{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} },
|
||||
{ id: 'node-2', name: 'Node 2', type: 'test', typeVersion: 1, position: [100, 0], parameters: {} }
|
||||
];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.addedNodes).toEqual(['node-2']);
|
||||
expect(result.removedNodes).toEqual([]);
|
||||
expect(result.modifiedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should detect removed nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [
|
||||
{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} },
|
||||
{ id: 'node-2', name: 'Node 2', type: 'test', typeVersion: 1, position: [100, 0], parameters: {} }
|
||||
];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.removedNodes).toEqual(['node-2']);
|
||||
expect(result.addedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should detect modified nodes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 1, position: [0, 0], parameters: {} }];
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.nodes = [{ id: 'node-1', name: 'Node 1', type: 'test', typeVersion: 2, position: [0, 0], parameters: {} }];
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.modifiedNodes).toEqual(['node-1']);
|
||||
});
|
||||
|
||||
it('should detect connection changes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.connections = { 'node-1': { main: [[{ node: 'node-2', type: 'main', index: 0 }]] } };
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.connections = {};
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.connectionChanges).toBe(1);
|
||||
});
|
||||
|
||||
it('should detect settings changes', async () => {
|
||||
const v1 = createMockVersion(1);
|
||||
v1.workflowSnapshot.settings = { executionOrder: 'v0' };
|
||||
|
||||
const v2 = createMockVersion(2);
|
||||
v2.workflowSnapshot.settings = { executionOrder: 'v1' };
|
||||
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion')
|
||||
.mockReturnValueOnce(v1)
|
||||
.mockReturnValueOnce(v2);
|
||||
|
||||
const result = await service.compareVersions(1, 2);
|
||||
|
||||
expect(result.settingChanges).toHaveProperty('executionOrder');
|
||||
expect(result.settingChanges.executionOrder.before).toBe('v0');
|
||||
expect(result.settingChanges.executionOrder.after).toBe('v1');
|
||||
});
|
||||
|
||||
it('should throw error if version not found', async () => {
|
||||
vi.spyOn(mockRepository, 'getWorkflowVersion').mockReturnValue(null);
|
||||
|
||||
await expect(service.compareVersions(1, 2)).rejects.toThrow('One or both versions not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatBytes', () => {
|
||||
it('should format bytes to human-readable string', () => {
|
||||
// Access private method through any cast
|
||||
const formatBytes = (service as any).formatBytes.bind(service);
|
||||
|
||||
expect(formatBytes(0)).toBe('0 Bytes');
|
||||
expect(formatBytes(500)).toBe('500 Bytes');
|
||||
expect(formatBytes(1024)).toBe('1 KB');
|
||||
expect(formatBytes(1048576)).toBe('1 MB');
|
||||
expect(formatBytes(1073741824)).toBe('1 GB');
|
||||
});
|
||||
});
|
||||
|
||||
describe('diffObjects', () => {
|
||||
it('should detect object differences', () => {
|
||||
const diffObjects = (service as any).diffObjects.bind(service);
|
||||
|
||||
const obj1 = { a: 1, b: 2 };
|
||||
const obj2 = { a: 1, b: 3, c: 4 };
|
||||
|
||||
const diff = diffObjects(obj1, obj2);
|
||||
|
||||
expect(diff).toHaveProperty('b');
|
||||
expect(diff.b).toEqual({ before: 2, after: 3 });
|
||||
expect(diff).toHaveProperty('c');
|
||||
expect(diff.c).toEqual({ before: undefined, after: 4 });
|
||||
});
|
||||
|
||||
it('should return empty object when no differences', () => {
|
||||
const diffObjects = (service as any).diffObjects.bind(service);
|
||||
|
||||
const obj1 = { a: 1, b: 2 };
|
||||
const obj2 = { a: 1, b: 2 };
|
||||
|
||||
const diff = diffObjects(obj1, obj2);
|
||||
|
||||
expect(Object.keys(diff)).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
306
tests/unit/session-lifecycle-events.test.ts
Normal file
306
tests/unit/session-lifecycle-events.test.ts
Normal file
@@ -0,0 +1,306 @@
|
||||
/**
|
||||
* Unit tests for Session Lifecycle Events (Phase 3 - REQ-4)
|
||||
* Tests event emission configuration and error handling
|
||||
*
|
||||
* Note: Events are fire-and-forget (non-blocking), so we test:
|
||||
* 1. Configuration works without errors
|
||||
* 2. Operations complete successfully even if handlers fail
|
||||
* 3. Handlers don't block operations
|
||||
*/
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { N8NMCPEngine } from '../../src/mcp-engine';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
|
||||
describe('Session Lifecycle Events (Phase 3 - REQ-4)', () => {
|
||||
let engine: N8NMCPEngine;
|
||||
const testContext: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Set required AUTH_TOKEN environment variable for testing
|
||||
process.env.AUTH_TOKEN = 'test-token-for-session-lifecycle-events-testing-32chars';
|
||||
});
|
||||
|
||||
describe('onSessionCreated event', () => {
|
||||
it('should configure onSessionCreated handler without error', () => {
|
||||
const onSessionCreated = vi.fn();
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionCreated }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-created-test-1';
|
||||
const result = engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// Session should be created successfully
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should create session successfully even with handler error', () => {
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Event handler error');
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionCreated: errorHandler }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-error-test';
|
||||
|
||||
// Should not throw despite handler error (non-blocking)
|
||||
expect(() => {
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
}).not.toThrow();
|
||||
|
||||
// Session should still be created successfully
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should support async handlers without blocking', () => {
|
||||
const asyncHandler = vi.fn(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionCreated: asyncHandler }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-async-test';
|
||||
|
||||
// Should return immediately (non-blocking)
|
||||
const startTime = Date.now();
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
const endTime = Date.now();
|
||||
|
||||
// Should complete quickly (not wait for async handler)
|
||||
expect(endTime - startTime).toBeLessThan(50);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onSessionDeleted event', () => {
|
||||
it('should configure onSessionDeleted handler without error', () => {
|
||||
const onSessionDeleted = vi.fn();
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionDeleted }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-deleted-test';
|
||||
|
||||
// Create and delete session
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
const result = engine.deleteSession(sessionId);
|
||||
|
||||
// Deletion should succeed
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should not configure onSessionDeleted for non-existent session', () => {
|
||||
const onSessionDeleted = vi.fn();
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionDeleted }
|
||||
});
|
||||
|
||||
// Try to delete non-existent session
|
||||
const result = engine.deleteSession('non-existent-session-id');
|
||||
|
||||
// Should return false (session not found)
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should delete session successfully even with handler error', () => {
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Deletion event error');
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionDeleted: errorHandler }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-delete-error-test';
|
||||
|
||||
// Create session
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// Delete should succeed despite handler error
|
||||
const deleted = engine.deleteSession(sessionId);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Session should still be deleted
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple events configuration', () => {
|
||||
it('should support multiple events configured together', () => {
|
||||
const onSessionCreated = vi.fn();
|
||||
const onSessionDeleted = vi.fn();
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionCreated,
|
||||
onSessionDeleted
|
||||
}
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-multi-event-test';
|
||||
|
||||
// Create session
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
|
||||
// Delete session
|
||||
engine.deleteSession(sessionId);
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should handle mix of sync and async handlers', () => {
|
||||
const syncHandler = vi.fn();
|
||||
const asyncHandler = vi.fn(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionCreated: syncHandler,
|
||||
onSessionDeleted: asyncHandler
|
||||
}
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-mixed-handlers';
|
||||
|
||||
// Create session
|
||||
const startTime = Date.now();
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
const createTime = Date.now();
|
||||
|
||||
// Should not block for async handler
|
||||
expect(createTime - startTime).toBeLessThan(50);
|
||||
|
||||
// Delete session
|
||||
engine.deleteSession(sessionId);
|
||||
const deleteTime = Date.now();
|
||||
|
||||
// Should not block for async handler
|
||||
expect(deleteTime - createTime).toBeLessThan(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Event handler error behavior', () => {
|
||||
it('should not propagate errors from event handlers to caller', () => {
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Test error');
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionCreated: errorHandler
|
||||
}
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-no-propagate';
|
||||
|
||||
// Should not throw (non-blocking error handling)
|
||||
expect(() => {
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
}).not.toThrow();
|
||||
|
||||
// Session was created successfully
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should allow operations to complete if event handler fails', () => {
|
||||
const errorHandler = vi.fn(() => {
|
||||
throw new Error('Handler error');
|
||||
});
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: {
|
||||
onSessionDeleted: errorHandler
|
||||
}
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-continue-on-error';
|
||||
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// Delete should succeed despite handler error
|
||||
const result = engine.deleteSession(sessionId);
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Session should be deleted
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Event handler with metadata', () => {
|
||||
it('should configure handlers with metadata support', () => {
|
||||
const onSessionCreated = vi.fn();
|
||||
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: { onSessionCreated }
|
||||
});
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-metadata-test';
|
||||
const contextWithMetadata = {
|
||||
...testContext,
|
||||
metadata: {
|
||||
userId: 'user-456',
|
||||
tier: 'enterprise',
|
||||
region: 'us-east-1'
|
||||
}
|
||||
};
|
||||
|
||||
engine.restoreSession(sessionId, contextWithMetadata);
|
||||
|
||||
// Session created successfully
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
|
||||
// State includes metadata
|
||||
const state = engine.getSessionState(sessionId);
|
||||
expect(state?.metadata).toEqual({
|
||||
userId: 'user-456',
|
||||
tier: 'enterprise',
|
||||
region: 'us-east-1'
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configuration validation', () => {
|
||||
it('should accept empty sessionEvents object', () => {
|
||||
expect(() => {
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: {}
|
||||
});
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should accept undefined sessionEvents', () => {
|
||||
expect(() => {
|
||||
engine = new N8NMCPEngine({
|
||||
sessionEvents: undefined
|
||||
});
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should work without sessionEvents configured', () => {
|
||||
engine = new N8NMCPEngine();
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-no-events';
|
||||
|
||||
// Should work normally
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
|
||||
engine.deleteSession(sessionId);
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
349
tests/unit/session-management-api.test.ts
Normal file
349
tests/unit/session-management-api.test.ts
Normal file
@@ -0,0 +1,349 @@
|
||||
/**
|
||||
* Unit tests for Session Management API (Phase 2 - REQ-5)
|
||||
* Tests the public API methods for session management in v2.19.0
|
||||
*/
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { N8NMCPEngine } from '../../src/mcp-engine';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
|
||||
describe('Session Management API (Phase 2 - REQ-5)', () => {
|
||||
let engine: N8NMCPEngine;
|
||||
const testContext: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Set required AUTH_TOKEN environment variable for testing
|
||||
process.env.AUTH_TOKEN = 'test-token-for-session-management-testing-32chars';
|
||||
|
||||
// Create engine with session restoration disabled for these tests
|
||||
engine = new N8NMCPEngine({
|
||||
sessionTimeout: 30 * 60 * 1000 // 30 minutes
|
||||
});
|
||||
});
|
||||
|
||||
describe('getActiveSessions()', () => {
|
||||
it('should return empty array when no sessions exist', () => {
|
||||
const sessionIds = engine.getActiveSessions();
|
||||
expect(sessionIds).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return session IDs after session creation via restoreSession', () => {
|
||||
// Create session using direct API (not through HTTP request)
|
||||
const sessionId = 'instance-test-abc123-uuid-session-test-1';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
const sessionIds = engine.getActiveSessions();
|
||||
expect(sessionIds.length).toBe(1);
|
||||
expect(sessionIds).toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should return multiple session IDs when multiple sessions exist', () => {
|
||||
// Create multiple sessions using direct API
|
||||
const sessions = [
|
||||
{ id: 'instance-test1-abc123-uuid-session-1', context: { ...testContext, instanceId: 'instance-1' } },
|
||||
{ id: 'instance-test2-abc123-uuid-session-2', context: { ...testContext, instanceId: 'instance-2' } }
|
||||
];
|
||||
|
||||
sessions.forEach(({ id, context }) => {
|
||||
engine.restoreSession(id, context);
|
||||
});
|
||||
|
||||
const sessionIds = engine.getActiveSessions();
|
||||
expect(sessionIds.length).toBe(2);
|
||||
expect(sessionIds).toContain(sessions[0].id);
|
||||
expect(sessionIds).toContain(sessions[1].id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSessionState()', () => {
|
||||
it('should return null for non-existent session', () => {
|
||||
const state = engine.getSessionState('non-existent-session-id');
|
||||
expect(state).toBeNull();
|
||||
});
|
||||
|
||||
it('should return session state for existing session', () => {
|
||||
// Create a session using direct API
|
||||
const sessionId = 'instance-test-abc123-uuid-session-state-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
const state = engine.getSessionState(sessionId);
|
||||
expect(state).not.toBeNull();
|
||||
expect(state).toMatchObject({
|
||||
sessionId: sessionId,
|
||||
instanceContext: expect.objectContaining({
|
||||
n8nApiUrl: testContext.n8nApiUrl,
|
||||
n8nApiKey: testContext.n8nApiKey,
|
||||
instanceId: testContext.instanceId
|
||||
}),
|
||||
createdAt: expect.any(Date),
|
||||
lastAccess: expect.any(Date),
|
||||
expiresAt: expect.any(Date)
|
||||
});
|
||||
});
|
||||
|
||||
it('should include metadata in session state if available', () => {
|
||||
const contextWithMetadata: InstanceContext = {
|
||||
...testContext,
|
||||
metadata: { userId: 'user-123', tier: 'premium' }
|
||||
};
|
||||
|
||||
const sessionId = 'instance-test-abc123-uuid-metadata-test';
|
||||
engine.restoreSession(sessionId, contextWithMetadata);
|
||||
|
||||
const state = engine.getSessionState(sessionId);
|
||||
|
||||
expect(state?.metadata).toEqual({ userId: 'user-123', tier: 'premium' });
|
||||
});
|
||||
|
||||
it('should calculate correct expiration time', () => {
|
||||
const sessionId = 'instance-test-abc123-uuid-expiry-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
const state = engine.getSessionState(sessionId);
|
||||
|
||||
expect(state).not.toBeNull();
|
||||
if (state) {
|
||||
const expectedExpiry = new Date(state.lastAccess.getTime() + 30 * 60 * 1000);
|
||||
const actualExpiry = state.expiresAt;
|
||||
|
||||
// Allow 1 second difference for test timing
|
||||
expect(Math.abs(actualExpiry.getTime() - expectedExpiry.getTime())).toBeLessThan(1000);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllSessionStates()', () => {
|
||||
it('should return empty array when no sessions exist', () => {
|
||||
const states = engine.getAllSessionStates();
|
||||
expect(states).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all session states', () => {
|
||||
// Create two sessions using direct API
|
||||
const session1Id = 'instance-test1-abc123-uuid-all-states-1';
|
||||
const session2Id = 'instance-test2-abc123-uuid-all-states-2';
|
||||
|
||||
engine.restoreSession(session1Id, {
|
||||
...testContext,
|
||||
instanceId: 'instance-1'
|
||||
});
|
||||
|
||||
engine.restoreSession(session2Id, {
|
||||
...testContext,
|
||||
instanceId: 'instance-2'
|
||||
});
|
||||
|
||||
const states = engine.getAllSessionStates();
|
||||
expect(states.length).toBe(2);
|
||||
expect(states[0]).toMatchObject({
|
||||
sessionId: expect.any(String),
|
||||
instanceContext: expect.objectContaining({
|
||||
n8nApiUrl: testContext.n8nApiUrl
|
||||
}),
|
||||
createdAt: expect.any(Date),
|
||||
lastAccess: expect.any(Date),
|
||||
expiresAt: expect.any(Date)
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter out sessions without state', () => {
|
||||
// Create session using direct API
|
||||
const sessionId = 'instance-test-abc123-uuid-filter-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// Get states
|
||||
const states = engine.getAllSessionStates();
|
||||
expect(states.length).toBe(1);
|
||||
|
||||
// All returned states should be non-null
|
||||
states.forEach(state => {
|
||||
expect(state).not.toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('restoreSession()', () => {
|
||||
it('should create a new session with provided ID and context', () => {
|
||||
const sessionId = 'instance-test-abc123-uuid-test-session-id';
|
||||
const result = engine.restoreSession(sessionId, testContext);
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
|
||||
it('should be idempotent - return true for existing session', () => {
|
||||
const sessionId = 'instance-test-abc123-uuid-test-session-id2';
|
||||
|
||||
// First restoration
|
||||
const result1 = engine.restoreSession(sessionId, testContext);
|
||||
expect(result1).toBe(true);
|
||||
|
||||
// Second restoration with same ID
|
||||
const result2 = engine.restoreSession(sessionId, testContext);
|
||||
expect(result2).toBe(true);
|
||||
|
||||
// Should still only have one session
|
||||
const sessionIds = engine.getActiveSessions();
|
||||
expect(sessionIds.filter(id => id === sessionId).length).toBe(1);
|
||||
});
|
||||
|
||||
it('should return false for invalid session ID format', () => {
|
||||
const invalidSessionIds = [
|
||||
'', // Empty string
|
||||
'a'.repeat(101), // Too long (101 chars, exceeds max)
|
||||
"'; DROP TABLE sessions--", // SQL injection attempt (invalid characters: ', ;, space)
|
||||
'../../../etc/passwd', // Path traversal attempt (invalid characters: ., /)
|
||||
'has spaces here', // Invalid character (space)
|
||||
'special@chars#here' // Invalid characters (@, #)
|
||||
];
|
||||
|
||||
invalidSessionIds.forEach(sessionId => {
|
||||
const result = engine.restoreSession(sessionId, testContext);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept short session IDs (relaxed for MCP proxy compatibility)', () => {
|
||||
const validShortIds = [
|
||||
'short', // 5 chars - now valid
|
||||
'a', // 1 char - now valid
|
||||
'only-nineteen-chars', // 19 chars - now valid
|
||||
'12345' // 5 digit ID - now valid
|
||||
];
|
||||
|
||||
validShortIds.forEach(sessionId => {
|
||||
const result = engine.restoreSession(sessionId, testContext);
|
||||
expect(result).toBe(true);
|
||||
expect(engine.getActiveSessions()).toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return false for invalid instance context', () => {
|
||||
const sessionId = 'instance-test-abc123-uuid-test-session-id3';
|
||||
const invalidContext = {
|
||||
n8nApiUrl: 'not-a-valid-url', // Invalid URL
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
} as any;
|
||||
|
||||
const result = engine.restoreSession(sessionId, invalidContext);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should create session that can be retrieved with getSessionState', () => {
|
||||
const sessionId = 'instance-test-abc123-uuid-test-session-id4';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
const state = engine.getSessionState(sessionId);
|
||||
expect(state).not.toBeNull();
|
||||
expect(state?.sessionId).toBe(sessionId);
|
||||
expect(state?.instanceContext).toEqual(testContext);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteSession()', () => {
|
||||
it('should return false for non-existent session', () => {
|
||||
const result = engine.deleteSession('non-existent-session-id');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should delete existing session and return true', () => {
|
||||
// Create a session using direct API
|
||||
const sessionId = 'instance-test-abc123-uuid-delete-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// Delete the session
|
||||
const result = engine.deleteSession(sessionId);
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Session should no longer exist
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
expect(engine.getSessionState(sessionId)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return false when trying to delete already deleted session', () => {
|
||||
// Create and delete session using direct API
|
||||
const sessionId = 'instance-test-abc123-uuid-double-delete-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
engine.deleteSession(sessionId);
|
||||
|
||||
// Try to delete again
|
||||
const result = engine.deleteSession(sessionId);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration workflows', () => {
|
||||
it('should support periodic backup workflow', () => {
|
||||
// Create multiple sessions using direct API
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const sessionId = `instance-test${i}-abc123-uuid-backup-${i}`;
|
||||
engine.restoreSession(sessionId, {
|
||||
...testContext,
|
||||
instanceId: `instance-${i}`
|
||||
});
|
||||
}
|
||||
|
||||
// Simulate periodic backup
|
||||
const states = engine.getAllSessionStates();
|
||||
expect(states.length).toBe(3);
|
||||
|
||||
// Each state should be serializable
|
||||
states.forEach(state => {
|
||||
const serialized = JSON.stringify(state);
|
||||
expect(serialized).toBeTruthy();
|
||||
|
||||
const deserialized = JSON.parse(serialized);
|
||||
expect(deserialized.sessionId).toBe(state.sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
it('should support bulk restore workflow', () => {
|
||||
const sessionData = [
|
||||
{ sessionId: 'instance-test1-abc123-uuid-bulk-session-1', context: { ...testContext, instanceId: 'user-1' } },
|
||||
{ sessionId: 'instance-test2-abc123-uuid-bulk-session-2', context: { ...testContext, instanceId: 'user-2' } },
|
||||
{ sessionId: 'instance-test3-abc123-uuid-bulk-session-3', context: { ...testContext, instanceId: 'user-3' } }
|
||||
];
|
||||
|
||||
// Restore all sessions
|
||||
for (const { sessionId, context } of sessionData) {
|
||||
const restored = engine.restoreSession(sessionId, context);
|
||||
expect(restored).toBe(true);
|
||||
}
|
||||
|
||||
// Verify all sessions exist
|
||||
const sessionIds = engine.getActiveSessions();
|
||||
expect(sessionIds.length).toBe(3);
|
||||
|
||||
sessionData.forEach(({ sessionId }) => {
|
||||
expect(sessionIds).toContain(sessionId);
|
||||
});
|
||||
});
|
||||
|
||||
it('should support session lifecycle workflow (create → get → delete)', () => {
|
||||
// 1. Create session using direct API
|
||||
const sessionId = 'instance-test-abc123-uuid-lifecycle-test';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
|
||||
// 2. Get session state
|
||||
const state = engine.getSessionState(sessionId);
|
||||
expect(state).not.toBeNull();
|
||||
|
||||
// 3. Simulate saving to database (serialization test)
|
||||
const serialized = JSON.stringify(state);
|
||||
expect(serialized).toBeTruthy();
|
||||
|
||||
// 4. Delete session
|
||||
const deleted = engine.deleteSession(sessionId);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// 5. Verify deletion
|
||||
expect(engine.getSessionState(sessionId)).toBeNull();
|
||||
expect(engine.getActiveSessions()).not.toContain(sessionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
400
tests/unit/session-restoration-retry.test.ts
Normal file
400
tests/unit/session-restoration-retry.test.ts
Normal file
@@ -0,0 +1,400 @@
|
||||
/**
|
||||
* Unit tests for Session Restoration Retry Policy (Phase 4 - REQ-7)
|
||||
* Tests retry logic for failed session restoration attempts
|
||||
*/
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { N8NMCPEngine } from '../../src/mcp-engine';
|
||||
import { InstanceContext } from '../../src/types/instance-context';
|
||||
|
||||
describe('Session Restoration Retry Policy (Phase 4 - REQ-7)', () => {
|
||||
const testContext: InstanceContext = {
|
||||
n8nApiUrl: 'https://test.n8n.cloud',
|
||||
n8nApiKey: 'test-api-key',
|
||||
instanceId: 'test-instance'
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Set required AUTH_TOKEN environment variable for testing
|
||||
process.env.AUTH_TOKEN = 'test-token-for-session-restoration-retry-testing-32chars';
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Default behavior (no retries)', () => {
|
||||
it('should have 0 retries by default (opt-in)', async () => {
|
||||
let callCount = 0;
|
||||
const failingHook = vi.fn(async () => {
|
||||
callCount++;
|
||||
throw new Error('Database connection failed');
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: failingHook
|
||||
// No sessionRestorationRetries specified - should default to 0
|
||||
});
|
||||
|
||||
// Note: Testing retry behavior requires HTTP request simulation
|
||||
// This is tested in integration tests
|
||||
// Here we verify configuration is accepted
|
||||
|
||||
expect(() => {
|
||||
const sessionId = 'instance-test-abc123-uuid-default-retry';
|
||||
engine.restoreSession(sessionId, testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should throw immediately on error with 0 retries', () => {
|
||||
const failingHook = vi.fn(async () => {
|
||||
throw new Error('Test error');
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: failingHook,
|
||||
sessionRestorationRetries: 0 // Explicit 0 retries
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Retry configuration', () => {
|
||||
it('should accept custom retry count', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 3
|
||||
});
|
||||
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should accept custom retry delay', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionRestorationRetryDelay: 200 // 200ms delay
|
||||
});
|
||||
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should use default delay of 100ms if not specified', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 2
|
||||
// sessionRestorationRetryDelay not specified - should default to 100ms
|
||||
});
|
||||
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error classification', () => {
|
||||
it('should configure retry for transient errors', () => {
|
||||
let attemptCount = 0;
|
||||
const failTwiceThenSucceed = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount < 3) {
|
||||
throw new Error('Transient error');
|
||||
}
|
||||
return testContext;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: failTwiceThenSucceed,
|
||||
sessionRestorationRetries: 3
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should not configure retry for timeout errors', () => {
|
||||
const timeoutHook = vi.fn(async () => {
|
||||
const error = new Error('Timeout error');
|
||||
error.name = 'TimeoutError';
|
||||
throw error;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: timeoutHook,
|
||||
sessionRestorationRetries: 3,
|
||||
sessionRestorationTimeout: 100
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Timeout interaction', () => {
|
||||
it('should configure overall timeout for all retry attempts', () => {
|
||||
const slowHook = vi.fn(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 200));
|
||||
return testContext;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: slowHook,
|
||||
sessionRestorationRetries: 3,
|
||||
sessionRestorationTimeout: 500 // 500ms total for all attempts
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should use default timeout of 5000ms if not specified', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 2
|
||||
// sessionRestorationTimeout not specified - should default to 5000ms
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Success scenarios', () => {
|
||||
it('should succeed on first attempt if hook succeeds', () => {
|
||||
const successHook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: successHook,
|
||||
sessionRestorationRetries: 3
|
||||
});
|
||||
|
||||
// Should succeed
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should succeed after retry if hook eventually succeeds', () => {
|
||||
let attemptCount = 0;
|
||||
const retryThenSucceed = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount === 1) {
|
||||
throw new Error('First attempt failed');
|
||||
}
|
||||
return testContext;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: retryThenSucceed,
|
||||
sessionRestorationRetries: 2
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hook validation', () => {
|
||||
it('should validate context returned by hook after retry', () => {
|
||||
let attemptCount = 0;
|
||||
const invalidAfterRetry = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount === 1) {
|
||||
throw new Error('First attempt failed');
|
||||
}
|
||||
// Return invalid context after retry
|
||||
return {
|
||||
n8nApiUrl: 'not-a-valid-url', // Invalid URL
|
||||
n8nApiKey: 'test-key',
|
||||
instanceId: 'test'
|
||||
} as any;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: invalidAfterRetry,
|
||||
sessionRestorationRetries: 2
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle null return from hook after retry', () => {
|
||||
let attemptCount = 0;
|
||||
const nullAfterRetry = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount === 1) {
|
||||
throw new Error('First attempt failed');
|
||||
}
|
||||
return null; // Session not found after retry
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: nullAfterRetry,
|
||||
sessionRestorationRetries: 2
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle exactly max retries configuration', () => {
|
||||
let attemptCount = 0;
|
||||
const failExactlyMaxTimes = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount <= 2) {
|
||||
throw new Error('Failing');
|
||||
}
|
||||
return testContext;
|
||||
});
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: failExactlyMaxTimes,
|
||||
sessionRestorationRetries: 2 // Will succeed on 3rd attempt (0, 1, 2 retries)
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle zero delay between retries', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 3,
|
||||
sessionRestorationRetryDelay: 0 // No delay
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle very short timeout', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationRetries: 3,
|
||||
sessionRestorationTimeout: 1 // 1ms timeout
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with lifecycle events', () => {
|
||||
it('should emit onSessionRestored after successful retry', () => {
|
||||
let attemptCount = 0;
|
||||
const retryThenSucceed = vi.fn(async () => {
|
||||
attemptCount++;
|
||||
if (attemptCount === 1) {
|
||||
throw new Error('First attempt failed');
|
||||
}
|
||||
return testContext;
|
||||
});
|
||||
|
||||
const onSessionRestored = vi.fn();
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: retryThenSucceed,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionEvents: {
|
||||
onSessionRestored
|
||||
}
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should not emit events if all retries fail', () => {
|
||||
const alwaysFail = vi.fn(async () => {
|
||||
throw new Error('Always fails');
|
||||
});
|
||||
|
||||
const onSessionRestored = vi.fn();
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: alwaysFail,
|
||||
sessionRestorationRetries: 2,
|
||||
sessionEvents: {
|
||||
onSessionRestored
|
||||
}
|
||||
});
|
||||
|
||||
// Configuration accepted
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Backward compatibility', () => {
|
||||
it('should work without retry configuration (backward compatible)', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook
|
||||
// No retry configuration - should work as before
|
||||
});
|
||||
|
||||
// Should work
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should work with only restoration hook configured', () => {
|
||||
const hook = vi.fn(async () => testContext);
|
||||
|
||||
const engine = new N8NMCPEngine({
|
||||
onSessionNotFound: hook,
|
||||
sessionRestorationTimeout: 5000
|
||||
// No retry configuration
|
||||
});
|
||||
|
||||
// Should work
|
||||
expect(() => {
|
||||
engine.restoreSession('test-session', testContext);
|
||||
}).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user