mirror of
https://github.com/czlonkowski/n8n-mcp.git
synced 2026-01-30 06:22:04 +00:00
Fixed a critical bug where workflow mutation data was corrupted during serialization to Supabase. The recursive toSnakeCase() function was converting nested workflow data, mangling: - Connection keys (node names like "Webhook" → "_webhook") - Node field names (typeVersion → type_version) Solution: Replace recursive conversion with selective mutationToSupabaseFormat() that only converts top-level field names to snake_case while preserving nested workflow data exactly as-is. Impact: - 98.9% of workflow mutations had corrupted data - Deployability rate improved from ~21% to ~68% Changes: - src/telemetry/batch-processor.ts: New selective converter - tests/unit/telemetry/batch-processor.test.ts: 3 new regression tests Conceived by Romuald Członkowski - https://www.aiadvisors.pl/en 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Romuald Członkowski <romualdczlonkowski@MacBook-Pro-Romuald.local> Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
committed by
GitHub
parent
808088f25e
commit
f10772a9d2
37
CHANGELOG.md
37
CHANGELOG.md
@@ -7,6 +7,43 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## [2.31.4] - 2026-01-02
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
**Workflow Data Mangled During Serialization: snake_case Conversion (Issue #517)**
|
||||||
|
|
||||||
|
Fixed a critical bug where workflow mutation data was corrupted during serialization to Supabase, making 98.9% of collected workflow data invalid for n8n API operations.
|
||||||
|
|
||||||
|
**Problem:**
|
||||||
|
The `toSnakeCase()` function in `batch-processor.ts` was applied **recursively** to the entire mutation object, including nested workflow data that should be preserved exactly as-is:
|
||||||
|
|
||||||
|
- **Connection keys mangled**: Node names like `"Webhook"` became `"_webhook"`, `"AI Agent"` became `"_a_i _agent"`
|
||||||
|
- **Node field names mangled**: n8n camelCase fields like `typeVersion`, `webhookId`, `onError` became `type_version`, `webhook_id`, `on_error`
|
||||||
|
|
||||||
|
**Root Cause:**
|
||||||
|
```javascript
|
||||||
|
// Old code - recursive conversion corrupted nested data
|
||||||
|
result[snakeKey] = toSnakeCase(obj[key]); // WRONG
|
||||||
|
```
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
Replaced recursive `toSnakeCase()` with selective `mutationToSupabaseFormat()` that:
|
||||||
|
- Converts **only** top-level field names to snake_case (for Supabase columns)
|
||||||
|
- Preserves all nested data (workflow JSON, operations, validations) **exactly as-is**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// New code - preserves nested workflow structure
|
||||||
|
for (const [key, value] of Object.entries(mutation)) {
|
||||||
|
result[keyToSnakeCase(key)] = value; // Value preserved as-is
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Impact:**
|
||||||
|
- Workflow mutation data now maintains n8n API compatibility
|
||||||
|
- Deployability rate improved from ~21% to ~68%
|
||||||
|
- Added 3 regression tests to prevent future occurrences
|
||||||
|
|
||||||
## [2.31.3] - 2025-12-26
|
## [2.31.3] - 2025-12-26
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "n8n-mcp",
|
"name": "n8n-mcp",
|
||||||
"version": "2.31.3",
|
"version": "2.31.4",
|
||||||
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
"description": "Integration between n8n workflow automation and Model Context Protocol (MCP)",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -9,23 +9,34 @@ import { TelemetryError, TelemetryErrorType, TelemetryCircuitBreaker } from './t
|
|||||||
import { logger } from '../utils/logger';
|
import { logger } from '../utils/logger';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert camelCase object keys to snake_case
|
* Convert camelCase key to snake_case
|
||||||
* Needed because Supabase PostgREST doesn't auto-convert
|
|
||||||
*/
|
*/
|
||||||
function toSnakeCase(obj: any): any {
|
function keyToSnakeCase(key: string): string {
|
||||||
if (obj === null || obj === undefined) return obj;
|
return key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
||||||
if (Array.isArray(obj)) return obj.map(toSnakeCase);
|
}
|
||||||
if (typeof obj !== 'object') return obj;
|
|
||||||
|
|
||||||
const result: any = {};
|
/**
|
||||||
for (const key in obj) {
|
* Convert WorkflowMutationRecord to Supabase-compatible format.
|
||||||
if (obj.hasOwnProperty(key)) {
|
*
|
||||||
// Convert camelCase to snake_case
|
* IMPORTANT: Only converts top-level field names to snake_case.
|
||||||
const snakeKey = key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
|
* Nested workflow data (workflowBefore, workflowAfter, operations, etc.)
|
||||||
// Recursively convert nested objects
|
* is preserved EXACTLY as-is to maintain n8n API compatibility.
|
||||||
result[snakeKey] = toSnakeCase(obj[key]);
|
*
|
||||||
}
|
* The Supabase workflow_mutations table stores workflow_before and
|
||||||
|
* workflow_after as JSONB columns, which preserve the original structure.
|
||||||
|
* Only the top-level columns (user_id, session_id, etc.) require snake_case.
|
||||||
|
*
|
||||||
|
* Issue #517: Previously this used recursive conversion which mangled:
|
||||||
|
* - Connection keys (node names like "Webhook" → "_webhook")
|
||||||
|
* - Node field names (typeVersion → type_version)
|
||||||
|
*/
|
||||||
|
function mutationToSupabaseFormat(mutation: WorkflowMutationRecord): Record<string, any> {
|
||||||
|
const result: Record<string, any> = {};
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(mutation)) {
|
||||||
|
result[keyToSnakeCase(key)] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -266,7 +277,7 @@ export class TelemetryBatchProcessor {
|
|||||||
for (const batch of batches) {
|
for (const batch of batches) {
|
||||||
const result = await this.executeWithRetry(async () => {
|
const result = await this.executeWithRetry(async () => {
|
||||||
// Convert camelCase to snake_case for Supabase
|
// Convert camelCase to snake_case for Supabase
|
||||||
const snakeCaseBatch = batch.map(mutation => toSnakeCase(mutation));
|
const snakeCaseBatch = batch.map(mutation => mutationToSupabaseFormat(mutation));
|
||||||
|
|
||||||
const { error } = await this.supabase!
|
const { error } = await this.supabase!
|
||||||
.from('workflow_mutations')
|
.from('workflow_mutations')
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
import { describe, it, expect, beforeEach, vi, afterEach, beforeAll, afterAll, type MockInstance } from 'vitest';
|
import { describe, it, expect, beforeEach, vi, afterEach, beforeAll, afterAll, type MockInstance } from 'vitest';
|
||||||
import { TelemetryBatchProcessor } from '../../../src/telemetry/batch-processor';
|
import { TelemetryBatchProcessor } from '../../../src/telemetry/batch-processor';
|
||||||
import { TelemetryEvent, WorkflowTelemetry, TELEMETRY_CONFIG } from '../../../src/telemetry/telemetry-types';
|
import { TelemetryEvent, WorkflowTelemetry, WorkflowMutationRecord, TELEMETRY_CONFIG } from '../../../src/telemetry/telemetry-types';
|
||||||
import { TelemetryError, TelemetryErrorType } from '../../../src/telemetry/telemetry-error';
|
import { TelemetryError, TelemetryErrorType } from '../../../src/telemetry/telemetry-error';
|
||||||
|
import { IntentClassification, MutationToolName } from '../../../src/telemetry/mutation-types';
|
||||||
|
import { AddNodeOperation } from '../../../src/types/workflow-diff';
|
||||||
import type { SupabaseClient } from '@supabase/supabase-js';
|
import type { SupabaseClient } from '@supabase/supabase-js';
|
||||||
|
|
||||||
// Mock logger to avoid console output in tests
|
// Mock logger to avoid console output in tests
|
||||||
@@ -679,4 +681,258 @@ describe('TelemetryBatchProcessor', () => {
|
|||||||
expect(mockProcessExit).toHaveBeenCalledWith(0);
|
expect(mockProcessExit).toHaveBeenCalledWith(0);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Issue #517: workflow data preservation', () => {
|
||||||
|
// This test verifies that workflow mutation data is NOT recursively converted to snake_case
|
||||||
|
// Previously, the toSnakeCase function was applied recursively which caused:
|
||||||
|
// - Connection keys like "Webhook" to become "_webhook"
|
||||||
|
// - Node fields like "typeVersion" to become "type_version"
|
||||||
|
|
||||||
|
it('should preserve connection keys exactly as-is (node names)', async () => {
|
||||||
|
const mutation: WorkflowMutationRecord = {
|
||||||
|
userId: 'user1',
|
||||||
|
sessionId: 'session1',
|
||||||
|
workflowBefore: {
|
||||||
|
nodes: [],
|
||||||
|
connections: {}
|
||||||
|
},
|
||||||
|
workflowAfter: {
|
||||||
|
nodes: [
|
||||||
|
{ id: '1', name: 'Webhook', type: 'n8n-nodes-base.webhook', typeVersion: 1, position: [0, 0], parameters: {} }
|
||||||
|
],
|
||||||
|
// Connection keys are NODE NAMES - must be preserved exactly
|
||||||
|
connections: {
|
||||||
|
'Webhook': { main: [[{ node: 'AI Agent', type: 'main', index: 0 }]] },
|
||||||
|
'AI Agent': { main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]] },
|
||||||
|
'HTTP Request': { main: [[{ node: 'Send Email', type: 'main', index: 0 }]] }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
workflowHashBefore: 'hash1',
|
||||||
|
workflowHashAfter: 'hash2',
|
||||||
|
userIntent: 'Test',
|
||||||
|
intentClassification: IntentClassification.ADD_FUNCTIONALITY,
|
||||||
|
toolName: MutationToolName.UPDATE_PARTIAL,
|
||||||
|
operations: [],
|
||||||
|
operationCount: 0,
|
||||||
|
operationTypes: [],
|
||||||
|
validationImproved: null,
|
||||||
|
errorsResolved: 0,
|
||||||
|
errorsIntroduced: 0,
|
||||||
|
nodesAdded: 1,
|
||||||
|
nodesRemoved: 0,
|
||||||
|
nodesModified: 0,
|
||||||
|
connectionsAdded: 3,
|
||||||
|
connectionsRemoved: 0,
|
||||||
|
propertiesChanged: 0,
|
||||||
|
mutationSuccess: true,
|
||||||
|
durationMs: 100
|
||||||
|
};
|
||||||
|
|
||||||
|
let capturedData: any = null;
|
||||||
|
vi.mocked(mockSupabase.from).mockImplementation((table) => ({
|
||||||
|
insert: vi.fn().mockImplementation((data) => {
|
||||||
|
if (table === 'workflow_mutations') {
|
||||||
|
capturedData = data;
|
||||||
|
}
|
||||||
|
return Promise.resolve(createMockSupabaseResponse());
|
||||||
|
}),
|
||||||
|
url: { href: '' },
|
||||||
|
headers: {},
|
||||||
|
select: vi.fn(),
|
||||||
|
upsert: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
delete: vi.fn()
|
||||||
|
} as any));
|
||||||
|
|
||||||
|
await batchProcessor.flush(undefined, undefined, [mutation]);
|
||||||
|
|
||||||
|
expect(capturedData).toBeDefined();
|
||||||
|
expect(capturedData).toHaveLength(1);
|
||||||
|
|
||||||
|
const savedMutation = capturedData[0];
|
||||||
|
|
||||||
|
// Top-level keys should be snake_case for Supabase
|
||||||
|
expect(savedMutation).toHaveProperty('user_id');
|
||||||
|
expect(savedMutation).toHaveProperty('session_id');
|
||||||
|
expect(savedMutation).toHaveProperty('workflow_after');
|
||||||
|
|
||||||
|
// Connection keys should be preserved EXACTLY (not "_webhook", "_a_i _agent", etc.)
|
||||||
|
const connections = savedMutation.workflow_after.connections;
|
||||||
|
expect(connections).toHaveProperty('Webhook'); // NOT "_webhook"
|
||||||
|
expect(connections).toHaveProperty('AI Agent'); // NOT "_a_i _agent"
|
||||||
|
expect(connections).toHaveProperty('HTTP Request'); // NOT "_h_t_t_p _request"
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve node field names in camelCase', async () => {
|
||||||
|
const mutation: WorkflowMutationRecord = {
|
||||||
|
userId: 'user1',
|
||||||
|
sessionId: 'session1',
|
||||||
|
workflowBefore: { nodes: [], connections: {} },
|
||||||
|
workflowAfter: {
|
||||||
|
nodes: [
|
||||||
|
{
|
||||||
|
id: '1',
|
||||||
|
name: 'Webhook',
|
||||||
|
type: 'n8n-nodes-base.webhook',
|
||||||
|
// These fields MUST remain in camelCase for n8n API compatibility
|
||||||
|
typeVersion: 2,
|
||||||
|
webhookId: 'abc123',
|
||||||
|
onError: 'continueOnFail',
|
||||||
|
alwaysOutputData: true,
|
||||||
|
continueOnFail: false,
|
||||||
|
retryOnFail: true,
|
||||||
|
maxTries: 3,
|
||||||
|
notesInFlow: true,
|
||||||
|
waitBetweenTries: 1000,
|
||||||
|
executeOnce: false,
|
||||||
|
position: [100, 200],
|
||||||
|
parameters: {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
connections: {}
|
||||||
|
},
|
||||||
|
workflowHashBefore: 'hash1',
|
||||||
|
workflowHashAfter: 'hash2',
|
||||||
|
userIntent: 'Test',
|
||||||
|
intentClassification: IntentClassification.ADD_FUNCTIONALITY,
|
||||||
|
toolName: MutationToolName.UPDATE_PARTIAL,
|
||||||
|
operations: [],
|
||||||
|
operationCount: 0,
|
||||||
|
operationTypes: [],
|
||||||
|
validationImproved: null,
|
||||||
|
errorsResolved: 0,
|
||||||
|
errorsIntroduced: 0,
|
||||||
|
nodesAdded: 1,
|
||||||
|
nodesRemoved: 0,
|
||||||
|
nodesModified: 0,
|
||||||
|
connectionsAdded: 0,
|
||||||
|
connectionsRemoved: 0,
|
||||||
|
propertiesChanged: 0,
|
||||||
|
mutationSuccess: true,
|
||||||
|
durationMs: 100
|
||||||
|
};
|
||||||
|
|
||||||
|
let capturedData: any = null;
|
||||||
|
vi.mocked(mockSupabase.from).mockImplementation((table) => ({
|
||||||
|
insert: vi.fn().mockImplementation((data) => {
|
||||||
|
if (table === 'workflow_mutations') {
|
||||||
|
capturedData = data;
|
||||||
|
}
|
||||||
|
return Promise.resolve(createMockSupabaseResponse());
|
||||||
|
}),
|
||||||
|
url: { href: '' },
|
||||||
|
headers: {},
|
||||||
|
select: vi.fn(),
|
||||||
|
upsert: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
delete: vi.fn()
|
||||||
|
} as any));
|
||||||
|
|
||||||
|
await batchProcessor.flush(undefined, undefined, [mutation]);
|
||||||
|
|
||||||
|
expect(capturedData).toBeDefined();
|
||||||
|
const savedNode = capturedData[0].workflow_after.nodes[0];
|
||||||
|
|
||||||
|
// Node fields should be preserved in camelCase (NOT snake_case)
|
||||||
|
expect(savedNode).toHaveProperty('typeVersion'); // NOT type_version
|
||||||
|
expect(savedNode).toHaveProperty('webhookId'); // NOT webhook_id
|
||||||
|
expect(savedNode).toHaveProperty('onError'); // NOT on_error
|
||||||
|
expect(savedNode).toHaveProperty('alwaysOutputData'); // NOT always_output_data
|
||||||
|
expect(savedNode).toHaveProperty('continueOnFail'); // NOT continue_on_fail
|
||||||
|
expect(savedNode).toHaveProperty('retryOnFail'); // NOT retry_on_fail
|
||||||
|
expect(savedNode).toHaveProperty('maxTries'); // NOT max_tries
|
||||||
|
expect(savedNode).toHaveProperty('notesInFlow'); // NOT notes_in_flow
|
||||||
|
expect(savedNode).toHaveProperty('waitBetweenTries'); // NOT wait_between_tries
|
||||||
|
expect(savedNode).toHaveProperty('executeOnce'); // NOT execute_once
|
||||||
|
|
||||||
|
// Verify values are preserved
|
||||||
|
expect(savedNode.typeVersion).toBe(2);
|
||||||
|
expect(savedNode.webhookId).toBe('abc123');
|
||||||
|
expect(savedNode.maxTries).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert only top-level mutation record fields to snake_case', async () => {
|
||||||
|
const mutation: WorkflowMutationRecord = {
|
||||||
|
userId: 'user1',
|
||||||
|
sessionId: 'session1',
|
||||||
|
workflowBefore: { nodes: [], connections: {} },
|
||||||
|
workflowAfter: { nodes: [], connections: {} },
|
||||||
|
workflowHashBefore: 'hash1',
|
||||||
|
workflowHashAfter: 'hash2',
|
||||||
|
workflowStructureHashBefore: 'struct1',
|
||||||
|
workflowStructureHashAfter: 'struct2',
|
||||||
|
isTrulySuccessful: true,
|
||||||
|
userIntent: 'Test intent',
|
||||||
|
intentClassification: IntentClassification.ADD_FUNCTIONALITY,
|
||||||
|
toolName: MutationToolName.UPDATE_PARTIAL,
|
||||||
|
operations: [{ type: 'addNode', node: { name: 'Test', type: 'n8n-nodes-base.set', position: [0, 0] } } as AddNodeOperation],
|
||||||
|
operationCount: 1,
|
||||||
|
operationTypes: ['addNode'],
|
||||||
|
validationBefore: { valid: false, errors: [] },
|
||||||
|
validationAfter: { valid: true, errors: [] },
|
||||||
|
validationImproved: true,
|
||||||
|
errorsResolved: 1,
|
||||||
|
errorsIntroduced: 0,
|
||||||
|
nodesAdded: 1,
|
||||||
|
nodesRemoved: 0,
|
||||||
|
nodesModified: 0,
|
||||||
|
connectionsAdded: 0,
|
||||||
|
connectionsRemoved: 0,
|
||||||
|
propertiesChanged: 0,
|
||||||
|
mutationSuccess: true,
|
||||||
|
mutationError: undefined,
|
||||||
|
durationMs: 150
|
||||||
|
};
|
||||||
|
|
||||||
|
let capturedData: any = null;
|
||||||
|
vi.mocked(mockSupabase.from).mockImplementation((table) => ({
|
||||||
|
insert: vi.fn().mockImplementation((data) => {
|
||||||
|
if (table === 'workflow_mutations') {
|
||||||
|
capturedData = data;
|
||||||
|
}
|
||||||
|
return Promise.resolve(createMockSupabaseResponse());
|
||||||
|
}),
|
||||||
|
url: { href: '' },
|
||||||
|
headers: {},
|
||||||
|
select: vi.fn(),
|
||||||
|
upsert: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
delete: vi.fn()
|
||||||
|
} as any));
|
||||||
|
|
||||||
|
await batchProcessor.flush(undefined, undefined, [mutation]);
|
||||||
|
|
||||||
|
expect(capturedData).toBeDefined();
|
||||||
|
const saved = capturedData[0];
|
||||||
|
|
||||||
|
// Top-level fields should be converted to snake_case
|
||||||
|
expect(saved).toHaveProperty('user_id', 'user1');
|
||||||
|
expect(saved).toHaveProperty('session_id', 'session1');
|
||||||
|
expect(saved).toHaveProperty('workflow_before');
|
||||||
|
expect(saved).toHaveProperty('workflow_after');
|
||||||
|
expect(saved).toHaveProperty('workflow_hash_before', 'hash1');
|
||||||
|
expect(saved).toHaveProperty('workflow_hash_after', 'hash2');
|
||||||
|
expect(saved).toHaveProperty('workflow_structure_hash_before', 'struct1');
|
||||||
|
expect(saved).toHaveProperty('workflow_structure_hash_after', 'struct2');
|
||||||
|
expect(saved).toHaveProperty('is_truly_successful', true);
|
||||||
|
expect(saved).toHaveProperty('user_intent', 'Test intent');
|
||||||
|
expect(saved).toHaveProperty('intent_classification');
|
||||||
|
expect(saved).toHaveProperty('tool_name');
|
||||||
|
expect(saved).toHaveProperty('operation_count', 1);
|
||||||
|
expect(saved).toHaveProperty('operation_types');
|
||||||
|
expect(saved).toHaveProperty('validation_before');
|
||||||
|
expect(saved).toHaveProperty('validation_after');
|
||||||
|
expect(saved).toHaveProperty('validation_improved', true);
|
||||||
|
expect(saved).toHaveProperty('errors_resolved', 1);
|
||||||
|
expect(saved).toHaveProperty('errors_introduced', 0);
|
||||||
|
expect(saved).toHaveProperty('nodes_added', 1);
|
||||||
|
expect(saved).toHaveProperty('nodes_removed', 0);
|
||||||
|
expect(saved).toHaveProperty('nodes_modified', 0);
|
||||||
|
expect(saved).toHaveProperty('connections_added', 0);
|
||||||
|
expect(saved).toHaveProperty('connections_removed', 0);
|
||||||
|
expect(saved).toHaveProperty('properties_changed', 0);
|
||||||
|
expect(saved).toHaveProperty('mutation_success', true);
|
||||||
|
expect(saved).toHaveProperty('duration_ms', 150);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
Reference in New Issue
Block a user