refactor: streamline test suite - cut 33 files, enable parallel execution (11.9x speedup)

Remove duplicate, low-value, and fragmented test files while preserving
all meaningful coverage. Enable parallel test execution and remove
the entire benchmark infrastructure.

Key changes:
- Consolidate workflow-validator tests (13 files -> 3)
- Consolidate config-validator tests (9 files -> 3)
- Consolidate telemetry tests (11 files -> 6)
- Merge AI validator tests (2 files -> 1)
- Remove example/demo test files, mock-testing files, and already-skipped tests
- Remove benchmark infrastructure (10 files, CI workflow, 4 npm scripts)
- Enable parallel test execution (remove singleThread: true)
- Remove retry:2 that was masking flaky tests
- Slim CI publish-results job

Results: 224 -> 191 test files, 4690 -> 4303 tests, 121K -> 106K lines
Local runtime: 319s -> 27s (11.9x speedup)

Conceived by Romuald Członkowski - www.aiadvisors.pl/en

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
czlonkowski
2026-03-26 23:41:06 +01:00
parent 07bd1d4cc2
commit fdc37efde7
62 changed files with 2998 additions and 20806 deletions

View File

@@ -1,562 +0,0 @@
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { z } from 'zod';
import { TelemetryEventValidator, telemetryEventSchema, workflowTelemetrySchema } from '../../../src/telemetry/event-validator';
import { TelemetryEvent, WorkflowTelemetry } from '../../../src/telemetry/telemetry-types';
// Mock logger to avoid console output in tests
vi.mock('../../../src/utils/logger', () => ({
logger: {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}
}));
describe('TelemetryEventValidator', () => {
let validator: TelemetryEventValidator;
beforeEach(() => {
validator = new TelemetryEventValidator();
vi.clearAllMocks();
});
describe('validateEvent()', () => {
it('should validate a basic valid event', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'tool_used',
properties: { tool: 'httpRequest', success: true, duration: 500 }
};
const result = validator.validateEvent(event);
expect(result).toEqual(event);
});
it('should validate event with specific schema for tool_used', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'tool_used',
properties: { tool: 'httpRequest', success: true, duration: 500 }
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.tool).toBe('httpRequest');
expect(result?.properties.success).toBe(true);
expect(result?.properties.duration).toBe(500);
});
it('should validate search_query event with specific schema', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'search_query',
properties: {
query: 'test query',
resultsFound: 5,
searchType: 'nodes',
hasResults: true,
isZeroResults: false
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.query).toBe('test query');
expect(result?.properties.resultsFound).toBe(5);
expect(result?.properties.hasResults).toBe(true);
});
it('should validate performance_metric event with specific schema', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'performance_metric',
properties: {
operation: 'database_query',
duration: 1500,
isSlow: true,
isVerySlow: false,
metadata: { table: 'nodes' }
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.operation).toBe('database_query');
expect(result?.properties.duration).toBe(1500);
expect(result?.properties.isSlow).toBe(true);
});
it('should sanitize sensitive data from properties', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'generic_event',
properties: {
description: 'Visit https://example.com/secret and user@example.com with key abcdef123456789012345678901234567890',
apiKey: 'super-secret-key-12345678901234567890',
normalProp: 'normal value'
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.description).toBe('Visit [URL] and [EMAIL] with key [KEY]');
expect(result?.properties.normalProp).toBe('normal value');
expect(result?.properties).not.toHaveProperty('apiKey'); // Should be filtered out
});
it('should handle nested object sanitization with depth limit', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'nested_event',
properties: {
nested: {
level1: {
level2: {
level3: {
level4: 'should be truncated',
apiKey: 'secret123',
description: 'Visit https://example.com'
},
description: 'Visit https://another.com'
}
}
}
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.nested.level1.level2.level3).toBe('[NESTED]');
expect(result?.properties.nested.level1.level2.description).toBe('Visit [URL]');
});
it('should handle array sanitization with size limit', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'array_event',
properties: {
items: Array.from({ length: 15 }, (_, i) => ({
id: i,
description: 'Visit https://example.com',
value: `item-${i}`
}))
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(Array.isArray(result?.properties.items)).toBe(true);
expect(result?.properties.items.length).toBe(10); // Should be limited to 10
});
it('should reject events with invalid user_id', () => {
const event: TelemetryEvent = {
user_id: '', // Empty string
event: 'test_event',
properties: {}
};
const result = validator.validateEvent(event);
expect(result).toBeNull();
});
it('should reject events with invalid event name', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'invalid-event-name!@#', // Invalid characters
properties: {}
};
const result = validator.validateEvent(event);
expect(result).toBeNull();
});
it('should reject tool_used event with invalid properties', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'tool_used',
properties: {
tool: 'test',
success: 'not-a-boolean', // Should be boolean
duration: -1 // Should be positive
}
};
const result = validator.validateEvent(event);
expect(result).toBeNull();
});
it('should filter out sensitive keys from properties', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'sensitive_event',
properties: {
password: 'secret123',
token: 'bearer-token',
apikey: 'api-key-value',
secret: 'secret-value',
credential: 'cred-value',
auth: 'auth-header',
url: 'https://example.com',
endpoint: 'api.example.com',
host: 'localhost',
database: 'prod-db',
normalProp: 'safe-value',
count: 42,
enabled: true
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties).not.toHaveProperty('password');
expect(result?.properties).not.toHaveProperty('token');
expect(result?.properties).not.toHaveProperty('apikey');
expect(result?.properties).not.toHaveProperty('secret');
expect(result?.properties).not.toHaveProperty('credential');
expect(result?.properties).not.toHaveProperty('auth');
expect(result?.properties).not.toHaveProperty('url');
expect(result?.properties).not.toHaveProperty('endpoint');
expect(result?.properties).not.toHaveProperty('host');
expect(result?.properties).not.toHaveProperty('database');
expect(result?.properties.normalProp).toBe('safe-value');
expect(result?.properties.count).toBe(42);
expect(result?.properties.enabled).toBe(true);
});
it('should handle validation_details event schema', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'validation_details',
properties: {
nodeType: 'nodes-base.httpRequest',
errorType: 'required_field_missing',
errorCategory: 'validation_error',
details: { field: 'url' }
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.nodeType).toBe('nodes-base.httpRequest');
expect(result?.properties.errorType).toBe('required_field_missing');
});
it('should handle null and undefined values', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'null_event',
properties: {
nullValue: null,
undefinedValue: undefined,
normalValue: 'test'
}
};
const result = validator.validateEvent(event);
expect(result).not.toBeNull();
expect(result?.properties.nullValue).toBeNull();
expect(result?.properties.undefinedValue).toBeNull();
expect(result?.properties.normalValue).toBe('test');
});
});
describe('validateWorkflow()', () => {
it('should validate a valid workflow', () => {
const workflow: WorkflowTelemetry = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 3,
node_types: ['webhook', 'httpRequest', 'set'],
has_trigger: true,
has_webhook: true,
complexity: 'medium',
sanitized_workflow: {
nodes: [
{ id: '1', type: 'webhook' },
{ id: '2', type: 'httpRequest' },
{ id: '3', type: 'set' }
],
connections: { '1': { main: [[{ node: '2', type: 'main', index: 0 }]] } }
}
};
const result = validator.validateWorkflow(workflow);
expect(result).toEqual(workflow);
});
it('should reject workflow with too many nodes', () => {
const workflow: WorkflowTelemetry = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 1001, // Over limit
node_types: ['webhook'],
has_trigger: true,
has_webhook: true,
complexity: 'complex',
sanitized_workflow: {
nodes: [],
connections: {}
}
};
const result = validator.validateWorkflow(workflow);
expect(result).toBeNull();
});
it('should reject workflow with invalid complexity', () => {
const workflow = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 3,
node_types: ['webhook'],
has_trigger: true,
has_webhook: true,
complexity: 'invalid' as any, // Invalid complexity
sanitized_workflow: {
nodes: [],
connections: {}
}
};
const result = validator.validateWorkflow(workflow);
expect(result).toBeNull();
});
it('should reject workflow with too many node types', () => {
const workflow: WorkflowTelemetry = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 3,
node_types: Array.from({ length: 101 }, (_, i) => `node-${i}`), // Over limit
has_trigger: true,
has_webhook: true,
complexity: 'complex',
sanitized_workflow: {
nodes: [],
connections: {}
}
};
const result = validator.validateWorkflow(workflow);
expect(result).toBeNull();
});
});
describe('getStats()', () => {
it('should track validation statistics', () => {
const validEvent: TelemetryEvent = {
user_id: 'user123',
event: 'valid_event',
properties: {}
};
const invalidEvent: TelemetryEvent = {
user_id: '', // Invalid
event: 'invalid_event',
properties: {}
};
validator.validateEvent(validEvent);
validator.validateEvent(validEvent);
validator.validateEvent(invalidEvent);
const stats = validator.getStats();
expect(stats.successes).toBe(2);
expect(stats.errors).toBe(1);
expect(stats.total).toBe(3);
expect(stats.errorRate).toBeCloseTo(0.333, 3);
});
it('should handle division by zero in error rate', () => {
const stats = validator.getStats();
expect(stats.errorRate).toBe(0);
});
});
describe('resetStats()', () => {
it('should reset validation statistics', () => {
const validEvent: TelemetryEvent = {
user_id: 'user123',
event: 'valid_event',
properties: {}
};
validator.validateEvent(validEvent);
validator.resetStats();
const stats = validator.getStats();
expect(stats.successes).toBe(0);
expect(stats.errors).toBe(0);
expect(stats.total).toBe(0);
expect(stats.errorRate).toBe(0);
});
});
describe('Schema validation', () => {
describe('telemetryEventSchema', () => {
it('should validate with created_at timestamp', () => {
const event = {
user_id: 'user123',
event: 'test_event',
properties: {},
created_at: '2024-01-01T00:00:00Z'
};
const result = telemetryEventSchema.safeParse(event);
expect(result.success).toBe(true);
});
it('should reject invalid datetime format', () => {
const event = {
user_id: 'user123',
event: 'test_event',
properties: {},
created_at: 'invalid-date'
};
const result = telemetryEventSchema.safeParse(event);
expect(result.success).toBe(false);
});
it('should enforce user_id length limits', () => {
const longUserId = 'a'.repeat(65);
const event = {
user_id: longUserId,
event: 'test_event',
properties: {}
};
const result = telemetryEventSchema.safeParse(event);
expect(result.success).toBe(false);
});
it('should enforce event name regex pattern', () => {
const event = {
user_id: 'user123',
event: 'invalid event name with spaces!',
properties: {}
};
const result = telemetryEventSchema.safeParse(event);
expect(result.success).toBe(false);
});
});
describe('workflowTelemetrySchema', () => {
it('should enforce node array size limits', () => {
const workflow = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 3,
node_types: ['test'],
has_trigger: true,
has_webhook: false,
complexity: 'simple',
sanitized_workflow: {
nodes: Array.from({ length: 1001 }, (_, i) => ({ id: i })), // Over limit
connections: {}
}
};
const result = workflowTelemetrySchema.safeParse(workflow);
expect(result.success).toBe(false);
});
it('should validate with optional created_at', () => {
const workflow = {
user_id: 'user123',
workflow_hash: 'hash123',
node_count: 1,
node_types: ['webhook'],
has_trigger: true,
has_webhook: true,
complexity: 'simple',
sanitized_workflow: {
nodes: [{ id: '1' }],
connections: {}
},
created_at: '2024-01-01T00:00:00Z'
};
const result = workflowTelemetrySchema.safeParse(workflow);
expect(result.success).toBe(true);
});
});
});
describe('String sanitization edge cases', () => {
it('should handle multiple URLs in same string', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'test_event',
properties: {
description: 'Visit https://example.com or http://test.com for more info'
}
};
const result = validator.validateEvent(event);
expect(result?.properties.description).toBe('Visit [URL] or [URL] for more info');
});
it('should handle mixed sensitive content', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'test_event',
properties: {
message: 'Contact admin@example.com at https://secure.com with key abc123def456ghi789jkl012mno345pqr'
}
};
const result = validator.validateEvent(event);
expect(result?.properties.message).toBe('Contact [EMAIL] at [URL] with key [KEY]');
});
it('should preserve non-sensitive content', () => {
const event: TelemetryEvent = {
user_id: 'user123',
event: 'test_event',
properties: {
status: 'success',
count: 42,
enabled: true,
short_id: 'abc123' // Too short to be considered a key
}
};
const result = validator.validateEvent(event);
expect(result?.properties.status).toBe('success');
expect(result?.properties.count).toBe(42);
expect(result?.properties.enabled).toBe(true);
expect(result?.properties.short_id).toBe('abc123');
});
});
describe('Error handling', () => {
it('should handle Zod parsing errors gracefully', () => {
const invalidEvent = {
user_id: 123, // Should be string
event: 'test_event',
properties: {}
};
const result = validator.validateEvent(invalidEvent as any);
expect(result).toBeNull();
});
it('should handle unexpected errors during validation', () => {
const eventWithCircularRef: any = {
user_id: 'user123',
event: 'test_event',
properties: {}
};
// Create circular reference
eventWithCircularRef.properties.self = eventWithCircularRef;
const result = validator.validateEvent(eventWithCircularRef);
// Should handle gracefully and not throw
expect(result).not.toThrow;
});
});
});

View File

@@ -1,817 +0,0 @@
/**
* Unit tests for MutationTracker - Sanitization and Processing
*/
import { describe, it, expect, beforeEach, vi } from 'vitest';
import { MutationTracker } from '../../../src/telemetry/mutation-tracker';
import { WorkflowMutationData, MutationToolName } from '../../../src/telemetry/mutation-types';
describe('MutationTracker', () => {
let tracker: MutationTracker;
beforeEach(() => {
tracker = new MutationTracker();
tracker.clearRecentMutations();
});
describe('Workflow Sanitization', () => {
it('should remove credentials from workflow level', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test sanitization',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {},
credentials: { apiKey: 'secret-key-123' },
sharedWorkflows: ['user1', 'user2'],
ownedBy: { id: 'user1', email: 'user@example.com' }
},
workflowAfter: {
id: 'wf1',
name: 'Test Updated',
nodes: [],
connections: {},
credentials: { apiKey: 'secret-key-456' }
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
expect(result!.workflowBefore).toBeDefined();
expect(result!.workflowBefore.credentials).toBeUndefined();
expect(result!.workflowBefore.sharedWorkflows).toBeUndefined();
expect(result!.workflowBefore.ownedBy).toBeUndefined();
expect(result!.workflowAfter.credentials).toBeUndefined();
});
it('should remove credentials from node level', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test node credentials',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
credentials: {
httpBasicAuth: {
id: 'cred-123',
name: 'My Auth'
}
},
parameters: {
url: 'https://api.example.com'
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
credentials: {
httpBasicAuth: {
id: 'cred-456',
name: 'Updated Auth'
}
},
parameters: {
url: 'https://api.example.com'
}
}
],
connections: {}
},
mutationSuccess: true,
durationMs: 150
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
expect(result!.workflowBefore.nodes[0].credentials).toBeUndefined();
expect(result!.workflowAfter.nodes[0].credentials).toBeUndefined();
});
it('should redact API keys in parameters', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test API key redaction',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'OpenAI',
type: 'n8n-nodes-base.openAi',
position: [100, 100],
parameters: {
apiKeyField: 'sk-1234567890abcdef1234567890abcdef',
tokenField: 'Bearer abc123def456',
config: {
passwordField: 'secret-password-123'
}
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'OpenAI',
type: 'n8n-nodes-base.openAi',
position: [100, 100],
parameters: {
apiKeyField: 'sk-newkey567890abcdef1234567890abcdef'
}
}
],
connections: {}
},
mutationSuccess: true,
durationMs: 200
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const params = result!.workflowBefore.nodes[0].parameters;
// Fields with sensitive key names are redacted
expect(params.apiKeyField).toBe('[REDACTED]');
expect(params.tokenField).toBe('[REDACTED]');
expect(params.config.passwordField).toBe('[REDACTED]');
});
it('should redact URLs with authentication', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test URL redaction',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
parameters: {
url: 'https://user:password@api.example.com/endpoint',
webhookUrl: 'http://admin:secret@webhook.example.com'
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const params = result!.workflowBefore.nodes[0].parameters;
// URL auth is redacted but path is preserved
expect(params.url).toBe('[REDACTED_URL_WITH_AUTH]/endpoint');
expect(params.webhookUrl).toBe('[REDACTED_URL_WITH_AUTH]');
});
it('should redact long tokens (32+ characters)', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test token redaction',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Slack',
type: 'n8n-nodes-base.slack',
position: [100, 100],
parameters: {
message: 'Token: test-token-1234567890-1234567890123-abcdefghijklmnopqrstuvwx'
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const message = result!.workflowBefore.nodes[0].parameters.message;
expect(message).toContain('[REDACTED_TOKEN]');
});
it('should redact OpenAI-style keys', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test OpenAI key redaction',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Code',
type: 'n8n-nodes-base.code',
position: [100, 100],
parameters: {
code: 'const apiKey = "sk-proj-abcd1234efgh5678ijkl9012mnop3456";'
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const code = result!.workflowBefore.nodes[0].parameters.code;
// The 32+ char regex runs before OpenAI-specific regex, so it becomes [REDACTED_TOKEN]
expect(code).toContain('[REDACTED_TOKEN]');
});
it('should redact Bearer tokens', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test Bearer token redaction',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
parameters: {
headerParameters: {
parameter: [
{
name: 'Authorization',
value: 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'
}
]
}
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const authValue = result!.workflowBefore.nodes[0].parameters.headerParameters.parameter[0].value;
expect(authValue).toBe('Bearer [REDACTED]');
});
it('should preserve workflow structure while sanitizing', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test structure preservation',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'My Workflow',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
},
{
id: 'node2',
name: 'HTTP',
type: 'n8n-nodes-base.httpRequest',
position: [300, 100],
parameters: {
url: 'https://api.example.com',
apiKey: 'secret-key'
}
}
],
connections: {
Start: {
main: [[{ node: 'HTTP', type: 'main', index: 0 }]]
}
},
active: true,
credentials: { apiKey: 'workflow-secret' }
},
workflowAfter: {
id: 'wf1',
name: 'My Workflow',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 150
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
// Check structure preserved
expect(result!.workflowBefore.id).toBe('wf1');
expect(result!.workflowBefore.name).toBe('My Workflow');
expect(result!.workflowBefore.nodes).toHaveLength(2);
expect(result!.workflowBefore.connections).toBeDefined();
expect(result!.workflowBefore.active).toBe(true);
// Check credentials removed
expect(result!.workflowBefore.credentials).toBeUndefined();
// Check node parameters sanitized
expect(result!.workflowBefore.nodes[1].parameters.apiKey).toBe('[REDACTED]');
// Check connections preserved
expect(result!.workflowBefore.connections.Start).toBeDefined();
});
it('should handle nested objects recursively', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test nested sanitization',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Complex Node',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
parameters: {
authentication: {
type: 'oauth2',
// Use 'settings' instead of 'credentials' since 'credentials' is a sensitive key
settings: {
clientId: 'safe-client-id',
clientSecret: 'very-secret-key',
nested: {
apiKeyValue: 'deep-secret-key',
tokenValue: 'nested-token'
}
}
}
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
const auth = result!.workflowBefore.nodes[0].parameters.authentication;
// The key 'authentication' contains 'auth' which is sensitive, so entire object is redacted
expect(auth).toBe('[REDACTED]');
});
});
describe('Deduplication', () => {
it('should detect and skip duplicate mutations', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'First mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test Updated',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
// First mutation should succeed
const result1 = await tracker.processMutation(data, 'test-user');
expect(result1).toBeTruthy();
// Exact duplicate should be skipped
const result2 = await tracker.processMutation(data, 'test-user');
expect(result2).toBeNull();
});
it('should allow mutations with different workflows', async () => {
const data1: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'First mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test 1',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test 1 Updated',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const data2: WorkflowMutationData = {
...data1,
workflowBefore: {
id: 'wf2',
name: 'Test 2',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf2',
name: 'Test 2 Updated',
nodes: [],
connections: {}
}
};
const result1 = await tracker.processMutation(data1, 'test-user');
const result2 = await tracker.processMutation(data2, 'test-user');
expect(result1).toBeTruthy();
expect(result2).toBeTruthy();
});
});
describe('Structural Hash Generation', () => {
it('should generate structural hashes for both before and after workflows', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test structural hash generation',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
},
{
id: 'node2',
name: 'HTTP',
type: 'n8n-nodes-base.httpRequest',
position: [300, 100],
parameters: { url: 'https://api.example.com' }
}
],
connections: {
Start: {
main: [[{ node: 'HTTP', type: 'main', index: 0 }]]
}
}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
expect(result!.workflowStructureHashBefore).toBeDefined();
expect(result!.workflowStructureHashAfter).toBeDefined();
expect(typeof result!.workflowStructureHashBefore).toBe('string');
expect(typeof result!.workflowStructureHashAfter).toBe('string');
expect(result!.workflowStructureHashBefore!.length).toBe(16);
expect(result!.workflowStructureHashAfter!.length).toBe(16);
});
it('should generate different structural hashes when node types change', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test hash changes with node types',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
},
{
id: 'node2',
name: 'Slack',
type: 'n8n-nodes-base.slack',
position: [300, 100],
parameters: {}
}
],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
expect(result!.workflowStructureHashBefore).not.toBe(result!.workflowStructureHashAfter);
});
it('should generate same structural hash for workflows with same structure but different parameters', async () => {
const workflow1Before = {
id: 'wf1',
name: 'Test 1',
nodes: [
{
id: 'node1',
name: 'HTTP',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
parameters: { url: 'https://api1.example.com' }
}
],
connections: {}
};
const workflow1After = {
id: 'wf1',
name: 'Test 1 Updated',
nodes: [
{
id: 'node1',
name: 'HTTP',
type: 'n8n-nodes-base.httpRequest',
position: [100, 100],
parameters: { url: 'https://api1-updated.example.com' }
}
],
connections: {}
};
const workflow2Before = {
id: 'wf2',
name: 'Test 2',
nodes: [
{
id: 'node2',
name: 'Different Name',
type: 'n8n-nodes-base.httpRequest',
position: [200, 200],
parameters: { url: 'https://api2.example.com' }
}
],
connections: {}
};
const workflow2After = {
id: 'wf2',
name: 'Test 2 Updated',
nodes: [
{
id: 'node2',
name: 'Different Name',
type: 'n8n-nodes-base.httpRequest',
position: [200, 200],
parameters: { url: 'https://api2-updated.example.com' }
}
],
connections: {}
};
const data1: WorkflowMutationData = {
sessionId: 'test-session-1',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test 1',
operations: [{ type: 'updateNode', nodeId: 'node1', updates: { 'parameters.test': 'value1' } } as any],
workflowBefore: workflow1Before,
workflowAfter: workflow1After,
mutationSuccess: true,
durationMs: 100
};
const data2: WorkflowMutationData = {
sessionId: 'test-session-2',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test 2',
operations: [{ type: 'updateNode', nodeId: 'node2', updates: { 'parameters.test': 'value2' } } as any],
workflowBefore: workflow2Before,
workflowAfter: workflow2After,
mutationSuccess: true,
durationMs: 100
};
const result1 = await tracker.processMutation(data1, 'test-user-1');
const result2 = await tracker.processMutation(data2, 'test-user-2');
expect(result1).toBeTruthy();
expect(result2).toBeTruthy();
// Same structure (same node types, same connection structure) should yield same hash
expect(result1!.workflowStructureHashBefore).toBe(result2!.workflowStructureHashBefore);
});
it('should generate both full hash and structural hash', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test both hash types',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test Updated',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = await tracker.processMutation(data, 'test-user');
expect(result).toBeTruthy();
// Full hashes (includes all workflow data)
expect(result!.workflowHashBefore).toBeDefined();
expect(result!.workflowHashAfter).toBeDefined();
// Structural hashes (nodeTypes + connections only)
expect(result!.workflowStructureHashBefore).toBeDefined();
expect(result!.workflowStructureHashAfter).toBeDefined();
// They should be different since they hash different data
expect(result!.workflowHashBefore).not.toBe(result!.workflowStructureHashBefore);
});
});
describe('Statistics', () => {
it('should track recent mutations count', async () => {
expect(tracker.getRecentMutationsCount()).toBe(0);
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test counting',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test Updated', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
await tracker.processMutation(data, 'test-user');
expect(tracker.getRecentMutationsCount()).toBe(1);
// Process another with different workflow
const data2 = { ...data, workflowBefore: { ...data.workflowBefore, id: 'wf2' } };
await tracker.processMutation(data2, 'test-user');
expect(tracker.getRecentMutationsCount()).toBe(2);
});
it('should clear recent mutations', async () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test clearing',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test Updated', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
await tracker.processMutation(data, 'test-user');
expect(tracker.getRecentMutationsCount()).toBe(1);
tracker.clearRecentMutations();
expect(tracker.getRecentMutationsCount()).toBe(0);
});
});
});

View File

@@ -1,557 +0,0 @@
/**
* Unit tests for MutationValidator - Data Quality Validation
*/
import { describe, it, expect, beforeEach } from 'vitest';
import { MutationValidator } from '../../../src/telemetry/mutation-validator';
import { WorkflowMutationData, MutationToolName } from '../../../src/telemetry/mutation-types';
import type { UpdateNodeOperation } from '../../../src/types/workflow-diff';
describe('MutationValidator', () => {
let validator: MutationValidator;
beforeEach(() => {
validator = new MutationValidator();
});
describe('Workflow Structure Validation', () => {
it('should accept valid workflow structure', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Valid mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test Updated',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should reject workflow without nodes array', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Invalid mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
connections: {}
} as any,
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Invalid workflow_before structure');
});
it('should reject workflow without connections object', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Invalid mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: []
} as any,
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Invalid workflow_before structure');
});
it('should reject null workflow', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Invalid mutation',
operations: [{ type: 'updateNode' }],
workflowBefore: null as any,
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Invalid workflow_before structure');
});
});
describe('Workflow Size Validation', () => {
it('should accept workflows within size limit', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Size test',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
}],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(true);
expect(result.errors).not.toContain(expect.stringContaining('size'));
});
it('should reject oversized workflows', () => {
// Create a very large workflow (over 500KB default limit)
// 600KB string = 600,000 characters
const largeArray = new Array(600000).fill('x').join('');
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Oversized test',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [{
id: 'node1',
name: 'Large',
type: 'n8n-nodes-base.code',
position: [100, 100],
parameters: {
code: largeArray
}
}],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors.some(err => err.includes('size') && err.includes('exceeds'))).toBe(true);
});
it('should respect custom size limit', () => {
const customValidator = new MutationValidator({ maxWorkflowSizeKb: 1 });
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Custom size test',
operations: [{ type: 'addNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [{
id: 'node1',
name: 'Medium',
type: 'n8n-nodes-base.code',
position: [100, 100],
parameters: {
code: 'x'.repeat(2000) // ~2KB
}
}],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = customValidator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors.some(err => err.includes('exceeds maximum (1KB)'))).toBe(true);
});
});
describe('Intent Validation', () => {
it('should warn about empty intent', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: '',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).toContain('User intent is empty');
});
it('should warn about very short intent', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'fix',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).toContain('User intent is too short (less than 5 characters)');
});
it('should warn about very long intent', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'x'.repeat(1001),
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).toContain('User intent is very long (over 1000 characters)');
});
it('should accept good intent length', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Add error handling to API nodes',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).not.toContain(expect.stringContaining('intent'));
});
});
describe('Operations Validation', () => {
it('should reject empty operations array', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors).toContain('No operations provided');
});
it('should accept operations array with items', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [
{ type: 'addNode' },
{ type: 'addConnection' }
],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.valid).toBe(true);
expect(result.errors).not.toContain('No operations provided');
});
});
describe('Duration Validation', () => {
it('should reject negative duration', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: -100
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Duration cannot be negative');
});
it('should warn about very long duration', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 400000 // Over 5 minutes
};
const result = validator.validate(data);
expect(result.warnings).toContain('Duration is very long (over 5 minutes)');
});
it('should accept reasonable duration', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
mutationSuccess: true,
durationMs: 150
};
const result = validator.validate(data);
expect(result.valid).toBe(true);
expect(result.warnings).not.toContain(expect.stringContaining('Duration'));
});
});
describe('Meaningful Change Detection', () => {
it('should warn when workflows are identical', () => {
const workflow = {
id: 'wf1',
name: 'Test',
nodes: [
{
id: 'node1',
name: 'Start',
type: 'n8n-nodes-base.start',
position: [100, 100],
parameters: {}
}
],
connections: {}
};
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'No actual change',
operations: [{ type: 'updateNode' }],
workflowBefore: workflow,
workflowAfter: JSON.parse(JSON.stringify(workflow)), // Deep clone
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).toContain('No meaningful change detected between before and after workflows');
});
it('should not warn when workflows are different', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Real change',
operations: [{ type: 'updateNode' }],
workflowBefore: {
id: 'wf1',
name: 'Test',
nodes: [],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'Test Updated',
nodes: [],
connections: {}
},
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).not.toContain(expect.stringContaining('meaningful change'));
});
});
describe('Validation Data Consistency', () => {
it('should warn about invalid validation structure', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
validationBefore: { valid: 'yes' } as any, // Invalid structure
validationAfter: { valid: true, errors: [] },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).toContain('Invalid validation_before structure');
});
it('should accept valid validation structure', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Test',
operations: [{ type: 'updateNode' }],
workflowBefore: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
workflowAfter: { id: 'wf1', name: 'Test', nodes: [], connections: {} },
validationBefore: { valid: false, errors: [{ type: 'test_error', message: 'Error 1' }] },
validationAfter: { valid: true, errors: [] },
mutationSuccess: true,
durationMs: 100
};
const result = validator.validate(data);
expect(result.warnings).not.toContain(expect.stringContaining('validation'));
});
});
describe('Comprehensive Validation', () => {
it('should collect multiple errors and warnings', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: '', // Empty - warning
operations: [], // Empty - error
workflowBefore: null as any, // Invalid - error
workflowAfter: { nodes: [] } as any, // Missing connections - error
mutationSuccess: true,
durationMs: -50 // Negative - error
};
const result = validator.validate(data);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(0);
expect(result.warnings.length).toBeGreaterThan(0);
});
it('should pass validation with all criteria met', () => {
const data: WorkflowMutationData = {
sessionId: 'test-session-123',
toolName: MutationToolName.UPDATE_PARTIAL,
userIntent: 'Add error handling to HTTP Request nodes',
operations: [
{ type: 'updateNode', nodeName: 'node1', updates: { onError: 'continueErrorOutput' } } as UpdateNodeOperation
],
workflowBefore: {
id: 'wf1',
name: 'API Workflow',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [300, 200],
parameters: {
url: 'https://api.example.com',
method: 'GET'
}
}
],
connections: {}
},
workflowAfter: {
id: 'wf1',
name: 'API Workflow',
nodes: [
{
id: 'node1',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [300, 200],
parameters: {
url: 'https://api.example.com',
method: 'GET'
},
onError: 'continueErrorOutput'
}
],
connections: {}
},
validationBefore: { valid: true, errors: [] },
validationAfter: { valid: true, errors: [] },
mutationSuccess: true,
durationMs: 245
};
const result = validator.validate(data);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
});
});

View File

@@ -1,636 +0,0 @@
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
import { TelemetryError, TelemetryCircuitBreaker, TelemetryErrorAggregator } from '../../../src/telemetry/telemetry-error';
import { TelemetryErrorType } from '../../../src/telemetry/telemetry-types';
import { logger } from '../../../src/utils/logger';
// Mock logger to avoid console output in tests
vi.mock('../../../src/utils/logger', () => ({
logger: {
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}
}));
describe('TelemetryError', () => {
beforeEach(() => {
vi.clearAllMocks();
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
describe('constructor', () => {
it('should create error with all properties', () => {
const context = { operation: 'test', detail: 'info' };
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Test error',
context,
true
);
expect(error.name).toBe('TelemetryError');
expect(error.message).toBe('Test error');
expect(error.type).toBe(TelemetryErrorType.NETWORK_ERROR);
expect(error.context).toEqual(context);
expect(error.retryable).toBe(true);
expect(error.timestamp).toBeTypeOf('number');
});
it('should default retryable to false', () => {
const error = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Test error'
);
expect(error.retryable).toBe(false);
});
it('should handle undefined context', () => {
const error = new TelemetryError(
TelemetryErrorType.UNKNOWN_ERROR,
'Test error'
);
expect(error.context).toBeUndefined();
});
it('should maintain proper prototype chain', () => {
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Test error'
);
expect(error instanceof TelemetryError).toBe(true);
expect(error instanceof Error).toBe(true);
});
});
describe('toContext()', () => {
it('should convert error to context object', () => {
const context = { operation: 'flush', batch: 'events' };
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Failed to flush',
context,
true
);
const contextObj = error.toContext();
expect(contextObj).toEqual({
type: TelemetryErrorType.NETWORK_ERROR,
message: 'Failed to flush',
context,
timestamp: error.timestamp,
retryable: true
});
});
});
describe('log()', () => {
it('should log retryable errors as debug', () => {
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Retryable error',
{ attempt: 1 },
true
);
error.log();
expect(logger.debug).toHaveBeenCalledWith(
'Retryable telemetry error:',
expect.objectContaining({
type: TelemetryErrorType.NETWORK_ERROR,
message: 'Retryable error',
attempt: 1
})
);
});
it('should log non-retryable errors as debug', () => {
const error = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Non-retryable error',
{ field: 'user_id' },
false
);
error.log();
expect(logger.debug).toHaveBeenCalledWith(
'Non-retryable telemetry error:',
expect.objectContaining({
type: TelemetryErrorType.VALIDATION_ERROR,
message: 'Non-retryable error',
field: 'user_id'
})
);
});
it('should handle errors without context', () => {
const error = new TelemetryError(
TelemetryErrorType.UNKNOWN_ERROR,
'Simple error'
);
error.log();
expect(logger.debug).toHaveBeenCalledWith(
'Non-retryable telemetry error:',
expect.objectContaining({
type: TelemetryErrorType.UNKNOWN_ERROR,
message: 'Simple error'
})
);
});
});
});
describe('TelemetryCircuitBreaker', () => {
let circuitBreaker: TelemetryCircuitBreaker;
beforeEach(() => {
vi.clearAllMocks();
vi.useFakeTimers();
circuitBreaker = new TelemetryCircuitBreaker(3, 10000, 2); // 3 failures, 10s reset, 2 half-open requests
});
afterEach(() => {
vi.useRealTimers();
});
describe('shouldAllow()', () => {
it('should allow requests in closed state', () => {
expect(circuitBreaker.shouldAllow()).toBe(true);
});
it('should open circuit after failure threshold', () => {
// Record 3 failures to reach threshold
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
expect(circuitBreaker.shouldAllow()).toBe(false);
expect(circuitBreaker.getState().state).toBe('open');
});
it('should transition to half-open after reset timeout', () => {
// Open the circuit
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
expect(circuitBreaker.shouldAllow()).toBe(false);
// Advance time past reset timeout
vi.advanceTimersByTime(11000);
// Should transition to half-open and allow request
expect(circuitBreaker.shouldAllow()).toBe(true);
expect(circuitBreaker.getState().state).toBe('half-open');
});
it('should limit requests in half-open state', () => {
// Open the circuit
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
// Advance to half-open
vi.advanceTimersByTime(11000);
// Should allow limited number of requests (2 in our config)
expect(circuitBreaker.shouldAllow()).toBe(true);
expect(circuitBreaker.shouldAllow()).toBe(true);
expect(circuitBreaker.shouldAllow()).toBe(true); // Note: simplified implementation allows all
});
it('should not allow requests before reset timeout in open state', () => {
// Open the circuit
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
// Advance time but not enough to reset
vi.advanceTimersByTime(5000);
expect(circuitBreaker.shouldAllow()).toBe(false);
});
});
describe('recordSuccess()', () => {
it('should reset failure count in closed state', () => {
// Record some failures but not enough to open
circuitBreaker.recordFailure();
circuitBreaker.recordFailure();
expect(circuitBreaker.getState().failureCount).toBe(2);
// Success should reset count
circuitBreaker.recordSuccess();
expect(circuitBreaker.getState().failureCount).toBe(0);
});
it('should close circuit after successful half-open requests', () => {
// Open the circuit
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
// Go to half-open
vi.advanceTimersByTime(11000);
circuitBreaker.shouldAllow(); // First half-open request
circuitBreaker.shouldAllow(); // Second half-open request
// The circuit breaker implementation requires success calls
// to match the number of half-open requests configured
circuitBreaker.recordSuccess();
// In current implementation, state remains half-open
// This is a known behavior of the simplified circuit breaker
expect(circuitBreaker.getState().state).toBe('half-open');
// After another success, it should close
circuitBreaker.recordSuccess();
expect(circuitBreaker.getState().state).toBe('closed');
expect(circuitBreaker.getState().failureCount).toBe(0);
expect(logger.debug).toHaveBeenCalledWith('Circuit breaker closed after successful recovery');
});
it('should not affect state when not in half-open after sufficient requests', () => {
// Open circuit, go to half-open, make one request
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
vi.advanceTimersByTime(11000);
circuitBreaker.shouldAllow(); // One half-open request
// Record success but should not close yet (need 2 successful requests)
circuitBreaker.recordSuccess();
expect(circuitBreaker.getState().state).toBe('half-open');
});
});
describe('recordFailure()', () => {
it('should increment failure count in closed state', () => {
circuitBreaker.recordFailure();
expect(circuitBreaker.getState().failureCount).toBe(1);
circuitBreaker.recordFailure();
expect(circuitBreaker.getState().failureCount).toBe(2);
});
it('should open circuit when threshold reached', () => {
const error = new Error('Test error');
// Record failures to reach threshold
circuitBreaker.recordFailure(error);
circuitBreaker.recordFailure(error);
expect(circuitBreaker.getState().state).toBe('closed');
circuitBreaker.recordFailure(error);
expect(circuitBreaker.getState().state).toBe('open');
expect(logger.debug).toHaveBeenCalledWith(
'Circuit breaker opened after 3 failures',
{ error: 'Test error' }
);
});
it('should immediately open from half-open on failure', () => {
// Open circuit, go to half-open
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
vi.advanceTimersByTime(11000);
circuitBreaker.shouldAllow();
// Failure in half-open should immediately open
const error = new Error('Half-open failure');
circuitBreaker.recordFailure(error);
expect(circuitBreaker.getState().state).toBe('open');
expect(logger.debug).toHaveBeenCalledWith(
'Circuit breaker opened from half-open state',
{ error: 'Half-open failure' }
);
});
it('should handle failure without error object', () => {
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
expect(circuitBreaker.getState().state).toBe('open');
expect(logger.debug).toHaveBeenCalledWith(
'Circuit breaker opened after 3 failures',
{ error: undefined }
);
});
});
describe('getState()', () => {
it('should return current state information', () => {
const state = circuitBreaker.getState();
expect(state).toEqual({
state: 'closed',
failureCount: 0,
canRetry: true
});
});
it('should reflect state changes', () => {
circuitBreaker.recordFailure();
circuitBreaker.recordFailure();
const state = circuitBreaker.getState();
expect(state).toEqual({
state: 'closed',
failureCount: 2,
canRetry: true
});
// Open circuit
circuitBreaker.recordFailure();
const openState = circuitBreaker.getState();
expect(openState).toEqual({
state: 'open',
failureCount: 3,
canRetry: false
});
});
});
describe('reset()', () => {
it('should reset circuit breaker to initial state', () => {
// Open the circuit and advance time
for (let i = 0; i < 3; i++) {
circuitBreaker.recordFailure();
}
vi.advanceTimersByTime(11000);
circuitBreaker.shouldAllow(); // Go to half-open
// Reset
circuitBreaker.reset();
const state = circuitBreaker.getState();
expect(state).toEqual({
state: 'closed',
failureCount: 0,
canRetry: true
});
});
});
describe('different configurations', () => {
it('should work with custom failure threshold', () => {
const customBreaker = new TelemetryCircuitBreaker(1, 5000, 1); // 1 failure threshold
expect(customBreaker.getState().state).toBe('closed');
customBreaker.recordFailure();
expect(customBreaker.getState().state).toBe('open');
});
it('should work with custom half-open request count', () => {
const customBreaker = new TelemetryCircuitBreaker(1, 5000, 3); // 3 half-open requests
// Open and go to half-open
customBreaker.recordFailure();
vi.advanceTimersByTime(6000);
// Should allow 3 requests in half-open
expect(customBreaker.shouldAllow()).toBe(true);
expect(customBreaker.shouldAllow()).toBe(true);
expect(customBreaker.shouldAllow()).toBe(true);
expect(customBreaker.shouldAllow()).toBe(true); // Fourth also allowed in simplified implementation
});
});
});
describe('TelemetryErrorAggregator', () => {
let aggregator: TelemetryErrorAggregator;
beforeEach(() => {
aggregator = new TelemetryErrorAggregator();
vi.clearAllMocks();
});
describe('record()', () => {
it('should record error and increment counter', () => {
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Network failure'
);
aggregator.record(error);
const stats = aggregator.getStats();
expect(stats.totalErrors).toBe(1);
expect(stats.errorsByType[TelemetryErrorType.NETWORK_ERROR]).toBe(1);
});
it('should increment counter for repeated error types', () => {
const error1 = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'First failure'
);
const error2 = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Second failure'
);
aggregator.record(error1);
aggregator.record(error2);
const stats = aggregator.getStats();
expect(stats.totalErrors).toBe(2);
expect(stats.errorsByType[TelemetryErrorType.NETWORK_ERROR]).toBe(2);
});
it('should maintain limited error detail history', () => {
// Record more than max details (100) to test limiting
for (let i = 0; i < 105; i++) {
const error = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
`Error ${i}`
);
aggregator.record(error);
}
const stats = aggregator.getStats();
expect(stats.totalErrors).toBe(105);
expect(stats.recentErrors).toHaveLength(10); // Only last 10
});
it('should track different error types separately', () => {
const networkError = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Network issue'
);
const validationError = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Validation issue'
);
const rateLimitError = new TelemetryError(
TelemetryErrorType.RATE_LIMIT_ERROR,
'Rate limit hit'
);
aggregator.record(networkError);
aggregator.record(networkError);
aggregator.record(validationError);
aggregator.record(rateLimitError);
const stats = aggregator.getStats();
expect(stats.totalErrors).toBe(4);
expect(stats.errorsByType[TelemetryErrorType.NETWORK_ERROR]).toBe(2);
expect(stats.errorsByType[TelemetryErrorType.VALIDATION_ERROR]).toBe(1);
expect(stats.errorsByType[TelemetryErrorType.RATE_LIMIT_ERROR]).toBe(1);
});
});
describe('getStats()', () => {
it('should return empty stats when no errors recorded', () => {
const stats = aggregator.getStats();
expect(stats).toEqual({
totalErrors: 0,
errorsByType: {},
mostCommonError: undefined,
recentErrors: []
});
});
it('should identify most common error type', () => {
const networkError = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Network issue'
);
const validationError = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Validation issue'
);
// Network errors more frequent
aggregator.record(networkError);
aggregator.record(networkError);
aggregator.record(networkError);
aggregator.record(validationError);
const stats = aggregator.getStats();
expect(stats.mostCommonError).toBe(TelemetryErrorType.NETWORK_ERROR);
});
it('should return recent errors in order', () => {
const error1 = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'First error'
);
const error2 = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Second error'
);
const error3 = new TelemetryError(
TelemetryErrorType.RATE_LIMIT_ERROR,
'Third error'
);
aggregator.record(error1);
aggregator.record(error2);
aggregator.record(error3);
const stats = aggregator.getStats();
expect(stats.recentErrors).toHaveLength(3);
expect(stats.recentErrors[0].message).toBe('First error');
expect(stats.recentErrors[1].message).toBe('Second error');
expect(stats.recentErrors[2].message).toBe('Third error');
});
it('should handle tie in most common error', () => {
const networkError = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Network issue'
);
const validationError = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
'Validation issue'
);
// Equal counts
aggregator.record(networkError);
aggregator.record(validationError);
const stats = aggregator.getStats();
// Should return one of them (implementation dependent)
expect(stats.mostCommonError).toBeDefined();
expect([TelemetryErrorType.NETWORK_ERROR, TelemetryErrorType.VALIDATION_ERROR])
.toContain(stats.mostCommonError);
});
});
describe('reset()', () => {
it('should clear all error data', () => {
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Test error'
);
aggregator.record(error);
// Verify data exists
expect(aggregator.getStats().totalErrors).toBe(1);
// Reset
aggregator.reset();
// Verify cleared
const stats = aggregator.getStats();
expect(stats).toEqual({
totalErrors: 0,
errorsByType: {},
mostCommonError: undefined,
recentErrors: []
});
});
});
describe('error detail management', () => {
it('should preserve error context in details', () => {
const context = { operation: 'flush', batchSize: 50 };
const error = new TelemetryError(
TelemetryErrorType.NETWORK_ERROR,
'Network failure',
context,
true
);
aggregator.record(error);
const stats = aggregator.getStats();
expect(stats.recentErrors[0]).toEqual({
type: TelemetryErrorType.NETWORK_ERROR,
message: 'Network failure',
context,
timestamp: error.timestamp,
retryable: true
});
});
it('should maintain error details queue with FIFO behavior', () => {
// Add more than max to test queue behavior
const errors = [];
for (let i = 0; i < 15; i++) {
const error = new TelemetryError(
TelemetryErrorType.VALIDATION_ERROR,
`Error ${i}`
);
errors.push(error);
aggregator.record(error);
}
const stats = aggregator.getStats();
// Should have last 10 errors (5-14)
expect(stats.recentErrors).toHaveLength(10);
expect(stats.recentErrors[0].message).toBe('Error 5');
expect(stats.recentErrors[9].message).toBe('Error 14');
});
});
});

View File

@@ -1,293 +0,0 @@
/**
* Verification Tests for v2.18.3 Critical Fixes
* Tests all 7 fixes from the code review:
* - CRITICAL-01: Database checkpoints logged
* - CRITICAL-02: Defensive initialization
* - CRITICAL-03: Non-blocking checkpoints
* - HIGH-01: ReDoS vulnerability fixed
* - HIGH-02: Race condition prevention
* - HIGH-03: Timeout on Supabase operations
* - HIGH-04: N8N API checkpoints logged
*/
import { EarlyErrorLogger } from '../../../src/telemetry/early-error-logger';
import { sanitizeErrorMessageCore } from '../../../src/telemetry/error-sanitization-utils';
import { STARTUP_CHECKPOINTS } from '../../../src/telemetry/startup-checkpoints';
describe('v2.18.3 Critical Fixes Verification', () => {
describe('CRITICAL-02: Defensive Initialization', () => {
it('should initialize all fields to safe defaults before any throwing operation', () => {
// Create instance - should not throw even if Supabase fails
const logger = EarlyErrorLogger.getInstance();
expect(logger).toBeDefined();
// Should be able to call methods immediately without crashing
expect(() => logger.logCheckpoint(STARTUP_CHECKPOINTS.PROCESS_STARTED)).not.toThrow();
expect(() => logger.getCheckpoints()).not.toThrow();
expect(() => logger.getStartupDuration()).not.toThrow();
});
it('should handle multiple getInstance calls correctly (singleton)', () => {
const logger1 = EarlyErrorLogger.getInstance();
const logger2 = EarlyErrorLogger.getInstance();
expect(logger1).toBe(logger2);
});
it('should gracefully handle being disabled', () => {
const logger = EarlyErrorLogger.getInstance();
// Even if disabled, these should not throw
expect(() => logger.logCheckpoint(STARTUP_CHECKPOINTS.PROCESS_STARTED)).not.toThrow();
expect(() => logger.logStartupError(STARTUP_CHECKPOINTS.DATABASE_CONNECTING, new Error('test'))).not.toThrow();
expect(() => logger.logStartupSuccess([], 100)).not.toThrow();
});
});
describe('CRITICAL-03: Non-blocking Checkpoints', () => {
it('logCheckpoint should be synchronous (fire-and-forget)', () => {
const logger = EarlyErrorLogger.getInstance();
const start = Date.now();
// Should return immediately, not block
logger.logCheckpoint(STARTUP_CHECKPOINTS.PROCESS_STARTED);
const duration = Date.now() - start;
expect(duration).toBeLessThan(50); // Should be nearly instant
});
it('logStartupError should be synchronous (fire-and-forget)', () => {
const logger = EarlyErrorLogger.getInstance();
const start = Date.now();
// Should return immediately, not block
logger.logStartupError(STARTUP_CHECKPOINTS.DATABASE_CONNECTING, new Error('test'));
const duration = Date.now() - start;
expect(duration).toBeLessThan(50); // Should be nearly instant
});
it('logStartupSuccess should be synchronous (fire-and-forget)', () => {
const logger = EarlyErrorLogger.getInstance();
const start = Date.now();
// Should return immediately, not block
logger.logStartupSuccess([STARTUP_CHECKPOINTS.PROCESS_STARTED], 100);
const duration = Date.now() - start;
expect(duration).toBeLessThan(50); // Should be nearly instant
});
});
describe('HIGH-01: ReDoS Vulnerability Fixed', () => {
it('should handle long token strings without catastrophic backtracking', () => {
// This would cause ReDoS with the old regex: (?<!Bearer\s)token\s*[=:]\s*\S+
const maliciousInput = 'token=' + 'a'.repeat(10000);
const start = Date.now();
const result = sanitizeErrorMessageCore(maliciousInput);
const duration = Date.now() - start;
// Should complete in reasonable time (< 100ms)
expect(duration).toBeLessThan(100);
expect(result).toContain('[REDACTED]');
});
it('should use simplified regex pattern without negative lookbehind', () => {
// Test that the new pattern works correctly
const testCases = [
{ input: 'token=abc123', shouldContain: '[REDACTED]' },
{ input: 'token: xyz789', shouldContain: '[REDACTED]' },
{ input: 'Bearer token=secret', shouldContain: '[TOKEN]' }, // Bearer gets handled separately
{ input: 'token = test', shouldContain: '[REDACTED]' },
{ input: 'some text here', shouldNotContain: '[REDACTED]' },
];
testCases.forEach((testCase) => {
const result = sanitizeErrorMessageCore(testCase.input);
if ('shouldContain' in testCase) {
expect(result).toContain(testCase.shouldContain);
} else if ('shouldNotContain' in testCase) {
expect(result).not.toContain(testCase.shouldNotContain);
}
});
});
it('should handle edge cases without hanging', () => {
const edgeCases = [
'token=',
'token:',
'token = ',
'= token',
'tokentoken=value',
];
edgeCases.forEach((input) => {
const start = Date.now();
expect(() => sanitizeErrorMessageCore(input)).not.toThrow();
const duration = Date.now() - start;
expect(duration).toBeLessThan(50);
});
});
});
describe('HIGH-02: Race Condition Prevention', () => {
it('should track initialization state with initPromise', async () => {
const logger = EarlyErrorLogger.getInstance();
// Should have waitForInit method
expect(logger.waitForInit).toBeDefined();
expect(typeof logger.waitForInit).toBe('function');
// Should be able to wait for init without hanging
await expect(logger.waitForInit()).resolves.not.toThrow();
});
it('should handle concurrent checkpoint logging safely', () => {
const logger = EarlyErrorLogger.getInstance();
// Log multiple checkpoints concurrently
const checkpoints = [
STARTUP_CHECKPOINTS.PROCESS_STARTED,
STARTUP_CHECKPOINTS.DATABASE_CONNECTING,
STARTUP_CHECKPOINTS.DATABASE_CONNECTED,
STARTUP_CHECKPOINTS.N8N_API_CHECKING,
STARTUP_CHECKPOINTS.N8N_API_READY,
];
expect(() => {
checkpoints.forEach(cp => logger.logCheckpoint(cp));
}).not.toThrow();
});
});
describe('HIGH-03: Timeout on Supabase Operations', () => {
it('should implement withTimeout wrapper function', async () => {
const logger = EarlyErrorLogger.getInstance();
// We can't directly test the private withTimeout function,
// but we can verify that operations don't hang indefinitely
const start = Date.now();
// Log an error - should complete quickly even if Supabase fails
logger.logStartupError(STARTUP_CHECKPOINTS.DATABASE_CONNECTING, new Error('test'));
// Give it a moment to attempt the operation
await new Promise(resolve => setTimeout(resolve, 100));
const duration = Date.now() - start;
// Should not hang for more than 6 seconds (5s timeout + 1s buffer)
expect(duration).toBeLessThan(6000);
});
it('should gracefully degrade when timeout occurs', async () => {
const logger = EarlyErrorLogger.getInstance();
// Multiple error logs should all complete quickly
const promises = [];
for (let i = 0; i < 5; i++) {
logger.logStartupError(STARTUP_CHECKPOINTS.DATABASE_CONNECTING, new Error(`test-${i}`));
promises.push(new Promise(resolve => setTimeout(resolve, 50)));
}
await Promise.all(promises);
// All operations should have returned (fire-and-forget)
expect(true).toBe(true);
});
});
describe('Error Sanitization - Shared Utilities', () => {
it('should remove sensitive patterns in correct order', () => {
const sensitiveData = 'Error: https://api.example.com/token=secret123 user@email.com';
const sanitized = sanitizeErrorMessageCore(sensitiveData);
expect(sanitized).not.toContain('api.example.com');
expect(sanitized).not.toContain('secret123');
expect(sanitized).not.toContain('user@email.com');
expect(sanitized).toContain('[URL]');
expect(sanitized).toContain('[EMAIL]');
});
it('should handle AWS keys', () => {
const input = 'Error: AWS key AKIAIOSFODNN7EXAMPLE leaked';
const result = sanitizeErrorMessageCore(input);
expect(result).not.toContain('AKIAIOSFODNN7EXAMPLE');
expect(result).toContain('[AWS_KEY]');
});
it('should handle GitHub tokens', () => {
const input = 'Auth failed with ghp_1234567890abcdefghijklmnopqrstuvwxyz';
const result = sanitizeErrorMessageCore(input);
expect(result).not.toContain('ghp_1234567890abcdefghijklmnopqrstuvwxyz');
expect(result).toContain('[GITHUB_TOKEN]');
});
it('should handle JWTs', () => {
const input = 'JWT: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIn0.abcdefghij';
const result = sanitizeErrorMessageCore(input);
// JWT pattern should match the full JWT
expect(result).not.toContain('eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9');
expect(result).toContain('[JWT]');
});
it('should limit stack traces to 3 lines', () => {
const stackTrace = 'Error: Test\n at func1 (file1.js:1:1)\n at func2 (file2.js:2:2)\n at func3 (file3.js:3:3)\n at func4 (file4.js:4:4)';
const result = sanitizeErrorMessageCore(stackTrace);
const lines = result.split('\n');
expect(lines.length).toBeLessThanOrEqual(3);
});
it('should truncate at 500 chars after sanitization', () => {
const longMessage = 'Error: ' + 'a'.repeat(1000);
const result = sanitizeErrorMessageCore(longMessage);
expect(result.length).toBeLessThanOrEqual(503); // 500 + '...'
});
it('should return safe default on sanitization failure', () => {
// Pass something that might cause issues
const result = sanitizeErrorMessageCore(null as any);
expect(result).toBe('[SANITIZATION_FAILED]');
});
});
describe('Checkpoint Integration', () => {
it('should have all required checkpoint constants defined', () => {
expect(STARTUP_CHECKPOINTS.PROCESS_STARTED).toBe('process_started');
expect(STARTUP_CHECKPOINTS.DATABASE_CONNECTING).toBe('database_connecting');
expect(STARTUP_CHECKPOINTS.DATABASE_CONNECTED).toBe('database_connected');
expect(STARTUP_CHECKPOINTS.N8N_API_CHECKING).toBe('n8n_api_checking');
expect(STARTUP_CHECKPOINTS.N8N_API_READY).toBe('n8n_api_ready');
expect(STARTUP_CHECKPOINTS.TELEMETRY_INITIALIZING).toBe('telemetry_initializing');
expect(STARTUP_CHECKPOINTS.TELEMETRY_READY).toBe('telemetry_ready');
expect(STARTUP_CHECKPOINTS.MCP_HANDSHAKE_STARTING).toBe('mcp_handshake_starting');
expect(STARTUP_CHECKPOINTS.MCP_HANDSHAKE_COMPLETE).toBe('mcp_handshake_complete');
expect(STARTUP_CHECKPOINTS.SERVER_READY).toBe('server_ready');
});
it('should track checkpoints correctly', () => {
const logger = EarlyErrorLogger.getInstance();
const initialCount = logger.getCheckpoints().length;
logger.logCheckpoint(STARTUP_CHECKPOINTS.PROCESS_STARTED);
const checkpoints = logger.getCheckpoints();
expect(checkpoints.length).toBeGreaterThanOrEqual(initialCount);
});
it('should calculate startup duration', () => {
const logger = EarlyErrorLogger.getInstance();
const duration = logger.getStartupDuration();
expect(duration).toBeGreaterThanOrEqual(0);
expect(typeof duration).toBe('number');
});
});
});