refactor: streamline test suite - cut 33 files, enable parallel execution (11.9x speedup)

Remove duplicate, low-value, and fragmented test files while preserving
all meaningful coverage. Enable parallel test execution and remove
the entire benchmark infrastructure.

Key changes:
- Consolidate workflow-validator tests (13 files -> 3)
- Consolidate config-validator tests (9 files -> 3)
- Consolidate telemetry tests (11 files -> 6)
- Merge AI validator tests (2 files -> 1)
- Remove example/demo test files, mock-testing files, and already-skipped tests
- Remove benchmark infrastructure (10 files, CI workflow, 4 npm scripts)
- Enable parallel test execution (remove singleThread: true)
- Remove retry:2 that was masking flaky tests
- Slim CI publish-results job

Results: 224 -> 191 test files, 4690 -> 4303 tests, 121K -> 106K lines
Local runtime: 319s -> 27s (11.9x speedup)

Conceived by Romuald Członkowski - www.aiadvisors.pl/en

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
czlonkowski
2026-03-26 23:41:06 +01:00
parent 07bd1d4cc2
commit fdc37efde7
62 changed files with 2998 additions and 20806 deletions

View File

@@ -702,4 +702,244 @@ describe('WorkflowValidator - Loop Node Validation', () => {
expect(result).toBeDefined();
});
});
// ─── Loop Output Edge Cases (absorbed from loop-output-edge-cases) ──
describe('Nodes without outputs', () => {
it('should handle nodes with null outputs gracefully', async () => {
mockNodeRepository.getNode.mockReturnValue({
nodeType: 'nodes-base.httpRequest', outputs: null, outputNames: null, properties: [],
});
const workflow = {
name: 'No Outputs',
nodes: [
{ id: '1', name: 'HTTP Request', type: 'n8n-nodes-base.httpRequest', position: [100, 100], parameters: { url: 'https://example.com' } },
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: { 'HTTP Request': { main: [[{ node: 'Set', type: 'main', index: 0 }]] } },
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
const outputErrors = result.errors.filter(e => e.message?.includes('output') && !e.message?.includes('Connection'));
expect(outputErrors).toHaveLength(0);
});
it('should handle nodes with empty outputs array', async () => {
mockNodeRepository.getNode.mockReturnValue({
nodeType: 'nodes-base.customNode', outputs: [], outputNames: [], properties: [],
});
const workflow = {
name: 'Empty Outputs',
nodes: [{ id: '1', name: 'Custom Node', type: 'n8n-nodes-base.customNode', position: [100, 100], parameters: {} }],
connections: { 'Custom Node': { main: [[{ node: 'Custom Node', type: 'main', index: 0 }]] } },
};
const result = await validator.validateWorkflow(workflow as any);
const selfRefWarnings = result.warnings.filter(w => w.message?.includes('self-referencing'));
expect(selfRefWarnings).toHaveLength(1);
});
});
describe('Invalid connection indices', () => {
it('should handle very large connection indices', async () => {
mockNodeRepository.getNode.mockReturnValue({
nodeType: 'nodes-base.switch', outputs: [{ displayName: 'Output 1' }, { displayName: 'Output 2' }], properties: [],
});
const workflow = {
name: 'Large Index',
nodes: [
{ id: '1', name: 'Switch', type: 'n8n-nodes-base.switch', position: [100, 100], parameters: {} },
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: { 'Switch': { main: [[{ node: 'Set', type: 'main', index: 999 }]] } },
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
});
});
describe('Malformed connection structures', () => {
it('should handle null connection objects', async () => {
const workflow = {
name: 'Null Connections',
nodes: [{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} }],
connections: { 'Split In Batches': { main: [null, [{ node: 'NonExistent', type: 'main', index: 0 }]] as any } },
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
});
it('should handle missing connection properties', async () => {
const workflow = {
name: 'Malformed Connections',
nodes: [
{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} },
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: {
'Split In Batches': { main: [[{ node: 'Set' } as any, { type: 'main', index: 0 } as any, {} as any]] },
},
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
expect(result.errors.length).toBeGreaterThan(0);
});
});
describe('Complex output structures', () => {
it('should handle nodes with many outputs', async () => {
const manyOutputs = Array.from({ length: 20 }, (_, i) => ({
displayName: `Output ${i + 1}`, name: `output${i + 1}`,
}));
mockNodeRepository.getNode.mockReturnValue({
nodeType: 'nodes-base.complexSwitch', outputs: manyOutputs, outputNames: manyOutputs.map(o => o.name), properties: [],
});
const workflow = {
name: 'Many Outputs',
nodes: [
{ id: '1', name: 'Complex Switch', type: 'n8n-nodes-base.complexSwitch', position: [100, 100], parameters: {} },
{ id: '2', name: 'Set', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: { 'Complex Switch': { main: Array.from({ length: 20 }, () => [{ node: 'Set', type: 'main', index: 0 }]) } },
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
});
it('should handle mixed output types (main, error, ai_tool)', async () => {
mockNodeRepository.getNode.mockReturnValue({
nodeType: 'nodes-base.complexNode', outputs: [{ displayName: 'Main', type: 'main' }, { displayName: 'Error', type: 'error' }], properties: [],
});
const workflow = {
name: 'Mixed Output Types',
nodes: [
{ id: '1', name: 'Complex Node', type: 'n8n-nodes-base.complexNode', position: [100, 100], parameters: {} },
{ id: '2', name: 'Main Handler', type: 'n8n-nodes-base.set', position: [300, 50], parameters: {} },
{ id: '3', name: 'Error Handler', type: 'n8n-nodes-base.set', position: [300, 150], parameters: {} },
{ id: '4', name: 'Tool', type: 'n8n-nodes-base.httpRequest', position: [500, 100], parameters: {} },
],
connections: {
'Complex Node': {
main: [[{ node: 'Main Handler', type: 'main', index: 0 }]],
error: [[{ node: 'Error Handler', type: 'main', index: 0 }]],
ai_tool: [[{ node: 'Tool', type: 'main', index: 0 }]],
},
},
};
const result = await validator.validateWorkflow(workflow as any);
expect(result).toBeDefined();
expect(result.statistics.validConnections).toBe(3);
});
});
describe('SplitInBatches specific edge cases', () => {
it('should handle SplitInBatches with no connections', async () => {
const workflow = {
name: 'Isolated SplitInBatches',
nodes: [{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} }],
connections: {},
};
const result = await validator.validateWorkflow(workflow as any);
const splitWarnings = result.warnings.filter(w => w.message?.includes('SplitInBatches') || w.message?.includes('loop') || w.message?.includes('done'));
expect(splitWarnings).toHaveLength(0);
});
it('should handle SplitInBatches with only done output connected', async () => {
const workflow = {
name: 'Single Output SplitInBatches',
nodes: [
{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} },
{ id: '2', name: 'Final Action', type: 'n8n-nodes-base.emailSend', position: [300, 100], parameters: {} },
],
connections: { 'Split In Batches': { main: [[{ node: 'Final Action', type: 'main', index: 0 }], []] } },
};
const result = await validator.validateWorkflow(workflow as any);
const loopWarnings = result.warnings.filter(w => w.message?.includes('loop') && w.message?.includes('connect back'));
expect(loopWarnings).toHaveLength(0);
});
it('should handle SplitInBatches with both outputs to same node', async () => {
const workflow = {
name: 'Same Target SplitInBatches',
nodes: [
{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} },
{ id: '2', name: 'Multi Purpose', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: {
'Split In Batches': { main: [[{ node: 'Multi Purpose', type: 'main', index: 0 }], [{ node: 'Multi Purpose', type: 'main', index: 0 }]] },
'Multi Purpose': { main: [[{ node: 'Split In Batches', type: 'main', index: 0 }]] },
},
};
const result = await validator.validateWorkflow(workflow as any);
const loopWarnings = result.warnings.filter(w => w.message?.includes('loop') && w.message?.includes('connect back'));
expect(loopWarnings).toHaveLength(0);
});
it('should detect reversed outputs with processing node on done output', async () => {
const workflow = {
name: 'Reversed SplitInBatches with Function Node',
nodes: [
{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} },
{ id: '2', name: 'Process Function', type: 'n8n-nodes-base.function', position: [300, 100], parameters: {} },
],
connections: {
'Split In Batches': { main: [[{ node: 'Process Function', type: 'main', index: 0 }], []] },
'Process Function': { main: [[{ node: 'Split In Batches', type: 'main', index: 0 }]] },
},
};
const result = await validator.validateWorkflow(workflow as any);
const reversedErrors = result.errors.filter(e => e.message?.includes('SplitInBatches outputs appear reversed'));
expect(reversedErrors).toHaveLength(1);
});
it('should handle self-referencing nodes in loop back detection', async () => {
const workflow = {
name: 'Self Reference in Loop Back',
nodes: [
{ id: '1', name: 'Split In Batches', type: 'n8n-nodes-base.splitInBatches', position: [100, 100], parameters: {} },
{ id: '2', name: 'SelfRef', type: 'n8n-nodes-base.set', position: [300, 100], parameters: {} },
],
connections: {
'Split In Batches': { main: [[], [{ node: 'SelfRef', type: 'main', index: 0 }]] },
'SelfRef': { main: [[{ node: 'SelfRef', type: 'main', index: 0 }]] },
},
};
const result = await validator.validateWorkflow(workflow as any);
expect(result.warnings.filter(w => w.message?.includes("doesn't connect back"))).toHaveLength(1);
expect(result.warnings.filter(w => w.message?.includes('self-referencing'))).toHaveLength(1);
});
it('should handle many SplitInBatches nodes', async () => {
const nodes = Array.from({ length: 100 }, (_, i) => ({
id: `split${i}`, name: `Split ${i}`, type: 'n8n-nodes-base.splitInBatches',
position: [100 + (i % 10) * 100, 100 + Math.floor(i / 10) * 100], parameters: {},
}));
const connections: any = {};
for (let i = 0; i < 99; i++) {
connections[`Split ${i}`] = { main: [[{ node: `Split ${i + 1}`, type: 'main', index: 0 }], []] };
}
const result = await validator.validateWorkflow({ name: 'Many SplitInBatches', nodes, connections } as any);
expect(result).toBeDefined();
expect(result.statistics.totalNodes).toBe(100);
});
});
});