feat: add comprehensive performance benchmark tracking system
- Create benchmark test suites for critical operations: - Node loading performance - Database query performance - Search operations performance - Validation performance - MCP tool execution performance - Add GitHub Actions workflow for benchmark tracking: - Runs on push to main and PRs - Uses github-action-benchmark for historical tracking - Comments on PRs with performance results - Alerts on >10% performance regressions - Stores results in GitHub Pages - Create benchmark infrastructure: - Custom Vitest benchmark configuration - JSON reporter for CI results - Result formatter for github-action-benchmark - Performance threshold documentation - Add supporting utilities: - SQLiteStorageService for benchmark database setup - MCPEngine wrapper for testing MCP tools - Test factories for generating benchmark data - Enhanced NodeRepository with benchmark methods - Document benchmark system: - Comprehensive benchmark guide in docs/BENCHMARKS.md - Performance thresholds in .github/BENCHMARK_THRESHOLDS.md - README for benchmarks directory - Integration with existing test suite The benchmark system will help monitor performance over time and catch regressions before they reach production. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
276
tests/integration/database-integration.test.ts
Normal file
276
tests/integration/database-integration.test.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import { createTestDatabase, seedTestNodes, seedTestTemplates, dbHelpers, TestDatabase } from '../utils/database-utils';
|
||||
import { NodeRepository } from '../../src/database/node-repository';
|
||||
import { TemplateRepository } from '../../src/templates/template-repository';
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Integration tests using the database utilities
|
||||
* These tests demonstrate realistic usage scenarios
|
||||
*/
|
||||
|
||||
describe('Database Integration Tests', () => {
|
||||
let testDb: TestDatabase;
|
||||
let nodeRepo: NodeRepository;
|
||||
let templateRepo: TemplateRepository;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a persistent database for integration tests
|
||||
testDb = await createTestDatabase({
|
||||
inMemory: false,
|
||||
dbPath: path.join(__dirname, '../temp/integration-test.db'),
|
||||
enableFTS5: true
|
||||
});
|
||||
|
||||
nodeRepo = testDb.nodeRepository;
|
||||
templateRepo = testDb.templateRepository;
|
||||
|
||||
// Seed comprehensive test data
|
||||
await seedTestNodes(nodeRepo, [
|
||||
// Communication nodes
|
||||
{ nodeType: 'nodes-base.email', displayName: 'Email', category: 'Communication' },
|
||||
{ nodeType: 'nodes-base.discord', displayName: 'Discord', category: 'Communication' },
|
||||
{ nodeType: 'nodes-base.twilio', displayName: 'Twilio', category: 'Communication' },
|
||||
|
||||
// Data nodes
|
||||
{ nodeType: 'nodes-base.postgres', displayName: 'Postgres', category: 'Data' },
|
||||
{ nodeType: 'nodes-base.mysql', displayName: 'MySQL', category: 'Data' },
|
||||
{ nodeType: 'nodes-base.mongodb', displayName: 'MongoDB', category: 'Data' },
|
||||
|
||||
// AI nodes
|
||||
{ nodeType: 'nodes-langchain.openAi', displayName: 'OpenAI', category: 'AI', isAITool: true },
|
||||
{ nodeType: 'nodes-langchain.agent', displayName: 'AI Agent', category: 'AI', isAITool: true },
|
||||
|
||||
// Trigger nodes
|
||||
{ nodeType: 'nodes-base.cron', displayName: 'Cron', category: 'Core Nodes', isTrigger: true },
|
||||
{ nodeType: 'nodes-base.emailTrigger', displayName: 'Email Trigger', category: 'Communication', isTrigger: true }
|
||||
]);
|
||||
|
||||
await seedTestTemplates(templateRepo, [
|
||||
{
|
||||
id: 100,
|
||||
name: 'Email to Discord Automation',
|
||||
description: 'Forward emails to Discord channel',
|
||||
nodes: [
|
||||
{ id: 1, name: 'Email Trigger', icon: 'email' },
|
||||
{ id: 2, name: 'Discord', icon: 'discord' }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 101,
|
||||
name: 'Database Sync',
|
||||
description: 'Sync data between Postgres and MongoDB',
|
||||
nodes: [
|
||||
{ id: 1, name: 'Cron', icon: 'clock' },
|
||||
{ id: 2, name: 'Postgres', icon: 'database' },
|
||||
{ id: 3, name: 'MongoDB', icon: 'database' }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 102,
|
||||
name: 'AI Content Generator',
|
||||
description: 'Generate content using OpenAI',
|
||||
nodes: [
|
||||
{ id: 1, name: 'Webhook', icon: 'webhook' },
|
||||
{ id: 2, name: 'OpenAI', icon: 'ai' },
|
||||
{ id: 3, name: 'Slack', icon: 'slack' }
|
||||
]
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await testDb.cleanup();
|
||||
});
|
||||
|
||||
describe('Node Repository Integration', () => {
|
||||
it('should query nodes by category', () => {
|
||||
const communicationNodes = testDb.adapter
|
||||
.prepare('SELECT * FROM nodes WHERE category = ?')
|
||||
.all('Communication') as any[];
|
||||
|
||||
expect(communicationNodes).toHaveLength(4); // email, discord, twilio, emailTrigger
|
||||
|
||||
const nodeTypes = communicationNodes.map(n => n.node_type);
|
||||
expect(nodeTypes).toContain('nodes-base.email');
|
||||
expect(nodeTypes).toContain('nodes-base.discord');
|
||||
expect(nodeTypes).toContain('nodes-base.twilio');
|
||||
expect(nodeTypes).toContain('nodes-base.emailTrigger');
|
||||
});
|
||||
|
||||
it('should query AI-enabled nodes', () => {
|
||||
const aiNodes = nodeRepo.getAITools();
|
||||
|
||||
// Should include seeded AI nodes plus defaults (httpRequest, slack)
|
||||
expect(aiNodes.length).toBeGreaterThanOrEqual(4);
|
||||
|
||||
const aiNodeTypes = aiNodes.map(n => n.nodeType);
|
||||
expect(aiNodeTypes).toContain('nodes-langchain.openAi');
|
||||
expect(aiNodeTypes).toContain('nodes-langchain.agent');
|
||||
});
|
||||
|
||||
it('should query trigger nodes', () => {
|
||||
const triggers = testDb.adapter
|
||||
.prepare('SELECT * FROM nodes WHERE is_trigger = 1')
|
||||
.all() as any[];
|
||||
|
||||
expect(triggers.length).toBeGreaterThanOrEqual(3); // cron, emailTrigger, webhook
|
||||
|
||||
const triggerTypes = triggers.map(t => t.node_type);
|
||||
expect(triggerTypes).toContain('nodes-base.cron');
|
||||
expect(triggerTypes).toContain('nodes-base.emailTrigger');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Template Repository Integration', () => {
|
||||
it('should find templates by node usage', () => {
|
||||
const discordTemplates = templateRepo.getTemplatesByNodes(['Discord'], 10);
|
||||
|
||||
expect(discordTemplates).toHaveLength(1);
|
||||
expect(discordTemplates[0].name).toBe('Email to Discord Automation');
|
||||
});
|
||||
|
||||
it('should search templates by keyword', () => {
|
||||
const dbTemplates = templateRepo.searchTemplates('database', 10);
|
||||
|
||||
expect(dbTemplates).toHaveLength(1);
|
||||
expect(dbTemplates[0].name).toBe('Database Sync');
|
||||
});
|
||||
|
||||
it('should get template details with workflow', () => {
|
||||
const template = templateRepo.getTemplate(102);
|
||||
|
||||
expect(template).toBeDefined();
|
||||
expect(template!.name).toBe('AI Content Generator');
|
||||
|
||||
// Parse workflow JSON
|
||||
const workflow = JSON.parse(template!.workflow_json);
|
||||
expect(workflow.nodes).toHaveLength(3);
|
||||
expect(workflow.nodes[0].name).toBe('Webhook');
|
||||
expect(workflow.nodes[1].name).toBe('OpenAI');
|
||||
expect(workflow.nodes[2].name).toBe('Slack');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Queries', () => {
|
||||
it('should perform join queries between nodes and templates', () => {
|
||||
// Find all templates that use AI nodes
|
||||
const query = `
|
||||
SELECT DISTINCT t.*
|
||||
FROM templates t
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM nodes n
|
||||
WHERE n.is_ai_tool = 1
|
||||
AND t.nodes_used LIKE '%"' || n.display_name || '"%'
|
||||
)
|
||||
ORDER BY t.views DESC
|
||||
`;
|
||||
|
||||
const aiTemplates = testDb.adapter.prepare(query).all() as any[];
|
||||
|
||||
expect(aiTemplates.length).toBeGreaterThan(0);
|
||||
expect(aiTemplates[0].name).toBe('AI Content Generator');
|
||||
});
|
||||
|
||||
it('should aggregate data across tables', () => {
|
||||
// Count nodes by category
|
||||
const categoryCounts = testDb.adapter.prepare(`
|
||||
SELECT category, COUNT(*) as count
|
||||
FROM nodes
|
||||
GROUP BY category
|
||||
ORDER BY count DESC
|
||||
`).all() as { category: string; count: number }[];
|
||||
|
||||
expect(categoryCounts.length).toBeGreaterThan(0);
|
||||
|
||||
const communicationCategory = categoryCounts.find(c => c.category === 'Communication');
|
||||
expect(communicationCategory).toBeDefined();
|
||||
expect(communicationCategory!.count).toBe(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transaction Testing', () => {
|
||||
it('should handle complex transactional operations', () => {
|
||||
const initialNodeCount = dbHelpers.countRows(testDb.adapter, 'nodes');
|
||||
const initialTemplateCount = dbHelpers.countRows(testDb.adapter, 'templates');
|
||||
|
||||
try {
|
||||
testDb.adapter.transaction(() => {
|
||||
// Add a new node
|
||||
nodeRepo.saveNode({
|
||||
nodeType: 'nodes-base.transaction-test',
|
||||
displayName: 'Transaction Test',
|
||||
packageName: 'n8n-nodes-base',
|
||||
style: 'programmatic',
|
||||
category: 'Test',
|
||||
properties: [],
|
||||
credentials: [],
|
||||
operations: [],
|
||||
isAITool: false,
|
||||
isTrigger: false,
|
||||
isWebhook: false,
|
||||
isVersioned: false
|
||||
});
|
||||
|
||||
// Verify it was added
|
||||
const midCount = dbHelpers.countRows(testDb.adapter, 'nodes');
|
||||
expect(midCount).toBe(initialNodeCount + 1);
|
||||
|
||||
// Force rollback
|
||||
throw new Error('Rollback test');
|
||||
});
|
||||
} catch (error) {
|
||||
// Expected error
|
||||
}
|
||||
|
||||
// Verify rollback worked
|
||||
const finalNodeCount = dbHelpers.countRows(testDb.adapter, 'nodes');
|
||||
expect(finalNodeCount).toBe(initialNodeCount);
|
||||
expect(dbHelpers.nodeExists(testDb.adapter, 'nodes-base.transaction-test')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Performance Testing', () => {
|
||||
it('should handle bulk operations efficiently', async () => {
|
||||
const bulkNodes = Array.from({ length: 1000 }, (_, i) => ({
|
||||
nodeType: `nodes-base.bulk${i}`,
|
||||
displayName: `Bulk Node ${i}`,
|
||||
category: i % 2 === 0 ? 'Category A' : 'Category B',
|
||||
isAITool: i % 10 === 0
|
||||
}));
|
||||
|
||||
const insertDuration = await measureDatabaseOperation('Bulk Insert 1000 nodes', async () => {
|
||||
await seedTestNodes(nodeRepo, bulkNodes);
|
||||
});
|
||||
|
||||
// Should complete reasonably quickly
|
||||
expect(insertDuration).toBeLessThan(5000); // 5 seconds max
|
||||
|
||||
// Test query performance
|
||||
const queryDuration = await measureDatabaseOperation('Query Category A nodes', async () => {
|
||||
const categoryA = testDb.adapter
|
||||
.prepare('SELECT COUNT(*) as count FROM nodes WHERE category = ?')
|
||||
.get('Category A') as { count: number };
|
||||
|
||||
expect(categoryA.count).toBe(500);
|
||||
});
|
||||
|
||||
expect(queryDuration).toBeLessThan(100); // Queries should be very fast
|
||||
|
||||
// Cleanup bulk data
|
||||
dbHelpers.executeSql(testDb.adapter, "DELETE FROM nodes WHERE node_type LIKE 'nodes-base.bulk%'");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Helper function
|
||||
async function measureDatabaseOperation(
|
||||
name: string,
|
||||
operation: () => Promise<void>
|
||||
): Promise<number> {
|
||||
const start = performance.now();
|
||||
await operation();
|
||||
const duration = performance.now() - start;
|
||||
console.log(`[Performance] ${name}: ${duration.toFixed(2)}ms`);
|
||||
return duration;
|
||||
}
|
||||
Reference in New Issue
Block a user