feat: add comprehensive performance benchmark tracking system
- Create benchmark test suites for critical operations: - Node loading performance - Database query performance - Search operations performance - Validation performance - MCP tool execution performance - Add GitHub Actions workflow for benchmark tracking: - Runs on push to main and PRs - Uses github-action-benchmark for historical tracking - Comments on PRs with performance results - Alerts on >10% performance regressions - Stores results in GitHub Pages - Create benchmark infrastructure: - Custom Vitest benchmark configuration - JSON reporter for CI results - Result formatter for github-action-benchmark - Performance threshold documentation - Add supporting utilities: - SQLiteStorageService for benchmark database setup - MCPEngine wrapper for testing MCP tools - Test factories for generating benchmark data - Enhanced NodeRepository with benchmark methods - Document benchmark system: - Comprehensive benchmark guide in docs/BENCHMARKS.md - Performance thresholds in .github/BENCHMARK_THRESHOLDS.md - README for benchmarks directory - Integration with existing test suite The benchmark system will help monitor performance over time and catch regressions before they reach production. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,4 +1,11 @@
|
||||
import { beforeEach, afterEach, vi } from 'vitest';
|
||||
import { loadTestEnvironment, getTestConfig, getTestTimeout } from './test-env';
|
||||
|
||||
// Load test environment configuration
|
||||
loadTestEnvironment();
|
||||
|
||||
// Get test configuration
|
||||
const testConfig = getTestConfig();
|
||||
|
||||
// Reset mocks between tests
|
||||
beforeEach(() => {
|
||||
@@ -8,19 +15,40 @@ beforeEach(() => {
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
// Perform cleanup if enabled
|
||||
if (testConfig.cleanup.enabled) {
|
||||
// Add cleanup logic here if needed
|
||||
}
|
||||
});
|
||||
|
||||
// Global test timeout
|
||||
vi.setConfig({ testTimeout: 10000 });
|
||||
// Global test timeout from configuration
|
||||
vi.setConfig({ testTimeout: getTestTimeout('global') });
|
||||
|
||||
// Silence console during tests unless DEBUG=true
|
||||
if (process.env.DEBUG !== 'true') {
|
||||
// Configure console output based on test configuration
|
||||
if (!testConfig.logging.debug) {
|
||||
global.console = {
|
||||
...console,
|
||||
log: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: testConfig.logging.level === 'error' ? vi.fn() : console.warn,
|
||||
error: console.error, // Always show errors
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Set up performance monitoring if enabled
|
||||
if (testConfig.performance) {
|
||||
global.performance = global.performance || {
|
||||
now: () => Date.now(),
|
||||
mark: vi.fn(),
|
||||
measure: vi.fn(),
|
||||
getEntriesByName: vi.fn(() => []),
|
||||
getEntriesByType: vi.fn(() => []),
|
||||
clearMarks: vi.fn(),
|
||||
clearMeasures: vi.fn(),
|
||||
} as any;
|
||||
}
|
||||
|
||||
// Export test configuration for use in tests
|
||||
export { testConfig, getTestTimeout, getTestConfig };
|
||||
Reference in New Issue
Block a user