- Create benchmark test suites for critical operations: - Node loading performance - Database query performance - Search operations performance - Validation performance - MCP tool execution performance - Add GitHub Actions workflow for benchmark tracking: - Runs on push to main and PRs - Uses github-action-benchmark for historical tracking - Comments on PRs with performance results - Alerts on >10% performance regressions - Stores results in GitHub Pages - Create benchmark infrastructure: - Custom Vitest benchmark configuration - JSON reporter for CI results - Result formatter for github-action-benchmark - Performance threshold documentation - Add supporting utilities: - SQLiteStorageService for benchmark database setup - MCPEngine wrapper for testing MCP tools - Test factories for generating benchmark data - Enhanced NodeRepository with benchmark methods - Document benchmark system: - Comprehensive benchmark guide in docs/BENCHMARKS.md - Performance thresholds in .github/BENCHMARK_THRESHOLDS.md - README for benchmarks directory - Integration with existing test suite The benchmark system will help monitor performance over time and catch regressions before they reach production. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
54 lines
1.3 KiB
TypeScript
54 lines
1.3 KiB
TypeScript
import { beforeEach, afterEach, vi } from 'vitest';
|
|
import { loadTestEnvironment, getTestConfig, getTestTimeout } from './test-env';
|
|
|
|
// Load test environment configuration
|
|
loadTestEnvironment();
|
|
|
|
// Get test configuration
|
|
const testConfig = getTestConfig();
|
|
|
|
// Reset mocks between tests
|
|
beforeEach(() => {
|
|
vi.clearAllMocks();
|
|
});
|
|
|
|
// Clean up after each test
|
|
afterEach(() => {
|
|
vi.restoreAllMocks();
|
|
|
|
// Perform cleanup if enabled
|
|
if (testConfig.cleanup.enabled) {
|
|
// Add cleanup logic here if needed
|
|
}
|
|
});
|
|
|
|
// Global test timeout from configuration
|
|
vi.setConfig({ testTimeout: getTestTimeout('global') });
|
|
|
|
// Configure console output based on test configuration
|
|
if (!testConfig.logging.debug) {
|
|
global.console = {
|
|
...console,
|
|
log: vi.fn(),
|
|
debug: vi.fn(),
|
|
info: vi.fn(),
|
|
warn: testConfig.logging.level === 'error' ? vi.fn() : console.warn,
|
|
error: console.error, // Always show errors
|
|
};
|
|
}
|
|
|
|
// Set up performance monitoring if enabled
|
|
if (testConfig.performance) {
|
|
global.performance = global.performance || {
|
|
now: () => Date.now(),
|
|
mark: vi.fn(),
|
|
measure: vi.fn(),
|
|
getEntriesByName: vi.fn(() => []),
|
|
getEntriesByType: vi.fn(() => []),
|
|
clearMarks: vi.fn(),
|
|
clearMeasures: vi.fn(),
|
|
} as any;
|
|
}
|
|
|
|
// Export test configuration for use in tests
|
|
export { testConfig, getTestTimeout, getTestConfig }; |