- Add test result artifacts storage with multiple formats (JUnit, JSON, HTML) - Configure GitHub Actions to upload and preserve test outputs - Add PR comment integration with test summaries - Create benchmark comparison workflow for PR performance tracking - Add detailed test report generation scripts - Configure artifact retention policies (30 days for tests, 90 for combined) - Set up test metadata collection for better debugging This completes all remaining test infrastructure tasks and provides comprehensive visibility into test results across CI/CD pipeline.
34 lines
841 B
TypeScript
34 lines
841 B
TypeScript
import { defineConfig } from 'vitest/config';
|
|
import path from 'path';
|
|
|
|
export default defineConfig({
|
|
test: {
|
|
globals: true,
|
|
environment: 'node',
|
|
include: ['tests/benchmarks/**/*.bench.ts'],
|
|
benchmark: {
|
|
// Benchmark specific options
|
|
include: ['tests/benchmarks/**/*.bench.ts'],
|
|
reporters: process.env.CI
|
|
? ['default', ['./scripts/vitest-benchmark-json-reporter.js', {}]]
|
|
: ['default'],
|
|
outputFile: './benchmark-results.json',
|
|
},
|
|
setupFiles: [],
|
|
pool: 'forks',
|
|
poolOptions: {
|
|
forks: {
|
|
singleFork: true,
|
|
},
|
|
},
|
|
// Increase timeout for benchmarks
|
|
testTimeout: 120000,
|
|
hookTimeout: 120000,
|
|
},
|
|
resolve: {
|
|
alias: {
|
|
'@': path.resolve(__dirname, './src'),
|
|
'@tests': path.resolve(__dirname, './tests'),
|
|
},
|
|
},
|
|
}); |