- Create benchmark test suites for critical operations: - Node loading performance - Database query performance - Search operations performance - Validation performance - MCP tool execution performance - Add GitHub Actions workflow for benchmark tracking: - Runs on push to main and PRs - Uses github-action-benchmark for historical tracking - Comments on PRs with performance results - Alerts on >10% performance regressions - Stores results in GitHub Pages - Create benchmark infrastructure: - Custom Vitest benchmark configuration - JSON reporter for CI results - Result formatter for github-action-benchmark - Performance threshold documentation - Add supporting utilities: - SQLiteStorageService for benchmark database setup - MCPEngine wrapper for testing MCP tools - Test factories for generating benchmark data - Enhanced NodeRepository with benchmark methods - Document benchmark system: - Comprehensive benchmark guide in docs/BENCHMARKS.md - Performance thresholds in .github/BENCHMARK_THRESHOLDS.md - README for benchmarks directory - Integration with existing test suite The benchmark system will help monitor performance over time and catch regressions before they reach production. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
127 lines
3.1 KiB
Plaintext
127 lines
3.1 KiB
Plaintext
# Test Environment Configuration for n8n-mcp
|
|
# This file contains test-specific environment variables
|
|
# DO NOT commit sensitive values - use .env.test.local for secrets
|
|
|
|
# === Test Mode Configuration ===
|
|
NODE_ENV=test
|
|
MCP_MODE=test
|
|
TEST_ENVIRONMENT=true
|
|
|
|
# === Database Configuration ===
|
|
# Use in-memory database for tests by default
|
|
NODE_DB_PATH=:memory:
|
|
# Uncomment to use a persistent test database
|
|
# NODE_DB_PATH=./tests/fixtures/test-nodes.db
|
|
REBUILD_ON_START=false
|
|
|
|
# === API Configuration for Mocking ===
|
|
# Mock API endpoints
|
|
N8N_API_URL=http://localhost:3001/mock-api
|
|
N8N_API_KEY=test-api-key-12345
|
|
N8N_WEBHOOK_BASE_URL=http://localhost:3001/webhook
|
|
N8N_WEBHOOK_TEST_URL=http://localhost:3001/webhook-test
|
|
|
|
# === Test Server Configuration ===
|
|
PORT=3001
|
|
HOST=127.0.0.1
|
|
CORS_ORIGIN=http://localhost:3000,http://localhost:5678
|
|
|
|
# === Authentication ===
|
|
AUTH_TOKEN=test-auth-token
|
|
MCP_AUTH_TOKEN=test-mcp-auth-token
|
|
|
|
# === Logging Configuration ===
|
|
# Set to 'debug' for verbose test output
|
|
LOG_LEVEL=error
|
|
# Enable debug logging for specific tests
|
|
DEBUG=false
|
|
# Log test execution details
|
|
TEST_LOG_VERBOSE=false
|
|
|
|
# === Test Execution Configuration ===
|
|
# Test timeouts (in milliseconds)
|
|
TEST_TIMEOUT_UNIT=5000
|
|
TEST_TIMEOUT_INTEGRATION=15000
|
|
TEST_TIMEOUT_E2E=30000
|
|
TEST_TIMEOUT_GLOBAL=60000
|
|
|
|
# Test retry configuration
|
|
TEST_RETRY_ATTEMPTS=2
|
|
TEST_RETRY_DELAY=1000
|
|
|
|
# Parallel execution
|
|
TEST_PARALLEL=true
|
|
TEST_MAX_WORKERS=4
|
|
|
|
# === Feature Flags ===
|
|
# Enable/disable specific test features
|
|
FEATURE_TEST_COVERAGE=true
|
|
FEATURE_TEST_SCREENSHOTS=false
|
|
FEATURE_TEST_VIDEOS=false
|
|
FEATURE_TEST_TRACE=false
|
|
FEATURE_MOCK_EXTERNAL_APIS=true
|
|
FEATURE_USE_TEST_CONTAINERS=false
|
|
|
|
# === Mock Service Configuration ===
|
|
# MSW (Mock Service Worker) configuration
|
|
MSW_ENABLED=true
|
|
MSW_API_DELAY=0
|
|
|
|
# Test data paths
|
|
TEST_FIXTURES_PATH=./tests/fixtures
|
|
TEST_DATA_PATH=./tests/data
|
|
TEST_SNAPSHOTS_PATH=./tests/__snapshots__
|
|
|
|
# === Performance Testing ===
|
|
# Performance thresholds (in milliseconds)
|
|
PERF_THRESHOLD_API_RESPONSE=100
|
|
PERF_THRESHOLD_DB_QUERY=50
|
|
PERF_THRESHOLD_NODE_PARSE=200
|
|
|
|
# === External Service Mocks ===
|
|
# Redis mock (if needed)
|
|
REDIS_MOCK_ENABLED=true
|
|
REDIS_MOCK_PORT=6380
|
|
|
|
# Elasticsearch mock (if needed)
|
|
ELASTICSEARCH_MOCK_ENABLED=false
|
|
ELASTICSEARCH_MOCK_PORT=9201
|
|
|
|
# === Rate Limiting ===
|
|
# Disable rate limiting in tests
|
|
RATE_LIMIT_MAX=0
|
|
RATE_LIMIT_WINDOW=0
|
|
|
|
# === Cache Configuration ===
|
|
# Disable caching in tests for predictable results
|
|
CACHE_TTL=0
|
|
CACHE_ENABLED=false
|
|
|
|
# === Error Handling ===
|
|
# Show full error stack traces in tests
|
|
ERROR_SHOW_STACK=true
|
|
ERROR_SHOW_DETAILS=true
|
|
|
|
# === Cleanup Configuration ===
|
|
# Automatically clean up test data after each test
|
|
TEST_CLEANUP_ENABLED=true
|
|
TEST_CLEANUP_ON_FAILURE=false
|
|
|
|
# === Database Seeding ===
|
|
# Seed test database with sample data
|
|
TEST_SEED_DATABASE=true
|
|
TEST_SEED_TEMPLATES=true
|
|
|
|
# === Network Configuration ===
|
|
# Network timeouts for external requests
|
|
NETWORK_TIMEOUT=5000
|
|
NETWORK_RETRY_COUNT=0
|
|
|
|
# === Memory Limits ===
|
|
# Set memory limits for tests (in MB)
|
|
TEST_MEMORY_LIMIT=512
|
|
|
|
# === Code Coverage ===
|
|
# Coverage output directory
|
|
COVERAGE_DIR=./coverage
|
|
COVERAGE_REPORTER=lcov,html,text-summary |