feat: add comprehensive performance benchmark tracking system
- Create benchmark test suites for critical operations: - Node loading performance - Database query performance - Search operations performance - Validation performance - MCP tool execution performance - Add GitHub Actions workflow for benchmark tracking: - Runs on push to main and PRs - Uses github-action-benchmark for historical tracking - Comments on PRs with performance results - Alerts on >10% performance regressions - Stores results in GitHub Pages - Create benchmark infrastructure: - Custom Vitest benchmark configuration - JSON reporter for CI results - Result formatter for github-action-benchmark - Performance threshold documentation - Add supporting utilities: - SQLiteStorageService for benchmark database setup - MCPEngine wrapper for testing MCP tools - Test factories for generating benchmark data - Enhanced NodeRepository with benchmark methods - Document benchmark system: - Comprehensive benchmark guide in docs/BENCHMARKS.md - Performance thresholds in .github/BENCHMARK_THRESHOLDS.md - README for benchmarks directory - Integration with existing test suite The benchmark system will help monitor performance over time and catch regressions before they reach production. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,8 +1,17 @@
|
||||
import { DatabaseAdapter } from './database-adapter';
|
||||
import { ParsedNode } from '../parsers/node-parser';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
|
||||
export class NodeRepository {
|
||||
constructor(private db: DatabaseAdapter) {}
|
||||
private db: DatabaseAdapter;
|
||||
|
||||
constructor(dbOrService: DatabaseAdapter | SQLiteStorageService) {
|
||||
if ('db' in dbOrService) {
|
||||
this.db = dbOrService.db;
|
||||
} else {
|
||||
this.db = dbOrService;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save node with proper JSON serialization
|
||||
@@ -91,4 +100,145 @@ export class NodeRepository {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
// Additional methods for benchmarks
|
||||
upsertNode(node: ParsedNode): void {
|
||||
this.saveNode(node);
|
||||
}
|
||||
|
||||
getNodeByType(nodeType: string): any {
|
||||
return this.getNode(nodeType);
|
||||
}
|
||||
|
||||
getNodesByCategory(category: string): any[] {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE category = ?
|
||||
ORDER BY display_name
|
||||
`).all(category) as any[];
|
||||
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
searchNodes(query: string, mode: 'OR' | 'AND' | 'FUZZY' = 'OR', limit: number = 20): any[] {
|
||||
let sql = '';
|
||||
const params: any[] = [];
|
||||
|
||||
if (mode === 'FUZZY') {
|
||||
// Simple fuzzy search
|
||||
sql = `
|
||||
SELECT * FROM nodes
|
||||
WHERE node_type LIKE ? OR display_name LIKE ? OR description LIKE ?
|
||||
ORDER BY display_name
|
||||
LIMIT ?
|
||||
`;
|
||||
const fuzzyQuery = `%${query}%`;
|
||||
params.push(fuzzyQuery, fuzzyQuery, fuzzyQuery, limit);
|
||||
} else {
|
||||
// OR/AND mode
|
||||
const words = query.split(/\s+/).filter(w => w.length > 0);
|
||||
const conditions = words.map(() =>
|
||||
'(node_type LIKE ? OR display_name LIKE ? OR description LIKE ?)'
|
||||
);
|
||||
const operator = mode === 'AND' ? ' AND ' : ' OR ';
|
||||
|
||||
sql = `
|
||||
SELECT * FROM nodes
|
||||
WHERE ${conditions.join(operator)}
|
||||
ORDER BY display_name
|
||||
LIMIT ?
|
||||
`;
|
||||
|
||||
for (const word of words) {
|
||||
const searchTerm = `%${word}%`;
|
||||
params.push(searchTerm, searchTerm, searchTerm);
|
||||
}
|
||||
params.push(limit);
|
||||
}
|
||||
|
||||
const rows = this.db.prepare(sql).all(...params) as any[];
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
getAllNodes(limit?: number): any[] {
|
||||
let sql = 'SELECT * FROM nodes ORDER BY display_name';
|
||||
if (limit) {
|
||||
sql += ` LIMIT ${limit}`;
|
||||
}
|
||||
|
||||
const rows = this.db.prepare(sql).all() as any[];
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
getNodeCount(): number {
|
||||
const result = this.db.prepare('SELECT COUNT(*) as count FROM nodes').get() as any;
|
||||
return result.count;
|
||||
}
|
||||
|
||||
getAIToolNodes(): any[] {
|
||||
return this.getAITools();
|
||||
}
|
||||
|
||||
getNodesByPackage(packageName: string): any[] {
|
||||
const rows = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE package_name = ?
|
||||
ORDER BY display_name
|
||||
`).all(packageName) as any[];
|
||||
|
||||
return rows.map(row => this.parseNodeRow(row));
|
||||
}
|
||||
|
||||
searchNodeProperties(nodeType: string, query: string, maxResults: number = 20): any[] {
|
||||
const node = this.getNode(nodeType);
|
||||
if (!node || !node.properties) return [];
|
||||
|
||||
const results: any[] = [];
|
||||
const searchLower = query.toLowerCase();
|
||||
|
||||
function searchProperties(properties: any[], path: string[] = []) {
|
||||
for (const prop of properties) {
|
||||
if (results.length >= maxResults) break;
|
||||
|
||||
const currentPath = [...path, prop.name || prop.displayName];
|
||||
const pathString = currentPath.join('.');
|
||||
|
||||
if (prop.name?.toLowerCase().includes(searchLower) ||
|
||||
prop.displayName?.toLowerCase().includes(searchLower) ||
|
||||
prop.description?.toLowerCase().includes(searchLower)) {
|
||||
results.push({
|
||||
path: pathString,
|
||||
property: prop,
|
||||
description: prop.description
|
||||
});
|
||||
}
|
||||
|
||||
// Search nested properties
|
||||
if (prop.options) {
|
||||
searchProperties(prop.options, currentPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
searchProperties(node.properties);
|
||||
return results;
|
||||
}
|
||||
|
||||
private parseNodeRow(row: any): any {
|
||||
return {
|
||||
nodeType: row.node_type,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
category: row.category,
|
||||
developmentStyle: row.development_style,
|
||||
package: row.package_name,
|
||||
isAITool: Number(row.is_ai_tool) === 1,
|
||||
isTrigger: Number(row.is_trigger) === 1,
|
||||
isWebhook: Number(row.is_webhook) === 1,
|
||||
isVersioned: Number(row.is_versioned) === 1,
|
||||
version: row.version,
|
||||
properties: this.safeJsonParse(row.properties_schema, []),
|
||||
operations: this.safeJsonParse(row.operations, []),
|
||||
credentials: this.safeJsonParse(row.credentials_required, []),
|
||||
hasDocumentation: !!row.documentation
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -23,7 +23,7 @@ export interface EngineHealth {
|
||||
|
||||
export interface EngineOptions {
|
||||
sessionTimeout?: number;
|
||||
logLevel?: string;
|
||||
logLevel?: 'error' | 'warn' | 'info' | 'debug';
|
||||
}
|
||||
|
||||
export class N8NMCPEngine {
|
||||
|
||||
113
src/mcp-tools-engine.ts
Normal file
113
src/mcp-tools-engine.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
/**
|
||||
* MCPEngine - A simplified interface for benchmarking MCP tool execution
|
||||
* This directly implements the MCP tool functionality without server dependencies
|
||||
*/
|
||||
import { NodeRepository } from './database/node-repository';
|
||||
import { PropertyFilter } from './services/property-filter';
|
||||
import { TaskTemplates } from './services/task-templates';
|
||||
import { ConfigValidator } from './services/config-validator';
|
||||
import { EnhancedConfigValidator } from './services/enhanced-config-validator';
|
||||
import { WorkflowValidator, WorkflowValidationResult } from './services/workflow-validator';
|
||||
|
||||
export class MCPEngine {
|
||||
private workflowValidator: WorkflowValidator;
|
||||
|
||||
constructor(private repository: NodeRepository) {
|
||||
this.workflowValidator = new WorkflowValidator(repository, EnhancedConfigValidator);
|
||||
}
|
||||
|
||||
async listNodes(args: any = {}) {
|
||||
return this.repository.getAllNodes(args.limit);
|
||||
}
|
||||
|
||||
async searchNodes(args: any) {
|
||||
return this.repository.searchNodes(args.query, args.mode || 'OR', args.limit || 20);
|
||||
}
|
||||
|
||||
async getNodeInfo(args: any) {
|
||||
return this.repository.getNodeByType(args.nodeType);
|
||||
}
|
||||
|
||||
async getNodeEssentials(args: any) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node) return null;
|
||||
|
||||
// Filter to essentials using static method
|
||||
const essentials = PropertyFilter.getEssentials(node.properties || [], args.nodeType);
|
||||
return {
|
||||
nodeType: node.nodeType,
|
||||
displayName: node.displayName,
|
||||
description: node.description,
|
||||
category: node.category,
|
||||
required: essentials.required,
|
||||
common: essentials.common
|
||||
};
|
||||
}
|
||||
|
||||
async getNodeDocumentation(args: any) {
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
return node?.documentation || null;
|
||||
}
|
||||
|
||||
async validateNodeOperation(args: any) {
|
||||
// Get node properties and validate
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [{ type: 'invalid_configuration', property: '', message: 'Node type not found' }],
|
||||
warnings: [],
|
||||
suggestions: [],
|
||||
visibleProperties: [],
|
||||
hiddenProperties: []
|
||||
};
|
||||
}
|
||||
|
||||
return ConfigValidator.validate(args.nodeType, args.config, node.properties || []);
|
||||
}
|
||||
|
||||
async validateNodeMinimal(args: any) {
|
||||
// Get node and check minimal requirements
|
||||
const node = await this.repository.getNodeByType(args.nodeType);
|
||||
if (!node) {
|
||||
return { missingFields: [], error: 'Node type not found' };
|
||||
}
|
||||
|
||||
const missingFields: string[] = [];
|
||||
const requiredFields = PropertyFilter.getEssentials(node.properties || [], args.nodeType).required;
|
||||
|
||||
for (const field of requiredFields) {
|
||||
if (!args.config[field.name]) {
|
||||
missingFields.push(field.name);
|
||||
}
|
||||
}
|
||||
|
||||
return { missingFields };
|
||||
}
|
||||
|
||||
async searchNodeProperties(args: any) {
|
||||
return this.repository.searchNodeProperties(args.nodeType, args.query, args.maxResults || 20);
|
||||
}
|
||||
|
||||
async getNodeForTask(args: any) {
|
||||
return TaskTemplates.getTaskTemplate(args.task);
|
||||
}
|
||||
|
||||
async listAITools(args: any) {
|
||||
return this.repository.getAIToolNodes();
|
||||
}
|
||||
|
||||
async getDatabaseStatistics(args: any) {
|
||||
const count = await this.repository.getNodeCount();
|
||||
const aiTools = await this.repository.getAIToolNodes();
|
||||
return {
|
||||
totalNodes: count,
|
||||
aiToolsCount: aiTools.length,
|
||||
categories: ['trigger', 'transform', 'output', 'input']
|
||||
};
|
||||
}
|
||||
|
||||
async validateWorkflow(args: any): Promise<WorkflowValidationResult> {
|
||||
return this.workflowValidator.validateWorkflow(args.workflow, args.options);
|
||||
}
|
||||
}
|
||||
86
src/services/sqlite-storage-service.ts
Normal file
86
src/services/sqlite-storage-service.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/**
|
||||
* SQLiteStorageService - A simple wrapper around DatabaseAdapter for benchmarks
|
||||
*/
|
||||
import { DatabaseAdapter, createDatabaseAdapter } from '../database/database-adapter';
|
||||
|
||||
export class SQLiteStorageService {
|
||||
private adapter: DatabaseAdapter | null = null;
|
||||
private dbPath: string;
|
||||
|
||||
constructor(dbPath: string = ':memory:') {
|
||||
this.dbPath = dbPath;
|
||||
this.initSync();
|
||||
}
|
||||
|
||||
private initSync() {
|
||||
// For benchmarks, we'll use synchronous initialization
|
||||
// In real usage, this should be async
|
||||
const Database = require('better-sqlite3');
|
||||
const db = new Database(this.dbPath);
|
||||
|
||||
// Create a simple adapter
|
||||
this.adapter = {
|
||||
prepare: (sql: string) => db.prepare(sql),
|
||||
exec: (sql: string) => db.exec(sql),
|
||||
close: () => db.close(),
|
||||
pragma: (key: string, value?: any) => db.pragma(`${key}${value !== undefined ? ` = ${value}` : ''}`),
|
||||
inTransaction: db.inTransaction,
|
||||
transaction: (fn: () => any) => db.transaction(fn)(),
|
||||
checkFTS5Support: () => {
|
||||
try {
|
||||
db.exec("CREATE VIRTUAL TABLE test_fts USING fts5(content)");
|
||||
db.exec("DROP TABLE test_fts");
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize schema
|
||||
this.initializeSchema();
|
||||
}
|
||||
|
||||
private initializeSchema() {
|
||||
const schema = `
|
||||
CREATE TABLE IF NOT EXISTS nodes (
|
||||
node_type TEXT PRIMARY KEY,
|
||||
package_name TEXT NOT NULL,
|
||||
display_name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
category TEXT,
|
||||
development_style TEXT CHECK(development_style IN ('declarative', 'programmatic')),
|
||||
is_ai_tool INTEGER DEFAULT 0,
|
||||
is_trigger INTEGER DEFAULT 0,
|
||||
is_webhook INTEGER DEFAULT 0,
|
||||
is_versioned INTEGER DEFAULT 0,
|
||||
version TEXT,
|
||||
documentation TEXT,
|
||||
properties_schema TEXT,
|
||||
operations TEXT,
|
||||
credentials_required TEXT,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_package ON nodes(package_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_ai_tool ON nodes(is_ai_tool);
|
||||
CREATE INDEX IF NOT EXISTS idx_category ON nodes(category);
|
||||
`;
|
||||
|
||||
this.adapter!.exec(schema);
|
||||
}
|
||||
|
||||
get db(): DatabaseAdapter {
|
||||
if (!this.adapter) {
|
||||
throw new Error('Database not initialized');
|
||||
}
|
||||
return this.adapter;
|
||||
}
|
||||
|
||||
close() {
|
||||
if (this.adapter) {
|
||||
this.adapter.close();
|
||||
this.adapter = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -56,7 +56,7 @@ interface ValidationIssue {
|
||||
details?: any;
|
||||
}
|
||||
|
||||
interface WorkflowValidationResult {
|
||||
export interface WorkflowValidationResult {
|
||||
valid: boolean;
|
||||
errors: ValidationIssue[];
|
||||
warnings: ValidationIssue[];
|
||||
|
||||
Reference in New Issue
Block a user