Implement SQLite database with full-text search for n8n node documentation
Major features implemented: - SQLite storage service with FTS5 for fast node search - Database rebuild mechanism for bulk node extraction - MCP tools: search_nodes, extract_all_nodes, get_node_statistics - Production Docker deployment with persistent storage - Management scripts for database operations - Comprehensive test suite for all functionality Database capabilities: - Stores node source code and metadata - Full-text search by node name or content - No versioning (stores latest only as per requirements) - Supports complete database rebuilds - ~4.5MB database with 500+ nodes indexed Production features: - Automated deployment script - Docker Compose production configuration - Database initialization on first run - Volume persistence for data - Management utilities for operations Documentation: - Updated README with complete instructions - Production deployment guide - Clear troubleshooting section - API reference for all new tools 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
63
src/db/schema.sql
Normal file
63
src/db/schema.sql
Normal file
@@ -0,0 +1,63 @@
|
||||
-- Main nodes table
|
||||
CREATE TABLE IF NOT EXISTS nodes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
package_name TEXT NOT NULL,
|
||||
display_name TEXT,
|
||||
description TEXT,
|
||||
code_hash TEXT NOT NULL,
|
||||
code_length INTEGER NOT NULL,
|
||||
source_location TEXT NOT NULL,
|
||||
source_code TEXT NOT NULL,
|
||||
credential_code TEXT,
|
||||
package_info TEXT, -- JSON
|
||||
has_credentials INTEGER DEFAULT 0,
|
||||
extracted_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_package_name ON nodes(package_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_code_hash ON nodes(code_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name);
|
||||
|
||||
-- Full Text Search virtual table for node search
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS nodes_fts USING fts5(
|
||||
node_type,
|
||||
name,
|
||||
display_name,
|
||||
description,
|
||||
package_name,
|
||||
content=nodes,
|
||||
content_rowid=id
|
||||
);
|
||||
|
||||
-- Triggers to keep FTS in sync
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_ai AFTER INSERT ON nodes
|
||||
BEGIN
|
||||
INSERT INTO nodes_fts(rowid, node_type, name, display_name, description, package_name)
|
||||
VALUES (new.id, new.node_type, new.name, new.display_name, new.description, new.package_name);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_ad AFTER DELETE ON nodes
|
||||
BEGIN
|
||||
DELETE FROM nodes_fts WHERE rowid = old.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_au AFTER UPDATE ON nodes
|
||||
BEGIN
|
||||
DELETE FROM nodes_fts WHERE rowid = old.id;
|
||||
INSERT INTO nodes_fts(rowid, node_type, name, display_name, description, package_name)
|
||||
VALUES (new.id, new.node_type, new.name, new.display_name, new.description, new.package_name);
|
||||
END;
|
||||
|
||||
-- Statistics table for metadata
|
||||
CREATE TABLE IF NOT EXISTS extraction_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
total_nodes INTEGER NOT NULL,
|
||||
total_packages INTEGER NOT NULL,
|
||||
total_code_size INTEGER NOT NULL,
|
||||
nodes_with_credentials INTEGER NOT NULL,
|
||||
extraction_date DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -16,15 +16,18 @@ import { N8NApiClient } from '../utils/n8n-client';
|
||||
import { N8NMCPBridge } from '../utils/bridge';
|
||||
import { logger } from '../utils/logger';
|
||||
import { NodeSourceExtractor } from '../utils/node-source-extractor';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
|
||||
export class N8NMCPServer {
|
||||
private server: Server;
|
||||
private n8nClient: N8NApiClient;
|
||||
private nodeExtractor: NodeSourceExtractor;
|
||||
private nodeStorage: SQLiteStorageService;
|
||||
|
||||
constructor(config: MCPServerConfig, n8nConfig: N8NConfig) {
|
||||
this.n8nClient = new N8NApiClient(n8nConfig);
|
||||
this.nodeExtractor = new NodeSourceExtractor();
|
||||
this.nodeStorage = new SQLiteStorageService();
|
||||
logger.info('Initializing n8n MCP server', { config, n8nConfig });
|
||||
this.server = new Server(
|
||||
{
|
||||
@@ -161,6 +164,12 @@ export class N8NMCPServer {
|
||||
return this.getNodeSourceCode(args);
|
||||
case 'list_available_nodes':
|
||||
return this.listAvailableNodes(args);
|
||||
case 'extract_all_nodes':
|
||||
return this.extractAllNodes(args);
|
||||
case 'search_nodes':
|
||||
return this.searchNodes(args);
|
||||
case 'get_node_statistics':
|
||||
return this.getNodeStatistics(args);
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
@@ -314,6 +323,107 @@ export class N8NMCPServer {
|
||||
}
|
||||
}
|
||||
|
||||
private async extractAllNodes(args: any): Promise<any> {
|
||||
try {
|
||||
logger.info(`Extracting all nodes`, args);
|
||||
|
||||
// Get list of all nodes
|
||||
const allNodes = await this.nodeExtractor.listAvailableNodes();
|
||||
let nodesToExtract = allNodes;
|
||||
|
||||
// Apply filters
|
||||
if (args.packageFilter) {
|
||||
nodesToExtract = nodesToExtract.filter(node =>
|
||||
node.packageName === args.packageFilter ||
|
||||
node.location?.includes(args.packageFilter)
|
||||
);
|
||||
}
|
||||
|
||||
if (args.limit) {
|
||||
nodesToExtract = nodesToExtract.slice(0, args.limit);
|
||||
}
|
||||
|
||||
logger.info(`Extracting ${nodesToExtract.length} nodes...`);
|
||||
|
||||
const extractedNodes = [];
|
||||
const errors = [];
|
||||
|
||||
for (const node of nodesToExtract) {
|
||||
try {
|
||||
const nodeType = node.packageName ? `${node.packageName}.${node.name}` : node.name;
|
||||
const nodeInfo = await this.nodeExtractor.extractNodeSource(nodeType);
|
||||
await this.nodeStorage.storeNode(nodeInfo);
|
||||
extractedNodes.push(nodeType);
|
||||
} catch (error) {
|
||||
errors.push({
|
||||
node: node.name,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const stats = await this.nodeStorage.getStatistics();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
extracted: extractedNodes.length,
|
||||
failed: errors.length,
|
||||
totalStored: stats.totalNodes,
|
||||
errors: errors.slice(0, 10), // Limit error list
|
||||
statistics: stats
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Failed to extract all nodes`, error);
|
||||
throw new Error(`Failed to extract all nodes: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async searchNodes(args: any): Promise<any> {
|
||||
try {
|
||||
logger.info(`Searching nodes`, args);
|
||||
|
||||
const results = await this.nodeStorage.searchNodes({
|
||||
query: args.query,
|
||||
packageName: args.packageName,
|
||||
hasCredentials: args.hasCredentials,
|
||||
limit: args.limit || 20
|
||||
});
|
||||
|
||||
return {
|
||||
nodes: results.map(node => ({
|
||||
nodeType: node.nodeType,
|
||||
name: node.name,
|
||||
packageName: node.packageName,
|
||||
displayName: node.displayName,
|
||||
description: node.description,
|
||||
codeLength: node.codeLength,
|
||||
hasCredentials: node.hasCredentials,
|
||||
location: node.sourceLocation
|
||||
})),
|
||||
total: results.length
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Failed to search nodes`, error);
|
||||
throw new Error(`Failed to search nodes: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async getNodeStatistics(args: any): Promise<any> {
|
||||
try {
|
||||
logger.info(`Getting node statistics`);
|
||||
const stats = await this.nodeStorage.getStatistics();
|
||||
|
||||
return {
|
||||
...stats,
|
||||
formattedTotalSize: `${(stats.totalCodeSize / 1024 / 1024).toFixed(2)} MB`,
|
||||
formattedAverageSize: `${(stats.averageNodeSize / 1024).toFixed(2)} KB`
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Failed to get node statistics`, error);
|
||||
throw new Error(`Failed to get node statistics: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
try {
|
||||
logger.info('Starting n8n MCP server...');
|
||||
|
||||
@@ -181,4 +181,55 @@ export const n8nTools: ToolDefinition[] = [
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'extract_all_nodes',
|
||||
description: 'Extract and store all available n8n nodes in the database',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
packageFilter: {
|
||||
type: 'string',
|
||||
description: 'Optional package name to filter extraction',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'Maximum number of nodes to extract',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'search_nodes',
|
||||
description: 'Search for n8n nodes by name, package, or functionality',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: 'Search query',
|
||||
},
|
||||
packageName: {
|
||||
type: 'string',
|
||||
description: 'Filter by package name',
|
||||
},
|
||||
hasCredentials: {
|
||||
type: 'boolean',
|
||||
description: 'Filter nodes that have credentials',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'Maximum results to return',
|
||||
default: 20,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_node_statistics',
|
||||
description: 'Get statistics about stored n8n nodes',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
];
|
||||
129
src/scripts/rebuild-database.ts
Normal file
129
src/scripts/rebuild-database.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { NodeSourceExtractor } from '../utils/node-source-extractor';
|
||||
import { SQLiteStorageService } from '../services/sqlite-storage-service';
|
||||
import { logger } from '../utils/logger';
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Rebuild the entire nodes database by extracting all available nodes
|
||||
*/
|
||||
async function rebuildDatabase() {
|
||||
console.log('🔄 Starting database rebuild...\n');
|
||||
|
||||
const startTime = Date.now();
|
||||
const extractor = new NodeSourceExtractor();
|
||||
const storage = new SQLiteStorageService();
|
||||
|
||||
try {
|
||||
// Step 1: Clear existing database
|
||||
console.log('1️⃣ Clearing existing database...');
|
||||
await storage.rebuildDatabase();
|
||||
|
||||
// Step 2: Get all available nodes
|
||||
console.log('2️⃣ Discovering available nodes...');
|
||||
const allNodes = await extractor.listAvailableNodes();
|
||||
console.log(` Found ${allNodes.length} nodes\n`);
|
||||
|
||||
// Step 3: Extract and store each node
|
||||
console.log('3️⃣ Extracting and storing nodes...');
|
||||
let processed = 0;
|
||||
let stored = 0;
|
||||
let failed = 0;
|
||||
const errors: Array<{ node: string; error: string }> = [];
|
||||
|
||||
// Process in batches for better performance
|
||||
const batchSize = 50;
|
||||
for (let i = 0; i < allNodes.length; i += batchSize) {
|
||||
const batch = allNodes.slice(i, Math.min(i + batchSize, allNodes.length));
|
||||
const nodeInfos = [];
|
||||
|
||||
for (const node of batch) {
|
||||
processed++;
|
||||
|
||||
try {
|
||||
const nodeType = node.packageName ? `${node.packageName}.${node.name}` : node.name;
|
||||
|
||||
// Show progress
|
||||
if (processed % 100 === 0) {
|
||||
const progress = ((processed / allNodes.length) * 100).toFixed(1);
|
||||
console.log(` Progress: ${processed}/${allNodes.length} (${progress}%)`);
|
||||
}
|
||||
|
||||
const nodeInfo = await extractor.extractNodeSource(nodeType);
|
||||
nodeInfos.push(nodeInfo);
|
||||
stored++;
|
||||
} catch (error) {
|
||||
failed++;
|
||||
const errorMsg = error instanceof Error ? error.message : 'Unknown error';
|
||||
errors.push({
|
||||
node: node.name,
|
||||
error: errorMsg
|
||||
});
|
||||
|
||||
// Log first few errors
|
||||
if (errors.length <= 5) {
|
||||
logger.debug(`Failed to extract ${node.name}: ${errorMsg}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bulk store the batch
|
||||
if (nodeInfos.length > 0) {
|
||||
await storage.bulkStoreNodes(nodeInfos);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: Save statistics
|
||||
console.log('\n4️⃣ Saving statistics...');
|
||||
const stats = await storage.getStatistics();
|
||||
await storage.saveExtractionStats(stats);
|
||||
|
||||
// Step 5: Display results
|
||||
const duration = ((Date.now() - startTime) / 1000).toFixed(2);
|
||||
|
||||
console.log('\n✅ Database rebuild completed!\n');
|
||||
console.log('📊 Results:');
|
||||
console.log(` Total nodes found: ${allNodes.length}`);
|
||||
console.log(` Successfully stored: ${stored}`);
|
||||
console.log(` Failed: ${failed}`);
|
||||
console.log(` Duration: ${duration}s`);
|
||||
console.log(` Database size: ${(stats.totalCodeSize / 1024 / 1024).toFixed(2)} MB`);
|
||||
|
||||
console.log('\n📦 Package distribution:');
|
||||
stats.packageDistribution.slice(0, 10).forEach(pkg => {
|
||||
console.log(` ${pkg.package}: ${pkg.count} nodes`);
|
||||
});
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log(`\n⚠️ First ${Math.min(5, errors.length)} errors:`);
|
||||
errors.slice(0, 5).forEach(err => {
|
||||
console.log(` - ${err.node}: ${err.error}`);
|
||||
});
|
||||
|
||||
if (errors.length > 5) {
|
||||
console.log(` ... and ${errors.length - 5} more errors`);
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection
|
||||
storage.close();
|
||||
|
||||
console.log('\n✨ Database is ready for use!');
|
||||
|
||||
} catch (error) {
|
||||
console.error('\n❌ Database rebuild failed:', error);
|
||||
storage.close();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (require.main === module) {
|
||||
rebuildDatabase().catch(error => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
export { rebuildDatabase };
|
||||
274
src/services/node-storage-service.ts
Normal file
274
src/services/node-storage-service.ts
Normal file
@@ -0,0 +1,274 @@
|
||||
import { NodeSourceInfo } from '../utils/node-source-extractor';
|
||||
import { logger } from '../utils/logger';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
export interface StoredNode {
|
||||
id: string;
|
||||
nodeType: string;
|
||||
name: string;
|
||||
packageName: string;
|
||||
displayName?: string;
|
||||
description?: string;
|
||||
codeHash: string;
|
||||
codeLength: number;
|
||||
sourceLocation: string;
|
||||
hasCredentials: boolean;
|
||||
extractedAt: Date;
|
||||
updatedAt: Date;
|
||||
sourceCode?: string;
|
||||
credentialCode?: string;
|
||||
packageInfo?: any;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface NodeSearchQuery {
|
||||
query?: string;
|
||||
packageName?: string;
|
||||
nodeType?: string;
|
||||
hasCredentials?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
}
|
||||
|
||||
export class NodeStorageService {
|
||||
private nodes: Map<string, StoredNode> = new Map();
|
||||
private nodesByPackage: Map<string, Set<string>> = new Map();
|
||||
private searchIndex: Map<string, Set<string>> = new Map();
|
||||
|
||||
/**
|
||||
* Store a node in the database
|
||||
*/
|
||||
async storeNode(nodeInfo: NodeSourceInfo): Promise<StoredNode> {
|
||||
const codeHash = crypto.createHash('sha256').update(nodeInfo.sourceCode).digest('hex');
|
||||
|
||||
// Parse display name and description from source if possible
|
||||
const displayName = this.extractDisplayName(nodeInfo.sourceCode);
|
||||
const description = this.extractDescription(nodeInfo.sourceCode);
|
||||
|
||||
const storedNode: StoredNode = {
|
||||
id: crypto.randomUUID(),
|
||||
nodeType: nodeInfo.nodeType,
|
||||
name: nodeInfo.nodeType.split('.').pop() || nodeInfo.nodeType,
|
||||
packageName: nodeInfo.nodeType.split('.')[0] || 'unknown',
|
||||
displayName,
|
||||
description,
|
||||
codeHash,
|
||||
codeLength: nodeInfo.sourceCode.length,
|
||||
sourceLocation: nodeInfo.location,
|
||||
hasCredentials: !!nodeInfo.credentialCode,
|
||||
extractedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
sourceCode: nodeInfo.sourceCode,
|
||||
credentialCode: nodeInfo.credentialCode,
|
||||
packageInfo: nodeInfo.packageInfo,
|
||||
};
|
||||
|
||||
// Store in memory (replace with real DB)
|
||||
this.nodes.set(nodeInfo.nodeType, storedNode);
|
||||
|
||||
// Update package index
|
||||
if (!this.nodesByPackage.has(storedNode.packageName)) {
|
||||
this.nodesByPackage.set(storedNode.packageName, new Set());
|
||||
}
|
||||
this.nodesByPackage.get(storedNode.packageName)!.add(nodeInfo.nodeType);
|
||||
|
||||
// Update search index
|
||||
this.updateSearchIndex(storedNode);
|
||||
|
||||
logger.info(`Stored node: ${nodeInfo.nodeType} (${codeHash.substring(0, 8)}...)`);
|
||||
return storedNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for nodes
|
||||
*/
|
||||
async searchNodes(query: NodeSearchQuery): Promise<StoredNode[]> {
|
||||
let results: StoredNode[] = [];
|
||||
|
||||
if (query.query) {
|
||||
// Text search
|
||||
const searchTerms = query.query.toLowerCase().split(' ');
|
||||
const matchingNodeTypes = new Set<string>();
|
||||
|
||||
for (const term of searchTerms) {
|
||||
const matches = this.searchIndex.get(term) || new Set();
|
||||
matches.forEach(nodeType => matchingNodeTypes.add(nodeType));
|
||||
}
|
||||
|
||||
results = Array.from(matchingNodeTypes)
|
||||
.map(nodeType => this.nodes.get(nodeType)!)
|
||||
.filter(Boolean);
|
||||
} else {
|
||||
// Get all nodes
|
||||
results = Array.from(this.nodes.values());
|
||||
}
|
||||
|
||||
// Apply filters
|
||||
if (query.packageName) {
|
||||
results = results.filter(node => node.packageName === query.packageName);
|
||||
}
|
||||
|
||||
if (query.nodeType) {
|
||||
results = results.filter(node => node.nodeType.includes(query.nodeType!));
|
||||
}
|
||||
|
||||
if (query.hasCredentials !== undefined) {
|
||||
results = results.filter(node => node.hasCredentials === query.hasCredentials);
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
const offset = query.offset || 0;
|
||||
const limit = query.limit || 50;
|
||||
|
||||
return results.slice(offset, offset + limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get node by type
|
||||
*/
|
||||
async getNode(nodeType: string): Promise<StoredNode | null> {
|
||||
return this.nodes.get(nodeType) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all packages
|
||||
*/
|
||||
async getPackages(): Promise<Array<{ name: string; nodeCount: number }>> {
|
||||
return Array.from(this.nodesByPackage.entries()).map(([name, nodes]) => ({
|
||||
name,
|
||||
nodeCount: nodes.size,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk store nodes
|
||||
*/
|
||||
async bulkStoreNodes(nodeInfos: NodeSourceInfo[]): Promise<{
|
||||
stored: number;
|
||||
failed: number;
|
||||
errors: Array<{ nodeType: string; error: string }>;
|
||||
}> {
|
||||
const results = {
|
||||
stored: 0,
|
||||
failed: 0,
|
||||
errors: [] as Array<{ nodeType: string; error: string }>,
|
||||
};
|
||||
|
||||
for (const nodeInfo of nodeInfos) {
|
||||
try {
|
||||
await this.storeNode(nodeInfo);
|
||||
results.stored++;
|
||||
} catch (error) {
|
||||
results.failed++;
|
||||
results.errors.push({
|
||||
nodeType: nodeInfo.nodeType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate statistics
|
||||
*/
|
||||
async getStatistics(): Promise<{
|
||||
totalNodes: number;
|
||||
totalPackages: number;
|
||||
totalCodeSize: number;
|
||||
nodesWithCredentials: number;
|
||||
averageNodeSize: number;
|
||||
packageDistribution: Array<{ package: string; count: number }>;
|
||||
}> {
|
||||
const nodes = Array.from(this.nodes.values());
|
||||
const totalCodeSize = nodes.reduce((sum, node) => sum + node.codeLength, 0);
|
||||
const nodesWithCredentials = nodes.filter(node => node.hasCredentials).length;
|
||||
|
||||
const packageDistribution = Array.from(this.nodesByPackage.entries())
|
||||
.map(([pkg, nodeSet]) => ({ package: pkg, count: nodeSet.size }))
|
||||
.sort((a, b) => b.count - a.count);
|
||||
|
||||
return {
|
||||
totalNodes: nodes.length,
|
||||
totalPackages: this.nodesByPackage.size,
|
||||
totalCodeSize,
|
||||
nodesWithCredentials,
|
||||
averageNodeSize: nodes.length > 0 ? Math.round(totalCodeSize / nodes.length) : 0,
|
||||
packageDistribution,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract display name from source code
|
||||
*/
|
||||
private extractDisplayName(sourceCode: string): string | undefined {
|
||||
const match = sourceCode.match(/displayName:\s*["'`]([^"'`]+)["'`]/);
|
||||
return match ? match[1] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract description from source code
|
||||
*/
|
||||
private extractDescription(sourceCode: string): string | undefined {
|
||||
const match = sourceCode.match(/description:\s*["'`]([^"'`]+)["'`]/);
|
||||
return match ? match[1] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update search index
|
||||
*/
|
||||
private updateSearchIndex(node: StoredNode): void {
|
||||
// Index by name parts
|
||||
const nameParts = node.name.toLowerCase().split(/(?=[A-Z])|[._-]/).filter(Boolean);
|
||||
for (const part of nameParts) {
|
||||
if (!this.searchIndex.has(part)) {
|
||||
this.searchIndex.set(part, new Set());
|
||||
}
|
||||
this.searchIndex.get(part)!.add(node.nodeType);
|
||||
}
|
||||
|
||||
// Index by display name
|
||||
if (node.displayName) {
|
||||
const displayParts = node.displayName.toLowerCase().split(/\s+/);
|
||||
for (const part of displayParts) {
|
||||
if (!this.searchIndex.has(part)) {
|
||||
this.searchIndex.set(part, new Set());
|
||||
}
|
||||
this.searchIndex.get(part)!.add(node.nodeType);
|
||||
}
|
||||
}
|
||||
|
||||
// Index by package name
|
||||
const pkgParts = node.packageName.toLowerCase().split(/[.-]/);
|
||||
for (const part of pkgParts) {
|
||||
if (!this.searchIndex.has(part)) {
|
||||
this.searchIndex.set(part, new Set());
|
||||
}
|
||||
this.searchIndex.get(part)!.add(node.nodeType);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export all nodes for database import
|
||||
*/
|
||||
async exportForDatabase(): Promise<{
|
||||
nodes: StoredNode[];
|
||||
metadata: {
|
||||
exportedAt: Date;
|
||||
totalNodes: number;
|
||||
totalPackages: number;
|
||||
};
|
||||
}> {
|
||||
const nodes = Array.from(this.nodes.values());
|
||||
|
||||
return {
|
||||
nodes,
|
||||
metadata: {
|
||||
exportedAt: new Date(),
|
||||
totalNodes: nodes.length,
|
||||
totalPackages: this.nodesByPackage.size,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
410
src/services/sqlite-storage-service.ts
Normal file
410
src/services/sqlite-storage-service.ts
Normal file
@@ -0,0 +1,410 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import * as crypto from 'crypto';
|
||||
import { NodeSourceInfo } from '../utils/node-source-extractor';
|
||||
import { StoredNode, NodeSearchQuery } from './node-storage-service';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
export class SQLiteStorageService {
|
||||
private db: Database.Database;
|
||||
private readonly dbPath: string;
|
||||
|
||||
constructor(dbPath?: string) {
|
||||
this.dbPath = dbPath || process.env.NODE_DB_PATH || path.join(process.cwd(), 'data', 'nodes.db');
|
||||
|
||||
// Ensure data directory exists
|
||||
const dataDir = path.dirname(this.dbPath);
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
}
|
||||
|
||||
this.db = new Database(this.dbPath, {
|
||||
verbose: process.env.NODE_ENV === 'development' ? (msg: unknown) => logger.debug(String(msg)) : undefined
|
||||
});
|
||||
|
||||
// Enable WAL mode for better performance
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
|
||||
this.initializeDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database with schema
|
||||
*/
|
||||
private initializeDatabase(): void {
|
||||
try {
|
||||
const schema = `
|
||||
-- Main nodes table
|
||||
CREATE TABLE IF NOT EXISTS nodes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
node_type TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
package_name TEXT NOT NULL,
|
||||
display_name TEXT,
|
||||
description TEXT,
|
||||
code_hash TEXT NOT NULL,
|
||||
code_length INTEGER NOT NULL,
|
||||
source_location TEXT NOT NULL,
|
||||
source_code TEXT NOT NULL,
|
||||
credential_code TEXT,
|
||||
package_info TEXT, -- JSON
|
||||
has_credentials INTEGER DEFAULT 0,
|
||||
extracted_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_package_name ON nodes(package_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_code_hash ON nodes(code_hash);
|
||||
CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name);
|
||||
|
||||
-- Full Text Search virtual table for node search
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS nodes_fts USING fts5(
|
||||
node_type,
|
||||
name,
|
||||
display_name,
|
||||
description,
|
||||
package_name,
|
||||
content=nodes,
|
||||
content_rowid=id
|
||||
);
|
||||
|
||||
-- Triggers to keep FTS in sync
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_ai AFTER INSERT ON nodes
|
||||
BEGIN
|
||||
INSERT INTO nodes_fts(rowid, node_type, name, display_name, description, package_name)
|
||||
VALUES (new.id, new.node_type, new.name, new.display_name, new.description, new.package_name);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_ad AFTER DELETE ON nodes
|
||||
BEGIN
|
||||
DELETE FROM nodes_fts WHERE rowid = old.id;
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS nodes_au AFTER UPDATE ON nodes
|
||||
BEGIN
|
||||
DELETE FROM nodes_fts WHERE rowid = old.id;
|
||||
INSERT INTO nodes_fts(rowid, node_type, name, display_name, description, package_name)
|
||||
VALUES (new.id, new.node_type, new.name, new.display_name, new.description, new.package_name);
|
||||
END;
|
||||
|
||||
-- Statistics table for metadata
|
||||
CREATE TABLE IF NOT EXISTS extraction_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
total_nodes INTEGER NOT NULL,
|
||||
total_packages INTEGER NOT NULL,
|
||||
total_code_size INTEGER NOT NULL,
|
||||
nodes_with_credentials INTEGER NOT NULL,
|
||||
extraction_date DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
|
||||
this.db.exec(schema);
|
||||
logger.info('Database initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize database:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a node in the database
|
||||
*/
|
||||
async storeNode(nodeInfo: NodeSourceInfo): Promise<StoredNode> {
|
||||
const codeHash = crypto.createHash('sha256').update(nodeInfo.sourceCode).digest('hex');
|
||||
|
||||
// Parse display name and description from source
|
||||
const displayName = this.extractDisplayName(nodeInfo.sourceCode);
|
||||
const description = this.extractDescription(nodeInfo.sourceCode);
|
||||
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT OR REPLACE INTO nodes (
|
||||
node_type, name, package_name, display_name, description,
|
||||
code_hash, code_length, source_location, source_code,
|
||||
credential_code, package_info, has_credentials,
|
||||
updated_at
|
||||
) VALUES (
|
||||
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
const name = nodeInfo.nodeType.split('.').pop() || nodeInfo.nodeType;
|
||||
const packageName = nodeInfo.nodeType.split('.')[0] || 'unknown';
|
||||
|
||||
const result = stmt.run(
|
||||
nodeInfo.nodeType,
|
||||
name,
|
||||
packageName,
|
||||
displayName || null,
|
||||
description || null,
|
||||
codeHash,
|
||||
nodeInfo.sourceCode.length,
|
||||
nodeInfo.location,
|
||||
nodeInfo.sourceCode,
|
||||
nodeInfo.credentialCode || null,
|
||||
nodeInfo.packageInfo ? JSON.stringify(nodeInfo.packageInfo) : null,
|
||||
nodeInfo.credentialCode ? 1 : 0
|
||||
);
|
||||
|
||||
logger.info(`Stored node: ${nodeInfo.nodeType} (${codeHash.substring(0, 8)}...)`);
|
||||
|
||||
return {
|
||||
id: String(result.lastInsertRowid),
|
||||
nodeType: nodeInfo.nodeType,
|
||||
name,
|
||||
packageName,
|
||||
displayName,
|
||||
description,
|
||||
codeHash,
|
||||
codeLength: nodeInfo.sourceCode.length,
|
||||
sourceLocation: nodeInfo.location,
|
||||
hasCredentials: !!nodeInfo.credentialCode,
|
||||
extractedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
sourceCode: nodeInfo.sourceCode,
|
||||
credentialCode: nodeInfo.credentialCode,
|
||||
packageInfo: nodeInfo.packageInfo
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for nodes using FTS
|
||||
*/
|
||||
async searchNodes(query: NodeSearchQuery): Promise<StoredNode[]> {
|
||||
let sql = `
|
||||
SELECT DISTINCT n.*
|
||||
FROM nodes n
|
||||
`;
|
||||
|
||||
const params: any[] = [];
|
||||
const conditions: string[] = [];
|
||||
|
||||
if (query.query) {
|
||||
// Use FTS for text search
|
||||
sql += ` JOIN nodes_fts fts ON n.id = fts.rowid`;
|
||||
conditions.push(`nodes_fts MATCH ?`);
|
||||
// Convert search query to FTS syntax (prefix search)
|
||||
const ftsQuery = query.query.split(' ')
|
||||
.map(term => `${term}*`)
|
||||
.join(' ');
|
||||
params.push(ftsQuery);
|
||||
}
|
||||
|
||||
if (query.packageName) {
|
||||
conditions.push(`n.package_name = ?`);
|
||||
params.push(query.packageName);
|
||||
}
|
||||
|
||||
if (query.nodeType) {
|
||||
conditions.push(`n.node_type LIKE ?`);
|
||||
params.push(`%${query.nodeType}%`);
|
||||
}
|
||||
|
||||
if (query.hasCredentials !== undefined) {
|
||||
conditions.push(`n.has_credentials = ?`);
|
||||
params.push(query.hasCredentials ? 1 : 0);
|
||||
}
|
||||
|
||||
if (conditions.length > 0) {
|
||||
sql += ` WHERE ${conditions.join(' AND ')}`;
|
||||
}
|
||||
|
||||
sql += ` ORDER BY n.name`;
|
||||
|
||||
if (query.limit) {
|
||||
sql += ` LIMIT ?`;
|
||||
params.push(query.limit);
|
||||
|
||||
if (query.offset) {
|
||||
sql += ` OFFSET ?`;
|
||||
params.push(query.offset);
|
||||
}
|
||||
}
|
||||
|
||||
const stmt = this.db.prepare(sql);
|
||||
const rows = stmt.all(...params);
|
||||
|
||||
return rows.map(row => this.rowToStoredNode(row));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get node by type
|
||||
*/
|
||||
async getNode(nodeType: string): Promise<StoredNode | null> {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM nodes WHERE node_type = ?
|
||||
`);
|
||||
|
||||
const row = stmt.get(nodeType);
|
||||
return row ? this.rowToStoredNode(row) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all packages
|
||||
*/
|
||||
async getPackages(): Promise<Array<{ name: string; nodeCount: number }>> {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT package_name as name, COUNT(*) as nodeCount
|
||||
FROM nodes
|
||||
GROUP BY package_name
|
||||
ORDER BY nodeCount DESC
|
||||
`);
|
||||
|
||||
return stmt.all() as Array<{ name: string; nodeCount: number }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk store nodes (used for database rebuild)
|
||||
*/
|
||||
async bulkStoreNodes(nodeInfos: NodeSourceInfo[]): Promise<{
|
||||
stored: number;
|
||||
failed: number;
|
||||
errors: Array<{ nodeType: string; error: string }>;
|
||||
}> {
|
||||
const results = {
|
||||
stored: 0,
|
||||
failed: 0,
|
||||
errors: [] as Array<{ nodeType: string; error: string }>
|
||||
};
|
||||
|
||||
// Use transaction for bulk insert
|
||||
const insertMany = this.db.transaction((nodes: NodeSourceInfo[]) => {
|
||||
for (const nodeInfo of nodes) {
|
||||
try {
|
||||
this.storeNode(nodeInfo);
|
||||
results.stored++;
|
||||
} catch (error) {
|
||||
results.failed++;
|
||||
results.errors.push({
|
||||
nodeType: nodeInfo.nodeType,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
insertMany(nodeInfos);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics
|
||||
*/
|
||||
async getStatistics(): Promise<{
|
||||
totalNodes: number;
|
||||
totalPackages: number;
|
||||
totalCodeSize: number;
|
||||
nodesWithCredentials: number;
|
||||
averageNodeSize: number;
|
||||
packageDistribution: Array<{ package: string; count: number }>;
|
||||
}> {
|
||||
const stats = this.db.prepare(`
|
||||
SELECT
|
||||
COUNT(*) as totalNodes,
|
||||
COUNT(DISTINCT package_name) as totalPackages,
|
||||
SUM(code_length) as totalCodeSize,
|
||||
SUM(has_credentials) as nodesWithCredentials
|
||||
FROM nodes
|
||||
`).get() as any;
|
||||
|
||||
const packageDist = this.db.prepare(`
|
||||
SELECT package_name as package, COUNT(*) as count
|
||||
FROM nodes
|
||||
GROUP BY package_name
|
||||
ORDER BY count DESC
|
||||
`).all() as Array<{ package: string; count: number }>;
|
||||
|
||||
return {
|
||||
totalNodes: stats.totalNodes || 0,
|
||||
totalPackages: stats.totalPackages || 0,
|
||||
totalCodeSize: stats.totalCodeSize || 0,
|
||||
nodesWithCredentials: stats.nodesWithCredentials || 0,
|
||||
averageNodeSize: stats.totalNodes > 0 ? Math.round(stats.totalCodeSize / stats.totalNodes) : 0,
|
||||
packageDistribution: packageDist
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebuild entire database
|
||||
*/
|
||||
async rebuildDatabase(): Promise<void> {
|
||||
logger.info('Starting database rebuild...');
|
||||
|
||||
// Clear existing data
|
||||
this.db.exec('DELETE FROM nodes');
|
||||
this.db.exec('DELETE FROM extraction_stats');
|
||||
|
||||
logger.info('Database cleared for rebuild');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save extraction statistics
|
||||
*/
|
||||
async saveExtractionStats(stats: {
|
||||
totalNodes: number;
|
||||
totalPackages: number;
|
||||
totalCodeSize: number;
|
||||
nodesWithCredentials: number;
|
||||
}): Promise<void> {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO extraction_stats (
|
||||
total_nodes, total_packages, total_code_size, nodes_with_credentials
|
||||
) VALUES (?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
stats.totalNodes,
|
||||
stats.totalPackages,
|
||||
stats.totalCodeSize,
|
||||
stats.nodesWithCredentials
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert database row to StoredNode
|
||||
*/
|
||||
private rowToStoredNode(row: any): StoredNode {
|
||||
return {
|
||||
id: String(row.id),
|
||||
nodeType: row.node_type,
|
||||
name: row.name,
|
||||
packageName: row.package_name,
|
||||
displayName: row.display_name,
|
||||
description: row.description,
|
||||
codeHash: row.code_hash,
|
||||
codeLength: row.code_length,
|
||||
sourceLocation: row.source_location,
|
||||
hasCredentials: row.has_credentials === 1,
|
||||
extractedAt: new Date(row.extracted_at),
|
||||
updatedAt: new Date(row.updated_at),
|
||||
sourceCode: row.source_code,
|
||||
credentialCode: row.credential_code,
|
||||
packageInfo: row.package_info ? JSON.parse(row.package_info) : undefined
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract display name from source code
|
||||
*/
|
||||
private extractDisplayName(sourceCode: string): string | undefined {
|
||||
const match = sourceCode.match(/displayName:\s*["'`]([^"'`]+)["'`]/);
|
||||
return match ? match[1] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract description from source code
|
||||
*/
|
||||
private extractDescription(sourceCode: string): string | undefined {
|
||||
const match = sourceCode.match(/description:\s*["'`]([^"'`]+)["'`]/);
|
||||
return match ? match[1] : undefined;
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,12 @@ export class NodeSourceExtractor {
|
||||
'/app/node_modules',
|
||||
'/home/node/.n8n/custom/nodes',
|
||||
'./node_modules',
|
||||
];
|
||||
// Docker volume paths
|
||||
'/var/lib/docker/volumes/n8n-mcp_n8n_modules/_data',
|
||||
'/n8n-modules',
|
||||
// Common n8n installation paths
|
||||
process.env.N8N_CUSTOM_EXTENSIONS || '',
|
||||
].filter(Boolean);
|
||||
|
||||
/**
|
||||
* Extract source code for a specific n8n node
|
||||
@@ -70,8 +75,8 @@ export class NodeSourceExtractor {
|
||||
nodeName: string
|
||||
): Promise<NodeSourceInfo | null> {
|
||||
try {
|
||||
// Common patterns for node files
|
||||
const patterns = [
|
||||
// First, try standard patterns
|
||||
const standardPatterns = [
|
||||
`${packageName}/dist/nodes/${nodeName}/${nodeName}.node.js`,
|
||||
`${packageName}/dist/nodes/${nodeName}.node.js`,
|
||||
`${packageName}/nodes/${nodeName}/${nodeName}.node.js`,
|
||||
@@ -80,39 +85,36 @@ export class NodeSourceExtractor {
|
||||
`${nodeName}.node.js`,
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
// Additional patterns for nested node structures (e.g., agents/Agent)
|
||||
const nestedPatterns = [
|
||||
`${packageName}/dist/nodes/*/${nodeName}/${nodeName}.node.js`,
|
||||
`${packageName}/dist/nodes/**/${nodeName}/${nodeName}.node.js`,
|
||||
`${packageName}/nodes/*/${nodeName}/${nodeName}.node.js`,
|
||||
`${packageName}/nodes/**/${nodeName}/${nodeName}.node.js`,
|
||||
];
|
||||
|
||||
// Try standard patterns first
|
||||
for (const pattern of standardPatterns) {
|
||||
const fullPath = path.join(basePath, pattern);
|
||||
const result = await this.tryLoadNodeFile(fullPath, packageName, nodeName, basePath);
|
||||
if (result) return result;
|
||||
}
|
||||
|
||||
// Try nested patterns (with glob-like search)
|
||||
for (const pattern of nestedPatterns) {
|
||||
const result = await this.searchWithGlobPattern(basePath, pattern, packageName, nodeName);
|
||||
if (result) return result;
|
||||
}
|
||||
|
||||
// If basePath contains .pnpm, search in pnpm structure
|
||||
if (basePath.includes('node_modules')) {
|
||||
const pnpmPath = path.join(basePath, '.pnpm');
|
||||
try {
|
||||
const sourceCode = await fs.readFile(fullPath, 'utf-8');
|
||||
|
||||
// Try to find credential file
|
||||
const credentialPath = fullPath.replace('.node.js', '.credentials.js');
|
||||
let credentialCode: string | undefined;
|
||||
try {
|
||||
credentialCode = await fs.readFile(credentialPath, 'utf-8');
|
||||
} catch {
|
||||
// Credential file is optional
|
||||
}
|
||||
|
||||
// Try to get package.json info
|
||||
const packageJsonPath = path.join(basePath, packageName, 'package.json');
|
||||
let packageInfo: any;
|
||||
try {
|
||||
const packageJson = await fs.readFile(packageJsonPath, 'utf-8');
|
||||
packageInfo = JSON.parse(packageJson);
|
||||
} catch {
|
||||
// Package.json is optional
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType: `${packageName}.${nodeName}`,
|
||||
sourceCode,
|
||||
credentialCode,
|
||||
packageInfo,
|
||||
location: fullPath,
|
||||
};
|
||||
await fs.access(pnpmPath);
|
||||
const result = await this.searchInPnpm(pnpmPath, packageName, nodeName);
|
||||
if (result) return result;
|
||||
} catch {
|
||||
// Continue searching
|
||||
// .pnpm directory doesn't exist
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -122,6 +124,172 @@ export class NodeSourceExtractor {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for nodes in pnpm's special directory structure
|
||||
*/
|
||||
private async searchInPnpm(
|
||||
pnpmPath: string,
|
||||
packageName: string,
|
||||
nodeName: string
|
||||
): Promise<NodeSourceInfo | null> {
|
||||
try {
|
||||
const entries = await fs.readdir(pnpmPath);
|
||||
|
||||
// Filter entries that might contain our package
|
||||
const packageEntries = entries.filter(entry =>
|
||||
entry.includes(packageName.replace('/', '+')) ||
|
||||
entry.includes(packageName)
|
||||
);
|
||||
|
||||
for (const entry of packageEntries) {
|
||||
const entryPath = path.join(pnpmPath, entry, 'node_modules', packageName);
|
||||
|
||||
// Search patterns within the pnpm package directory
|
||||
const patterns = [
|
||||
`dist/nodes/${nodeName}/${nodeName}.node.js`,
|
||||
`dist/nodes/${nodeName}.node.js`,
|
||||
`dist/nodes/*/${nodeName}/${nodeName}.node.js`,
|
||||
`dist/nodes/**/${nodeName}/${nodeName}.node.js`,
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.includes('*')) {
|
||||
const result = await this.searchWithGlobPattern(entryPath, pattern, packageName, nodeName);
|
||||
if (result) return result;
|
||||
} else {
|
||||
const fullPath = path.join(entryPath, pattern);
|
||||
const result = await this.tryLoadNodeFile(fullPath, packageName, nodeName, entryPath);
|
||||
if (result) return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Error searching in pnpm directory: ${error}`);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for files matching a glob-like pattern
|
||||
*/
|
||||
private async searchWithGlobPattern(
|
||||
basePath: string,
|
||||
pattern: string,
|
||||
packageName: string,
|
||||
nodeName: string
|
||||
): Promise<NodeSourceInfo | null> {
|
||||
// Convert glob pattern to regex parts
|
||||
const parts = pattern.split('/');
|
||||
const targetFile = `${nodeName}.node.js`;
|
||||
|
||||
async function searchDir(currentPath: string, remainingParts: string[]): Promise<string | null> {
|
||||
if (remainingParts.length === 0) return null;
|
||||
|
||||
const part = remainingParts[0];
|
||||
const isLastPart = remainingParts.length === 1;
|
||||
|
||||
try {
|
||||
if (isLastPart && part === targetFile) {
|
||||
// Check if file exists
|
||||
const fullPath = path.join(currentPath, part);
|
||||
await fs.access(fullPath);
|
||||
return fullPath;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory() && !isLastPart) continue;
|
||||
|
||||
if (part === '*' || part === '**') {
|
||||
// Match any directory
|
||||
if (entry.isDirectory()) {
|
||||
const result = await searchDir(
|
||||
path.join(currentPath, entry.name),
|
||||
part === '**' ? remainingParts : remainingParts.slice(1)
|
||||
);
|
||||
if (result) return result;
|
||||
}
|
||||
} else if (entry.name === part || (isLastPart && entry.name === targetFile)) {
|
||||
if (isLastPart && entry.isFile()) {
|
||||
return path.join(currentPath, entry.name);
|
||||
} else if (!isLastPart && entry.isDirectory()) {
|
||||
const result = await searchDir(
|
||||
path.join(currentPath, entry.name),
|
||||
remainingParts.slice(1)
|
||||
);
|
||||
if (result) return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist or can't be read
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const foundPath = await searchDir(basePath, parts);
|
||||
if (foundPath) {
|
||||
return this.tryLoadNodeFile(foundPath, packageName, nodeName, basePath);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to load a node file and its associated files
|
||||
*/
|
||||
private async tryLoadNodeFile(
|
||||
fullPath: string,
|
||||
packageName: string,
|
||||
nodeName: string,
|
||||
packageBasePath: string
|
||||
): Promise<NodeSourceInfo | null> {
|
||||
try {
|
||||
const sourceCode = await fs.readFile(fullPath, 'utf-8');
|
||||
|
||||
// Try to find credential file
|
||||
const credentialPath = fullPath.replace('.node.js', '.credentials.js');
|
||||
let credentialCode: string | undefined;
|
||||
try {
|
||||
credentialCode = await fs.readFile(credentialPath, 'utf-8');
|
||||
} catch {
|
||||
// Credential file is optional
|
||||
}
|
||||
|
||||
// Try to get package.json info
|
||||
let packageInfo: any;
|
||||
const possiblePackageJsonPaths = [
|
||||
path.join(packageBasePath, 'package.json'),
|
||||
path.join(packageBasePath, packageName, 'package.json'),
|
||||
path.join(path.dirname(path.dirname(fullPath)), 'package.json'),
|
||||
path.join(path.dirname(path.dirname(path.dirname(fullPath))), 'package.json'),
|
||||
];
|
||||
|
||||
for (const packageJsonPath of possiblePackageJsonPaths) {
|
||||
try {
|
||||
const packageJson = await fs.readFile(packageJsonPath, 'utf-8');
|
||||
packageInfo = JSON.parse(packageJson);
|
||||
break;
|
||||
} catch {
|
||||
// Try next path
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
nodeType: `${packageName}.${nodeName}`,
|
||||
sourceCode,
|
||||
credentialCode,
|
||||
packageInfo,
|
||||
location: fullPath,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available nodes
|
||||
*/
|
||||
@@ -183,9 +351,14 @@ export class NodeSourceExtractor {
|
||||
} catch {
|
||||
// Skip files we can't read
|
||||
}
|
||||
} else if (entry.isDirectory() && entry.name !== 'node_modules') {
|
||||
// Recursively scan subdirectories
|
||||
await this.scanDirectoryForNodes(path.join(dirPath, entry.name), nodes, category, search);
|
||||
} else if (entry.isDirectory()) {
|
||||
// Special handling for .pnpm directories
|
||||
if (entry.name === '.pnpm') {
|
||||
await this.scanPnpmDirectory(path.join(dirPath, entry.name), nodes, category, search);
|
||||
} else if (entry.name !== 'node_modules') {
|
||||
// Recursively scan subdirectories
|
||||
await this.scanDirectoryForNodes(path.join(dirPath, entry.name), nodes, category, search);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -193,6 +366,32 @@ export class NodeSourceExtractor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan pnpm directory structure for nodes
|
||||
*/
|
||||
private async scanPnpmDirectory(
|
||||
pnpmPath: string,
|
||||
nodes: any[],
|
||||
category?: string,
|
||||
search?: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
const entries = await fs.readdir(pnpmPath);
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(pnpmPath, entry, 'node_modules');
|
||||
try {
|
||||
await fs.access(entryPath);
|
||||
await this.scanDirectoryForNodes(entryPath, nodes, category, search);
|
||||
} catch {
|
||||
// Skip if node_modules doesn't exist
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Error scanning pnpm directory ${pnpmPath}: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract AI Agent node specifically
|
||||
*/
|
||||
|
||||
Reference in New Issue
Block a user