Implement SQLite database with full-text search for n8n node documentation

Major features implemented:
- SQLite storage service with FTS5 for fast node search
- Database rebuild mechanism for bulk node extraction
- MCP tools: search_nodes, extract_all_nodes, get_node_statistics
- Production Docker deployment with persistent storage
- Management scripts for database operations
- Comprehensive test suite for all functionality

Database capabilities:
- Stores node source code and metadata
- Full-text search by node name or content
- No versioning (stores latest only as per requirements)
- Supports complete database rebuilds
- ~4.5MB database with 500+ nodes indexed

Production features:
- Automated deployment script
- Docker Compose production configuration
- Database initialization on first run
- Volume persistence for data
- Management utilities for operations

Documentation:
- Updated README with complete instructions
- Production deployment guide
- Clear troubleshooting section
- API reference for all new tools

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
czlonkowski
2025-06-07 21:12:17 +00:00
parent 0cff8fbe6a
commit 078b67ff35
42 changed files with 33875 additions and 196 deletions

View File

@@ -16,7 +16,12 @@ export class NodeSourceExtractor {
'/app/node_modules',
'/home/node/.n8n/custom/nodes',
'./node_modules',
];
// Docker volume paths
'/var/lib/docker/volumes/n8n-mcp_n8n_modules/_data',
'/n8n-modules',
// Common n8n installation paths
process.env.N8N_CUSTOM_EXTENSIONS || '',
].filter(Boolean);
/**
* Extract source code for a specific n8n node
@@ -70,8 +75,8 @@ export class NodeSourceExtractor {
nodeName: string
): Promise<NodeSourceInfo | null> {
try {
// Common patterns for node files
const patterns = [
// First, try standard patterns
const standardPatterns = [
`${packageName}/dist/nodes/${nodeName}/${nodeName}.node.js`,
`${packageName}/dist/nodes/${nodeName}.node.js`,
`${packageName}/nodes/${nodeName}/${nodeName}.node.js`,
@@ -80,39 +85,36 @@ export class NodeSourceExtractor {
`${nodeName}.node.js`,
];
for (const pattern of patterns) {
// Additional patterns for nested node structures (e.g., agents/Agent)
const nestedPatterns = [
`${packageName}/dist/nodes/*/${nodeName}/${nodeName}.node.js`,
`${packageName}/dist/nodes/**/${nodeName}/${nodeName}.node.js`,
`${packageName}/nodes/*/${nodeName}/${nodeName}.node.js`,
`${packageName}/nodes/**/${nodeName}/${nodeName}.node.js`,
];
// Try standard patterns first
for (const pattern of standardPatterns) {
const fullPath = path.join(basePath, pattern);
const result = await this.tryLoadNodeFile(fullPath, packageName, nodeName, basePath);
if (result) return result;
}
// Try nested patterns (with glob-like search)
for (const pattern of nestedPatterns) {
const result = await this.searchWithGlobPattern(basePath, pattern, packageName, nodeName);
if (result) return result;
}
// If basePath contains .pnpm, search in pnpm structure
if (basePath.includes('node_modules')) {
const pnpmPath = path.join(basePath, '.pnpm');
try {
const sourceCode = await fs.readFile(fullPath, 'utf-8');
// Try to find credential file
const credentialPath = fullPath.replace('.node.js', '.credentials.js');
let credentialCode: string | undefined;
try {
credentialCode = await fs.readFile(credentialPath, 'utf-8');
} catch {
// Credential file is optional
}
// Try to get package.json info
const packageJsonPath = path.join(basePath, packageName, 'package.json');
let packageInfo: any;
try {
const packageJson = await fs.readFile(packageJsonPath, 'utf-8');
packageInfo = JSON.parse(packageJson);
} catch {
// Package.json is optional
}
return {
nodeType: `${packageName}.${nodeName}`,
sourceCode,
credentialCode,
packageInfo,
location: fullPath,
};
await fs.access(pnpmPath);
const result = await this.searchInPnpm(pnpmPath, packageName, nodeName);
if (result) return result;
} catch {
// Continue searching
// .pnpm directory doesn't exist
}
}
} catch (error) {
@@ -122,6 +124,172 @@ export class NodeSourceExtractor {
return null;
}
/**
* Search for nodes in pnpm's special directory structure
*/
private async searchInPnpm(
pnpmPath: string,
packageName: string,
nodeName: string
): Promise<NodeSourceInfo | null> {
try {
const entries = await fs.readdir(pnpmPath);
// Filter entries that might contain our package
const packageEntries = entries.filter(entry =>
entry.includes(packageName.replace('/', '+')) ||
entry.includes(packageName)
);
for (const entry of packageEntries) {
const entryPath = path.join(pnpmPath, entry, 'node_modules', packageName);
// Search patterns within the pnpm package directory
const patterns = [
`dist/nodes/${nodeName}/${nodeName}.node.js`,
`dist/nodes/${nodeName}.node.js`,
`dist/nodes/*/${nodeName}/${nodeName}.node.js`,
`dist/nodes/**/${nodeName}/${nodeName}.node.js`,
];
for (const pattern of patterns) {
if (pattern.includes('*')) {
const result = await this.searchWithGlobPattern(entryPath, pattern, packageName, nodeName);
if (result) return result;
} else {
const fullPath = path.join(entryPath, pattern);
const result = await this.tryLoadNodeFile(fullPath, packageName, nodeName, entryPath);
if (result) return result;
}
}
}
} catch (error) {
logger.debug(`Error searching in pnpm directory: ${error}`);
}
return null;
}
/**
* Search for files matching a glob-like pattern
*/
private async searchWithGlobPattern(
basePath: string,
pattern: string,
packageName: string,
nodeName: string
): Promise<NodeSourceInfo | null> {
// Convert glob pattern to regex parts
const parts = pattern.split('/');
const targetFile = `${nodeName}.node.js`;
async function searchDir(currentPath: string, remainingParts: string[]): Promise<string | null> {
if (remainingParts.length === 0) return null;
const part = remainingParts[0];
const isLastPart = remainingParts.length === 1;
try {
if (isLastPart && part === targetFile) {
// Check if file exists
const fullPath = path.join(currentPath, part);
await fs.access(fullPath);
return fullPath;
}
const entries = await fs.readdir(currentPath, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory() && !isLastPart) continue;
if (part === '*' || part === '**') {
// Match any directory
if (entry.isDirectory()) {
const result = await searchDir(
path.join(currentPath, entry.name),
part === '**' ? remainingParts : remainingParts.slice(1)
);
if (result) return result;
}
} else if (entry.name === part || (isLastPart && entry.name === targetFile)) {
if (isLastPart && entry.isFile()) {
return path.join(currentPath, entry.name);
} else if (!isLastPart && entry.isDirectory()) {
const result = await searchDir(
path.join(currentPath, entry.name),
remainingParts.slice(1)
);
if (result) return result;
}
}
}
} catch {
// Directory doesn't exist or can't be read
}
return null;
}
const foundPath = await searchDir(basePath, parts);
if (foundPath) {
return this.tryLoadNodeFile(foundPath, packageName, nodeName, basePath);
}
return null;
}
/**
* Try to load a node file and its associated files
*/
private async tryLoadNodeFile(
fullPath: string,
packageName: string,
nodeName: string,
packageBasePath: string
): Promise<NodeSourceInfo | null> {
try {
const sourceCode = await fs.readFile(fullPath, 'utf-8');
// Try to find credential file
const credentialPath = fullPath.replace('.node.js', '.credentials.js');
let credentialCode: string | undefined;
try {
credentialCode = await fs.readFile(credentialPath, 'utf-8');
} catch {
// Credential file is optional
}
// Try to get package.json info
let packageInfo: any;
const possiblePackageJsonPaths = [
path.join(packageBasePath, 'package.json'),
path.join(packageBasePath, packageName, 'package.json'),
path.join(path.dirname(path.dirname(fullPath)), 'package.json'),
path.join(path.dirname(path.dirname(path.dirname(fullPath))), 'package.json'),
];
for (const packageJsonPath of possiblePackageJsonPaths) {
try {
const packageJson = await fs.readFile(packageJsonPath, 'utf-8');
packageInfo = JSON.parse(packageJson);
break;
} catch {
// Try next path
}
}
return {
nodeType: `${packageName}.${nodeName}`,
sourceCode,
credentialCode,
packageInfo,
location: fullPath,
};
} catch {
return null;
}
}
/**
* List all available nodes
*/
@@ -183,9 +351,14 @@ export class NodeSourceExtractor {
} catch {
// Skip files we can't read
}
} else if (entry.isDirectory() && entry.name !== 'node_modules') {
// Recursively scan subdirectories
await this.scanDirectoryForNodes(path.join(dirPath, entry.name), nodes, category, search);
} else if (entry.isDirectory()) {
// Special handling for .pnpm directories
if (entry.name === '.pnpm') {
await this.scanPnpmDirectory(path.join(dirPath, entry.name), nodes, category, search);
} else if (entry.name !== 'node_modules') {
// Recursively scan subdirectories
await this.scanDirectoryForNodes(path.join(dirPath, entry.name), nodes, category, search);
}
}
}
} catch (error) {
@@ -193,6 +366,32 @@ export class NodeSourceExtractor {
}
}
/**
* Scan pnpm directory structure for nodes
*/
private async scanPnpmDirectory(
pnpmPath: string,
nodes: any[],
category?: string,
search?: string
): Promise<void> {
try {
const entries = await fs.readdir(pnpmPath);
for (const entry of entries) {
const entryPath = path.join(pnpmPath, entry, 'node_modules');
try {
await fs.access(entryPath);
await this.scanDirectoryForNodes(entryPath, nodes, category, search);
} catch {
// Skip if node_modules doesn't exist
}
}
} catch (error) {
logger.debug(`Error scanning pnpm directory ${pnpmPath}: ${error}`);
}
}
/**
* Extract AI Agent node specifically
*/