feat: add node extraction scripts and Docker setup for n8n modules
This commit is contained in:
220
scripts/extract-from-docker.js
Normal file
220
scripts/extract-from-docker.js
Normal file
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/env node
|
||||
const dotenv = require('dotenv');
|
||||
const { NodeDocumentationService } = require('../dist/services/node-documentation-service');
|
||||
const { NodeSourceExtractor } = require('../dist/utils/node-source-extractor');
|
||||
const { logger } = require('../dist/utils/logger');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
async function extractNodesFromDocker() {
|
||||
logger.info('🐳 Starting Docker-based node extraction...');
|
||||
|
||||
// Add Docker volume paths to environment for NodeSourceExtractor
|
||||
const dockerVolumePaths = [
|
||||
process.env.N8N_MODULES_PATH || '/n8n-modules',
|
||||
process.env.N8N_CUSTOM_PATH || '/n8n-custom',
|
||||
];
|
||||
|
||||
logger.info(`Docker volume paths: ${dockerVolumePaths.join(', ')}`);
|
||||
|
||||
// Check if volumes are mounted
|
||||
for (const volumePath of dockerVolumePaths) {
|
||||
try {
|
||||
await fs.access(volumePath);
|
||||
logger.info(`✅ Volume mounted: ${volumePath}`);
|
||||
|
||||
// List what's in the volume
|
||||
const entries = await fs.readdir(volumePath);
|
||||
logger.info(`Contents of ${volumePath}: ${entries.slice(0, 10).join(', ')}${entries.length > 10 ? '...' : ''}`);
|
||||
} catch (error) {
|
||||
logger.warn(`❌ Volume not accessible: ${volumePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize services
|
||||
const docService = new NodeDocumentationService();
|
||||
const extractor = new NodeSourceExtractor();
|
||||
|
||||
// Extend the extractor's search paths with Docker volumes
|
||||
extractor.n8nBasePaths.unshift(...dockerVolumePaths);
|
||||
|
||||
// Clear existing nodes to ensure we only have latest versions
|
||||
logger.info('🧹 Clearing existing nodes...');
|
||||
const db = docService.db;
|
||||
db.prepare('DELETE FROM nodes').run();
|
||||
|
||||
logger.info('🔍 Searching for n8n nodes in Docker volumes...');
|
||||
|
||||
// Known n8n packages to extract
|
||||
const n8nPackages = [
|
||||
'n8n-nodes-base',
|
||||
'@n8n/n8n-nodes-langchain',
|
||||
'n8n-nodes-extras',
|
||||
];
|
||||
|
||||
let totalExtracted = 0;
|
||||
let ifNodeVersion = null;
|
||||
|
||||
for (const packageName of n8nPackages) {
|
||||
logger.info(`\n📦 Processing package: ${packageName}`);
|
||||
|
||||
try {
|
||||
// Find package in Docker volumes
|
||||
let packagePath = null;
|
||||
|
||||
for (const volumePath of dockerVolumePaths) {
|
||||
const possiblePaths = [
|
||||
path.join(volumePath, packageName),
|
||||
path.join(volumePath, '.pnpm', `${packageName}@*`, 'node_modules', packageName),
|
||||
];
|
||||
|
||||
for (const testPath of possiblePaths) {
|
||||
try {
|
||||
// Use glob pattern to find pnpm packages
|
||||
if (testPath.includes('*')) {
|
||||
const baseDir = path.dirname(testPath.split('*')[0]);
|
||||
const entries = await fs.readdir(baseDir);
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.includes(packageName.replace('/', '+'))) {
|
||||
const fullPath = path.join(baseDir, entry, 'node_modules', packageName);
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
packagePath = fullPath;
|
||||
break;
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await fs.access(testPath);
|
||||
packagePath = testPath;
|
||||
break;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (packagePath) break;
|
||||
}
|
||||
|
||||
if (!packagePath) {
|
||||
logger.warn(`Package ${packageName} not found in Docker volumes`);
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.info(`Found package at: ${packagePath}`);
|
||||
|
||||
// Check package version
|
||||
try {
|
||||
const packageJsonPath = path.join(packagePath, 'package.json');
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf-8'));
|
||||
logger.info(`Package version: ${packageJson.version}`);
|
||||
} catch {}
|
||||
|
||||
// Find nodes directory
|
||||
const nodesPath = path.join(packagePath, 'dist', 'nodes');
|
||||
|
||||
try {
|
||||
await fs.access(nodesPath);
|
||||
logger.info(`Scanning nodes directory: ${nodesPath}`);
|
||||
|
||||
// Extract all nodes from this package
|
||||
const nodeEntries = await scanForNodes(nodesPath);
|
||||
logger.info(`Found ${nodeEntries.length} nodes in ${packageName}`);
|
||||
|
||||
for (const nodeEntry of nodeEntries) {
|
||||
try {
|
||||
const nodeName = nodeEntry.name.replace('.node.js', '');
|
||||
const nodeType = `${packageName}.${nodeName}`;
|
||||
|
||||
logger.info(`Extracting: ${nodeType}`);
|
||||
|
||||
// Extract source info
|
||||
const sourceInfo = await extractor.extractNodeSource(nodeType);
|
||||
|
||||
// Check if this is the If node
|
||||
if (nodeName === 'If') {
|
||||
// Look for version in the source code
|
||||
const versionMatch = sourceInfo.sourceCode.match(/version:\s*(\d+)/);
|
||||
if (versionMatch) {
|
||||
ifNodeVersion = versionMatch[1];
|
||||
logger.info(`📍 Found If node version: ${ifNodeVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Store in database
|
||||
await docService.storeNode({
|
||||
nodeType: nodeType,
|
||||
name: nodeName,
|
||||
displayName: nodeName,
|
||||
description: `${nodeName} node from ${packageName}`,
|
||||
sourceCode: sourceInfo.sourceCode,
|
||||
credentialCode: sourceInfo.credentialCode,
|
||||
packageName: packageName,
|
||||
version: ifNodeVersion || '1',
|
||||
hasCredentials: !!sourceInfo.credentialCode,
|
||||
isTrigger: sourceInfo.sourceCode.includes('trigger: true') || nodeName.toLowerCase().includes('trigger'),
|
||||
isWebhook: sourceInfo.sourceCode.includes('webhook: true') || nodeName.toLowerCase().includes('webhook'),
|
||||
});
|
||||
|
||||
totalExtracted++;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to extract ${nodeEntry.name}: ${error}`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to scan nodes directory: ${error}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to process package ${packageName}: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`\n✅ Extraction complete!`);
|
||||
logger.info(`📊 Total nodes extracted: ${totalExtracted}`);
|
||||
|
||||
if (ifNodeVersion) {
|
||||
logger.info(`📍 If node version: ${ifNodeVersion}`);
|
||||
if (ifNodeVersion === '2' || ifNodeVersion === '2.2') {
|
||||
logger.info('✅ Successfully extracted latest If node (v2+)!');
|
||||
} else {
|
||||
logger.warn(`⚠️ If node version is ${ifNodeVersion}, expected v2 or higher`);
|
||||
}
|
||||
}
|
||||
|
||||
// Close database
|
||||
docService.close();
|
||||
}
|
||||
|
||||
async function scanForNodes(dirPath) {
|
||||
const nodes = [];
|
||||
|
||||
async function scan(currentPath) {
|
||||
try {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name);
|
||||
|
||||
if (entry.isFile() && entry.name.endsWith('.node.js')) {
|
||||
nodes.push({ name: entry.name, path: fullPath });
|
||||
} else if (entry.isDirectory() && entry.name !== 'node_modules') {
|
||||
await scan(fullPath);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug(`Failed to scan directory ${currentPath}: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
await scan(dirPath);
|
||||
return nodes;
|
||||
}
|
||||
|
||||
// Run extraction
|
||||
extractNodesFromDocker().catch(error => {
|
||||
logger.error('Extraction failed:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
116
scripts/extract-nodes-docker.sh
Executable file
116
scripts/extract-nodes-docker.sh
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "🐳 n8n Node Extraction via Docker"
|
||||
echo "================================="
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_status() {
|
||||
echo -e "${GREEN}[$(date +'%H:%M:%S')]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[$(date +'%H:%M:%S')]${NC} ⚠️ $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[$(date +'%H:%M:%S')]${NC} ❌ $1"
|
||||
}
|
||||
|
||||
# Check if Docker is running
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
print_error "Docker is not running. Please start Docker and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_status "Docker is running ✅"
|
||||
|
||||
# Clean up any existing containers
|
||||
print_status "Cleaning up existing containers..."
|
||||
docker-compose -f docker-compose.extract.yml down -v 2>/dev/null || true
|
||||
|
||||
# Build the project first
|
||||
print_status "Building the project..."
|
||||
npm run build
|
||||
|
||||
# Start the extraction process
|
||||
print_status "Starting n8n container to extract latest nodes..."
|
||||
docker-compose -f docker-compose.extract.yml up -d n8n-latest
|
||||
|
||||
# Wait for n8n container to be healthy
|
||||
print_status "Waiting for n8n container to initialize..."
|
||||
ATTEMPTS=0
|
||||
MAX_ATTEMPTS=60
|
||||
|
||||
while [ $ATTEMPTS -lt $MAX_ATTEMPTS ]; do
|
||||
if docker-compose -f docker-compose.extract.yml ps | grep -q "healthy"; then
|
||||
print_status "n8n container is ready ✅"
|
||||
break
|
||||
fi
|
||||
|
||||
ATTEMPTS=$((ATTEMPTS + 1))
|
||||
echo -n "."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
if [ $ATTEMPTS -eq $MAX_ATTEMPTS ]; then
|
||||
print_error "n8n container failed to become healthy"
|
||||
docker-compose -f docker-compose.extract.yml logs n8n-latest
|
||||
docker-compose -f docker-compose.extract.yml down -v
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run the extraction
|
||||
print_status "Running node extraction..."
|
||||
docker-compose -f docker-compose.extract.yml run --rm node-extractor
|
||||
|
||||
# Check the results
|
||||
print_status "Checking extraction results..."
|
||||
if [ -f "./data/nodes-fresh.db" ]; then
|
||||
NODE_COUNT=$(sqlite3 ./data/nodes-fresh.db "SELECT COUNT(*) FROM nodes;" 2>/dev/null || echo "0")
|
||||
IF_VERSION=$(sqlite3 ./data/nodes-fresh.db "SELECT version FROM nodes WHERE name='n8n-nodes-base.If' LIMIT 1;" 2>/dev/null || echo "not found")
|
||||
|
||||
print_status "Extracted $NODE_COUNT nodes"
|
||||
print_status "If node version: $IF_VERSION"
|
||||
|
||||
# Check if we got the If node source code and look for version
|
||||
IF_SOURCE=$(sqlite3 ./data/nodes-fresh.db "SELECT source_code FROM nodes WHERE name='n8n-nodes-base.If' LIMIT 1;" 2>/dev/null || echo "")
|
||||
if [[ $IF_SOURCE =~ version:[[:space:]]*([0-9]+) ]]; then
|
||||
IF_CODE_VERSION="${BASH_REMATCH[1]}"
|
||||
print_status "If node version from source code: v$IF_CODE_VERSION"
|
||||
|
||||
if [ "$IF_CODE_VERSION" -ge "2" ]; then
|
||||
print_status "✅ Successfully extracted latest If node (v$IF_CODE_VERSION)!"
|
||||
else
|
||||
print_warning "If node is still v$IF_CODE_VERSION, expected v2 or higher"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
print_error "Database file not found after extraction"
|
||||
fi
|
||||
|
||||
# Clean up
|
||||
print_status "Cleaning up Docker containers..."
|
||||
docker-compose -f docker-compose.extract.yml down -v
|
||||
|
||||
print_status "✨ Extraction complete!"
|
||||
|
||||
# Offer to restart the MCP server
|
||||
echo ""
|
||||
read -p "Would you like to restart the MCP server with the new nodes? (y/n) " -n 1 -r
|
||||
echo ""
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
print_status "Restarting MCP server..."
|
||||
# Kill any existing server process
|
||||
pkill -f "node.*dist/index.js" || true
|
||||
|
||||
# Start the server
|
||||
npm start &
|
||||
print_status "MCP server restarted with fresh node database"
|
||||
fi
|
||||
108
scripts/extract-nodes-simple.sh
Executable file
108
scripts/extract-nodes-simple.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "🐳 Simple n8n Node Extraction via Docker"
|
||||
echo "======================================="
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_status() {
|
||||
echo -e "${GREEN}[$(date +'%H:%M:%S')]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[$(date +'%H:%M:%S')]${NC} ⚠️ $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[$(date +'%H:%M:%S')]${NC} ❌ $1"
|
||||
}
|
||||
|
||||
# Check if Docker is running
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
print_error "Docker is not running. Please start Docker and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
print_status "Docker is running ✅"
|
||||
|
||||
# Build the project first
|
||||
print_status "Building the project..."
|
||||
npm run build
|
||||
|
||||
# Create a temporary directory for extraction
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
print_status "Created temporary directory: $TEMP_DIR"
|
||||
|
||||
# Run Docker container to copy node files
|
||||
print_status "Running n8n container to extract nodes..."
|
||||
docker run --rm -d --name n8n-temp n8nio/n8n:latest sleep 300
|
||||
|
||||
# Wait a bit for container to start
|
||||
sleep 5
|
||||
|
||||
# Copy n8n modules from container
|
||||
print_status "Copying n8n modules from container..."
|
||||
docker cp n8n-temp:/usr/local/lib/node_modules/n8n/node_modules "$TEMP_DIR/node_modules" || {
|
||||
print_error "Failed to copy node_modules"
|
||||
docker stop n8n-temp
|
||||
rm -rf "$TEMP_DIR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Stop the container
|
||||
docker stop n8n-temp
|
||||
|
||||
# Run our extraction script locally
|
||||
print_status "Running extraction script..."
|
||||
NODE_ENV=development \
|
||||
NODE_DB_PATH=./data/nodes-fresh.db \
|
||||
N8N_MODULES_PATH="$TEMP_DIR/node_modules" \
|
||||
node scripts/extract-from-docker.js
|
||||
|
||||
# Clean up
|
||||
print_status "Cleaning up temporary files..."
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
# Check the results
|
||||
print_status "Checking extraction results..."
|
||||
if [ -f "./data/nodes-fresh.db" ]; then
|
||||
NODE_COUNT=$(sqlite3 ./data/nodes-fresh.db "SELECT COUNT(*) FROM nodes;" 2>/dev/null || echo "0")
|
||||
print_status "Extracted $NODE_COUNT nodes"
|
||||
|
||||
# Check if we got the If node source code and look for version
|
||||
IF_SOURCE=$(sqlite3 ./data/nodes-fresh.db "SELECT source_code FROM nodes WHERE node_type='n8n-nodes-base.If' LIMIT 1;" 2>/dev/null || echo "")
|
||||
if [[ $IF_SOURCE =~ version:[[:space:]]*([0-9]+) ]]; then
|
||||
IF_CODE_VERSION="${BASH_REMATCH[1]}"
|
||||
print_status "If node version from source code: v$IF_CODE_VERSION"
|
||||
|
||||
if [ "$IF_CODE_VERSION" -ge "2" ]; then
|
||||
print_status "✅ Successfully extracted latest If node (v$IF_CODE_VERSION)!"
|
||||
else
|
||||
print_warning "If node is still v$IF_CODE_VERSION, expected v2 or higher"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
print_error "Database file not found after extraction"
|
||||
fi
|
||||
|
||||
print_status "✨ Extraction complete!"
|
||||
|
||||
# Offer to restart the MCP server
|
||||
echo ""
|
||||
read -p "Would you like to restart the MCP server with the new nodes? (y/n) " -n 1 -r
|
||||
echo ""
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
print_status "Restarting MCP server..."
|
||||
# Kill any existing server process
|
||||
pkill -f "node.*dist/index.js" || true
|
||||
|
||||
# Start the server
|
||||
npm start &
|
||||
print_status "MCP server restarted with fresh node database"
|
||||
fi
|
||||
Reference in New Issue
Block a user