Clean up legacy documentation and scripts

- Remove all references to workflow execution/management features
- Delete legacy scripts for bidirectional n8n integration
- Update documentation to focus on node documentation serving only
- Remove old docker-compose files for workflow management
- Add simplified docker-compose.yml for documentation server
- Update CHANGELOG.md to reflect v2.0.0 and v2.1.0 changes
- Update Dockerfile to use v2 paths and database

The project is now clearly focused on serving n8n node documentation
to AI assistants, with no workflow execution capabilities.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
czlonkowski
2025-06-08 07:54:53 +00:00
parent ee8aa729c1
commit 1dd6b8e55f
15 changed files with 270 additions and 1116 deletions

6
.gitignore vendored
View File

@@ -64,4 +64,8 @@ docker-compose.override.yml
.lock-wscript
.node_repl_history
.npmrc
.yarnrctemp/
.yarnrc
# Temporary files
temp/
tmp/

View File

@@ -2,6 +2,56 @@
All notable changes to this project will be documented in this file.
## [2.1.0] - 2025-01-08
### Added
- Remote deployment capabilities via HTTP/JSON-RPC transport
- Domain configuration through environment variables (`MCP_DOMAIN`)
- Bearer token authentication for remote access
- Comprehensive remote deployment documentation
- PM2 and Nginx configuration examples
- HTTP server mode (`npm run start:http`)
### Enhanced
- Support for both local (stdio) and remote (HTTP) deployment modes
- Production deployment guide for VM/cloud environments
- Claude Desktop configuration for remote servers
## [2.0.0] - 2025-01-08
### Major Refactoring
- **BREAKING CHANGE**: Refocused project to serve only n8n node documentation
- Removed all workflow execution and management features
- Removed bidirectional n8n-MCP integration
- Simplified to be a read-only documentation server
### Added
- SQLite database with full-text search (FTS5) for node documentation
- Integration with n8n-docs repository for official documentation
- Automatic example workflow generation for each node type
- Comprehensive node information including:
- Source code
- Official documentation
- Usage examples
- Properties schema
- Credential definitions
### New MCP Tools
- `list_nodes` - List available nodes with filtering
- `get_node_info` - Get complete node information
- `search_nodes` - Full-text search across nodes
- `get_node_example` - Get example workflow for a node
- `get_node_source_code` - Get only source code
- `get_node_documentation` - Get only documentation
- `rebuild_database` - Rebuild entire node database
- `get_database_statistics` - Database statistics
### Infrastructure
- New database schema optimized for documentation storage
- `DocumentationFetcher` for n8n-docs repository integration
- `ExampleGenerator` for creating node usage examples
- `NodeDocumentationService` for database management
## [1.1.0] - 2024-01-07
### Added

View File

@@ -29,20 +29,20 @@ RUN chown -R nodejs:nodejs /app
USER nodejs
# Set environment variable for database location
ENV NODE_DB_PATH=/app/data/nodes.db
ENV NODE_DB_PATH=/app/data/nodes-v2.db
# Create a startup script
RUN printf '#!/bin/sh\n\
echo "🚀 Starting n8n-MCP server..."\n\
echo "🚀 Starting n8n Documentation MCP server..."\n\
\n\
# Initialize database if it does not exist\n\
if [ ! -f "$NODE_DB_PATH" ]; then\n\
echo "📦 Initializing database..."\n\
node dist/scripts/rebuild-database.js\n\
node dist/scripts/rebuild-database-v2.js\n\
fi\n\
\n\
echo "🎯 Database ready, starting MCP server..."\n\
exec node dist/index.js\n' > /app/start.sh && chmod +x /app/start.sh
echo "🎯 Database ready, starting documentation server..."\n\
exec node dist/index-v2.js\n' > /app/start.sh && chmod +x /app/start.sh
# Expose the MCP server port (if using HTTP transport)
EXPOSE 3000

View File

@@ -1,32 +0,0 @@
version: '3.8'
services:
n8n:
image: n8nio/n8n:latest
container_name: n8n-dev
restart: unless-stopped
ports:
- "5678:5678"
environment:
- N8N_BASIC_AUTH_ACTIVE=false
- N8N_HOST=localhost
- N8N_PORT=5678
- N8N_PROTOCOL=http
- NODE_ENV=development
- WEBHOOK_URL=http://localhost:5678/
- GENERIC_TIMEZONE=UTC
# Enable API for MCP integration
- N8N_USER_MANAGEMENT_DISABLED=true
- N8N_PUBLIC_API_DISABLED=false
volumes:
- n8n_data:/home/node/.n8n
- ./dist/n8n:/home/node/.n8n/custom/nodes
networks:
- n8n-network
networks:
n8n-network:
driver: bridge
volumes:
n8n_data:

View File

@@ -1,69 +0,0 @@
version: '3.8'
services:
n8n-mcp:
image: n8n-mcp:production
build:
context: .
dockerfile: Dockerfile
container_name: n8n-mcp-server
restart: unless-stopped
environment:
- NODE_ENV=production
- NODE_DB_PATH=/app/data/nodes.db
- N8N_API_URL=http://n8n:5678
- N8N_API_KEY=${N8N_API_KEY}
- LOG_LEVEL=info
volumes:
- mcp-data:/app/data
- n8n-node-modules:/usr/local/lib/node_modules/n8n/node_modules:ro
networks:
- n8n-network
depends_on:
- n8n
healthcheck:
test: ["CMD", "node", "-e", "require('fs').existsSync('/app/data/nodes.db') || process.exit(1)"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
n8n:
image: n8nio/n8n:latest
container_name: n8n
restart: unless-stopped
ports:
- "5678:5678"
environment:
- N8N_BASIC_AUTH_ACTIVE=true
- N8N_BASIC_AUTH_USER=${N8N_BASIC_AUTH_USER}
- N8N_BASIC_AUTH_PASSWORD=${N8N_BASIC_AUTH_PASSWORD}
- N8N_HOST=${N8N_HOST:-localhost}
- N8N_PORT=5678
- N8N_PROTOCOL=http
- NODE_ENV=production
- N8N_METRICS=true
- N8N_CUSTOM_EXTENSIONS=/home/node/.n8n/custom
volumes:
- n8n-data:/home/node/.n8n
- ./dist/n8n:/home/node/.n8n/custom/nodes
networks:
- n8n-network
healthcheck:
test: ["CMD", "wget", "-q", "-O", "-", "http://localhost:5678/healthz"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
volumes:
n8n-data:
driver: local
mcp-data:
driver: local
n8n-node-modules:
driver: local
networks:
n8n-network:
driver: bridge

View File

@@ -1,63 +0,0 @@
version: '3.8'
services:
n8n:
image: n8nio/n8n:latest
container_name: n8n-test
restart: unless-stopped
ports:
- "5678:5678"
environment:
- N8N_BASIC_AUTH_ACTIVE=false
- N8N_HOST=0.0.0.0
- N8N_PORT=5678
- N8N_PROTOCOL=http
- NODE_ENV=production
- WEBHOOK_URL=http://localhost:5678/
- GENERIC_TIMEZONE=UTC
# Enable API
- N8N_USER_MANAGEMENT_DISABLED=true
- N8N_PUBLIC_API_DISABLED=false
# Install additional nodes
- N8N_CUSTOM_EXTENSIONS=@n8n/n8n-nodes-langchain
volumes:
- n8n_data:/home/node/.n8n
- n8n_modules:/usr/local/lib/node_modules/n8n/node_modules:ro
networks:
- test-network
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:5678/"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
n8n-mcp:
build: .
container_name: n8n-mcp-test
restart: unless-stopped
environment:
- MCP_SERVER_PORT=3000
- MCP_SERVER_HOST=0.0.0.0
- N8N_API_URL=http://n8n:5678
- N8N_API_KEY=test-api-key
- MCP_AUTH_TOKEN=test-token
- LOG_LEVEL=debug
volumes:
# Mount n8n's node_modules to access source code
- n8n_modules:/usr/local/lib/node_modules/n8n/node_modules:ro
ports:
- "3000:3000"
networks:
- test-network
depends_on:
- n8n
command: node dist/index.js
networks:
test-network:
driver: bridge
volumes:
n8n_data:
n8n_modules:

View File

@@ -1,53 +1,42 @@
# Development docker-compose configuration
# For production deployment, use docker-compose.prod.yml
# See docs/PRODUCTION_DEPLOYMENT.md for instructions
version: '3.8'
services:
n8n-mcp:
n8n-docs-mcp:
build: .
container_name: n8n-mcp-server
restart: unless-stopped
environment:
- MCP_SERVER_PORT=${MCP_SERVER_PORT:-3000}
- MCP_SERVER_HOST=${MCP_SERVER_HOST:-0.0.0.0}
- N8N_API_URL=${N8N_API_URL:-http://n8n:5678}
- N8N_API_KEY=${N8N_API_KEY}
- MCP_AUTH_TOKEN=${MCP_AUTH_TOKEN}
- LOG_LEVEL=${LOG_LEVEL:-info}
ports:
- "${MCP_SERVER_PORT:-3000}:3000"
networks:
- n8n-network
depends_on:
- n8n
n8n:
image: n8nio/n8n:latest
container_name: n8n
restart: unless-stopped
ports:
- "5678:5678"
environment:
- N8N_BASIC_AUTH_ACTIVE=true
- N8N_BASIC_AUTH_USER=${N8N_BASIC_AUTH_USER:-admin}
- N8N_BASIC_AUTH_PASSWORD=${N8N_BASIC_AUTH_PASSWORD:-password}
- N8N_HOST=${N8N_HOST:-localhost}
- N8N_PORT=5678
- N8N_PROTOCOL=${N8N_PROTOCOL:-http}
- NODE_ENV=production
- WEBHOOK_URL=${WEBHOOK_URL:-http://localhost:5678/}
- GENERIC_TIMEZONE=${GENERIC_TIMEZONE:-UTC}
container_name: n8n-docs-mcp
volumes:
- n8n_data:/home/node/.n8n
- ./n8n-custom-nodes:/home/node/.n8n/custom
networks:
- n8n-network
- ./data:/app/data
environment:
- NODE_ENV=production
- NODE_DB_PATH=/app/data/nodes-v2.db
- MCP_LOG_LEVEL=info
ports:
- "3000:3000" # Only needed if using HTTP mode
command: node dist/index-v2.js
restart: unless-stopped
networks:
n8n-network:
driver: bridge
# HTTP mode (for remote access)
n8n-docs-mcp-http:
build: .
container_name: n8n-docs-mcp-http
volumes:
- ./data:/app/data
environment:
- NODE_ENV=production
- NODE_DB_PATH=/app/data/nodes-v2.db
- MCP_LOG_LEVEL=info
- MCP_PORT=3000
- MCP_HOST=0.0.0.0
- MCP_DOMAIN=${MCP_DOMAIN:-localhost}
- MCP_AUTH_TOKEN=${MCP_AUTH_TOKEN}
- MCP_CORS=true
ports:
- "3000:3000"
command: node dist/index-http.js
restart: unless-stopped
profiles:
- http
volumes:
n8n_data:
# Usage:
# Local mode: docker-compose up n8n-docs-mcp
# HTTP mode: docker-compose --profile http up n8n-docs-mcp-http

View File

@@ -1,22 +1,35 @@
# AI Agent Node Extraction Test Guide
This document describes how to test the MCP server's ability to extract and provide the AI Agent node source code from n8n.
This document describes how to test the n8n Documentation MCP Server's ability to extract and provide node source code, including the AI Agent node from n8n.
## Overview
The n8n Documentation MCP Server provides comprehensive node information including:
- Source code extraction
- Official documentation
- Usage examples
- Node metadata
## Test Scenario
An MCP client (like an AI assistant) requests the source code for n8n's AI Agent node, and the MCP server successfully extracts and returns it.
An MCP client (like Claude) requests the source code for n8n's AI Agent node, and the documentation server successfully extracts and returns it.
## Implementation Overview
### 1. New MCP Tools Added
### 1. Available MCP Tools
- **`get_node_source_code`**: Extracts source code for any n8n node
- **`list_available_nodes`**: Lists all available n8n nodes
- **`get_node_info`**: Gets complete node information including docs and examples
- **`list_nodes`**: Lists all available n8n nodes
- **`search_nodes`**: Search nodes by name or content
- **`get_node_documentation`**: Gets only the documentation for a node
- **`get_node_example`**: Gets example workflow for a node
### 2. New Components
### 2. Key Components
- **`NodeSourceExtractor`** (`src/utils/node-source-extractor.ts`): Handles file system access to extract node source code
- **Resource endpoint**: `nodes://source/{nodeType}` for accessing node code via resources
- **`NodeDocumentationService`** (`src/services/node-documentation-service.ts`): Manages SQLite database with node information
- **`DocumentationFetcher`** (`src/utils/documentation-fetcher.ts`): Fetches docs from n8n-docs repository
### 3. Test Infrastructure

View File

@@ -1,188 +0,0 @@
# n8n-MCP Implementation Roadmap
## ✅ Completed Features
### 1. Core MCP Server Implementation
- [x] Basic MCP server with stdio transport
- [x] Tool handlers for n8n workflow operations
- [x] Resource handlers for workflow data
- [x] Authentication and error handling
### 2. n8n Integration
- [x] n8n API client for workflow management
- [x] MCP<->n8n data bridge for format conversion
- [x] Workflow execution and monitoring
### 3. Node Source Extraction
- [x] Extract source code from any n8n node
- [x] Handle pnpm directory structures
- [x] Support for AI Agent node extraction
- [x] Bulk extraction capabilities
### 4. Node Storage System
- [x] In-memory storage service
- [x] Search functionality
- [x] Package statistics
- [x] Database export format
## 🚧 Next Implementation Steps
### Phase 1: Database Integration (Priority: High)
1. **Real Database Backend**
- [ ] Add PostgreSQL/SQLite support
- [ ] Implement proper migrations
- [ ] Add connection pooling
- [ ] Transaction support
2. **Enhanced Storage Features**
- [ ] Version tracking for nodes
- [ ] Diff detection for updates
- [ ] Backup/restore functionality
- [ ] Data compression
### Phase 2: Advanced Search & Analysis (Priority: High)
1. **Full-Text Search**
- [ ] Elasticsearch/MeiliSearch integration
- [ ] Code analysis and indexing
- [ ] Semantic search capabilities
- [ ] Search by functionality
2. **Node Analysis**
- [ ] Dependency graph generation
- [ ] Security vulnerability scanning
- [ ] Performance profiling
- [ ] Code quality metrics
### Phase 3: AI Integration (Priority: Medium)
1. **AI-Powered Features**
- [ ] Node recommendation system
- [ ] Workflow generation from descriptions
- [ ] Code explanation generation
- [ ] Automatic documentation
2. **Vector Database**
- [ ] Node embeddings generation
- [ ] Similarity search
- [ ] Clustering similar nodes
- [ ] AI training data export
### Phase 4: n8n Node Development (Priority: Medium)
1. **MCPNode Enhancements**
- [ ] Dynamic tool discovery
- [ ] Streaming responses
- [ ] File upload/download
- [ ] WebSocket support
2. **Custom Node Features**
- [ ] Visual configuration UI
- [ ] Credential management
- [ ] Error handling improvements
- [ ] Performance monitoring
### Phase 5: API & Web Interface (Priority: Low)
1. **REST/GraphQL API**
- [ ] Node search API
- [ ] Statistics dashboard
- [ ] Webhook notifications
- [ ] Rate limiting
2. **Web Dashboard**
- [ ] Node browser interface
- [ ] Code viewer with syntax highlighting
- [ ] Search interface
- [ ] Analytics dashboard
### Phase 6: Production Features (Priority: Low)
1. **Deployment**
- [ ] Kubernetes manifests
- [ ] Helm charts
- [ ] Auto-scaling configuration
- [ ] Health checks
2. **Monitoring**
- [ ] Prometheus metrics
- [ ] Grafana dashboards
- [ ] Log aggregation
- [ ] Alerting rules
## 🎯 Immediate Next Steps
1. **Database Integration** (Week 1-2)
```typescript
// Add to package.json
"typeorm": "^0.3.x",
"pg": "^8.x"
// Create entities/Node.entity.ts
@Entity()
export class Node {
@PrimaryGeneratedColumn('uuid')
id: string;
@Column({ unique: true })
nodeType: string;
@Column('text')
sourceCode: string;
// ... etc
}
```
2. **Add Database MCP Tools** (Week 2)
```typescript
// New tools:
- sync_nodes_to_database
- query_nodes_database
- export_nodes_for_training
```
3. **Create Migration Scripts** (Week 2-3)
```bash
npm run migrate:create -- CreateNodesTable
npm run migrate:run
```
4. **Implement Caching Layer** (Week 3)
- Redis for frequently accessed nodes
- LRU cache for search results
- Invalidation strategies
5. **Add Real-Time Updates** (Week 4)
- WebSocket server for live updates
- Node change notifications
- Workflow execution streaming
## 📊 Success Metrics
- [ ] Extract and store 100% of n8n nodes
- [ ] Search response time < 100ms
- [ ] Support for 10k+ stored nodes
- [ ] 99.9% uptime for MCP server
- [ ] Full-text search accuracy > 90%
## 🔗 Integration Points
1. **n8n Community Store**
- Sync with community nodes
- Version tracking
- Popularity metrics
2. **AI Platforms**
- OpenAI fine-tuning exports
- Anthropic training data
- Local LLM integration
3. **Development Tools**
- VS Code extension
- CLI tools
- SDK libraries
## 📝 Documentation Needs
- [ ] API reference documentation
- [ ] Database schema documentation
- [ ] Search query syntax guide
- [ ] Performance tuning guide
- [ ] Security best practices
This roadmap provides a clear path forward for the n8n-MCP project, with the most critical next step being proper database integration to persist the extracted node data.

View File

@@ -1,192 +1,210 @@
# Production Deployment Guide for n8n-MCP
# Production Deployment Guide
This guide provides instructions for deploying n8n-MCP in a production environment.
This guide covers deploying the n8n Documentation MCP Server in production environments.
## Prerequisites
## Overview
- Docker and Docker Compose v2 installed
- Node.js 18+ installed (for building)
- At least 2GB of available RAM
- 1GB of available disk space
The n8n Documentation MCP Server provides node documentation and source code to AI assistants. It can be deployed:
- **Locally** - Using stdio transport for Claude Desktop on the same machine
- **Remotely** - Using HTTP transport for access over the internet
## Quick Start
For remote deployment with full VM setup instructions, see [REMOTE_DEPLOYMENT.md](./REMOTE_DEPLOYMENT.md).
1. **Clone the repository**
## Local Production Deployment
### Prerequisites
- Node.js 18+ installed
- Git installed
- 500MB available disk space
### Quick Start
1. **Clone and setup**
```bash
git clone https://github.com/yourusername/n8n-mcp.git
cd n8n-mcp
```
2. **Run the deployment script**
```bash
./scripts/deploy-production.sh
```
This script will:
- Check prerequisites
- Create a secure `.env` file with generated passwords
- Build the project
- Create Docker images
- Start all services
- Initialize the node database
3. **Access n8n**
- URL: `http://localhost:5678`
- Use the credentials displayed during deployment
## Manual Deployment
If you prefer manual deployment:
1. **Create .env file**
```bash
cp .env.example .env
# Edit .env with your configuration
```
2. **Build the project**
```bash
npm install
npm run build
```
3. **Start services**
2. **Initialize database**
```bash
docker compose -f docker-compose.prod.yml up -d
npm run db:rebuild:v2
```
## Configuration
3. **Configure Claude Desktop**
Edit Claude Desktop config (see README.md for paths):
```json
{
"mcpServers": {
"n8n-nodes": {
"command": "node",
"args": ["/absolute/path/to/n8n-mcp/dist/index-v2.js"],
"env": {
"NODE_DB_PATH": "/absolute/path/to/n8n-mcp/data/nodes-v2.db"
}
}
}
}
```
### Environment Variables
## Docker Deployment
| Variable | Description | Default |
|----------|-------------|---------|
| `N8N_BASIC_AUTH_USER` | n8n admin username | admin |
| `N8N_BASIC_AUTH_PASSWORD` | n8n admin password | (generated) |
| `N8N_HOST` | n8n hostname | localhost |
| `N8N_API_KEY` | API key for n8n access | (generated) |
| `NODE_DB_PATH` | SQLite database path | /app/data/nodes.db |
| `LOG_LEVEL` | Logging level | info |
### Using Docker Compose
### Volumes
1. **Create docker-compose.yml**
```yaml
version: '3.8'
services:
n8n-docs-mcp:
build: .
volumes:
- ./data:/app/data
environment:
- NODE_ENV=production
- NODE_DB_PATH=/app/data/nodes-v2.db
command: node dist/index-v2.js
```
The deployment creates persistent volumes:
- `n8n-data`: n8n workflows and credentials
- `mcp-data`: MCP node database
- `n8n-node-modules`: Read-only n8n node modules
2. **Build and run**
```bash
docker-compose up -d
```
## Management
### Using Dockerfile
Use the management script for common operations:
```dockerfile
FROM node:18-alpine
```bash
# Check service status
./scripts/manage-production.sh status
WORKDIR /app
# View logs
./scripts/manage-production.sh logs
# Copy package files
COPY package*.json ./
RUN npm ci --only=production
# Rebuild node database
./scripts/manage-production.sh rebuild-db
# Copy built files
COPY dist/ ./dist/
COPY data/ ./data/
# Show database statistics
./scripts/manage-production.sh db-stats
# Set environment
ENV NODE_ENV=production
ENV NODE_DB_PATH=/app/data/nodes-v2.db
# Create backup
./scripts/manage-production.sh backup
# Update services
./scripts/manage-production.sh update
# Run the server
CMD ["node", "dist/index-v2.js"]
```
## Database Management
### Initial Database Population
### Automatic Rebuilds
The database is automatically populated on first startup. To manually rebuild:
Schedule regular database updates to get latest node documentation:
```bash
docker compose -f docker-compose.prod.yml exec n8n-mcp node dist/scripts/rebuild-database.js
# Add to crontab
0 2 * * * cd /path/to/n8n-mcp && npm run db:rebuild:v2
```
### Database Queries
### Manual Rebuild
Search for nodes:
```bash
docker compose -f docker-compose.prod.yml exec n8n-mcp sqlite3 /app/data/nodes.db \
"SELECT node_type, display_name FROM nodes WHERE name LIKE '%webhook%';"
npm run db:rebuild:v2
```
## Security Considerations
### Database Location
1. **Change default passwords**: Always change the generated passwords in production
2. **Use HTTPS**: Configure a reverse proxy (nginx, traefik) for HTTPS
3. **Firewall**: Restrict access to ports 5678
4. **API Keys**: Keep API keys secure and rotate regularly
5. **Backups**: Regular backup of data volumes
The SQLite database is stored at: `data/nodes-v2.db`
### Backup
```bash
# Simple backup
cp data/nodes-v2.db data/nodes-v2.db.backup
# Timestamped backup
cp data/nodes-v2.db "data/nodes-v2-$(date +%Y%m%d-%H%M%S).db"
```
## Monitoring
### Health Checks
### Database Statistics
Both services include health checks:
- n8n: `http://localhost:5678/healthz`
- MCP: Database file existence check
Check the database status:
```bash
# Using SQLite directly
sqlite3 data/nodes-v2.db "SELECT COUNT(*) as total_nodes FROM nodes;"
# Using the MCP tool (in Claude)
# "Get database statistics for n8n nodes"
```
### Logs
View logs for debugging:
For local deployment:
```bash
# All services
docker compose -f docker-compose.prod.yml logs -f
# Specific service
docker compose -f docker-compose.prod.yml logs -f n8n-mcp
# Run with logging
NODE_ENV=production node dist/index-v2.js 2>&1 | tee app.log
```
## Performance Optimization
### SQLite Optimization
The database uses these optimizations by default:
- WAL mode for better concurrency
- Memory-mapped I/O
- Full-text search indexes
### System Requirements
- **Minimum**: 256MB RAM, 500MB disk
- **Recommended**: 512MB RAM, 1GB disk
- **CPU**: Minimal requirements (mostly I/O bound)
## Security
### Local Deployment
- No network exposure (stdio only)
- File system permissions control access
- No authentication needed
### Remote Deployment
See [REMOTE_DEPLOYMENT.md](./REMOTE_DEPLOYMENT.md) for:
- HTTPS configuration
- Authentication setup
- Firewall rules
- Security best practices
## Troubleshooting
### Database Issues
If the database is corrupted or needs rebuilding:
If the database is missing or corrupted:
```bash
# Stop services
docker compose -f docker-compose.prod.yml stop
# Remove database
docker compose -f docker-compose.prod.yml exec n8n-mcp rm /app/data/nodes.db
# Start services (database will rebuild)
docker compose -f docker-compose.prod.yml start
# Rebuild from scratch
rm data/nodes-v2.db
npm run db:rebuild:v2
```
### Memory Issues
If services run out of memory, increase Docker memory limits:
```yaml
# In docker-compose.prod.yml
services:
n8n-mcp:
deploy:
resources:
limits:
memory: 1G
If running on limited memory:
```bash
# Limit Node.js memory usage
NODE_OPTIONS="--max-old-space-size=256" node dist/index-v2.js
```
### Connection Issues
### Permission Issues
If n8n can't connect to MCP:
1. Check both services are running: `docker compose -f docker-compose.prod.yml ps`
2. Verify network connectivity: `docker compose -f docker-compose.prod.yml exec n8n ping n8n-mcp`
3. Check MCP logs: `docker compose -f docker-compose.prod.yml logs n8n-mcp`
## Scaling
For high-availability deployments:
1. **Database Replication**: Use external SQLite replication or migrate to PostgreSQL
2. **Load Balancing**: Deploy multiple MCP instances behind a load balancer
3. **Caching**: Implement Redis caching for frequently accessed nodes
Ensure proper file permissions:
```bash
chmod 644 data/nodes-v2.db
chmod 755 data/
```
## Updates
@@ -196,12 +214,19 @@ To update to the latest version:
# Pull latest code
git pull
# Rebuild and restart
./scripts/manage-production.sh update
# Install dependencies
npm install
# Rebuild
npm run build
# Rebuild database
npm run db:rebuild:v2
```
## Support
For issues and questions:
- GitHub Issues: [your-repo-url]/issues
- Documentation: [your-docs-url]
- GitHub Issues: https://github.com/yourusername/n8n-mcp/issues
- Check logs for error messages
- Verify database integrity

View File

@@ -1,66 +0,0 @@
#!/bin/bash
echo "🚀 Quick n8n-MCP Node Extraction Test"
echo "===================================="
echo ""
# Start services
echo "1. Starting Docker services..."
docker compose -f docker-compose.test.yml up -d
echo ""
echo "2. Waiting for services to start (30 seconds)..."
sleep 30
echo ""
echo "3. Testing AI Agent node extraction..."
docker compose -f docker-compose.test.yml run --rm n8n-mcp node -e "
const { NodeSourceExtractor } = require('./dist/utils/node-source-extractor');
async function test() {
const extractor = new NodeSourceExtractor();
console.log('\\n🔍 Extracting AI Agent node...');
try {
const result = await extractor.extractNodeSource('@n8n/n8n-nodes-langchain.Agent');
console.log('✅ SUCCESS!');
console.log('📦 Node Type:', result.nodeType);
console.log('📏 Code Size:', result.sourceCode.length, 'bytes');
console.log('📍 Location:', result.location);
console.log('\\n📄 First 200 characters of code:');
console.log(result.sourceCode.substring(0, 200) + '...');
} catch (error) {
console.log('❌ FAILED:', error.message);
}
}
test();
"
echo ""
echo "4. Listing available AI nodes..."
docker compose -f docker-compose.test.yml run --rm n8n-mcp node -e "
const { NodeSourceExtractor } = require('./dist/utils/node-source-extractor');
async function test() {
const extractor = new NodeSourceExtractor();
console.log('\\n📋 Listing AI/LangChain nodes...');
const nodes = await extractor.listAvailableNodes();
const aiNodes = nodes.filter(n => n.location && n.location.includes('langchain'));
console.log('Found', aiNodes.length, 'AI nodes:');
aiNodes.slice(0, 10).forEach(node => {
console.log(' -', node.name);
});
}
test();
"
echo ""
echo "5. Cleaning up..."
docker compose -f docker-compose.test.yml down -v
echo ""
echo "✅ Test complete!"

View File

@@ -1,140 +0,0 @@
#!/bin/bash
# Production deployment script for n8n-MCP
set -e
echo "🚀 n8n-MCP Production Deployment Script"
echo "======================================"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Function to check if command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
}
# Function to generate secure password
generate_password() {
openssl rand -base64 32 | tr -d "=+/" | cut -c1-25
}
# Check prerequisites
echo -e "\n${YELLOW}Checking prerequisites...${NC}"
if ! command_exists docker; then
echo -e "${RED}❌ Docker is not installed. Please install Docker first.${NC}"
exit 1
fi
if ! command_exists docker compose; then
echo -e "${RED}❌ Docker Compose v2 is not installed. Please install Docker Compose v2.${NC}"
exit 1
fi
if ! command_exists node; then
echo -e "${RED}❌ Node.js is not installed. Please install Node.js first.${NC}"
exit 1
fi
echo -e "${GREEN}✅ All prerequisites met${NC}"
# Check for .env file
if [ ! -f .env ]; then
echo -e "\n${YELLOW}Creating .env file...${NC}"
# Generate secure passwords
N8N_BASIC_AUTH_PASSWORD=$(generate_password)
N8N_API_KEY=$(generate_password)
cat > .env << EOF
# n8n Configuration
N8N_BASIC_AUTH_USER=admin
N8N_BASIC_AUTH_PASSWORD=${N8N_BASIC_AUTH_PASSWORD}
N8N_HOST=localhost
N8N_API_KEY=${N8N_API_KEY}
# MCP Configuration
MCP_LOG_LEVEL=info
NODE_ENV=production
EOF
echo -e "${GREEN}✅ Created .env file with secure defaults${NC}"
echo -e "${YELLOW}⚠️ Please note your credentials:${NC}"
echo -e " n8n Username: admin"
echo -e " n8n Password: ${N8N_BASIC_AUTH_PASSWORD}"
echo -e " API Key: ${N8N_API_KEY}"
echo -e "${YELLOW} Save these credentials securely!${NC}"
else
echo -e "${GREEN}✅ Using existing .env file${NC}"
fi
# Build the project
echo -e "\n${YELLOW}Building the project...${NC}"
npm install
npm run build
if [ $? -ne 0 ]; then
echo -e "${RED}❌ Build failed. Please fix the errors and try again.${NC}"
exit 1
fi
echo -e "${GREEN}✅ Build completed successfully${NC}"
# Build Docker image
echo -e "\n${YELLOW}Building Docker image...${NC}"
docker compose -f docker-compose.prod.yml build
if [ $? -ne 0 ]; then
echo -e "${RED}❌ Docker build failed. Please check the logs.${NC}"
exit 1
fi
echo -e "${GREEN}✅ Docker image built successfully${NC}"
# Start services
echo -e "\n${YELLOW}Starting services...${NC}"
docker compose -f docker-compose.prod.yml up -d
if [ $? -ne 0 ]; then
echo -e "${RED}❌ Failed to start services. Please check the logs.${NC}"
exit 1
fi
echo -e "${GREEN}✅ Services started successfully${NC}"
# Wait for services to be healthy
echo -e "\n${YELLOW}Waiting for services to be healthy...${NC}"
sleep 10
# Check service health
N8N_HEALTH=$(docker compose -f docker-compose.prod.yml ps n8n --format json | jq -r '.[0].Health // "unknown"')
MCP_HEALTH=$(docker compose -f docker-compose.prod.yml ps n8n-mcp --format json | jq -r '.[0].Health // "unknown"')
if [ "$N8N_HEALTH" = "healthy" ] && [ "$MCP_HEALTH" = "healthy" ]; then
echo -e "${GREEN}✅ All services are healthy${NC}"
else
echo -e "${YELLOW}⚠️ Services might still be starting up...${NC}"
echo -e " n8n status: $N8N_HEALTH"
echo -e " MCP server status: $MCP_HEALTH"
fi
# Display access information
echo -e "\n${GREEN}🎉 Deployment completed successfully!${NC}"
echo -e "\n${YELLOW}Access Information:${NC}"
echo -e " n8n UI: http://localhost:5678"
echo -e " MCP Server: Running internally (accessible by n8n)"
echo -e "\n${YELLOW}Next Steps:${NC}"
echo -e " 1. Access n8n at http://localhost:5678"
echo -e " 2. Log in with the credentials from .env file"
echo -e " 3. Create a new workflow and add the MCP node"
echo -e " 4. Configure the MCP node to connect to the internal server"
echo -e "\n${YELLOW}Useful Commands:${NC}"
echo -e " View logs: docker compose -f docker-compose.prod.yml logs -f"
echo -e " Stop services: docker compose -f docker-compose.prod.yml down"
echo -e " Rebuild database: docker compose -f docker-compose.prod.yml exec n8n-mcp node dist/scripts/rebuild-database.js"
echo -e " View database stats: docker compose -f docker-compose.prod.yml exec n8n-mcp sqlite3 /app/data/nodes.db 'SELECT COUNT(*) as total_nodes FROM nodes;'"

View File

@@ -1,50 +0,0 @@
#!/bin/bash
# Script to install the MCP node into n8n
set -e
echo "Installing n8n MCP node..."
# Build the project
echo "Building project..."
npm run build
# Create custom nodes directory if it doesn't exist
N8N_CUSTOM_DIR="${N8N_CUSTOM_DIR:-$HOME/.n8n/custom}"
mkdir -p "$N8N_CUSTOM_DIR/nodes/n8n-mcp"
# Copy node files
echo "Copying node files to n8n custom directory..."
cp dist/n8n/MCPNode.node.js "$N8N_CUSTOM_DIR/nodes/n8n-mcp/"
cp dist/n8n/MCPApi.credentials.js "$N8N_CUSTOM_DIR/nodes/n8n-mcp/"
# Copy utils for the node to work
mkdir -p "$N8N_CUSTOM_DIR/nodes/n8n-mcp/utils"
cp -r dist/utils/* "$N8N_CUSTOM_DIR/nodes/n8n-mcp/utils/"
# Create package.json for the custom node
cat > "$N8N_CUSTOM_DIR/nodes/n8n-mcp/package.json" << EOF
{
"name": "n8n-nodes-mcp",
"version": "1.0.0",
"description": "MCP integration for n8n",
"n8n": {
"n8nNodesApiVersion": 1,
"credentials": [
"dist/n8n/MCPApi.credentials.js"
],
"nodes": [
"dist/n8n/MCPNode.node.js"
]
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.12.1"
}
}
EOF
echo "MCP node installed successfully!"
echo "Please restart n8n for the changes to take effect."
echo ""
echo "Custom node location: $N8N_CUSTOM_DIR/nodes/n8n-mcp"

View File

@@ -1,178 +0,0 @@
#!/bin/bash
# Production management script for n8n-MCP
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Display usage
usage() {
echo -e "${BLUE}n8n-MCP Production Management Script${NC}"
echo -e "${YELLOW}Usage:${NC} $0 [command]"
echo
echo -e "${YELLOW}Commands:${NC}"
echo " status - Show service status"
echo " logs - View service logs"
echo " start - Start all services"
echo " stop - Stop all services"
echo " restart - Restart all services"
echo " rebuild-db - Rebuild the node database"
echo " db-stats - Show database statistics"
echo " backup - Backup data volumes"
echo " restore - Restore data volumes from backup"
echo " update - Update services to latest versions"
echo " shell - Open shell in MCP container"
echo
exit 1
}
# Check if docker compose file exists
if [ ! -f "docker-compose.prod.yml" ]; then
echo -e "${RED}❌ docker-compose.prod.yml not found. Run this script from the project root.${NC}"
exit 1
fi
# Main command handling
case "$1" in
status)
echo -e "${YELLOW}Service Status:${NC}"
docker compose -f docker-compose.prod.yml ps
;;
logs)
if [ -z "$2" ]; then
docker compose -f docker-compose.prod.yml logs -f --tail=100
else
docker compose -f docker-compose.prod.yml logs -f --tail=100 "$2"
fi
;;
start)
echo -e "${YELLOW}Starting services...${NC}"
docker compose -f docker-compose.prod.yml up -d
echo -e "${GREEN}✅ Services started${NC}"
;;
stop)
echo -e "${YELLOW}Stopping services...${NC}"
docker compose -f docker-compose.prod.yml down
echo -e "${GREEN}✅ Services stopped${NC}"
;;
restart)
echo -e "${YELLOW}Restarting services...${NC}"
docker compose -f docker-compose.prod.yml restart
echo -e "${GREEN}✅ Services restarted${NC}"
;;
rebuild-db)
echo -e "${YELLOW}Rebuilding node database...${NC}"
docker compose -f docker-compose.prod.yml exec n8n-mcp node dist/scripts/rebuild-database.js
;;
db-stats)
echo -e "${YELLOW}Database Statistics:${NC}"
docker compose -f docker-compose.prod.yml exec n8n-mcp sqlite3 /app/data/nodes.db << 'EOF'
.headers on
.mode column
SELECT
COUNT(*) as total_nodes,
COUNT(DISTINCT package_name) as total_packages,
ROUND(SUM(code_length) / 1024.0 / 1024.0, 2) as total_size_mb,
ROUND(AVG(code_length) / 1024.0, 2) as avg_size_kb
FROM nodes;
.print
.print "Top 10 packages by node count:"
SELECT package_name, COUNT(*) as node_count
FROM nodes
GROUP BY package_name
ORDER BY node_count DESC
LIMIT 10;
EOF
;;
backup)
BACKUP_DIR="backups/$(date +%Y%m%d_%H%M%S)"
echo -e "${YELLOW}Creating backup in ${BACKUP_DIR}...${NC}"
mkdir -p "$BACKUP_DIR"
# Stop services for consistent backup
docker compose -f docker-compose.prod.yml stop
# Backup volumes
docker run --rm -v n8n-mcp_n8n-data:/source -v $(pwd)/$BACKUP_DIR:/backup alpine tar czf /backup/n8n-data.tar.gz -C /source .
docker run --rm -v n8n-mcp_mcp-data:/source -v $(pwd)/$BACKUP_DIR:/backup alpine tar czf /backup/mcp-data.tar.gz -C /source .
# Copy .env file
cp .env "$BACKUP_DIR/"
# Restart services
docker compose -f docker-compose.prod.yml start
echo -e "${GREEN}✅ Backup completed in ${BACKUP_DIR}${NC}"
;;
restore)
if [ -z "$2" ]; then
echo -e "${RED}❌ Please specify backup directory (e.g., backups/20240107_120000)${NC}"
exit 1
fi
if [ ! -d "$2" ]; then
echo -e "${RED}❌ Backup directory $2 not found${NC}"
exit 1
fi
echo -e "${YELLOW}⚠️ This will replace all current data! Continue? (y/N)${NC}"
read -r confirm
if [ "$confirm" != "y" ]; then
echo "Restore cancelled"
exit 0
fi
echo -e "${YELLOW}Restoring from $2...${NC}"
# Stop services
docker compose -f docker-compose.prod.yml down
# Restore volumes
docker run --rm -v n8n-mcp_n8n-data:/target -v $(pwd)/$2:/backup alpine tar xzf /backup/n8n-data.tar.gz -C /target
docker run --rm -v n8n-mcp_mcp-data:/target -v $(pwd)/$2:/backup alpine tar xzf /backup/mcp-data.tar.gz -C /target
# Start services
docker compose -f docker-compose.prod.yml up -d
echo -e "${GREEN}✅ Restore completed${NC}"
;;
update)
echo -e "${YELLOW}Updating services...${NC}"
# Pull latest images
docker compose -f docker-compose.prod.yml pull
# Rebuild MCP image
docker compose -f docker-compose.prod.yml build
# Restart with new images
docker compose -f docker-compose.prod.yml up -d
echo -e "${GREEN}✅ Services updated${NC}"
;;
shell)
echo -e "${YELLOW}Opening shell in MCP container...${NC}"
docker compose -f docker-compose.prod.yml exec n8n-mcp /bin/sh
;;
*)
usage
;;
esac

View File

@@ -1,141 +0,0 @@
#!/bin/bash
# Test script for AI Agent node extraction
set -e
echo "=== AI Agent Node Extraction Test ==="
echo
# Colors for output
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Check if Docker is running
if ! docker info > /dev/null 2>&1; then
echo -e "${RED}Error: Docker is not running${NC}"
exit 1
fi
echo "1. Building the project..."
npm run build
echo
echo "2. Building Docker image..."
docker compose -f docker-compose.test.yml build
echo
echo "3. Starting test environment..."
docker compose -f docker-compose.test.yml up -d
echo
echo "4. Waiting for services to be ready..."
sleep 10
# Wait for n8n to be healthy
echo " Waiting for n8n to be ready..."
for i in {1..30}; do
if docker compose -f docker-compose.test.yml exec n8n wget --spider -q http://localhost:5678/healthz 2>/dev/null; then
echo -e " ${GREEN}✓ n8n is ready${NC}"
break
fi
echo -n "."
sleep 2
done
echo
echo "5. Running MCP client test..."
# Create a simple test using the MCP server directly
docker compose -f docker-compose.test.yml exec n8n-mcp node -e "
const http = require('http');
// Test data
const testRequest = {
jsonrpc: '2.0',
id: 1,
method: 'tools/call',
params: {
name: 'get_node_source_code',
arguments: {
nodeType: '@n8n/n8n-nodes-langchain.Agent',
includeCredentials: true
}
}
};
// Since MCP server uses stdio, we'll test via the n8n API first
console.log('Testing node extraction...');
// First, let's check if the node exists in the container
const fs = require('fs');
const possiblePaths = [
'/usr/local/lib/node_modules/n8n/node_modules/@n8n/n8n-nodes-langchain/dist/nodes/agents/Agent/Agent.node.js',
'/usr/local/lib/node_modules/n8n/node_modules/@n8n/n8n-nodes-langchain/dist/nodes/Agent.node.js',
'/app/node_modules/@n8n/n8n-nodes-langchain/dist/nodes/agents/Agent/Agent.node.js'
];
let found = false;
for (const path of possiblePaths) {
try {
if (fs.existsSync(path)) {
console.log('✓ Found AI Agent node at:', path);
const content = fs.readFileSync(path, 'utf8');
console.log('✓ File size:', content.length, 'bytes');
console.log('✓ First 200 characters:');
console.log(content.substring(0, 200) + '...');
found = true;
break;
}
} catch (e) {
// Continue checking
}
}
if (!found) {
console.log('⚠️ AI Agent node not found in expected locations');
console.log('Checking installed packages...');
try {
const packages = fs.readdirSync('/usr/local/lib/node_modules/n8n/node_modules/@n8n/');
console.log('Available @n8n packages:', packages);
} catch (e) {
console.log('Could not list @n8n packages');
}
}
"
echo
echo "6. Alternative test - Direct file system check..."
docker compose -f docker-compose.test.yml exec n8n find /usr/local/lib/node_modules -name "*Agent*.node.js" -type f 2>/dev/null | head -10 || true
echo
echo "7. Test using curl to n8n API..."
# Get available node types from n8n
NODE_TYPES=$(docker compose -f docker-compose.test.yml exec n8n curl -s http://localhost:5678/api/v1/node-types | jq -r '.data[].name' | grep -i agent | head -5) || true
if [ -n "$NODE_TYPES" ]; then
echo -e "${GREEN}✓ Found Agent nodes in n8n:${NC}"
echo "$NODE_TYPES"
else
echo -e "${RED}✗ No Agent nodes found in n8n${NC}"
fi
echo
echo "8. Cleanup..."
read -p "Stop test environment? (y/n) " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
docker compose -f docker-compose.test.yml down
echo -e "${GREEN}✓ Test environment stopped${NC}"
fi
echo
echo "=== Test Summary ==="
echo "The test demonstrated:"
echo "1. MCP server can be built and run in Docker"
echo "2. Node source code extraction mechanism is in place"
echo "3. File system access is configured for reading n8n nodes"
echo
echo "Note: The AI Agent node requires n8n-nodes-langchain package to be installed."
echo "To fully test, ensure n8n has the langchain nodes installed."