chore: clean up development artifacts and update .gitignore

- Remove AI agent coordination files and progress tracking
- Remove temporary test results and generated artifacts
- Remove diagnostic test scripts from src/scripts/
- Remove development planning documents
- Update .gitignore to exclude test artifacts
- Clean up 53 temporary files total
This commit is contained in:
czlonkowski
2025-07-30 09:22:53 +02:00
parent f4c776f43b
commit 07cda6e3ab
54 changed files with 8 additions and 13666 deletions

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env node
import axios from 'axios';
import { config } from 'dotenv';
// Load environment variables
config();
async function debugN8nAuth() {
const apiUrl = process.env.N8N_API_URL;
const apiKey = process.env.N8N_API_KEY;
if (!apiUrl || !apiKey) {
console.error('Error: N8N_API_URL and N8N_API_KEY environment variables are required');
console.error('Please set them in your .env file or environment');
process.exit(1);
}
console.log('Testing n8n API Authentication...');
console.log('API URL:', apiUrl);
console.log('API Key:', apiKey.substring(0, 20) + '...');
// Test 1: Direct health check
console.log('\n=== Test 1: Direct Health Check (no auth) ===');
try {
const healthResponse = await axios.get(`${apiUrl}/api/v1/health`);
console.log('Health Response:', healthResponse.data);
} catch (error: any) {
console.log('Health Check Error:', error.response?.status, error.response?.data || error.message);
}
// Test 2: Workflows with API key
console.log('\n=== Test 2: List Workflows (with auth) ===');
try {
const workflowsResponse = await axios.get(`${apiUrl}/api/v1/workflows`, {
headers: {
'X-N8N-API-KEY': apiKey,
'Content-Type': 'application/json'
},
params: { limit: 1 }
});
console.log('Workflows Response:', workflowsResponse.data);
} catch (error: any) {
console.log('Workflows Error:', error.response?.status, error.response?.data || error.message);
if (error.response?.headers) {
console.log('Response Headers:', error.response.headers);
}
}
// Test 3: Try different auth header formats
console.log('\n=== Test 3: Alternative Auth Headers ===');
// Try Bearer token
try {
const bearerResponse = await axios.get(`${apiUrl}/api/v1/workflows`, {
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
},
params: { limit: 1 }
});
console.log('Bearer Auth Success:', bearerResponse.data);
} catch (error: any) {
console.log('Bearer Auth Error:', error.response?.status);
}
// Try lowercase header
try {
const lowercaseResponse = await axios.get(`${apiUrl}/api/v1/workflows`, {
headers: {
'x-n8n-api-key': apiKey,
'Content-Type': 'application/json'
},
params: { limit: 1 }
});
console.log('Lowercase Header Success:', lowercaseResponse.data);
} catch (error: any) {
console.log('Lowercase Header Error:', error.response?.status);
}
// Test 4: Check API endpoint structure
console.log('\n=== Test 4: API Endpoint Structure ===');
const endpoints = [
'/api/v1/workflows',
'/workflows',
'/api/workflows',
'/api/v1/workflow'
];
for (const endpoint of endpoints) {
try {
const response = await axios.get(`${apiUrl}${endpoint}`, {
headers: {
'X-N8N-API-KEY': apiKey,
},
params: { limit: 1 },
timeout: 5000
});
console.log(`${endpoint} - Success`);
} catch (error: any) {
console.log(`${endpoint} - ${error.response?.status || 'Failed'}`);
}
}
}
debugN8nAuth().catch(console.error);

View File

@@ -1,65 +0,0 @@
#!/usr/bin/env node
import { N8nNodeLoader } from '../loaders/node-loader';
import { NodeParser } from '../parsers/node-parser';
async function debugNode() {
const loader = new N8nNodeLoader();
const parser = new NodeParser();
console.log('Loading nodes...');
const nodes = await loader.loadAllNodes();
// Find HTTP Request node
const httpNode = nodes.find(n => n.nodeName === 'HttpRequest');
if (httpNode) {
console.log('\n=== HTTP Request Node Debug ===');
console.log('NodeName:', httpNode.nodeName);
console.log('Package:', httpNode.packageName);
console.log('NodeClass type:', typeof httpNode.NodeClass);
console.log('NodeClass constructor name:', httpNode.NodeClass?.constructor?.name);
try {
const parsed = parser.parse(httpNode.NodeClass, httpNode.packageName);
console.log('\nParsed successfully:');
console.log('- Node Type:', parsed.nodeType);
console.log('- Display Name:', parsed.displayName);
console.log('- Style:', parsed.style);
console.log('- Properties count:', parsed.properties.length);
console.log('- Operations count:', parsed.operations.length);
console.log('- Is AI Tool:', parsed.isAITool);
console.log('- Is Versioned:', parsed.isVersioned);
if (parsed.properties.length > 0) {
console.log('\nFirst property:', parsed.properties[0]);
}
} catch (error) {
console.error('\nError parsing node:', (error as Error).message);
console.error('Stack:', (error as Error).stack);
}
} else {
console.log('HTTP Request node not found');
}
// Find Code node
const codeNode = nodes.find(n => n.nodeName === 'Code');
if (codeNode) {
console.log('\n\n=== Code Node Debug ===');
console.log('NodeName:', codeNode.nodeName);
console.log('Package:', codeNode.packageName);
console.log('NodeClass type:', typeof codeNode.NodeClass);
try {
const parsed = parser.parse(codeNode.NodeClass, codeNode.packageName);
console.log('\nParsed successfully:');
console.log('- Node Type:', parsed.nodeType);
console.log('- Properties count:', parsed.properties.length);
console.log('- Is Versioned:', parsed.isVersioned);
} catch (error) {
console.error('\nError parsing node:', (error as Error).message);
}
}
}
debugNode().catch(console.error);

View File

@@ -1,212 +0,0 @@
#!/usr/bin/env node
/**
* Test AI workflow validation enhancements
*/
import { createDatabaseAdapter } from '../database/database-adapter';
import { NodeRepository } from '../database/node-repository';
import { WorkflowValidator } from '../services/workflow-validator';
import { Logger } from '../utils/logger';
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
const logger = new Logger({ prefix: '[TestAIWorkflow]' });
// Test workflow with AI Agent and tools
const aiWorkflow = {
name: 'AI Agent with Tools',
nodes: [
{
id: '1',
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
position: [100, 100],
parameters: {
path: 'ai-webhook',
httpMethod: 'POST'
}
},
{
id: '2',
name: 'AI Agent',
type: '@n8n/n8n-nodes-langchain.agent',
position: [300, 100],
parameters: {
text: '={{ $json.query }}',
systemMessage: 'You are a helpful assistant with access to tools'
}
},
{
id: '3',
name: 'Google Sheets Tool',
type: 'n8n-nodes-base.googleSheets',
position: [300, 250],
parameters: {
operation: 'append',
sheetId: '={{ $fromAI("sheetId", "Sheet ID") }}',
range: 'A:Z'
}
},
{
id: '4',
name: 'Slack Tool',
type: 'n8n-nodes-base.slack',
position: [300, 350],
parameters: {
resource: 'message',
operation: 'post',
channel: '={{ $fromAI("channel", "Channel name") }}',
text: '={{ $fromAI("message", "Message text") }}'
}
},
{
id: '5',
name: 'Response',
type: 'n8n-nodes-base.respondToWebhook',
position: [500, 100],
parameters: {
responseCode: 200
}
}
],
connections: {
'Webhook': {
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
},
'AI Agent': {
main: [[{ node: 'Response', type: 'main', index: 0 }]],
ai_tool: [
[
{ node: 'Google Sheets Tool', type: 'ai_tool', index: 0 },
{ node: 'Slack Tool', type: 'ai_tool', index: 0 }
]
]
}
}
};
// Test workflow without tools (should trigger warning)
const aiWorkflowNoTools = {
name: 'AI Agent without Tools',
nodes: [
{
id: '1',
name: 'Manual',
type: 'n8n-nodes-base.manualTrigger',
position: [100, 100],
parameters: {}
},
{
id: '2',
name: 'AI Agent',
type: '@n8n/n8n-nodes-langchain.agent',
position: [300, 100],
parameters: {
text: 'Hello AI'
}
}
],
connections: {
'Manual': {
main: [[{ node: 'AI Agent', type: 'main', index: 0 }]]
}
}
};
// Test workflow with googleSheetsTool (unknown node type)
const unknownToolWorkflow = {
name: 'Unknown Tool Test',
nodes: [
{
id: '1',
name: 'Agent',
type: 'nodes-langchain.agent',
position: [100, 100],
parameters: {}
},
{
id: '2',
name: 'Sheets Tool',
type: 'googleSheetsTool',
position: [300, 100],
parameters: {}
}
],
connections: {
'Agent': {
ai_tool: [[{ node: 'Sheets Tool', type: 'ai_tool', index: 0 }]]
}
}
};
async function testWorkflow(name: string, workflow: any) {
console.log(`\n🧪 Testing: ${name}`);
console.log('='.repeat(50));
const db = await createDatabaseAdapter('./data/nodes.db');
const repository = new NodeRepository(db);
const validator = new WorkflowValidator(repository, EnhancedConfigValidator);
try {
const result = await validator.validateWorkflow(workflow);
console.log(`\n📊 Validation Results:`);
console.log(`Valid: ${result.valid ? '✅' : '❌'}`);
if (result.errors.length > 0) {
console.log('\n❌ Errors:');
result.errors.forEach((err: any) => {
if (typeof err === 'string') {
console.log(` - ${err}`);
} else if (err.message) {
const nodeInfo = err.nodeName ? ` [${err.nodeName}]` : '';
console.log(` - ${err.message}${nodeInfo}`);
} else {
console.log(` - ${JSON.stringify(err, null, 2)}`);
}
});
}
if (result.warnings.length > 0) {
console.log('\n⚠ Warnings:');
result.warnings.forEach((warn: any) => {
const msg = warn.message || warn;
const nodeInfo = warn.nodeName ? ` [${warn.nodeName}]` : '';
console.log(` - ${msg}${nodeInfo}`);
});
}
if (result.suggestions.length > 0) {
console.log('\n💡 Suggestions:');
result.suggestions.forEach((sug: any) => console.log(` - ${sug}`));
}
console.log('\n📈 Statistics:');
console.log(` - Total nodes: ${result.statistics.totalNodes}`);
console.log(` - Valid connections: ${result.statistics.validConnections}`);
console.log(` - Invalid connections: ${result.statistics.invalidConnections}`);
console.log(` - Expressions validated: ${result.statistics.expressionsValidated}`);
} catch (error) {
console.error('Validation error:', error);
} finally {
db.close();
}
}
async function main() {
console.log('🤖 Testing AI Workflow Validation Enhancements');
// Test 1: Complete AI workflow with tools
await testWorkflow('AI Agent with Multiple Tools', aiWorkflow);
// Test 2: AI Agent without tools (should warn)
await testWorkflow('AI Agent without Tools', aiWorkflowNoTools);
// Test 3: Unknown tool type (like googleSheetsTool)
await testWorkflow('Unknown Tool Type', unknownToolWorkflow);
console.log('\n✅ All tests completed!');
}
if (require.main === module) {
main().catch(console.error);
}

View File

@@ -1,172 +0,0 @@
#!/usr/bin/env ts-node
/**
* Test Enhanced Validation
*
* Demonstrates the improvements in the enhanced validation system:
* - Operation-aware validation reduces false positives
* - Node-specific validators provide better error messages
* - Examples are included in validation responses
*/
import { ConfigValidator } from '../services/config-validator';
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
import { createDatabaseAdapter } from '../database/database-adapter';
import { NodeRepository } from '../database/node-repository';
import { logger } from '../utils/logger';
async function testValidation() {
const db = await createDatabaseAdapter('./data/nodes.db');
const repository = new NodeRepository(db);
console.log('🧪 Testing Enhanced Validation System\n');
console.log('=' .repeat(60));
// Test Case 1: Slack Send Message - Compare old vs new validation
console.log('\n📧 Test Case 1: Slack Send Message');
console.log('-'.repeat(40));
const slackConfig = {
resource: 'message',
operation: 'send',
channel: '#general',
text: 'Hello from n8n!'
};
const slackNode = repository.getNode('nodes-base.slack');
if (slackNode && slackNode.properties) {
// Old validation (full mode)
console.log('\n❌ OLD Validation (validate_node_config):');
const oldResult = ConfigValidator.validate('nodes-base.slack', slackConfig, slackNode.properties);
console.log(` Errors: ${oldResult.errors.length}`);
console.log(` Warnings: ${oldResult.warnings.length}`);
console.log(` Visible Properties: ${oldResult.visibleProperties.length}`);
if (oldResult.errors.length > 0) {
console.log('\n Sample errors:');
oldResult.errors.slice(0, 3).forEach(err => {
console.log(` - ${err.message}`);
});
}
// New validation (operation mode)
console.log('\n✅ NEW Validation (validate_node_operation):');
const newResult = EnhancedConfigValidator.validateWithMode(
'nodes-base.slack',
slackConfig,
slackNode.properties,
'operation'
);
console.log(` Errors: ${newResult.errors.length}`);
console.log(` Warnings: ${newResult.warnings.length}`);
console.log(` Mode: ${newResult.mode}`);
console.log(` Operation: ${newResult.operation?.resource}/${newResult.operation?.operation}`);
if (newResult.examples && newResult.examples.length > 0) {
console.log('\n 📚 Examples provided:');
newResult.examples.forEach(ex => {
console.log(` - ${ex.description}`);
});
}
if (newResult.nextSteps && newResult.nextSteps.length > 0) {
console.log('\n 🎯 Next steps:');
newResult.nextSteps.forEach(step => {
console.log(` - ${step}`);
});
}
}
// Test Case 2: Google Sheets Append - With validation errors
console.log('\n\n📊 Test Case 2: Google Sheets Append (with errors)');
console.log('-'.repeat(40));
const sheetsConfigBad = {
operation: 'append',
// Missing required fields
};
const sheetsNode = repository.getNode('nodes-base.googleSheets');
if (sheetsNode && sheetsNode.properties) {
const result = EnhancedConfigValidator.validateWithMode(
'nodes-base.googleSheets',
sheetsConfigBad,
sheetsNode.properties,
'operation'
);
console.log(`\n Validation result:`);
console.log(` Valid: ${result.valid}`);
console.log(` Errors: ${result.errors.length}`);
if (result.errors.length > 0) {
console.log('\n Errors found:');
result.errors.forEach(err => {
console.log(` - ${err.message}`);
if (err.fix) console.log(` Fix: ${err.fix}`);
});
}
if (result.examples && result.examples.length > 0) {
console.log('\n 📚 Working examples provided:');
result.examples.forEach(ex => {
console.log(` - ${ex.description}:`);
console.log(` ${JSON.stringify(ex.config, null, 2).split('\n').join('\n ')}`);
});
}
}
// Test Case 3: Complex Slack Update Message
console.log('\n\n💬 Test Case 3: Slack Update Message');
console.log('-'.repeat(40));
const slackUpdateConfig = {
resource: 'message',
operation: 'update',
channel: '#general',
// Missing required 'ts' field
text: 'Updated message'
};
if (slackNode && slackNode.properties) {
const result = EnhancedConfigValidator.validateWithMode(
'nodes-base.slack',
slackUpdateConfig,
slackNode.properties,
'operation'
);
console.log(`\n Validation result:`);
console.log(` Valid: ${result.valid}`);
console.log(` Errors: ${result.errors.length}`);
result.errors.forEach(err => {
console.log(` - Property: ${err.property}`);
console.log(` Message: ${err.message}`);
console.log(` Fix: ${err.fix}`);
});
}
// Test Case 4: Comparison Summary
console.log('\n\n📈 Summary: Old vs New Validation');
console.log('=' .repeat(60));
console.log('\nOLD validate_node_config:');
console.log(' ❌ Validates ALL properties regardless of operation');
console.log(' ❌ Many false positives for complex nodes');
console.log(' ❌ Generic error messages');
console.log(' ❌ No examples or next steps');
console.log('\nNEW validate_node_operation:');
console.log(' ✅ Only validates properties for selected operation');
console.log(' ✅ 80%+ reduction in false positives');
console.log(' ✅ Operation-specific error messages');
console.log(' ✅ Includes working examples when errors found');
console.log(' ✅ Provides actionable next steps');
console.log(' ✅ Auto-fix suggestions for common issues');
console.log('\n✨ The enhanced validation makes AI agents much more effective!');
db.close();
}
// Run the test
testValidation().catch(console.error);

View File

@@ -1,165 +0,0 @@
#!/usr/bin/env node
/**
* Test for Issue #45 Fix: Partial Update Tool Validation/Execution Discrepancy
*
* This test verifies that the cleanWorkflowForUpdate function no longer adds
* default settings to workflows during updates, which was causing the n8n API
* to reject requests with "settings must NOT have additional properties".
*/
import { config } from 'dotenv';
import { logger } from '../utils/logger';
import { cleanWorkflowForUpdate, cleanWorkflowForCreate } from '../services/n8n-validation';
import { Workflow } from '../types/n8n-api';
// Load environment variables
config();
function testCleanWorkflowFunctions() {
logger.info('Testing Issue #45 Fix: cleanWorkflowForUpdate should not add default settings\n');
// Test 1: cleanWorkflowForUpdate with workflow without settings
logger.info('=== Test 1: cleanWorkflowForUpdate without settings ===');
const workflowWithoutSettings: Workflow = {
id: 'test-123',
name: 'Test Workflow',
nodes: [],
connections: {},
active: false,
createdAt: '2024-01-01T00:00:00.000Z',
updatedAt: '2024-01-01T00:00:00.000Z',
versionId: 'version-123'
};
const cleanedUpdate = cleanWorkflowForUpdate(workflowWithoutSettings);
if ('settings' in cleanedUpdate) {
logger.error('❌ FAIL: cleanWorkflowForUpdate added settings when it should not have');
logger.error(' Found settings:', JSON.stringify(cleanedUpdate.settings));
} else {
logger.info('✅ PASS: cleanWorkflowForUpdate did not add settings');
}
// Test 2: cleanWorkflowForUpdate with existing settings
logger.info('\n=== Test 2: cleanWorkflowForUpdate with existing settings ===');
const workflowWithSettings: Workflow = {
...workflowWithoutSettings,
settings: {
executionOrder: 'v1',
saveDataErrorExecution: 'none',
saveDataSuccessExecution: 'none',
saveManualExecutions: false,
saveExecutionProgress: false
}
};
const cleanedUpdate2 = cleanWorkflowForUpdate(workflowWithSettings);
if ('settings' in cleanedUpdate2) {
const settingsMatch = JSON.stringify(cleanedUpdate2.settings) === JSON.stringify(workflowWithSettings.settings);
if (settingsMatch) {
logger.info('✅ PASS: cleanWorkflowForUpdate preserved existing settings without modification');
} else {
logger.error('❌ FAIL: cleanWorkflowForUpdate modified existing settings');
logger.error(' Original:', JSON.stringify(workflowWithSettings.settings));
logger.error(' Cleaned:', JSON.stringify(cleanedUpdate2.settings));
}
} else {
logger.error('❌ FAIL: cleanWorkflowForUpdate removed existing settings');
}
// Test 3: cleanWorkflowForUpdate with partial settings
logger.info('\n=== Test 3: cleanWorkflowForUpdate with partial settings ===');
const workflowWithPartialSettings: Workflow = {
...workflowWithoutSettings,
settings: {
executionOrder: 'v1'
// Missing other default properties
}
};
const cleanedUpdate3 = cleanWorkflowForUpdate(workflowWithPartialSettings);
if ('settings' in cleanedUpdate3) {
const settingsKeys = cleanedUpdate3.settings ? Object.keys(cleanedUpdate3.settings) : [];
const hasOnlyExecutionOrder = settingsKeys.length === 1 &&
cleanedUpdate3.settings?.executionOrder === 'v1';
if (hasOnlyExecutionOrder) {
logger.info('✅ PASS: cleanWorkflowForUpdate preserved partial settings without adding defaults');
} else {
logger.error('❌ FAIL: cleanWorkflowForUpdate added default properties to partial settings');
logger.error(' Original keys:', Object.keys(workflowWithPartialSettings.settings || {}));
logger.error(' Cleaned keys:', settingsKeys);
}
} else {
logger.error('❌ FAIL: cleanWorkflowForUpdate removed partial settings');
}
// Test 4: Verify cleanWorkflowForCreate still adds defaults
logger.info('\n=== Test 4: cleanWorkflowForCreate should add default settings ===');
const newWorkflow = {
name: 'New Workflow',
nodes: [],
connections: {}
};
const cleanedCreate = cleanWorkflowForCreate(newWorkflow);
if ('settings' in cleanedCreate && cleanedCreate.settings) {
const hasDefaults =
cleanedCreate.settings.executionOrder === 'v1' &&
cleanedCreate.settings.saveDataErrorExecution === 'all' &&
cleanedCreate.settings.saveDataSuccessExecution === 'all' &&
cleanedCreate.settings.saveManualExecutions === true &&
cleanedCreate.settings.saveExecutionProgress === true;
if (hasDefaults) {
logger.info('✅ PASS: cleanWorkflowForCreate correctly adds default settings');
} else {
logger.error('❌ FAIL: cleanWorkflowForCreate added settings but not with correct defaults');
logger.error(' Settings:', JSON.stringify(cleanedCreate.settings));
}
} else {
logger.error('❌ FAIL: cleanWorkflowForCreate did not add default settings');
}
// Test 5: Verify read-only fields are removed
logger.info('\n=== Test 5: cleanWorkflowForUpdate removes read-only fields ===');
const workflowWithReadOnly: any = {
...workflowWithoutSettings,
staticData: { some: 'data' },
pinData: { node1: 'data' },
tags: ['tag1', 'tag2'],
isArchived: true,
usedCredentials: ['cred1'],
sharedWithProjects: ['proj1'],
triggerCount: 5,
shared: true,
active: true
};
const cleanedReadOnly = cleanWorkflowForUpdate(workflowWithReadOnly);
const removedFields = [
'id', 'createdAt', 'updatedAt', 'versionId', 'meta',
'staticData', 'pinData', 'tags', 'isArchived',
'usedCredentials', 'sharedWithProjects', 'triggerCount',
'shared', 'active'
];
const hasRemovedFields = removedFields.some(field => field in cleanedReadOnly);
if (!hasRemovedFields) {
logger.info('✅ PASS: cleanWorkflowForUpdate correctly removed all read-only fields');
} else {
const foundFields = removedFields.filter(field => field in cleanedReadOnly);
logger.error('❌ FAIL: cleanWorkflowForUpdate did not remove these fields:', foundFields);
}
logger.info('\n=== Test Summary ===');
logger.info('All tests completed. The fix ensures that cleanWorkflowForUpdate only removes fields');
logger.info('without adding default settings, preventing the n8n API validation error.');
}
// Run the tests
testCleanWorkflowFunctions();

View File

@@ -1,162 +0,0 @@
#!/usr/bin/env node
/**
* Integration test for n8n_update_partial_workflow MCP tool
* Tests that the tool can be called successfully via MCP protocol
*/
import { config } from 'dotenv';
import { logger } from '../utils/logger';
import { isN8nApiConfigured } from '../config/n8n-api';
import { handleUpdatePartialWorkflow } from '../mcp/handlers-workflow-diff';
// Load environment variables
config();
async function testMcpUpdatePartialWorkflow() {
logger.info('Testing n8n_update_partial_workflow MCP tool...');
// Check if API is configured
if (!isN8nApiConfigured()) {
logger.warn('n8n API not configured. Set N8N_API_URL and N8N_API_KEY to test.');
logger.info('Example:');
logger.info(' N8N_API_URL=https://your-n8n.com N8N_API_KEY=your-key npm run test:mcp:update-partial');
return;
}
// Test 1: Validate only - should work without actual workflow
logger.info('\n=== Test 1: Validate Only (no actual workflow needed) ===');
const validateOnlyRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'addNode',
description: 'Add HTTP Request node',
node: {
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [400, 300],
parameters: {
url: 'https://api.example.com/data',
method: 'GET'
}
}
},
{
type: 'addConnection',
source: 'Start',
target: 'HTTP Request'
}
],
validateOnly: true
};
try {
const result = await handleUpdatePartialWorkflow(validateOnlyRequest);
logger.info('Validation result:', JSON.stringify(result, null, 2));
} catch (error) {
logger.error('Validation test failed:', error);
}
// Test 2: Test with missing required fields
logger.info('\n=== Test 2: Missing Required Fields ===');
const invalidRequest = {
operations: [{
type: 'addNode'
// Missing node property
}]
// Missing id
};
try {
const result = await handleUpdatePartialWorkflow(invalidRequest);
logger.info('Should fail with validation error:', JSON.stringify(result, null, 2));
} catch (error) {
logger.info('Expected validation error:', error instanceof Error ? error.message : String(error));
}
// Test 3: Test with complex operations array
logger.info('\n=== Test 3: Complex Operations Array ===');
const complexRequest = {
id: 'workflow-456',
operations: [
{
type: 'updateNode',
nodeName: 'Webhook',
changes: {
'parameters.path': 'new-webhook-path',
'parameters.method': 'POST'
}
},
{
type: 'addNode',
node: {
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 3,
position: [600, 300],
parameters: {
mode: 'manual',
fields: {
values: [
{ name: 'status', value: 'processed' }
]
}
}
}
},
{
type: 'addConnection',
source: 'Webhook',
target: 'Set'
},
{
type: 'updateName',
name: 'Updated Workflow Name'
},
{
type: 'addTag',
tag: 'production'
}
],
validateOnly: true
};
try {
const result = await handleUpdatePartialWorkflow(complexRequest);
logger.info('Complex operations result:', JSON.stringify(result, null, 2));
} catch (error) {
logger.error('Complex operations test failed:', error);
}
// Test 4: Test operation type validation
logger.info('\n=== Test 4: Invalid Operation Type ===');
const invalidTypeRequest = {
id: 'workflow-789',
operations: [{
type: 'invalidOperation',
something: 'else'
}],
validateOnly: true
};
try {
const result = await handleUpdatePartialWorkflow(invalidTypeRequest);
logger.info('Invalid type result:', JSON.stringify(result, null, 2));
} catch (error) {
logger.info('Expected error for invalid type:', error instanceof Error ? error.message : String(error));
}
logger.info('\n✅ MCP tool integration tests completed!');
logger.info('\nNOTE: These tests verify the MCP tool can be called without errors.');
logger.info('To test with real workflows, ensure N8N_API_URL and N8N_API_KEY are set.');
}
// Run tests
testMcpUpdatePartialWorkflow().catch(error => {
logger.error('Unhandled error:', error);
process.exit(1);
});

View File

@@ -1,54 +0,0 @@
#!/usr/bin/env node
/**
* Test MCP tools directly
*/
import { createDatabaseAdapter } from '../database/database-adapter';
import { NodeRepository } from '../database/node-repository';
import { N8NDocumentationMCPServer } from '../mcp/server';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[TestMCPTools]' });
async function testTool(server: any, toolName: string, args: any) {
try {
console.log(`\n🔧 Testing: ${toolName}`);
console.log('Args:', JSON.stringify(args, null, 2));
console.log('-'.repeat(60));
const result = await server[toolName].call(server, args);
console.log('Result:', JSON.stringify(result, null, 2));
} catch (error) {
console.error(`❌ Error: ${error}`);
}
}
async function main() {
console.log('🤖 Testing MCP Tools\n');
// Create server instance and wait for initialization
const server = new N8NDocumentationMCPServer();
// Give it time to initialize
await new Promise(resolve => setTimeout(resolve, 100));
// Test get_node_as_tool_info
console.log('\n=== Testing get_node_as_tool_info ===');
await testTool(server, 'getNodeAsToolInfo', 'nodes-base.slack');
await testTool(server, 'getNodeAsToolInfo', 'nodes-base.googleSheets');
// Test enhanced get_node_info with aiToolCapabilities
console.log('\n\n=== Testing get_node_info (with aiToolCapabilities) ===');
await testTool(server, 'getNodeInfo', 'nodes-base.httpRequest');
// Test list_ai_tools with enhanced response
console.log('\n\n=== Testing list_ai_tools (enhanced) ===');
await testTool(server, 'listAITools', {});
console.log('\n✅ All tests completed!');
process.exit(0);
}
if (require.main === module) {
main().catch(console.error);
}

View File

@@ -1,148 +0,0 @@
#!/usr/bin/env node
import { config } from 'dotenv';
import { logger } from '../utils/logger';
import { isN8nApiConfigured, getN8nApiConfig } from '../config/n8n-api';
import { getN8nApiClient } from '../mcp/handlers-n8n-manager';
import { N8nApiClient } from '../services/n8n-api-client';
import { Workflow, ExecutionStatus } from '../types/n8n-api';
// Load environment variables
config();
async function testN8nManagerIntegration() {
logger.info('Testing n8n Manager Integration...');
// Check if API is configured
if (!isN8nApiConfigured()) {
logger.warn('n8n API not configured. Set N8N_API_URL and N8N_API_KEY to test.');
logger.info('Example:');
logger.info(' N8N_API_URL=https://your-n8n.com N8N_API_KEY=your-key npm run test:n8n-manager');
return;
}
const apiConfig = getN8nApiConfig();
logger.info('n8n API Configuration:', {
url: apiConfig!.baseUrl,
timeout: apiConfig!.timeout,
maxRetries: apiConfig!.maxRetries
});
const client = getN8nApiClient();
if (!client) {
logger.error('Failed to create n8n API client');
return;
}
try {
// Test 1: Health Check
logger.info('\n=== Test 1: Health Check ===');
const health = await client.healthCheck();
logger.info('Health check passed:', health);
// Test 2: List Workflows
logger.info('\n=== Test 2: List Workflows ===');
const workflows = await client.listWorkflows({ limit: 5 });
logger.info(`Found ${workflows.data.length} workflows`);
workflows.data.forEach(wf => {
logger.info(`- ${wf.name} (ID: ${wf.id}, Active: ${wf.active})`);
});
// Test 3: Create a Test Workflow
logger.info('\n=== Test 3: Create Test Workflow ===');
const testWorkflow: Partial<Workflow> = {
name: `Test Workflow - MCP Integration ${Date.now()}`,
nodes: [
{
id: '1',
name: 'Start',
type: 'n8n-nodes-base.start',
typeVersion: 1,
position: [250, 300],
parameters: {}
},
{
id: '2',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [450, 300],
parameters: {
values: {
string: [
{
name: 'message',
value: 'Hello from MCP!'
}
]
}
}
}
],
connections: {
'1': {
main: [[{ node: '2', type: 'main', index: 0 }]]
}
},
settings: {
executionOrder: 'v1',
saveDataErrorExecution: 'all',
saveDataSuccessExecution: 'all',
saveManualExecutions: true,
saveExecutionProgress: true
}
};
const createdWorkflow = await client.createWorkflow(testWorkflow);
logger.info('Created workflow:', {
id: createdWorkflow.id,
name: createdWorkflow.name,
active: createdWorkflow.active
});
// Test 4: Get Workflow Details
logger.info('\n=== Test 4: Get Workflow Details ===');
const workflowDetails = await client.getWorkflow(createdWorkflow.id!);
logger.info('Retrieved workflow:', {
id: workflowDetails.id,
name: workflowDetails.name,
nodeCount: workflowDetails.nodes.length
});
// Test 5: Update Workflow
logger.info('\n=== Test 5: Update Workflow ===');
// n8n API requires full workflow structure for updates
const updatedWorkflow = await client.updateWorkflow(createdWorkflow.id!, {
name: `${createdWorkflow.name} - Updated`,
nodes: workflowDetails.nodes,
connections: workflowDetails.connections,
settings: workflowDetails.settings
});
logger.info('Updated workflow name:', updatedWorkflow.name);
// Test 6: List Executions
logger.info('\n=== Test 6: List Recent Executions ===');
const executions = await client.listExecutions({ limit: 5 });
logger.info(`Found ${executions.data.length} recent executions`);
executions.data.forEach(exec => {
logger.info(`- Workflow: ${exec.workflowName || exec.workflowId}, Status: ${exec.status}, Started: ${exec.startedAt}`);
});
// Test 7: Cleanup - Delete Test Workflow
logger.info('\n=== Test 7: Cleanup ===');
await client.deleteWorkflow(createdWorkflow.id!);
logger.info('Deleted test workflow');
logger.info('\n✅ All tests passed successfully!');
} catch (error) {
logger.error('Test failed:', error);
process.exit(1);
}
}
// Run tests
testN8nManagerIntegration().catch(error => {
logger.error('Unhandled error:', error);
process.exit(1);
});

View File

@@ -1,113 +0,0 @@
#!/usr/bin/env ts-node
/**
* Test script for the n8n_validate_workflow tool
*
* This script tests the new tool that fetches a workflow from n8n
* and validates it using the existing validation logic.
*/
import { config } from 'dotenv';
import { handleValidateWorkflow } from '../mcp/handlers-n8n-manager';
import { NodeRepository } from '../database/node-repository';
import { createDatabaseAdapter } from '../database/database-adapter';
import { Logger } from '../utils/logger';
import * as path from 'path';
// Load environment variables
config();
const logger = new Logger({ prefix: '[TestN8nValidateWorkflow]' });
async function testN8nValidateWorkflow() {
try {
// Check if n8n API is configured
if (!process.env.N8N_API_URL || !process.env.N8N_API_KEY) {
logger.error('N8N_API_URL and N8N_API_KEY must be set in environment variables');
process.exit(1);
}
logger.info('n8n API Configuration:', {
url: process.env.N8N_API_URL,
hasApiKey: !!process.env.N8N_API_KEY
});
// Initialize database
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
const db = await createDatabaseAdapter(dbPath);
const repository = new NodeRepository(db);
// Test cases
const testCases = [
{
name: 'Validate existing workflow with all options',
args: {
id: '1', // Replace with an actual workflow ID from your n8n instance
options: {
validateNodes: true,
validateConnections: true,
validateExpressions: true,
profile: 'runtime'
}
}
},
{
name: 'Validate with minimal profile',
args: {
id: '1', // Replace with an actual workflow ID
options: {
profile: 'minimal'
}
}
},
{
name: 'Validate connections only',
args: {
id: '1', // Replace with an actual workflow ID
options: {
validateNodes: false,
validateConnections: true,
validateExpressions: false
}
}
}
];
// Run test cases
for (const testCase of testCases) {
logger.info(`\nRunning test: ${testCase.name}`);
logger.info('Input:', JSON.stringify(testCase.args, null, 2));
try {
const result = await handleValidateWorkflow(testCase.args, repository);
if (result.success) {
logger.info('✅ Validation completed successfully');
logger.info('Result:', JSON.stringify(result.data, null, 2));
} else {
logger.error('❌ Validation failed');
logger.error('Error:', result.error);
if (result.details) {
logger.error('Details:', JSON.stringify(result.details, null, 2));
}
}
} catch (error) {
logger.error('❌ Test case failed with exception:', error);
}
logger.info('-'.repeat(80));
}
logger.info('\n✅ All tests completed');
} catch (error) {
logger.error('Test script failed:', error);
process.exit(1);
}
}
// Run the test
testN8nValidateWorkflow().catch(error => {
logger.error('Unhandled error:', error);
process.exit(1);
});

View File

@@ -1,200 +0,0 @@
#!/usr/bin/env node
/**
* Test script demonstrating all node-level properties in n8n workflows
* Shows correct placement and usage of properties that must be at node level
*/
import { createDatabaseAdapter } from '../database/database-adapter.js';
import { NodeRepository } from '../database/node-repository.js';
import { WorkflowValidator } from '../services/workflow-validator.js';
import { WorkflowDiffEngine } from '../services/workflow-diff-engine.js';
import { join } from 'path';
async function main() {
console.log('🔍 Testing Node-Level Properties Configuration\n');
// Initialize database
const dbPath = join(process.cwd(), 'nodes.db');
const dbAdapter = await createDatabaseAdapter(dbPath);
const nodeRepository = new NodeRepository(dbAdapter);
const EnhancedConfigValidator = (await import('../services/enhanced-config-validator.js')).EnhancedConfigValidator;
const validator = new WorkflowValidator(nodeRepository, EnhancedConfigValidator);
const diffEngine = new WorkflowDiffEngine();
// Example 1: Complete node with all properties
console.log('1⃣ Complete Node Configuration Example:');
const completeNode = {
id: 'node_1',
name: 'Database Query',
type: 'n8n-nodes-base.postgres',
typeVersion: 2.6,
position: [450, 300] as [number, number],
// Operation parameters (inside parameters)
parameters: {
operation: 'executeQuery',
query: 'SELECT * FROM users WHERE active = true'
},
// Node-level properties (NOT inside parameters!)
credentials: {
postgres: {
id: 'cred_123',
name: 'Production Database'
}
},
disabled: false,
notes: 'This node queries active users from the production database',
notesInFlow: true,
executeOnce: true,
// Error handling (also at node level!)
onError: 'continueErrorOutput' as const,
retryOnFail: true,
maxTries: 3,
waitBetweenTries: 2000,
alwaysOutputData: true
};
console.log(JSON.stringify(completeNode, null, 2));
console.log('\n✅ All properties are at the correct level!\n');
// Example 2: Workflow with properly configured nodes
console.log('2⃣ Complete Workflow Example:');
const workflow = {
name: 'Production Data Processing',
nodes: [
{
id: 'trigger_1',
name: 'Every Hour',
type: 'n8n-nodes-base.scheduleTrigger',
typeVersion: 1.2,
position: [250, 300] as [number, number],
parameters: {
rule: { interval: [{ field: 'hours', hoursInterval: 1 }] }
},
notes: 'Runs every hour to check for new data',
notesInFlow: true
},
completeNode,
{
id: 'error_handler',
name: 'Error Notification',
type: 'n8n-nodes-base.slack',
typeVersion: 2.3,
position: [650, 450] as [number, number],
parameters: {
resource: 'message',
operation: 'post',
channel: '#alerts',
text: 'Database query failed!'
},
credentials: {
slackApi: {
id: 'cred_456',
name: 'Alert Slack'
}
},
executeOnce: true,
onError: 'continueRegularOutput' as const
}
],
connections: {
'Every Hour': {
main: [[{ node: 'Database Query', type: 'main', index: 0 }]]
},
'Database Query': {
main: [[{ node: 'Process Data', type: 'main', index: 0 }]],
error: [[{ node: 'Error Notification', type: 'main', index: 0 }]]
}
}
};
// Validate the workflow
console.log('\n3⃣ Validating Workflow:');
const result = await validator.validateWorkflow(workflow as any, { profile: 'strict' });
console.log(`Valid: ${result.valid}`);
console.log(`Errors: ${result.errors.length}`);
console.log(`Warnings: ${result.warnings.length}`);
if (result.errors.length > 0) {
console.log('\nErrors:');
result.errors.forEach((err: any) => console.log(`- ${err.message}`));
}
// Example 3: Using workflow diff to update node-level properties
console.log('\n4⃣ Updating Node-Level Properties with Diff Engine:');
const operations = [
{
type: 'updateNode' as const,
nodeName: 'Database Query',
changes: {
// Update operation parameters
'parameters.query': 'SELECT * FROM users WHERE active = true AND created_at > NOW() - INTERVAL \'7 days\'',
// Update node-level properties (no 'parameters.' prefix!)
'onError': 'stopWorkflow',
'executeOnce': false,
'notes': 'Updated to only query users from last 7 days',
'maxTries': 5,
'disabled': false
}
}
];
console.log('Operations:');
console.log(JSON.stringify(operations, null, 2));
// Example 4: Common mistakes to avoid
console.log('\n5⃣ ❌ COMMON MISTAKES TO AVOID:');
const wrongNode = {
id: 'wrong_1',
name: 'Wrong Configuration',
type: 'n8n-nodes-base.httpRequest',
typeVersion: 4.2,
position: [250, 300] as [number, number],
parameters: {
method: 'POST',
url: 'https://api.example.com',
// ❌ WRONG - These should NOT be inside parameters!
onError: 'continueErrorOutput',
retryOnFail: true,
executeOnce: true,
notes: 'This is wrong!',
credentials: { httpAuth: { id: '123' } }
}
};
console.log('❌ Wrong (properties inside parameters):');
console.log(JSON.stringify(wrongNode.parameters, null, 2));
// Validate wrong configuration
const wrongWorkflow = {
name: 'Wrong Example',
nodes: [wrongNode],
connections: {}
};
const wrongResult = await validator.validateWorkflow(wrongWorkflow as any);
console.log('\nValidation of wrong configuration:');
wrongResult.errors.forEach((err: any) => console.log(`❌ ERROR: ${err.message}`));
console.log('\n✅ Summary of Node-Level Properties:');
console.log('- credentials: Link to credential sets');
console.log('- disabled: Disable node execution');
console.log('- notes: Internal documentation');
console.log('- notesInFlow: Show notes on canvas');
console.log('- executeOnce: Execute only once per run');
console.log('- onError: Error handling strategy');
console.log('- retryOnFail: Enable automatic retries');
console.log('- maxTries: Number of retry attempts');
console.log('- waitBetweenTries: Delay between retries');
console.log('- alwaysOutputData: Output data on error');
console.log('- continueOnFail: (deprecated - use onError)');
console.log('\n🎯 Remember: All these properties go at the NODE level, not inside parameters!');
}
main().catch(console.error);

View File

@@ -1,108 +0,0 @@
#!/usr/bin/env node
/**
* Copyright (c) 2024 AiAdvisors Romuald Czlonkowski
* Licensed under the Sustainable Use License v1.0
*/
import { createDatabaseAdapter } from '../database/database-adapter';
import { NodeRepository } from '../database/node-repository';
const TEST_CASES = [
{
nodeType: 'nodes-base.httpRequest',
checks: {
hasProperties: true,
minProperties: 5,
hasDocumentation: true,
isVersioned: true
}
},
{
nodeType: 'nodes-base.slack',
checks: {
hasOperations: true,
minOperations: 10,
style: 'declarative'
}
},
{
nodeType: 'nodes-base.code',
checks: {
hasProperties: true,
properties: ['mode', 'language', 'jsCode']
}
}
];
async function runTests() {
const db = await createDatabaseAdapter('./data/nodes.db');
const repository = new NodeRepository(db);
console.log('🧪 Running node tests...\n');
let passed = 0;
let failed = 0;
for (const testCase of TEST_CASES) {
console.log(`Testing ${testCase.nodeType}...`);
try {
const node = repository.getNode(testCase.nodeType);
if (!node) {
throw new Error('Node not found');
}
// Run checks
for (const [check, expected] of Object.entries(testCase.checks)) {
switch (check) {
case 'hasProperties':
if (expected && node.properties.length === 0) {
throw new Error('No properties found');
}
break;
case 'minProperties':
if (node.properties.length < expected) {
throw new Error(`Expected at least ${expected} properties, got ${node.properties.length}`);
}
break;
case 'hasOperations':
if (expected && node.operations.length === 0) {
throw new Error('No operations found');
}
break;
case 'minOperations':
if (node.operations.length < expected) {
throw new Error(`Expected at least ${expected} operations, got ${node.operations.length}`);
}
break;
case 'properties':
const propNames = node.properties.map((p: any) => p.name);
for (const prop of expected as string[]) {
if (!propNames.includes(prop)) {
throw new Error(`Missing property: ${prop}`);
}
}
break;
}
}
console.log(`${testCase.nodeType} passed all checks\n`);
passed++;
} catch (error) {
console.error(`${testCase.nodeType} failed: ${(error as Error).message}\n`);
failed++;
}
}
console.log(`\n📊 Test Results: ${passed} passed, ${failed} failed`);
db.close();
}
if (require.main === module) {
runTests().catch(console.error);
}

View File

@@ -1,137 +0,0 @@
#!/usr/bin/env node
/**
* Test validation of a single workflow
*/
import { existsSync, readFileSync } from 'fs';
import path from 'path';
import { NodeRepository } from '../database/node-repository';
import { createDatabaseAdapter } from '../database/database-adapter';
import { WorkflowValidator } from '../services/workflow-validator';
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[test-single-workflow]' });
async function testSingleWorkflow() {
// Read the workflow file
const workflowPath = process.argv[2];
if (!workflowPath) {
logger.error('Please provide a workflow file path');
process.exit(1);
}
if (!existsSync(workflowPath)) {
logger.error(`Workflow file not found: ${workflowPath}`);
process.exit(1);
}
logger.info(`Testing workflow: ${workflowPath}\n`);
// Initialize database
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
if (!existsSync(dbPath)) {
logger.error('Database not found. Run npm run rebuild first.');
process.exit(1);
}
const db = await createDatabaseAdapter(dbPath);
const repository = new NodeRepository(db);
const validator = new WorkflowValidator(
repository,
EnhancedConfigValidator
);
try {
// Read and parse workflow
const workflowJson = JSON.parse(readFileSync(workflowPath, 'utf8'));
logger.info(`Workflow: ${workflowJson.name || 'Unnamed'}`);
logger.info(`Nodes: ${workflowJson.nodes?.length || 0}`);
logger.info(`Connections: ${Object.keys(workflowJson.connections || {}).length}`);
// List all node types in the workflow
logger.info('\nNode types in workflow:');
workflowJson.nodes?.forEach((node: any) => {
logger.info(` - ${node.name}: ${node.type}`);
});
// Check what these node types are in our database
logger.info('\nChecking node types in database:');
for (const node of workflowJson.nodes || []) {
const dbNode = repository.getNode(node.type);
if (dbNode) {
logger.info(`${node.type} found in database`);
} else {
// Try normalization patterns
let shortType = node.type;
if (node.type.startsWith('n8n-nodes-base.')) {
shortType = node.type.replace('n8n-nodes-base.', 'nodes-base.');
} else if (node.type.startsWith('@n8n/n8n-nodes-langchain.')) {
shortType = node.type.replace('@n8n/n8n-nodes-langchain.', 'nodes-langchain.');
}
const dbNodeShort = repository.getNode(shortType);
if (dbNodeShort) {
logger.info(`${shortType} found in database (normalized)`);
} else {
logger.error(`${node.type} NOT found in database`);
}
}
}
logger.info('\n' + '='.repeat(80));
logger.info('VALIDATION RESULTS');
logger.info('='.repeat(80) + '\n');
// Validate the workflow
const result = await validator.validateWorkflow(workflowJson);
console.log(`Valid: ${result.valid ? '✅ YES' : '❌ NO'}`);
if (result.errors.length > 0) {
console.log('\nErrors:');
result.errors.forEach((error: any) => {
console.log(` - ${error.nodeName || 'workflow'}: ${error.message}`);
});
}
if (result.warnings.length > 0) {
console.log('\nWarnings:');
result.warnings.forEach((warning: any) => {
const msg = typeof warning.message === 'string'
? warning.message
: JSON.stringify(warning.message);
console.log(` - ${warning.nodeName || 'workflow'}: ${msg}`);
});
}
if (result.suggestions?.length > 0) {
console.log('\nSuggestions:');
result.suggestions.forEach((suggestion: string) => {
console.log(` - ${suggestion}`);
});
}
console.log('\nStatistics:');
console.log(` - Total nodes: ${result.statistics.totalNodes}`);
console.log(` - Enabled nodes: ${result.statistics.enabledNodes}`);
console.log(` - Trigger nodes: ${result.statistics.triggerNodes}`);
console.log(` - Valid connections: ${result.statistics.validConnections}`);
console.log(` - Invalid connections: ${result.statistics.invalidConnections}`);
console.log(` - Expressions validated: ${result.statistics.expressionsValidated}`);
} catch (error) {
logger.error('Failed to validate workflow:', error);
process.exit(1);
} finally {
db.close();
}
}
// Run test
testSingleWorkflow().catch(error => {
logger.error('Test failed:', error);
process.exit(1);
});

View File

@@ -1,173 +0,0 @@
#!/usr/bin/env node
/**
* Test workflow validation on actual n8n templates from the database
*/
import { existsSync } from 'fs';
import path from 'path';
import { NodeRepository } from '../database/node-repository';
import { createDatabaseAdapter } from '../database/database-adapter';
import { WorkflowValidator } from '../services/workflow-validator';
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
import { TemplateRepository } from '../templates/template-repository';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[test-template-validation]' });
async function testTemplateValidation() {
logger.info('Starting template validation tests...\n');
// Initialize database
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
if (!existsSync(dbPath)) {
logger.error('Database not found. Run npm run rebuild first.');
process.exit(1);
}
const db = await createDatabaseAdapter(dbPath);
const repository = new NodeRepository(db);
const templateRepository = new TemplateRepository(db);
const validator = new WorkflowValidator(
repository,
EnhancedConfigValidator
);
try {
// Get some templates to test
const templates = await templateRepository.getAllTemplates(20);
if (templates.length === 0) {
logger.warn('No templates found in database. Run npm run fetch:templates first.');
process.exit(0);
}
logger.info(`Found ${templates.length} templates to validate\n`);
const results = {
total: templates.length,
valid: 0,
invalid: 0,
withErrors: 0,
withWarnings: 0,
errorTypes: new Map<string, number>(),
warningTypes: new Map<string, number>()
};
// Validate each template
for (const template of templates) {
logger.info(`\n${'='.repeat(80)}`);
logger.info(`Validating: ${template.name} (ID: ${template.id})`);
logger.info(`Author: ${template.author_name} (@${template.author_username})`);
logger.info(`Views: ${template.views}`);
logger.info(`${'='.repeat(80)}\n`);
try {
const workflow = JSON.parse(template.workflow_json);
// Log workflow summary
logger.info(`Workflow summary:`);
logger.info(`- Nodes: ${workflow.nodes?.length || 0}`);
logger.info(`- Connections: ${Object.keys(workflow.connections || {}).length}`);
// Validate the workflow
const validationResult = await validator.validateWorkflow(workflow);
// Update statistics
if (validationResult.valid) {
results.valid++;
console.log('✅ VALID');
} else {
results.invalid++;
console.log('❌ INVALID');
}
if (validationResult.errors.length > 0) {
results.withErrors++;
console.log('\nErrors:');
validationResult.errors.forEach((error: any) => {
const errorMsg = typeof error.message === 'string' ? error.message : JSON.stringify(error.message);
const errorKey = errorMsg.substring(0, 50);
results.errorTypes.set(errorKey, (results.errorTypes.get(errorKey) || 0) + 1);
console.log(` - ${error.nodeName || 'workflow'}: ${errorMsg}`);
});
}
if (validationResult.warnings.length > 0) {
results.withWarnings++;
console.log('\nWarnings:');
validationResult.warnings.forEach((warning: any) => {
const warningKey = typeof warning.message === 'string'
? warning.message.substring(0, 50)
: JSON.stringify(warning.message).substring(0, 50);
results.warningTypes.set(warningKey, (results.warningTypes.get(warningKey) || 0) + 1);
console.log(` - ${warning.nodeName || 'workflow'}: ${
typeof warning.message === 'string' ? warning.message : JSON.stringify(warning.message)
}`);
});
}
if (validationResult.suggestions?.length > 0) {
console.log('\nSuggestions:');
validationResult.suggestions.forEach((suggestion: string) => {
console.log(` - ${suggestion}`);
});
}
console.log('\nStatistics:');
console.log(` - Total nodes: ${validationResult.statistics.totalNodes}`);
console.log(` - Enabled nodes: ${validationResult.statistics.enabledNodes}`);
console.log(` - Trigger nodes: ${validationResult.statistics.triggerNodes}`);
console.log(` - Valid connections: ${validationResult.statistics.validConnections}`);
console.log(` - Invalid connections: ${validationResult.statistics.invalidConnections}`);
console.log(` - Expressions validated: ${validationResult.statistics.expressionsValidated}`);
} catch (error) {
logger.error(`Failed to validate template ${template.id}:`, error);
results.invalid++;
}
}
// Print summary
console.log('\n' + '='.repeat(80));
console.log('VALIDATION SUMMARY');
console.log('='.repeat(80));
console.log(`Total templates tested: ${results.total}`);
console.log(`Valid workflows: ${results.valid} (${((results.valid / results.total) * 100).toFixed(1)}%)`);
console.log(`Invalid workflows: ${results.invalid} (${((results.invalid / results.total) * 100).toFixed(1)}%)`);
console.log(`Workflows with errors: ${results.withErrors}`);
console.log(`Workflows with warnings: ${results.withWarnings}`);
if (results.errorTypes.size > 0) {
console.log('\nMost common errors:');
const sortedErrors = Array.from(results.errorTypes.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 5);
sortedErrors.forEach(([error, count]) => {
console.log(` - "${error}..." (${count} times)`);
});
}
if (results.warningTypes.size > 0) {
console.log('\nMost common warnings:');
const sortedWarnings = Array.from(results.warningTypes.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 5);
sortedWarnings.forEach(([warning, count]) => {
console.log(` - "${warning}..." (${count} times)`);
});
}
} catch (error) {
logger.error('Failed to run template validation:', error);
process.exit(1);
} finally {
db.close();
}
}
// Run tests
testTemplateValidation().catch(error => {
logger.error('Test failed:', error);
process.exit(1);
});

View File

@@ -1,88 +0,0 @@
#!/usr/bin/env node
import { createDatabaseAdapter } from '../database/database-adapter';
import { TemplateService } from '../templates/template-service';
import * as fs from 'fs';
import * as path from 'path';
async function testTemplates() {
console.log('🧪 Testing template functionality...\n');
// Initialize database
const db = await createDatabaseAdapter('./data/nodes.db');
// Apply schema if needed
const schema = fs.readFileSync(path.join(__dirname, '../../src/database/schema.sql'), 'utf8');
db.exec(schema);
// Create service
const service = new TemplateService(db);
try {
// Get statistics
const stats = await service.getTemplateStats();
console.log('📊 Template Database Stats:');
console.log(` Total templates: ${stats.totalTemplates}`);
if (stats.totalTemplates === 0) {
console.log('\n⚠ No templates found in database!');
console.log(' Run "npm run fetch:templates" to populate the database.\n');
return;
}
console.log(` Average views: ${stats.averageViews}`);
console.log('\n🔝 Most used nodes in templates:');
stats.topUsedNodes.forEach((node: any, i: number) => {
console.log(` ${i + 1}. ${node.node} (${node.count} templates)`);
});
// Test search
console.log('\n🔍 Testing search for "webhook":');
const searchResults = await service.searchTemplates('webhook', 3);
searchResults.forEach((t: any) => {
console.log(` - ${t.name} (${t.views} views)`);
});
// Test node-based search
console.log('\n🔍 Testing templates with HTTP Request node:');
const httpTemplates = await service.listNodeTemplates(['n8n-nodes-base.httpRequest'], 3);
httpTemplates.forEach((t: any) => {
console.log(` - ${t.name} (${t.nodes.length} nodes)`);
});
// Test task-based search
console.log('\n🔍 Testing AI automation templates:');
const aiTemplates = await service.getTemplatesForTask('ai_automation');
aiTemplates.forEach((t: any) => {
console.log(` - ${t.name} by @${t.author.username}`);
});
// Get a specific template
if (searchResults.length > 0) {
const templateId = searchResults[0].id;
console.log(`\n📄 Getting template ${templateId} details...`);
const template = await service.getTemplate(templateId);
if (template) {
console.log(` Name: ${template.name}`);
console.log(` Nodes: ${template.nodes.join(', ')}`);
console.log(` Workflow has ${template.workflow.nodes.length} nodes`);
}
}
console.log('\n✅ All template tests passed!');
} catch (error) {
console.error('❌ Error during testing:', error);
}
// Close database
if ('close' in db && typeof db.close === 'function') {
db.close();
}
}
// Run if called directly
if (require.main === module) {
testTemplates().catch(console.error);
}
export { testTemplates };

View File

@@ -1,55 +0,0 @@
import { N8NDocumentationMCPServer } from '../mcp/server';
async function testToolsDocumentation() {
const server = new N8NDocumentationMCPServer();
console.log('=== Testing tools_documentation tool ===\n');
// Test 1: No parameters (quick reference)
console.log('1. Testing without parameters (quick reference):');
console.log('----------------------------------------');
const quickRef = await server.executeTool('tools_documentation', {});
console.log(quickRef);
console.log('\n');
// Test 2: Overview with essentials depth
console.log('2. Testing overview with essentials:');
console.log('----------------------------------------');
const overviewEssentials = await server.executeTool('tools_documentation', { topic: 'overview' });
console.log(overviewEssentials);
console.log('\n');
// Test 3: Overview with full depth
console.log('3. Testing overview with full depth:');
console.log('----------------------------------------');
const overviewFull = await server.executeTool('tools_documentation', { topic: 'overview', depth: 'full' });
console.log(overviewFull.substring(0, 500) + '...\n');
// Test 4: Specific tool with essentials
console.log('4. Testing search_nodes with essentials:');
console.log('----------------------------------------');
const searchNodesEssentials = await server.executeTool('tools_documentation', { topic: 'search_nodes' });
console.log(searchNodesEssentials);
console.log('\n');
// Test 5: Specific tool with full documentation
console.log('5. Testing search_nodes with full depth:');
console.log('----------------------------------------');
const searchNodesFull = await server.executeTool('tools_documentation', { topic: 'search_nodes', depth: 'full' });
console.log(searchNodesFull.substring(0, 800) + '...\n');
// Test 6: Non-existent tool
console.log('6. Testing non-existent tool:');
console.log('----------------------------------------');
const nonExistent = await server.executeTool('tools_documentation', { topic: 'fake_tool' });
console.log(nonExistent);
console.log('\n');
// Test 7: Another tool example
console.log('7. Testing n8n_update_partial_workflow with essentials:');
console.log('----------------------------------------');
const updatePartial = await server.executeTool('tools_documentation', { topic: 'n8n_update_partial_workflow' });
console.log(updatePartial);
}
testToolsDocumentation().catch(console.error);

View File

@@ -1,276 +0,0 @@
/**
* Test script for transactional workflow diff operations
* Tests the two-pass processing approach
*/
import { WorkflowDiffEngine } from '../services/workflow-diff-engine';
import { Workflow, WorkflowNode } from '../types/n8n-api';
import { WorkflowDiffRequest } from '../types/workflow-diff';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[TestTransactionalDiff]' });
// Create a test workflow
const testWorkflow: Workflow = {
id: 'test-workflow-123',
name: 'Test Workflow',
active: false,
nodes: [
{
id: '1',
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
typeVersion: 2,
position: [200, 300],
parameters: {
path: '/test',
method: 'GET'
}
}
],
connections: {},
settings: {
executionOrder: 'v1'
},
tags: []
};
async function testAddNodesAndConnect() {
logger.info('Test 1: Add two nodes and connect them in one operation');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: testWorkflow.id!,
operations: [
// Add connections first (would fail in old implementation)
{
type: 'addConnection',
source: 'Webhook',
target: 'Process Data'
},
{
type: 'addConnection',
source: 'Process Data',
target: 'Send Email'
},
// Then add the nodes (two-pass will process these first)
{
type: 'addNode',
node: {
id: '2',
name: 'Process Data',
type: 'n8n-nodes-base.set',
typeVersion: 3,
position: [400, 300],
parameters: {
mode: 'manual',
fields: []
}
}
},
{
type: 'addNode',
node: {
id: '3',
name: 'Send Email',
type: 'n8n-nodes-base.emailSend',
typeVersion: 2.1,
position: [600, 300],
parameters: {
to: 'test@example.com',
subject: 'Test'
}
}
}
]
};
const result = await engine.applyDiff(testWorkflow, request);
if (result.success) {
logger.info('✅ Test passed! Operations applied successfully');
logger.info(`Message: ${result.message}`);
// Verify nodes were added
const workflow = result.workflow!;
const hasProcessData = workflow.nodes.some((n: WorkflowNode) => n.name === 'Process Data');
const hasSendEmail = workflow.nodes.some((n: WorkflowNode) => n.name === 'Send Email');
if (hasProcessData && hasSendEmail) {
logger.info('✅ Both nodes were added');
} else {
logger.error('❌ Nodes were not added correctly');
}
// Verify connections were made
const webhookConnections = workflow.connections['Webhook'];
const processConnections = workflow.connections['Process Data'];
if (webhookConnections && processConnections) {
logger.info('✅ Connections were established');
} else {
logger.error('❌ Connections were not established correctly');
}
} else {
logger.error('❌ Test failed!');
logger.error('Errors:', result.errors);
}
}
async function testOperationLimit() {
logger.info('\nTest 2: Operation limit (max 5)');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: testWorkflow.id!,
operations: [
{ type: 'addNode', node: { id: '101', name: 'Node1', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 100], parameters: {} } },
{ type: 'addNode', node: { id: '102', name: 'Node2', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 200], parameters: {} } },
{ type: 'addNode', node: { id: '103', name: 'Node3', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 300], parameters: {} } },
{ type: 'addNode', node: { id: '104', name: 'Node4', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 400], parameters: {} } },
{ type: 'addNode', node: { id: '105', name: 'Node5', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 500], parameters: {} } },
{ type: 'addNode', node: { id: '106', name: 'Node6', type: 'n8n-nodes-base.set', typeVersion: 1, position: [400, 600], parameters: {} } }
]
};
const result = await engine.applyDiff(testWorkflow, request);
if (!result.success && result.errors?.[0]?.message.includes('Too many operations')) {
logger.info('✅ Operation limit enforced correctly');
} else {
logger.error('❌ Operation limit not enforced');
}
}
async function testValidateOnly() {
logger.info('\nTest 3: Validate only mode');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: testWorkflow.id!,
operations: [
// Test with connection first - two-pass should handle this
{
type: 'addConnection',
source: 'Webhook',
target: 'HTTP Request'
},
{
type: 'addNode',
node: {
id: '4',
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
typeVersion: 4.2,
position: [400, 300],
parameters: {
method: 'GET',
url: 'https://api.example.com'
}
}
},
{
type: 'updateSettings',
settings: {
saveDataErrorExecution: 'all'
}
}
],
validateOnly: true
};
const result = await engine.applyDiff(testWorkflow, request);
if (result.success) {
logger.info('✅ Validate-only mode works correctly');
logger.info(`Validation message: ${result.message}`);
// Verify original workflow wasn't modified
if (testWorkflow.nodes.length === 1) {
logger.info('✅ Original workflow unchanged');
} else {
logger.error('❌ Original workflow was modified in validate-only mode');
}
} else {
logger.error('❌ Validate-only mode failed');
logger.error('Errors:', result.errors);
}
}
async function testMixedOperations() {
logger.info('\nTest 4: Mixed operations (update existing, add new, connect)');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: testWorkflow.id!,
operations: [
// Update existing node
{
type: 'updateNode',
nodeName: 'Webhook',
changes: {
'parameters.path': '/updated-path'
}
},
// Add new node
{
type: 'addNode',
node: {
id: '5',
name: 'Logger',
type: 'n8n-nodes-base.n8n',
typeVersion: 1,
position: [400, 300],
parameters: {
operation: 'log',
level: 'info'
}
}
},
// Connect them
{
type: 'addConnection',
source: 'Webhook',
target: 'Logger'
},
// Update workflow settings
{
type: 'updateSettings',
settings: {
saveDataErrorExecution: 'all'
}
}
]
};
const result = await engine.applyDiff(testWorkflow, request);
if (result.success) {
logger.info('✅ Mixed operations applied successfully');
logger.info(`Message: ${result.message}`);
} else {
logger.error('❌ Mixed operations failed');
logger.error('Errors:', result.errors);
}
}
// Run all tests
async function runTests() {
logger.info('Starting transactional diff tests...\n');
try {
await testAddNodesAndConnect();
await testOperationLimit();
await testValidateOnly();
await testMixedOperations();
logger.info('\n✅ All tests completed!');
} catch (error) {
logger.error('Test suite failed:', error);
}
}
// Run tests if this file is executed directly
if (require.main === module) {
runTests().catch(console.error);
}

View File

@@ -1,114 +0,0 @@
#!/usr/bin/env node
/**
* Debug test for n8n_update_partial_workflow
* Tests the actual update path to identify the issue
*/
import { config } from 'dotenv';
import { logger } from '../utils/logger';
import { isN8nApiConfigured } from '../config/n8n-api';
import { handleUpdatePartialWorkflow } from '../mcp/handlers-workflow-diff';
import { getN8nApiClient } from '../mcp/handlers-n8n-manager';
// Load environment variables
config();
async function testUpdatePartialDebug() {
logger.info('Debug test for n8n_update_partial_workflow...');
// Check if API is configured
if (!isN8nApiConfigured()) {
logger.warn('n8n API not configured. This test requires a real n8n instance.');
logger.info('Set N8N_API_URL and N8N_API_KEY to test.');
return;
}
const client = getN8nApiClient();
if (!client) {
logger.error('Failed to create n8n API client');
return;
}
try {
// First, create a test workflow
logger.info('\n=== Creating test workflow ===');
const testWorkflow = {
name: `Test Partial Update ${Date.now()}`,
nodes: [
{
id: '1',
name: 'Start',
type: 'n8n-nodes-base.start',
typeVersion: 1,
position: [250, 300] as [number, number],
parameters: {}
},
{
id: '2',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 3,
position: [450, 300] as [number, number],
parameters: {
mode: 'manual',
fields: {
values: [
{ name: 'message', value: 'Initial value' }
]
}
}
}
],
connections: {
'Start': {
main: [[{ node: 'Set', type: 'main', index: 0 }]]
}
},
settings: {
executionOrder: 'v1' as 'v1'
}
};
const createdWorkflow = await client.createWorkflow(testWorkflow);
logger.info('Created workflow:', {
id: createdWorkflow.id,
name: createdWorkflow.name
});
// Now test partial update WITHOUT validateOnly
logger.info('\n=== Testing partial update (NO validateOnly) ===');
const updateRequest = {
id: createdWorkflow.id!,
operations: [
{
type: 'updateName',
name: 'Updated via Partial Update'
}
]
// Note: NO validateOnly flag
};
logger.info('Update request:', JSON.stringify(updateRequest, null, 2));
const result = await handleUpdatePartialWorkflow(updateRequest);
logger.info('Update result:', JSON.stringify(result, null, 2));
// Cleanup - delete test workflow
if (createdWorkflow.id) {
logger.info('\n=== Cleanup ===');
await client.deleteWorkflow(createdWorkflow.id);
logger.info('Deleted test workflow');
}
} catch (error) {
logger.error('Test failed:', error);
}
}
// Run test
testUpdatePartialDebug().catch(error => {
logger.error('Unhandled error:', error);
process.exit(1);
});

View File

@@ -1,90 +0,0 @@
import { NodeParser } from '../parsers/node-parser';
// Test script to verify version extraction from different node types
async function testVersionExtraction() {
console.log('Testing version extraction from different node types...\n');
const parser = new NodeParser();
// Test cases
const testCases = [
{
name: 'Gmail Trigger (version array)',
nodeType: 'nodes-base.gmailTrigger',
expectedVersion: '1.2',
expectedVersioned: true
},
{
name: 'HTTP Request (VersionedNodeType)',
nodeType: 'nodes-base.httpRequest',
expectedVersion: '4.2',
expectedVersioned: true
},
{
name: 'Code (version array)',
nodeType: 'nodes-base.code',
expectedVersion: '2',
expectedVersioned: true
}
];
// Load nodes from packages
const basePackagePath = process.cwd() + '/node_modules/n8n/node_modules/n8n-nodes-base';
for (const testCase of testCases) {
console.log(`\nTesting: ${testCase.name}`);
console.log(`Node Type: ${testCase.nodeType}`);
try {
// Find the node file
const nodeName = testCase.nodeType.split('.')[1];
// Try different paths
const possiblePaths = [
`${basePackagePath}/dist/nodes/${nodeName}.node.js`,
`${basePackagePath}/dist/nodes/Google/Gmail/GmailTrigger.node.js`,
`${basePackagePath}/dist/nodes/HttpRequest/HttpRequest.node.js`,
`${basePackagePath}/dist/nodes/Code/Code.node.js`
];
let nodeClass = null;
for (const path of possiblePaths) {
try {
const module = require(path);
nodeClass = module[Object.keys(module)[0]];
if (nodeClass) break;
} catch (e) {
// Try next path
}
}
if (!nodeClass) {
console.log('❌ Could not load node');
continue;
}
// Parse the node
const parsed = parser.parse(nodeClass, 'n8n-nodes-base');
console.log(`Loaded node: ${parsed.displayName} (${parsed.nodeType})`);
console.log(`Extracted version: ${parsed.version}`);
console.log(`Is versioned: ${parsed.isVersioned}`);
console.log(`Expected version: ${testCase.expectedVersion}`);
console.log(`Expected versioned: ${testCase.expectedVersioned}`);
if (parsed.version === testCase.expectedVersion &&
parsed.isVersioned === testCase.expectedVersioned) {
console.log('✅ PASS');
} else {
console.log('❌ FAIL');
}
} catch (error) {
console.log(`❌ Error: ${error instanceof Error ? error.message : String(error)}`);
}
}
}
// Run the test
testVersionExtraction().catch(console.error);

View File

@@ -1,374 +0,0 @@
#!/usr/bin/env node
/**
* Test script for workflow diff engine
* Tests various diff operations and edge cases
*/
import { WorkflowDiffEngine } from '../services/workflow-diff-engine';
import { WorkflowDiffRequest } from '../types/workflow-diff';
import { Workflow } from '../types/n8n-api';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[test-workflow-diff]' });
// Sample workflow for testing
const sampleWorkflow: Workflow = {
id: 'test-workflow-123',
name: 'Test Workflow',
nodes: [
{
id: 'webhook_1',
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
typeVersion: 1.1,
position: [200, 200],
parameters: {
path: 'test-webhook',
method: 'GET'
}
},
{
id: 'set_1',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 3,
position: [400, 200],
parameters: {
mode: 'manual',
fields: {
values: [
{ name: 'message', value: 'Hello World' }
]
}
}
}
],
connections: {
'Webhook': {
main: [[{ node: 'Set', type: 'main', index: 0 }]]
}
},
settings: {
executionOrder: 'v1',
saveDataSuccessExecution: 'all'
},
tags: ['test', 'demo']
};
async function testAddNode() {
console.log('\n=== Testing Add Node Operation ===');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'addNode',
description: 'Add HTTP Request node',
node: {
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
position: [600, 200],
parameters: {
url: 'https://api.example.com/data',
method: 'GET'
}
}
}
]
};
const result = await engine.applyDiff(sampleWorkflow, request);
if (result.success) {
console.log('✅ Add node successful');
console.log(` - Nodes count: ${result.workflow!.nodes.length}`);
console.log(` - New node: ${result.workflow!.nodes[2].name}`);
} else {
console.error('❌ Add node failed:', result.errors);
}
}
async function testRemoveNode() {
console.log('\n=== Testing Remove Node Operation ===');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'removeNode',
description: 'Remove Set node',
nodeName: 'Set'
}
]
};
const result = await engine.applyDiff(sampleWorkflow, request);
if (result.success) {
console.log('✅ Remove node successful');
console.log(` - Nodes count: ${result.workflow!.nodes.length}`);
console.log(` - Connections cleaned: ${Object.keys(result.workflow!.connections).length}`);
} else {
console.error('❌ Remove node failed:', result.errors);
}
}
async function testUpdateNode() {
console.log('\n=== Testing Update Node Operation ===');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'updateNode',
description: 'Update webhook path',
nodeName: 'Webhook',
changes: {
'parameters.path': 'new-webhook-path',
'parameters.method': 'POST'
}
}
]
};
const result = await engine.applyDiff(sampleWorkflow, request);
if (result.success) {
console.log('✅ Update node successful');
const updatedNode = result.workflow!.nodes.find((n: any) => n.name === 'Webhook');
console.log(` - New path: ${updatedNode!.parameters.path}`);
console.log(` - New method: ${updatedNode!.parameters.method}`);
} else {
console.error('❌ Update node failed:', result.errors);
}
}
async function testAddConnection() {
console.log('\n=== Testing Add Connection Operation ===');
// First add a node to connect to
const workflowWithExtraNode = JSON.parse(JSON.stringify(sampleWorkflow));
workflowWithExtraNode.nodes.push({
id: 'email_1',
name: 'Send Email',
type: 'n8n-nodes-base.emailSend',
typeVersion: 2,
position: [600, 200],
parameters: {}
});
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'addConnection',
description: 'Connect Set to Send Email',
source: 'Set',
target: 'Send Email'
}
]
};
const result = await engine.applyDiff(workflowWithExtraNode, request);
if (result.success) {
console.log('✅ Add connection successful');
const setConnections = result.workflow!.connections['Set'];
console.log(` - Connection added: ${JSON.stringify(setConnections)}`);
} else {
console.error('❌ Add connection failed:', result.errors);
}
}
async function testMultipleOperations() {
console.log('\n=== Testing Multiple Operations ===');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'updateName',
name: 'Updated Test Workflow'
},
{
type: 'addNode',
node: {
name: 'If',
type: 'n8n-nodes-base.if',
position: [400, 400],
parameters: {}
}
},
{
type: 'disableNode',
nodeName: 'Set'
},
{
type: 'addTag',
tag: 'updated'
}
]
};
const result = await engine.applyDiff(sampleWorkflow, request);
if (result.success) {
console.log('✅ Multiple operations successful');
console.log(` - New name: ${result.workflow!.name}`);
console.log(` - Operations applied: ${result.operationsApplied}`);
console.log(` - Node count: ${result.workflow!.nodes.length}`);
console.log(` - Tags: ${result.workflow!.tags?.join(', ')}`);
} else {
console.error('❌ Multiple operations failed:', result.errors);
}
}
async function testValidationOnly() {
console.log('\n=== Testing Validation Only ===');
const engine = new WorkflowDiffEngine();
const request: WorkflowDiffRequest = {
id: 'test-workflow-123',
operations: [
{
type: 'addNode',
node: {
name: 'Webhook', // Duplicate name - should fail validation
type: 'n8n-nodes-base.webhook',
position: [600, 400]
}
}
],
validateOnly: true
};
const result = await engine.applyDiff(sampleWorkflow, request);
console.log(` - Validation result: ${result.success ? '✅ Valid' : '❌ Invalid'}`);
if (!result.success) {
console.log(` - Error: ${result.errors![0].message}`);
} else {
console.log(` - Message: ${result.message}`);
}
}
async function testInvalidOperations() {
console.log('\n=== Testing Invalid Operations ===');
const engine = new WorkflowDiffEngine();
// Test 1: Invalid node type
console.log('\n1. Testing invalid node type:');
let result = await engine.applyDiff(sampleWorkflow, {
id: 'test-workflow-123',
operations: [{
type: 'addNode',
node: {
name: 'Bad Node',
type: 'webhook', // Missing package prefix
position: [600, 400]
}
}]
});
console.log(` - Result: ${result.success ? '✅' : '❌'} ${result.errors?.[0]?.message || 'Success'}`);
// Test 2: Remove non-existent node
console.log('\n2. Testing remove non-existent node:');
result = await engine.applyDiff(sampleWorkflow, {
id: 'test-workflow-123',
operations: [{
type: 'removeNode',
nodeName: 'Non Existent Node'
}]
});
console.log(` - Result: ${result.success ? '✅' : '❌'} ${result.errors?.[0]?.message || 'Success'}`);
// Test 3: Invalid connection
console.log('\n3. Testing invalid connection:');
result = await engine.applyDiff(sampleWorkflow, {
id: 'test-workflow-123',
operations: [{
type: 'addConnection',
source: 'Webhook',
target: 'Non Existent Node'
}]
});
console.log(` - Result: ${result.success ? '✅' : '❌'} ${result.errors?.[0]?.message || 'Success'}`);
}
async function testNodeReferenceByIdAndName() {
console.log('\n=== Testing Node Reference by ID and Name ===');
const engine = new WorkflowDiffEngine();
// Test update by ID
console.log('\n1. Update node by ID:');
let result = await engine.applyDiff(sampleWorkflow, {
id: 'test-workflow-123',
operations: [{
type: 'updateNode',
nodeId: 'webhook_1',
changes: {
'parameters.path': 'updated-by-id'
}
}]
});
if (result.success) {
const node = result.workflow!.nodes.find((n: any) => n.id === 'webhook_1');
console.log(` - ✅ Success: path = ${node!.parameters.path}`);
} else {
console.log(` - ❌ Failed: ${result.errors![0].message}`);
}
// Test update by name
console.log('\n2. Update node by name:');
result = await engine.applyDiff(sampleWorkflow, {
id: 'test-workflow-123',
operations: [{
type: 'updateNode',
nodeName: 'Webhook',
changes: {
'parameters.path': 'updated-by-name'
}
}]
});
if (result.success) {
const node = result.workflow!.nodes.find((n: any) => n.name === 'Webhook');
console.log(` - ✅ Success: path = ${node!.parameters.path}`);
} else {
console.log(` - ❌ Failed: ${result.errors![0].message}`);
}
}
// Run all tests
async function runTests() {
try {
console.log('🧪 Running Workflow Diff Engine Tests...\n');
await testAddNode();
await testRemoveNode();
await testUpdateNode();
await testAddConnection();
await testMultipleOperations();
await testValidationOnly();
await testInvalidOperations();
await testNodeReferenceByIdAndName();
console.log('\n✅ All tests completed!');
} catch (error) {
console.error('\n❌ Test failed with error:', error);
process.exit(1);
}
}
// Run tests if this is the main module
if (require.main === module) {
runTests();
}

View File

@@ -1,272 +0,0 @@
#!/usr/bin/env node
/**
* Test script for workflow validation features
* Tests the new workflow validation tools with various scenarios
*/
import { existsSync } from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { dirname } from 'path';
import { NodeRepository } from '../database/node-repository';
import { createDatabaseAdapter } from '../database/database-adapter';
import { WorkflowValidator } from '../services/workflow-validator';
import { EnhancedConfigValidator } from '../services/enhanced-config-validator';
import { Logger } from '../utils/logger';
const logger = new Logger({ prefix: '[test-workflow-validation]' });
// Test workflows
const VALID_WORKFLOW = {
name: 'Test Valid Workflow',
nodes: [
{
id: '1',
name: 'Schedule Trigger',
type: 'nodes-base.scheduleTrigger',
position: [250, 300] as [number, number],
parameters: {
rule: {
interval: [{ field: 'hours', hoursInterval: 1 }]
}
}
},
{
id: '2',
name: 'HTTP Request',
type: 'nodes-base.httpRequest',
position: [450, 300] as [number, number],
parameters: {
url: 'https://api.example.com/data',
method: 'GET'
}
},
{
id: '3',
name: 'Set',
type: 'nodes-base.set',
position: [650, 300] as [number, number],
parameters: {
values: {
string: [
{
name: 'status',
value: '={{ $json.status }}'
}
]
}
}
}
],
connections: {
'Schedule Trigger': {
main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]]
},
'HTTP Request': {
main: [[{ node: 'Set', type: 'main', index: 0 }]]
}
}
};
const WORKFLOW_WITH_CYCLE = {
name: 'Workflow with Cycle',
nodes: [
{
id: '1',
name: 'Start',
type: 'nodes-base.start',
position: [250, 300] as [number, number],
parameters: {}
},
{
id: '2',
name: 'Node A',
type: 'nodes-base.set',
position: [450, 300] as [number, number],
parameters: { values: { string: [] } }
},
{
id: '3',
name: 'Node B',
type: 'nodes-base.set',
position: [650, 300] as [number, number],
parameters: { values: { string: [] } }
}
],
connections: {
'Start': {
main: [[{ node: 'Node A', type: 'main', index: 0 }]]
},
'Node A': {
main: [[{ node: 'Node B', type: 'main', index: 0 }]]
},
'Node B': {
main: [[{ node: 'Node A', type: 'main', index: 0 }]] // Creates cycle
}
}
};
const WORKFLOW_WITH_INVALID_EXPRESSION = {
name: 'Workflow with Invalid Expression',
nodes: [
{
id: '1',
name: 'Webhook',
type: 'nodes-base.webhook',
position: [250, 300] as [number, number],
parameters: {
path: 'test-webhook'
}
},
{
id: '2',
name: 'Set Data',
type: 'nodes-base.set',
position: [450, 300] as [number, number],
parameters: {
values: {
string: [
{
name: 'invalidExpression',
value: '={{ json.field }}' // Missing $ prefix
},
{
name: 'nestedExpression',
value: '={{ {{ $json.field }} }}' // Nested expressions not allowed
},
{
name: 'nodeReference',
value: '={{ $node["Non Existent Node"].json.data }}'
}
]
}
}
}
],
connections: {
'Webhook': {
main: [[{ node: 'Set Data', type: 'main', index: 0 }]]
}
}
};
const WORKFLOW_WITH_ORPHANED_NODE = {
name: 'Workflow with Orphaned Node',
nodes: [
{
id: '1',
name: 'Schedule Trigger',
type: 'nodes-base.scheduleTrigger',
position: [250, 300] as [number, number],
parameters: {
rule: { interval: [{ field: 'hours', hoursInterval: 1 }] }
}
},
{
id: '2',
name: 'HTTP Request',
type: 'nodes-base.httpRequest',
position: [450, 300] as [number, number],
parameters: {
url: 'https://api.example.com',
method: 'GET'
}
},
{
id: '3',
name: 'Orphaned Node',
type: 'nodes-base.set',
position: [450, 500] as [number, number],
parameters: {
values: { string: [] }
}
}
],
connections: {
'Schedule Trigger': {
main: [[{ node: 'HTTP Request', type: 'main', index: 0 }]]
}
// Orphaned Node has no connections
}
};
async function testWorkflowValidation() {
logger.info('Starting workflow validation tests...\n');
// Initialize database
const dbPath = path.join(process.cwd(), 'data', 'nodes.db');
if (!existsSync(dbPath)) {
logger.error('Database not found. Run npm run rebuild first.');
process.exit(1);
}
const db = await createDatabaseAdapter(dbPath);
const repository = new NodeRepository(db);
const validator = new WorkflowValidator(
repository,
EnhancedConfigValidator
);
// Test 1: Valid workflow
logger.info('Test 1: Validating a valid workflow');
const validResult = await validator.validateWorkflow(VALID_WORKFLOW);
console.log('Valid workflow result:', JSON.stringify(validResult, null, 2));
console.log('---\n');
// Test 2: Workflow with cycle
logger.info('Test 2: Validating workflow with cycle');
const cycleResult = await validator.validateWorkflow(WORKFLOW_WITH_CYCLE);
console.log('Cycle workflow result:', JSON.stringify(cycleResult, null, 2));
console.log('---\n');
// Test 3: Workflow with invalid expressions
logger.info('Test 3: Validating workflow with invalid expressions');
const expressionResult = await validator.validateWorkflow(WORKFLOW_WITH_INVALID_EXPRESSION);
console.log('Invalid expression result:', JSON.stringify(expressionResult, null, 2));
console.log('---\n');
// Test 4: Workflow with orphaned node
logger.info('Test 4: Validating workflow with orphaned node');
const orphanedResult = await validator.validateWorkflow(WORKFLOW_WITH_ORPHANED_NODE);
console.log('Orphaned node result:', JSON.stringify(orphanedResult, null, 2));
console.log('---\n');
// Test 5: Connection-only validation
logger.info('Test 5: Testing connection-only validation');
const connectionOnlyResult = await validator.validateWorkflow(WORKFLOW_WITH_CYCLE, {
validateNodes: false,
validateConnections: true,
validateExpressions: false
});
console.log('Connection-only result:', JSON.stringify(connectionOnlyResult, null, 2));
console.log('---\n');
// Test 6: Expression-only validation
logger.info('Test 6: Testing expression-only validation');
const expressionOnlyResult = await validator.validateWorkflow(WORKFLOW_WITH_INVALID_EXPRESSION, {
validateNodes: false,
validateConnections: false,
validateExpressions: true
});
console.log('Expression-only result:', JSON.stringify(expressionOnlyResult, null, 2));
console.log('---\n');
// Test summary
logger.info('Test Summary:');
console.log('✓ Valid workflow:', validResult.valid ? 'PASSED' : 'FAILED');
console.log('✓ Cycle detection:', !cycleResult.valid ? 'PASSED' : 'FAILED');
console.log('✓ Expression validation:', !expressionResult.valid ? 'PASSED' : 'FAILED');
console.log('✓ Orphaned node detection:', orphanedResult.warnings.length > 0 ? 'PASSED' : 'FAILED');
console.log('✓ Connection-only validation:', connectionOnlyResult.errors.length > 0 ? 'PASSED' : 'FAILED');
console.log('✓ Expression-only validation:', expressionOnlyResult.errors.length > 0 ? 'PASSED' : 'FAILED');
// Close database
db.close();
}
// Run tests
testWorkflowValidation().catch(error => {
logger.error('Test failed:', error);
process.exit(1);
});

View File

@@ -1,281 +0,0 @@
import { describe, it, expect, vi, beforeEach, afterEach, beforeAll } from 'vitest';
import { SingleSessionHTTPServer } from '../http-server-single-session';
import express from 'express';
import { ConsoleManager } from '../utils/console-manager';
// Mock express Request and Response
const createMockRequest = (body: any = {}): express.Request => {
// Create a mock readable stream for the request body
const { Readable } = require('stream');
const bodyString = JSON.stringify(body);
const stream = new Readable({
read() {}
});
// Push the body data and signal end
setTimeout(() => {
stream.push(bodyString);
stream.push(null);
}, 0);
const req: any = Object.assign(stream, {
body,
headers: {
authorization: `Bearer ${process.env.AUTH_TOKEN || 'test-token'}`,
'content-type': 'application/json',
'content-length': bodyString.length.toString()
},
method: 'POST',
path: '/mcp',
ip: '127.0.0.1',
get: (header: string) => {
if (header === 'user-agent') return 'test-agent';
if (header === 'content-length') return bodyString.length.toString();
if (header === 'content-type') return 'application/json';
return req.headers[header.toLowerCase()];
}
});
return req;
};
const createMockResponse = (): express.Response => {
const { Writable } = require('stream');
const chunks: Buffer[] = [];
const stream = new Writable({
write(chunk: any, encoding: string, callback: Function) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
callback();
}
});
const res: any = Object.assign(stream, {
statusCode: 200,
headers: {} as any,
body: null as any,
headersSent: false,
chunks,
status: function(code: number) {
this.statusCode = code;
return this;
},
json: function(data: any) {
this.body = data;
this.headersSent = true;
const jsonStr = JSON.stringify(data);
stream.write(jsonStr);
stream.end();
return this;
},
setHeader: function(name: string, value: string) {
this.headers[name] = value;
return this;
},
writeHead: function(statusCode: number, headers?: any) {
this.statusCode = statusCode;
if (headers) {
Object.assign(this.headers, headers);
}
this.headersSent = true;
return this;
},
end: function(data?: any) {
if (data) {
stream.write(data);
}
// Parse the accumulated chunks as the body
if (chunks.length > 0) {
const fullBody = Buffer.concat(chunks).toString();
try {
this.body = JSON.parse(fullBody);
} catch {
this.body = fullBody;
}
}
stream.end();
return this;
}
});
return res;
};
describe('SingleSessionHTTPServer', () => {
let server: SingleSessionHTTPServer;
beforeAll(() => {
process.env.AUTH_TOKEN = 'test-token';
process.env.MCP_MODE = 'http';
});
beforeEach(() => {
server = new SingleSessionHTTPServer();
});
afterEach(async () => {
await server.shutdown();
});
describe('Console Management', () => {
it('should silence console during request handling', async () => {
// Set MCP_MODE to http to enable console silencing
const originalMode = process.env.MCP_MODE;
process.env.MCP_MODE = 'http';
// Save the original console.log
const originalLog = console.log;
// Track if console methods were called
let logCalled = false;
const trackingLog = (...args: any[]) => {
logCalled = true;
originalLog(...args); // Call original for debugging
};
// Replace console.log BEFORE creating ConsoleManager
console.log = trackingLog;
// Now create console manager which will capture our tracking function
const consoleManager = new ConsoleManager();
// Test console is silenced during operation
await consoleManager.wrapOperation(async () => {
// Reset the flag
logCalled = false;
// This should not actually call our tracking function
console.log('This should not appear');
expect(logCalled).toBe(false);
});
// After operation, console should be restored to our tracking function
logCalled = false;
console.log('This should appear');
expect(logCalled).toBe(true);
// Restore everything
console.log = originalLog;
process.env.MCP_MODE = originalMode;
});
it('should handle errors and still restore console', async () => {
const consoleManager = new ConsoleManager();
const originalError = console.error;
try {
await consoleManager.wrapOperation(() => {
throw new Error('Test error');
});
} catch (error) {
// Expected error
}
// Verify console was restored
expect(console.error).toBe(originalError);
});
});
describe('Session Management', () => {
it('should create a single session on first request', async () => {
const sessionInfoBefore = server.getSessionInfo();
expect(sessionInfoBefore.active).toBe(false);
// Since handleRequest would hang with our mocks,
// we'll test the session info functionality directly
// The actual request handling is an integration test concern
// Test that we can get session info when no session exists
expect(sessionInfoBefore).toEqual({ active: false });
});
it('should reuse the same session for multiple requests', async () => {
// This is tested implicitly by the SingleSessionHTTPServer design
// which always returns 'single-session' as the sessionId
const sessionInfo = server.getSessionInfo();
// If there was a session, it would always have the same ID
if (sessionInfo.active) {
expect(sessionInfo.sessionId).toBe('single-session');
}
});
it('should handle authentication correctly', async () => {
// Authentication is handled by the Express middleware in the actual server
// The handleRequest method assumes auth has already been validated
// This is more of an integration test concern
// Test that the server was initialized with auth token
expect(server).toBeDefined();
// The constructor would have thrown if auth token was invalid
});
it('should handle invalid auth token', async () => {
// This test would need to test the Express route handler, not handleRequest
// handleRequest assumes authentication has already been performed
// This is covered by integration tests
expect(server).toBeDefined();
});
});
describe('Session Expiry', () => {
it('should detect expired sessions', () => {
// This would require mocking timers or exposing internal state
// For now, we'll test the concept
const sessionInfo = server.getSessionInfo();
expect(sessionInfo.active).toBe(false);
});
});
describe('Error Handling', () => {
it('should handle server errors gracefully', async () => {
// Error handling is tested by the handleRequest method's try-catch block
// Since we can't easily test handleRequest with mocks (it uses streams),
// we'll verify the server's error handling setup
// Test that shutdown method exists and can be called
expect(server.shutdown).toBeDefined();
expect(typeof server.shutdown).toBe('function');
// The actual error handling is covered by integration tests
});
});
});
describe('ConsoleManager', () => {
it('should only silence in HTTP mode', () => {
const originalMode = process.env.MCP_MODE;
process.env.MCP_MODE = 'stdio';
const consoleManager = new ConsoleManager();
const originalLog = console.log;
consoleManager.silence();
expect(console.log).toBe(originalLog); // Should not change
process.env.MCP_MODE = originalMode;
});
it('should track silenced state', () => {
process.env.MCP_MODE = 'http';
const consoleManager = new ConsoleManager();
expect(consoleManager.isActive).toBe(false);
consoleManager.silence();
expect(consoleManager.isActive).toBe(true);
consoleManager.restore();
expect(consoleManager.isActive).toBe(false);
});
it('should handle nested calls correctly', () => {
process.env.MCP_MODE = 'http';
const consoleManager = new ConsoleManager();
const originalLog = console.log;
consoleManager.silence();
consoleManager.silence(); // Second call should be no-op
expect(consoleManager.isActive).toBe(true);
consoleManager.restore();
expect(console.log).toBe(originalLog);
});
});