test: add Phase 4 database integration tests (partial)
- Add comprehensive test utilities for database testing - Implement connection management tests for in-memory and file databases - Add transaction tests including nested transactions and savepoints - Test database lifecycle, error handling, and performance - Include tests for WAL mode, connection pooling, and constraints Part of Phase 4: Integration Testing
This commit is contained in:
53
tests/integration/mcp-protocol/README.md
Normal file
53
tests/integration/mcp-protocol/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# MCP Protocol Integration Tests
|
||||
|
||||
This directory contains comprehensive integration tests for the Model Context Protocol (MCP) implementation in n8n-mcp.
|
||||
|
||||
## Test Structure
|
||||
|
||||
### Core Tests
|
||||
- **basic-connection.test.ts** - Tests basic MCP server functionality and tool execution
|
||||
- **protocol-compliance.test.ts** - Tests JSON-RPC 2.0 compliance and protocol specifications
|
||||
- **tool-invocation.test.ts** - Tests all MCP tool categories and their invocation
|
||||
- **session-management.test.ts** - Tests session lifecycle, multiple sessions, and recovery
|
||||
- **error-handling.test.ts** - Tests error handling, edge cases, and invalid inputs
|
||||
- **performance.test.ts** - Performance benchmarks and stress tests
|
||||
|
||||
### Helper Files
|
||||
- **test-helpers.ts** - TestableN8NMCPServer wrapper for testing with custom transports
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all MCP protocol tests
|
||||
npm test -- tests/integration/mcp-protocol/
|
||||
|
||||
# Run specific test file
|
||||
npm test -- tests/integration/mcp-protocol/basic-connection.test.ts
|
||||
|
||||
# Run with coverage
|
||||
npm test -- tests/integration/mcp-protocol/ --coverage
|
||||
```
|
||||
|
||||
## Test Coverage
|
||||
|
||||
These tests ensure:
|
||||
- ✅ JSON-RPC 2.0 protocol compliance
|
||||
- ✅ Proper request/response handling
|
||||
- ✅ All tool categories are tested
|
||||
- ✅ Error handling and edge cases
|
||||
- ✅ Session management and lifecycle
|
||||
- ✅ Performance and scalability
|
||||
|
||||
## Known Issues
|
||||
|
||||
1. The InMemoryTransport from MCP SDK has some limitations with connection lifecycle
|
||||
2. Tests use the actual database, so they require `data/nodes.db` to exist
|
||||
3. Some tests are currently skipped due to transport issues (being worked on)
|
||||
|
||||
## Future Improvements
|
||||
|
||||
1. Mock the database for true unit testing
|
||||
2. Add WebSocket transport tests
|
||||
3. Add authentication/authorization tests
|
||||
4. Add rate limiting tests
|
||||
5. Add more performance benchmarks
|
||||
51
tests/integration/mcp-protocol/basic-connection.test.ts
Normal file
51
tests/integration/mcp-protocol/basic-connection.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { N8NDocumentationMCPServer } from '../../../src/mcp/server';
|
||||
|
||||
describe('Basic MCP Connection', () => {
|
||||
it('should initialize MCP server', async () => {
|
||||
const server = new N8NDocumentationMCPServer();
|
||||
|
||||
// Test executeTool directly - it returns raw data
|
||||
const result = await server.executeTool('get_database_statistics', {});
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe('object');
|
||||
expect(result.totalNodes).toBeDefined();
|
||||
expect(result.statistics).toBeDefined();
|
||||
|
||||
await server.shutdown();
|
||||
});
|
||||
|
||||
it('should execute list_nodes tool', async () => {
|
||||
const server = new N8NDocumentationMCPServer();
|
||||
|
||||
const result = await server.executeTool('list_nodes', { limit: 5 });
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe('object');
|
||||
expect(result.nodes).toBeDefined();
|
||||
expect(Array.isArray(result.nodes)).toBe(true);
|
||||
expect(result.nodes).toHaveLength(5);
|
||||
expect(result.nodes[0]).toHaveProperty('nodeType');
|
||||
expect(result.nodes[0]).toHaveProperty('displayName');
|
||||
|
||||
await server.shutdown();
|
||||
});
|
||||
|
||||
it('should search nodes', async () => {
|
||||
const server = new N8NDocumentationMCPServer();
|
||||
|
||||
const result = await server.executeTool('search_nodes', { query: 'webhook' });
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result).toBe('object');
|
||||
expect(result.results).toBeDefined();
|
||||
expect(Array.isArray(result.results)).toBe(true);
|
||||
expect(result.results.length).toBeGreaterThan(0);
|
||||
expect(result.totalCount).toBeGreaterThan(0);
|
||||
|
||||
// Should find webhook node
|
||||
const webhookNode = result.results.find((n: any) => n.nodeType === 'nodes-base.webhook');
|
||||
expect(webhookNode).toBeDefined();
|
||||
expect(webhookNode.displayName).toContain('Webhook');
|
||||
|
||||
await server.shutdown();
|
||||
});
|
||||
});
|
||||
512
tests/integration/mcp-protocol/error-handling.test.ts
Normal file
512
tests/integration/mcp-protocol/error-handling.test.ts
Normal file
@@ -0,0 +1,512 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { TestableN8NMCPServer } from './test-helpers';
|
||||
|
||||
describe('MCP Error Handling', () => {
|
||||
let mcpServer: TestableN8NMCPServer;
|
||||
let client: Client;
|
||||
|
||||
beforeEach(async () => {
|
||||
mcpServer = new TestableN8NMCPServer();
|
||||
await mcpServer.initialize();
|
||||
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await client.close();
|
||||
await mcpServer.close();
|
||||
});
|
||||
|
||||
describe('JSON-RPC Error Codes', () => {
|
||||
it('should handle invalid request (parse error)', async () => {
|
||||
// The MCP SDK handles parsing, so we test with invalid method instead
|
||||
try {
|
||||
await client.request({
|
||||
method: '', // Empty method
|
||||
params: {}
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle method not found', async () => {
|
||||
try {
|
||||
await client.request({
|
||||
method: 'nonexistent/method',
|
||||
params: {}
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle invalid params', async () => {
|
||||
try {
|
||||
// Missing required parameter
|
||||
await client.callTool('get_node_info', {});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toMatch(/missing|required|nodeType/i);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle internal errors gracefully', async () => {
|
||||
try {
|
||||
// Invalid node type format should cause internal processing error
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'completely-invalid-format-$$$$'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Tool-Specific Errors', () => {
|
||||
describe('Node Discovery Errors', () => {
|
||||
it('should handle invalid category filter', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
category: 'invalid_category'
|
||||
});
|
||||
|
||||
// Should return empty array, not error
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
expect(nodes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle invalid search mode', async () => {
|
||||
try {
|
||||
await client.callTool('search_nodes', {
|
||||
query: 'test',
|
||||
mode: 'INVALID_MODE' as any
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty search query', async () => {
|
||||
try {
|
||||
await client.callTool('search_nodes', {
|
||||
query: ''
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('query');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle non-existent node types', async () => {
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.thisDoesNotExist'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validation Errors', () => {
|
||||
it('should handle invalid validation profile', async () => {
|
||||
try {
|
||||
await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: { method: 'GET', url: 'https://api.example.com' },
|
||||
profile: 'invalid_profile' as any
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle malformed workflow structure', async () => {
|
||||
try {
|
||||
await client.callTool('validate_workflow', {
|
||||
workflow: {
|
||||
// Missing required 'nodes' array
|
||||
connections: {}
|
||||
}
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('nodes');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle circular workflow references', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Node1',
|
||||
type: 'nodes-base.noOp',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Node2',
|
||||
type: 'nodes-base.noOp',
|
||||
typeVersion: 1,
|
||||
position: [250, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Node1': {
|
||||
'main': [[{ node: 'Node2', type: 'main', index: 0 }]]
|
||||
},
|
||||
'Node2': {
|
||||
'main': [[{ node: 'Node1', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation.warnings).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Documentation Errors', () => {
|
||||
it('should handle non-existent documentation topics', async () => {
|
||||
const response = await client.callTool('tools_documentation', {
|
||||
topic: 'completely_fake_tool'
|
||||
});
|
||||
|
||||
expect(response[0].text).toContain('not found');
|
||||
});
|
||||
|
||||
it('should handle invalid depth parameter', async () => {
|
||||
try {
|
||||
await client.callTool('tools_documentation', {
|
||||
depth: 'invalid_depth' as any
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Large Payload Handling', () => {
|
||||
it('should handle large node info requests', async () => {
|
||||
// HTTP Request node has extensive properties
|
||||
const response = await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
|
||||
expect(response[0].text.length).toBeGreaterThan(10000);
|
||||
|
||||
// Should be valid JSON
|
||||
const nodeInfo = JSON.parse(response[0].text);
|
||||
expect(nodeInfo).toHaveProperty('properties');
|
||||
});
|
||||
|
||||
it('should handle large workflow validation', async () => {
|
||||
// Create a large workflow
|
||||
const nodes = [];
|
||||
const connections: any = {};
|
||||
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const nodeName = `Node${i}`;
|
||||
nodes.push({
|
||||
id: String(i),
|
||||
name: nodeName,
|
||||
type: 'nodes-base.noOp',
|
||||
typeVersion: 1,
|
||||
position: [i * 100, 0],
|
||||
parameters: {}
|
||||
});
|
||||
|
||||
if (i > 0) {
|
||||
const prevNode = `Node${i - 1}`;
|
||||
connections[prevNode] = {
|
||||
'main': [[{ node: nodeName, type: 'main', index: 0 }]]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow: { nodes, connections }
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('valid');
|
||||
});
|
||||
|
||||
it('should handle many concurrent requests', async () => {
|
||||
const requestCount = 50;
|
||||
const promises = [];
|
||||
|
||||
for (let i = 0; i < requestCount; i++) {
|
||||
promises.push(
|
||||
client.callTool('list_nodes', {
|
||||
limit: 1,
|
||||
category: i % 2 === 0 ? 'trigger' : 'transform'
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const responses = await Promise.all(promises);
|
||||
expect(responses).toHaveLength(requestCount);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid JSON Handling', () => {
|
||||
it('should handle invalid JSON in tool parameters', async () => {
|
||||
try {
|
||||
// Config should be an object, not a string
|
||||
await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: 'invalid json string' as any
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle malformed workflow JSON', async () => {
|
||||
try {
|
||||
await client.callTool('validate_workflow', {
|
||||
workflow: 'not a valid workflow object' as any
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Timeout Scenarios', () => {
|
||||
it('should handle rapid sequential requests', async () => {
|
||||
const start = Date.now();
|
||||
|
||||
for (let i = 0; i < 20; i++) {
|
||||
await client.callTool('get_database_statistics', {});
|
||||
}
|
||||
|
||||
const duration = Date.now() - start;
|
||||
|
||||
// Should complete reasonably quickly (under 5 seconds)
|
||||
expect(duration).toBeLessThan(5000);
|
||||
});
|
||||
|
||||
it('should handle long-running operations', async () => {
|
||||
// Search with complex query that requires more processing
|
||||
const response = await client.callTool('search_nodes', {
|
||||
query: 'a b c d e f g h i j k l m n o p q r s t u v w x y z',
|
||||
mode: 'AND'
|
||||
});
|
||||
|
||||
expect(response).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Memory Pressure', () => {
|
||||
it('should handle multiple large responses', async () => {
|
||||
const promises = [];
|
||||
|
||||
// Request multiple large node infos
|
||||
const largeNodes = [
|
||||
'nodes-base.httpRequest',
|
||||
'nodes-base.postgres',
|
||||
'nodes-base.googleSheets',
|
||||
'nodes-base.slack',
|
||||
'nodes-base.gmail'
|
||||
];
|
||||
|
||||
for (const nodeType of largeNodes) {
|
||||
promises.push(
|
||||
client.callTool('get_node_info', { nodeType })
|
||||
.catch(() => null) // Some might not exist
|
||||
);
|
||||
}
|
||||
|
||||
const responses = await Promise.all(promises);
|
||||
const validResponses = responses.filter(r => r !== null);
|
||||
|
||||
expect(validResponses.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should handle workflow with many nodes', async () => {
|
||||
const nodeCount = 100;
|
||||
const nodes = [];
|
||||
|
||||
for (let i = 0; i < nodeCount; i++) {
|
||||
nodes.push({
|
||||
id: String(i),
|
||||
name: `Node${i}`,
|
||||
type: 'nodes-base.noOp',
|
||||
typeVersion: 1,
|
||||
position: [i * 50, Math.floor(i / 10) * 100],
|
||||
parameters: {
|
||||
// Add some data to increase memory usage
|
||||
data: `This is some test data for node ${i}`.repeat(10)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow: {
|
||||
nodes,
|
||||
connections: {}
|
||||
}
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('valid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Recovery', () => {
|
||||
it('should continue working after errors', async () => {
|
||||
// Cause an error
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'invalid'
|
||||
});
|
||||
} catch (error) {
|
||||
// Expected
|
||||
}
|
||||
|
||||
// Should still work
|
||||
const response = await client.callTool('list_nodes', { limit: 1 });
|
||||
expect(response).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle mixed success and failure', async () => {
|
||||
const promises = [
|
||||
client.callTool('list_nodes', { limit: 5 }),
|
||||
client.callTool('get_node_info', { nodeType: 'invalid' }).catch(e => ({ error: e })),
|
||||
client.callTool('get_database_statistics', {}),
|
||||
client.callTool('search_nodes', { query: '' }).catch(e => ({ error: e })),
|
||||
client.callTool('list_ai_tools', {})
|
||||
];
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Some should succeed, some should fail
|
||||
const successes = results.filter(r => !('error' in r));
|
||||
const failures = results.filter(r => 'error' in r);
|
||||
|
||||
expect(successes.length).toBeGreaterThan(0);
|
||||
expect(failures.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty responses gracefully', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
category: 'nonexistent_category'
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
expect(nodes).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle special characters in parameters', async () => {
|
||||
const response = await client.callTool('search_nodes', {
|
||||
query: 'test!@#$%^&*()_+-=[]{}|;\':",./<>?'
|
||||
});
|
||||
|
||||
// Should return results or empty array, not error
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle unicode in parameters', async () => {
|
||||
const response = await client.callTool('search_nodes', {
|
||||
query: 'test 测试 тест परीक्षण'
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle null and undefined gracefully', async () => {
|
||||
// Most tools should handle missing optional params
|
||||
const response = await client.callTool('list_nodes', {
|
||||
limit: undefined as any,
|
||||
category: null as any
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Message Quality', () => {
|
||||
it('should provide helpful error messages', async () => {
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'httpRequest' // Missing prefix
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toBeDefined();
|
||||
expect(error.message.length).toBeGreaterThan(10);
|
||||
// Should mention the issue
|
||||
expect(error.message.toLowerCase()).toMatch(/not found|invalid|missing/);
|
||||
}
|
||||
});
|
||||
|
||||
it('should indicate missing required parameters', async () => {
|
||||
try {
|
||||
await client.callTool('search_nodes', {});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('query');
|
||||
}
|
||||
});
|
||||
|
||||
it('should provide context for validation errors', async () => {
|
||||
const response = await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: {
|
||||
// Missing required fields
|
||||
method: 'INVALID_METHOD'
|
||||
}
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation.valid).toBe(false);
|
||||
expect(validation.errors[0].message).toBeDefined();
|
||||
expect(validation.errors[0].field).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
502
tests/integration/mcp-protocol/performance.test.ts
Normal file
502
tests/integration/mcp-protocol/performance.test.ts
Normal file
@@ -0,0 +1,502 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { TestableN8NMCPServer } from './test-helpers';
|
||||
|
||||
describe('MCP Performance Tests', () => {
|
||||
let mcpServer: TestableN8NMCPServer;
|
||||
let client: Client;
|
||||
|
||||
beforeEach(async () => {
|
||||
mcpServer = new TestableN8NMCPServer();
|
||||
await mcpServer.initialize();
|
||||
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await client.close();
|
||||
await mcpServer.close();
|
||||
});
|
||||
|
||||
describe('Response Time Benchmarks', () => {
|
||||
it('should respond to simple queries quickly', async () => {
|
||||
const iterations = 100;
|
||||
const start = performance.now();
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
await client.callTool('get_database_statistics', {});
|
||||
}
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / iterations;
|
||||
|
||||
console.log(`Average response time for get_database_statistics: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Should average under 10ms per request
|
||||
expect(avgTime).toBeLessThan(10);
|
||||
});
|
||||
|
||||
it('should handle list operations efficiently', async () => {
|
||||
const iterations = 50;
|
||||
const start = performance.now();
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
await client.callTool('list_nodes', { limit: 10 });
|
||||
}
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / iterations;
|
||||
|
||||
console.log(`Average response time for list_nodes: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Should average under 20ms per request
|
||||
expect(avgTime).toBeLessThan(20);
|
||||
});
|
||||
|
||||
it('should perform searches efficiently', async () => {
|
||||
const searches = ['http', 'webhook', 'slack', 'database', 'api'];
|
||||
const iterations = 20;
|
||||
const start = performance.now();
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
for (const query of searches) {
|
||||
await client.callTool('search_nodes', { query });
|
||||
}
|
||||
}
|
||||
|
||||
const totalRequests = iterations * searches.length;
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / totalRequests;
|
||||
|
||||
console.log(`Average response time for search_nodes: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Should average under 30ms per search
|
||||
expect(avgTime).toBeLessThan(30);
|
||||
});
|
||||
|
||||
it('should retrieve node info quickly', async () => {
|
||||
const nodeTypes = [
|
||||
'nodes-base.httpRequest',
|
||||
'nodes-base.webhook',
|
||||
'nodes-base.set',
|
||||
'nodes-base.if',
|
||||
'nodes-base.switch'
|
||||
];
|
||||
|
||||
const start = performance.now();
|
||||
|
||||
for (const nodeType of nodeTypes) {
|
||||
await client.callTool('get_node_info', { nodeType });
|
||||
}
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / nodeTypes.length;
|
||||
|
||||
console.log(`Average response time for get_node_info: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Should average under 50ms per request (these are large responses)
|
||||
expect(avgTime).toBeLessThan(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Concurrent Request Performance', () => {
|
||||
it('should handle concurrent requests efficiently', async () => {
|
||||
const concurrentRequests = 50;
|
||||
const start = performance.now();
|
||||
|
||||
const promises = [];
|
||||
for (let i = 0; i < concurrentRequests; i++) {
|
||||
promises.push(
|
||||
client.callTool('list_nodes', { limit: 5 })
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / concurrentRequests;
|
||||
|
||||
console.log(`Average time for ${concurrentRequests} concurrent requests: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Concurrent requests should be more efficient than sequential
|
||||
expect(avgTime).toBeLessThan(10);
|
||||
});
|
||||
|
||||
it('should handle mixed concurrent operations', async () => {
|
||||
const operations = [
|
||||
{ tool: 'list_nodes', params: { limit: 10 } },
|
||||
{ tool: 'search_nodes', params: { query: 'http' } },
|
||||
{ tool: 'get_database_statistics', params: {} },
|
||||
{ tool: 'list_ai_tools', params: {} },
|
||||
{ tool: 'list_tasks', params: {} }
|
||||
];
|
||||
|
||||
const rounds = 10;
|
||||
const start = performance.now();
|
||||
|
||||
for (let round = 0; round < rounds; round++) {
|
||||
const promises = operations.map(op =>
|
||||
client.callTool(op.tool, op.params)
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const totalRequests = rounds * operations.length;
|
||||
const avgTime = duration / totalRequests;
|
||||
|
||||
console.log(`Average time for mixed operations: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
expect(avgTime).toBeLessThan(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Large Data Performance', () => {
|
||||
it('should handle large node lists efficiently', async () => {
|
||||
const start = performance.now();
|
||||
|
||||
const response = await client.callTool('list_nodes', {
|
||||
limit: 200 // Get many nodes
|
||||
});
|
||||
|
||||
const duration = performance.now() - start;
|
||||
|
||||
console.log(`Time to list 200 nodes: ${duration.toFixed(2)}ms`);
|
||||
|
||||
// Should complete within 100ms
|
||||
expect(duration).toBeLessThan(100);
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes.length).toBeGreaterThan(100);
|
||||
});
|
||||
|
||||
it('should handle large workflow validation efficiently', async () => {
|
||||
// Create a large workflow
|
||||
const nodeCount = 100;
|
||||
const nodes = [];
|
||||
const connections: any = {};
|
||||
|
||||
for (let i = 0; i < nodeCount; i++) {
|
||||
nodes.push({
|
||||
id: String(i),
|
||||
name: `Node${i}`,
|
||||
type: i % 3 === 0 ? 'nodes-base.httpRequest' : 'nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [i * 100, 0],
|
||||
parameters: i % 3 === 0 ?
|
||||
{ method: 'GET', url: 'https://api.example.com' } :
|
||||
{ values: { string: [{ name: 'test', value: 'value' }] } }
|
||||
});
|
||||
|
||||
if (i > 0) {
|
||||
connections[`Node${i-1}`] = {
|
||||
'main': [[{ node: `Node${i}`, type: 'main', index: 0 }]]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const start = performance.now();
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow: { nodes, connections }
|
||||
});
|
||||
|
||||
const duration = performance.now() - start;
|
||||
|
||||
console.log(`Time to validate ${nodeCount} node workflow: ${duration.toFixed(2)}ms`);
|
||||
|
||||
// Should complete within 500ms
|
||||
expect(duration).toBeLessThan(500);
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('valid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Memory Efficiency', () => {
|
||||
it('should handle repeated operations without memory leaks', async () => {
|
||||
const iterations = 1000;
|
||||
const batchSize = 100;
|
||||
|
||||
// Measure initial memory if available
|
||||
const initialMemory = process.memoryUsage();
|
||||
|
||||
for (let i = 0; i < iterations; i += batchSize) {
|
||||
const promises = [];
|
||||
|
||||
for (let j = 0; j < batchSize; j++) {
|
||||
promises.push(
|
||||
client.callTool('get_database_statistics', {})
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// Force garbage collection if available
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
}
|
||||
}
|
||||
|
||||
const finalMemory = process.memoryUsage();
|
||||
const memoryIncrease = finalMemory.heapUsed - initialMemory.heapUsed;
|
||||
|
||||
console.log(`Memory increase after ${iterations} operations: ${(memoryIncrease / 1024 / 1024).toFixed(2)}MB`);
|
||||
|
||||
// Memory increase should be reasonable (less than 50MB)
|
||||
expect(memoryIncrease).toBeLessThan(50 * 1024 * 1024);
|
||||
});
|
||||
|
||||
it('should release memory after large operations', async () => {
|
||||
const initialMemory = process.memoryUsage();
|
||||
|
||||
// Perform large operations
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await client.callTool('list_nodes', { limit: 200 });
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
}
|
||||
|
||||
// Force garbage collection if available
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
const finalMemory = process.memoryUsage();
|
||||
const memoryIncrease = finalMemory.heapUsed - initialMemory.heapUsed;
|
||||
|
||||
console.log(`Memory increase after large operations: ${(memoryIncrease / 1024 / 1024).toFixed(2)}MB`);
|
||||
|
||||
// Should not retain excessive memory
|
||||
expect(memoryIncrease).toBeLessThan(20 * 1024 * 1024);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Scalability Tests', () => {
|
||||
it('should maintain performance with increasing load', async () => {
|
||||
const loadLevels = [10, 50, 100, 200];
|
||||
const results: any[] = [];
|
||||
|
||||
for (const load of loadLevels) {
|
||||
const start = performance.now();
|
||||
|
||||
const promises = [];
|
||||
for (let i = 0; i < load; i++) {
|
||||
promises.push(
|
||||
client.callTool('list_nodes', { limit: 1 })
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
const duration = performance.now() - start;
|
||||
const avgTime = duration / load;
|
||||
|
||||
results.push({
|
||||
load,
|
||||
totalTime: duration,
|
||||
avgTime
|
||||
});
|
||||
|
||||
console.log(`Load ${load}: Total ${duration.toFixed(2)}ms, Avg ${avgTime.toFixed(2)}ms`);
|
||||
}
|
||||
|
||||
// Average time should not increase dramatically with load
|
||||
const firstAvg = results[0].avgTime;
|
||||
const lastAvg = results[results.length - 1].avgTime;
|
||||
|
||||
// Last average should be less than 2x the first
|
||||
expect(lastAvg).toBeLessThan(firstAvg * 2);
|
||||
});
|
||||
|
||||
it('should handle burst traffic', async () => {
|
||||
const burstSize = 100;
|
||||
const start = performance.now();
|
||||
|
||||
// Simulate burst of requests
|
||||
const promises = [];
|
||||
for (let i = 0; i < burstSize; i++) {
|
||||
const operation = i % 4;
|
||||
switch (operation) {
|
||||
case 0:
|
||||
promises.push(client.callTool('list_nodes', { limit: 5 }));
|
||||
break;
|
||||
case 1:
|
||||
promises.push(client.callTool('search_nodes', { query: 'test' }));
|
||||
break;
|
||||
case 2:
|
||||
promises.push(client.callTool('get_database_statistics', {}));
|
||||
break;
|
||||
case 3:
|
||||
promises.push(client.callTool('list_ai_tools', {}));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
const duration = performance.now() - start;
|
||||
|
||||
console.log(`Burst of ${burstSize} requests completed in ${duration.toFixed(2)}ms`);
|
||||
|
||||
// Should handle burst within reasonable time
|
||||
expect(duration).toBeLessThan(1000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Critical Path Optimization', () => {
|
||||
it('should optimize tool listing performance', async () => {
|
||||
// Warm up
|
||||
await client.callTool('list_nodes', { limit: 1 });
|
||||
|
||||
const iterations = 100;
|
||||
const times: number[] = [];
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const start = performance.now();
|
||||
await client.callTool('list_nodes', { limit: 20 });
|
||||
times.push(performance.now() - start);
|
||||
}
|
||||
|
||||
const avgTime = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
const minTime = Math.min(...times);
|
||||
const maxTime = Math.max(...times);
|
||||
|
||||
console.log(`list_nodes performance - Avg: ${avgTime.toFixed(2)}ms, Min: ${minTime.toFixed(2)}ms, Max: ${maxTime.toFixed(2)}ms`);
|
||||
|
||||
// Average should be very fast
|
||||
expect(avgTime).toBeLessThan(10);
|
||||
|
||||
// Max should not be too much higher than average (no outliers)
|
||||
expect(maxTime).toBeLessThan(avgTime * 3);
|
||||
});
|
||||
|
||||
it('should optimize search performance', async () => {
|
||||
// Warm up
|
||||
await client.callTool('search_nodes', { query: 'test' });
|
||||
|
||||
const queries = ['http', 'webhook', 'database', 'api', 'slack'];
|
||||
const times: number[] = [];
|
||||
|
||||
for (const query of queries) {
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const start = performance.now();
|
||||
await client.callTool('search_nodes', { query });
|
||||
times.push(performance.now() - start);
|
||||
}
|
||||
}
|
||||
|
||||
const avgTime = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
|
||||
console.log(`search_nodes average performance: ${avgTime.toFixed(2)}ms`);
|
||||
|
||||
// Search should be optimized
|
||||
expect(avgTime).toBeLessThan(15);
|
||||
});
|
||||
|
||||
it('should cache effectively for repeated queries', async () => {
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
|
||||
// First call (cold)
|
||||
const coldStart = performance.now();
|
||||
await client.callTool('get_node_info', { nodeType });
|
||||
const coldTime = performance.now() - coldStart;
|
||||
|
||||
// Subsequent calls (potentially cached)
|
||||
const warmTimes: number[] = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const start = performance.now();
|
||||
await client.callTool('get_node_info', { nodeType });
|
||||
warmTimes.push(performance.now() - start);
|
||||
}
|
||||
|
||||
const avgWarmTime = warmTimes.reduce((a, b) => a + b, 0) / warmTimes.length;
|
||||
|
||||
console.log(`Cold time: ${coldTime.toFixed(2)}ms, Avg warm time: ${avgWarmTime.toFixed(2)}ms`);
|
||||
|
||||
// Warm calls should be faster or similar
|
||||
expect(avgWarmTime).toBeLessThanOrEqual(coldTime * 1.1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Stress Tests', () => {
|
||||
it('should handle sustained high load', async () => {
|
||||
const duration = 5000; // 5 seconds
|
||||
const start = performance.now();
|
||||
let requestCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
while (performance.now() - start < duration) {
|
||||
try {
|
||||
await client.callTool('get_database_statistics', {});
|
||||
requestCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
const actualDuration = performance.now() - start;
|
||||
const requestsPerSecond = requestCount / (actualDuration / 1000);
|
||||
|
||||
console.log(`Sustained load test - Requests: ${requestCount}, RPS: ${requestsPerSecond.toFixed(2)}, Errors: ${errorCount}`);
|
||||
|
||||
// Should handle at least 100 requests per second
|
||||
expect(requestsPerSecond).toBeGreaterThan(100);
|
||||
|
||||
// Error rate should be very low
|
||||
expect(errorCount).toBe(0);
|
||||
});
|
||||
|
||||
it('should recover from performance degradation', async () => {
|
||||
// Create heavy load
|
||||
const heavyPromises = [];
|
||||
for (let i = 0; i < 200; i++) {
|
||||
heavyPromises.push(
|
||||
client.callTool('validate_workflow', {
|
||||
workflow: {
|
||||
nodes: Array(20).fill(null).map((_, idx) => ({
|
||||
id: String(idx),
|
||||
name: `Node${idx}`,
|
||||
type: 'nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [idx * 100, 0],
|
||||
parameters: {}
|
||||
})),
|
||||
connections: {}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(heavyPromises);
|
||||
|
||||
// Measure performance after heavy load
|
||||
const recoveryTimes: number[] = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const start = performance.now();
|
||||
await client.callTool('get_database_statistics', {});
|
||||
recoveryTimes.push(performance.now() - start);
|
||||
}
|
||||
|
||||
const avgRecoveryTime = recoveryTimes.reduce((a, b) => a + b, 0) / recoveryTimes.length;
|
||||
|
||||
console.log(`Average response time after heavy load: ${avgRecoveryTime.toFixed(2)}ms`);
|
||||
|
||||
// Should recover to normal performance
|
||||
expect(avgRecoveryTime).toBeLessThan(10);
|
||||
});
|
||||
});
|
||||
});
|
||||
300
tests/integration/mcp-protocol/protocol-compliance.test.ts
Normal file
300
tests/integration/mcp-protocol/protocol-compliance.test.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { TestableN8NMCPServer } from './test-helpers';
|
||||
|
||||
describe('MCP Protocol Compliance', () => {
|
||||
let mcpServer: TestableN8NMCPServer;
|
||||
let transport: InMemoryTransport;
|
||||
let client: Client;
|
||||
|
||||
beforeEach(async () => {
|
||||
mcpServer = new TestableN8NMCPServer();
|
||||
await mcpServer.initialize();
|
||||
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
transport = serverTransport;
|
||||
|
||||
// Connect MCP server to transport
|
||||
await mcpServer.connectToTransport(transport);
|
||||
|
||||
// Create client
|
||||
client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await client.close();
|
||||
await mcpServer.close();
|
||||
});
|
||||
|
||||
describe('JSON-RPC 2.0 Compliance', () => {
|
||||
it('should return proper JSON-RPC 2.0 response format', async () => {
|
||||
const response = await client.request({
|
||||
method: 'tools/list',
|
||||
params: {}
|
||||
});
|
||||
|
||||
// Response should have tools array
|
||||
expect(response).toHaveProperty('tools');
|
||||
expect(Array.isArray(response.tools)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle request with id correctly', async () => {
|
||||
const response = await client.request({
|
||||
method: 'tools/list',
|
||||
params: {}
|
||||
});
|
||||
|
||||
expect(response).toBeDefined();
|
||||
expect(typeof response).toBe('object');
|
||||
});
|
||||
|
||||
it('should handle batch requests', async () => {
|
||||
// Send multiple requests concurrently
|
||||
const promises = [
|
||||
client.request({ method: 'tools/list', params: {} }),
|
||||
client.request({ method: 'tools/list', params: {} }),
|
||||
client.request({ method: 'tools/list', params: {} })
|
||||
];
|
||||
|
||||
const responses = await Promise.all(promises);
|
||||
|
||||
expect(responses).toHaveLength(3);
|
||||
responses.forEach(response => {
|
||||
expect(response).toHaveProperty('tools');
|
||||
});
|
||||
});
|
||||
|
||||
it('should preserve request order in responses', async () => {
|
||||
const requests = [];
|
||||
const expectedOrder = [];
|
||||
|
||||
// Create requests with different tools to track order
|
||||
for (let i = 0; i < 5; i++) {
|
||||
expectedOrder.push(i);
|
||||
requests.push(
|
||||
client.callTool('get_database_statistics', {})
|
||||
.then(() => i)
|
||||
);
|
||||
}
|
||||
|
||||
const results = await Promise.all(requests);
|
||||
expect(results).toEqual(expectedOrder);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Protocol Version Negotiation', () => {
|
||||
it('should negotiate protocol capabilities', async () => {
|
||||
const serverInfo = await client.getServerInfo();
|
||||
|
||||
expect(serverInfo).toHaveProperty('name');
|
||||
expect(serverInfo).toHaveProperty('version');
|
||||
expect(serverInfo.name).toBe('n8n-documentation-mcp');
|
||||
});
|
||||
|
||||
it('should expose supported capabilities', async () => {
|
||||
const serverInfo = await client.getServerInfo();
|
||||
|
||||
expect(serverInfo).toHaveProperty('capabilities');
|
||||
const capabilities = serverInfo.capabilities || {};
|
||||
|
||||
// Should support tools
|
||||
expect(capabilities).toHaveProperty('tools');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Message Format Validation', () => {
|
||||
it('should reject messages without method', async () => {
|
||||
// Test by sending raw message through transport
|
||||
const [serverTransport, clientTransport] = InMemoryTransport.createLinkedPair();
|
||||
const testClient = new Client({ name: 'test', version: '1.0.0' }, {});
|
||||
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
await testClient.connect(clientTransport);
|
||||
|
||||
try {
|
||||
// This should fail as MCP SDK validates method
|
||||
await testClient.request({ method: '', params: {} });
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
} finally {
|
||||
await testClient.close();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle missing params gracefully', async () => {
|
||||
// Most tools should work without params
|
||||
const response = await client.callTool('list_nodes', {});
|
||||
expect(response).toBeDefined();
|
||||
});
|
||||
|
||||
it('should validate params schema', async () => {
|
||||
try {
|
||||
// Invalid nodeType format (missing prefix)
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'httpRequest' // Should be 'nodes-base.httpRequest'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Content Types', () => {
|
||||
it('should handle text content in tool responses', async () => {
|
||||
const response = await client.callTool('get_database_statistics', {});
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response[0]).toHaveProperty('type', 'text');
|
||||
expect(response[0]).toHaveProperty('text');
|
||||
expect(typeof response[0].text).toBe('string');
|
||||
});
|
||||
|
||||
it('should handle large text responses', async () => {
|
||||
// Get a large node info response
|
||||
const response = await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response[0].type).toBe('text');
|
||||
expect(response[0].text.length).toBeGreaterThan(1000);
|
||||
});
|
||||
|
||||
it('should handle JSON content properly', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
limit: 5
|
||||
});
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
const content = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(content)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request/Response Correlation', () => {
|
||||
it('should correlate concurrent requests correctly', async () => {
|
||||
const requests = [
|
||||
client.callTool('get_node_essentials', { nodeType: 'nodes-base.httpRequest' }),
|
||||
client.callTool('get_node_essentials', { nodeType: 'nodes-base.webhook' }),
|
||||
client.callTool('get_node_essentials', { nodeType: 'nodes-base.slack' })
|
||||
];
|
||||
|
||||
const responses = await Promise.all(requests);
|
||||
|
||||
expect(responses[0][0].text).toContain('httpRequest');
|
||||
expect(responses[1][0].text).toContain('webhook');
|
||||
expect(responses[2][0].text).toContain('slack');
|
||||
});
|
||||
|
||||
it('should handle interleaved requests', async () => {
|
||||
const results: string[] = [];
|
||||
|
||||
// Start multiple requests with different delays
|
||||
const p1 = client.callTool('get_database_statistics', {})
|
||||
.then(() => { results.push('stats'); return 'stats'; });
|
||||
|
||||
const p2 = client.callTool('list_nodes', { limit: 1 })
|
||||
.then(() => { results.push('nodes'); return 'nodes'; });
|
||||
|
||||
const p3 = client.callTool('search_nodes', { query: 'http' })
|
||||
.then(() => { results.push('search'); return 'search'; });
|
||||
|
||||
const resolved = await Promise.all([p1, p2, p3]);
|
||||
|
||||
// All should complete
|
||||
expect(resolved).toHaveLength(3);
|
||||
expect(results).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Protocol Extensions', () => {
|
||||
it('should handle tool-specific extensions', async () => {
|
||||
// Test tool with complex params
|
||||
const response = await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: {
|
||||
method: 'GET',
|
||||
url: 'https://api.example.com'
|
||||
},
|
||||
profile: 'runtime'
|
||||
});
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response[0].type).toBe('text');
|
||||
});
|
||||
|
||||
it('should support optional parameters', async () => {
|
||||
// Call with minimal params
|
||||
const response1 = await client.callTool('list_nodes', {});
|
||||
|
||||
// Call with all params
|
||||
const response2 = await client.callTool('list_nodes', {
|
||||
limit: 10,
|
||||
category: 'trigger',
|
||||
package: 'n8n-nodes-base'
|
||||
});
|
||||
|
||||
expect(response1).toBeDefined();
|
||||
expect(response2).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transport Layer', () => {
|
||||
it('should handle transport disconnection gracefully', async () => {
|
||||
const [serverTransport, clientTransport] = InMemoryTransport.createLinkedPair();
|
||||
const testClient = new Client({ name: 'test', version: '1.0.0' }, {});
|
||||
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
await testClient.connect(clientTransport);
|
||||
|
||||
// Make a request
|
||||
const response = await testClient.callTool('get_database_statistics', {});
|
||||
expect(response).toBeDefined();
|
||||
|
||||
// Close client
|
||||
await testClient.close();
|
||||
|
||||
// Further requests should fail
|
||||
try {
|
||||
await testClient.callTool('get_database_statistics', {});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle multiple sequential connections', async () => {
|
||||
// Close existing connection
|
||||
await client.close();
|
||||
await mcpServer.close();
|
||||
|
||||
// Create new connections
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const engine = new TestableN8NMCPServer();
|
||||
await engine.initialize();
|
||||
|
||||
const [serverTransport, clientTransport] = InMemoryTransport.createLinkedPair();
|
||||
await engine.connect(serverTransport);
|
||||
|
||||
const testClient = new Client({ name: 'test', version: '1.0.0' }, {});
|
||||
await testClient.connect(clientTransport);
|
||||
|
||||
const response = await testClient.callTool('get_database_statistics', {});
|
||||
expect(response).toBeDefined();
|
||||
|
||||
await testClient.close();
|
||||
await engine.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
432
tests/integration/mcp-protocol/session-management.test.ts
Normal file
432
tests/integration/mcp-protocol/session-management.test.ts
Normal file
@@ -0,0 +1,432 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { TestableN8NMCPServer } from './test-helpers';
|
||||
|
||||
describe('MCP Session Management', () => {
|
||||
let mcpServer: TestableN8NMCPServer;
|
||||
|
||||
beforeEach(async () => {
|
||||
mcpServer = new TestableN8NMCPServer();
|
||||
await mcpServer.initialize();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await mcpServer.close();
|
||||
});
|
||||
|
||||
describe('Session Lifecycle', () => {
|
||||
it('should establish a new session', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Session should be established
|
||||
const serverInfo = await client.getServerInfo();
|
||||
expect(serverInfo).toHaveProperty('name', 'n8n-mcp');
|
||||
|
||||
await client.close();
|
||||
});
|
||||
|
||||
it('should handle session initialization with capabilities', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {
|
||||
// Client capabilities
|
||||
experimental: {}
|
||||
}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
const serverInfo = await client.getServerInfo();
|
||||
expect(serverInfo.capabilities).toHaveProperty('tools');
|
||||
|
||||
await client.close();
|
||||
});
|
||||
|
||||
it('should handle clean session termination', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Make some requests
|
||||
await client.callTool('get_database_statistics', {});
|
||||
await client.callTool('list_nodes', { limit: 5 });
|
||||
|
||||
// Clean termination
|
||||
await client.close();
|
||||
|
||||
// Client should be closed
|
||||
try {
|
||||
await client.callTool('get_database_statistics', {});
|
||||
expect.fail('Should not be able to make requests after close');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle abrupt disconnection', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Make a request to ensure connection is active
|
||||
await client.callTool('get_database_statistics', {});
|
||||
|
||||
// Simulate abrupt disconnection by closing transport
|
||||
await clientTransport.close();
|
||||
|
||||
// Further operations should fail
|
||||
try {
|
||||
await client.callTool('list_nodes', {});
|
||||
expect.fail('Should not be able to make requests after transport close');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple Sessions', () => {
|
||||
it('should handle multiple concurrent sessions', async () => {
|
||||
const sessions = [];
|
||||
|
||||
// Create 5 concurrent sessions
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: `test-client-${i}`,
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
sessions.push(client);
|
||||
}
|
||||
|
||||
// All sessions should work independently
|
||||
const promises = sessions.map((client, index) =>
|
||||
client.callTool('get_database_statistics', {})
|
||||
.then(response => ({ client: index, response }))
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
results.forEach(result => {
|
||||
expect(result.response).toBeDefined();
|
||||
expect(result.response[0].type).toBe('text');
|
||||
});
|
||||
|
||||
// Clean up all sessions
|
||||
await Promise.all(sessions.map(client => client.close()));
|
||||
});
|
||||
|
||||
it('should isolate session state', async () => {
|
||||
// Create two sessions
|
||||
const { serverTransport: st1, clientTransport: ct1 } = InMemoryTransport.createLinkedPair();
|
||||
const { serverTransport: st2, clientTransport: ct2 } = InMemoryTransport.createLinkedPair();
|
||||
|
||||
await mcpEngine.connect(st1);
|
||||
await mcpEngine.connect(st2);
|
||||
|
||||
const client1 = new Client({ name: 'client1', version: '1.0.0' }, {});
|
||||
const client2 = new Client({ name: 'client2', version: '1.0.0' }, {});
|
||||
|
||||
await client1.connect(ct1);
|
||||
await client2.connect(ct2);
|
||||
|
||||
// Both should work independently
|
||||
const [response1, response2] = await Promise.all([
|
||||
client1.callTool('list_nodes', { limit: 3 }),
|
||||
client2.callTool('list_nodes', { limit: 5 })
|
||||
]);
|
||||
|
||||
const nodes1 = JSON.parse(response1[0].text);
|
||||
const nodes2 = JSON.parse(response2[0].text);
|
||||
|
||||
expect(nodes1).toHaveLength(3);
|
||||
expect(nodes2).toHaveLength(5);
|
||||
|
||||
await client1.close();
|
||||
await client2.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Recovery', () => {
|
||||
it('should not persist state between sessions', async () => {
|
||||
// First session
|
||||
const { serverTransport: st1, clientTransport: ct1 } = InMemoryTransport.createLinkedPair();
|
||||
await mcpEngine.connect(st1);
|
||||
|
||||
const client1 = new Client({ name: 'client1', version: '1.0.0' }, {});
|
||||
await client1.connect(ct1);
|
||||
|
||||
// Make some requests
|
||||
await client1.callTool('list_nodes', { limit: 10 });
|
||||
await client1.close();
|
||||
|
||||
// Second session - should be fresh
|
||||
const { serverTransport: st2, clientTransport: ct2 } = InMemoryTransport.createLinkedPair();
|
||||
await mcpEngine.connect(st2);
|
||||
|
||||
const client2 = new Client({ name: 'client2', version: '1.0.0' }, {});
|
||||
await client2.connect(ct2);
|
||||
|
||||
// Should work normally
|
||||
const response = await client2.callTool('get_database_statistics', {});
|
||||
expect(response).toBeDefined();
|
||||
|
||||
await client2.close();
|
||||
});
|
||||
|
||||
it('should handle rapid session cycling', async () => {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: `rapid-client-${i}`,
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Quick operation
|
||||
const response = await client.callTool('get_database_statistics', {});
|
||||
expect(response).toBeDefined();
|
||||
|
||||
await client.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Metadata', () => {
|
||||
it('should track client information', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'test-client-with-metadata',
|
||||
version: '2.0.0'
|
||||
}, {
|
||||
capabilities: {
|
||||
experimental: {}
|
||||
}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Server should be aware of client
|
||||
const serverInfo = await client.getServerInfo();
|
||||
expect(serverInfo).toBeDefined();
|
||||
|
||||
await client.close();
|
||||
});
|
||||
|
||||
it('should handle different client versions', async () => {
|
||||
const clients = [];
|
||||
|
||||
for (const version of ['1.0.0', '1.1.0', '2.0.0']) {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'version-test-client',
|
||||
version
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
clients.push(client);
|
||||
}
|
||||
|
||||
// All versions should work
|
||||
const responses = await Promise.all(
|
||||
clients.map(client => client.getServerInfo())
|
||||
);
|
||||
|
||||
responses.forEach(info => {
|
||||
expect(info.name).toBe('n8n-mcp');
|
||||
});
|
||||
|
||||
// Clean up
|
||||
await Promise.all(clients.map(client => client.close()));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Limits', () => {
|
||||
it('should handle many sequential sessions', async () => {
|
||||
const sessionCount = 50;
|
||||
|
||||
for (let i = 0; i < sessionCount; i++) {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: `sequential-client-${i}`,
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Light operation
|
||||
if (i % 10 === 0) {
|
||||
await client.callTool('get_database_statistics', {});
|
||||
}
|
||||
|
||||
await client.close();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle session with heavy usage', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'heavy-usage-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Make many requests
|
||||
const requestCount = 100;
|
||||
const promises = [];
|
||||
|
||||
for (let i = 0; i < requestCount; i++) {
|
||||
const toolName = i % 2 === 0 ? 'list_nodes' : 'get_database_statistics';
|
||||
const params = toolName === 'list_nodes' ? { limit: 1 } : {};
|
||||
promises.push(client.callTool(toolName, params));
|
||||
}
|
||||
|
||||
const responses = await Promise.all(promises);
|
||||
expect(responses).toHaveLength(requestCount);
|
||||
|
||||
await client.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Error Recovery', () => {
|
||||
it('should handle errors without breaking session', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'error-recovery-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Make an error-inducing request
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'invalid-node-type'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
|
||||
// Session should still be active
|
||||
const response = await client.callTool('get_database_statistics', {});
|
||||
expect(response).toBeDefined();
|
||||
|
||||
await client.close();
|
||||
});
|
||||
|
||||
it('should handle multiple errors in sequence', async () => {
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
const client = new Client({
|
||||
name: 'multi-error-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
|
||||
// Multiple error-inducing requests
|
||||
const errorPromises = [
|
||||
client.callTool('get_node_info', { nodeType: 'invalid1' }).catch(e => e),
|
||||
client.callTool('get_node_info', { nodeType: 'invalid2' }).catch(e => e),
|
||||
client.callTool('get_node_for_task', { task: 'invalid_task' }).catch(e => e)
|
||||
];
|
||||
|
||||
const errors = await Promise.all(errorPromises);
|
||||
errors.forEach(error => {
|
||||
expect(error).toBeDefined();
|
||||
});
|
||||
|
||||
// Session should still work
|
||||
const response = await client.callTool('list_nodes', { limit: 1 });
|
||||
expect(response).toBeDefined();
|
||||
|
||||
await client.close();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Session Transport Events', () => {
|
||||
it('should handle transport reconnection', async () => {
|
||||
// Initial connection
|
||||
const { serverTransport: st1, clientTransport: ct1 } = InMemoryTransport.createLinkedPair();
|
||||
await mcpEngine.connect(st1);
|
||||
|
||||
const client = new Client({
|
||||
name: 'reconnect-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await client.connect(ct1);
|
||||
|
||||
// Initial request
|
||||
const response1 = await client.callTool('get_database_statistics', {});
|
||||
expect(response1).toBeDefined();
|
||||
|
||||
await client.close();
|
||||
|
||||
// New connection with same client
|
||||
const { serverTransport: st2, clientTransport: ct2 } = InMemoryTransport.createLinkedPair();
|
||||
await mcpEngine.connect(st2);
|
||||
|
||||
const newClient = new Client({
|
||||
name: 'reconnect-client',
|
||||
version: '1.0.0'
|
||||
}, {});
|
||||
|
||||
await newClient.connect(ct2);
|
||||
|
||||
// Should work normally
|
||||
const response2 = await newClient.callTool('get_database_statistics', {});
|
||||
expect(response2).toBeDefined();
|
||||
|
||||
await newClient.close();
|
||||
});
|
||||
});
|
||||
});
|
||||
101
tests/integration/mcp-protocol/test-helpers.ts
Normal file
101
tests/integration/mcp-protocol/test-helpers.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
||||
import { Transport } from '@modelcontextprotocol/sdk';
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
InitializeRequestSchema,
|
||||
} from '@modelcontextprotocol/sdk/types.js';
|
||||
import { N8NDocumentationMCPServer } from '../../../src/mcp/server';
|
||||
|
||||
export class TestableN8NMCPServer {
|
||||
private mcpServer: N8NDocumentationMCPServer;
|
||||
private server: Server;
|
||||
private transport?: Transport;
|
||||
|
||||
constructor() {
|
||||
this.server = new Server({
|
||||
name: 'n8n-documentation-mcp',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {
|
||||
tools: {}
|
||||
}
|
||||
});
|
||||
|
||||
this.mcpServer = new N8NDocumentationMCPServer();
|
||||
this.setupHandlers();
|
||||
}
|
||||
|
||||
private setupHandlers() {
|
||||
// Initialize handler
|
||||
this.server.setRequestHandler(InitializeRequestSchema, async () => {
|
||||
return {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {
|
||||
tools: {}
|
||||
},
|
||||
serverInfo: {
|
||||
name: 'n8n-documentation-mcp',
|
||||
version: '1.0.0'
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// List tools handler
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
const tools = await this.mcpServer.executeTool('tools/list', {});
|
||||
return tools;
|
||||
});
|
||||
|
||||
// Call tool handler
|
||||
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const result = await this.mcpServer.executeTool(request.params.name, request.params.arguments || {});
|
||||
|
||||
// Convert result to content array if needed
|
||||
if (Array.isArray(result) && result.length > 0 && result[0].content) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: typeof result === 'string' ? result : JSON.stringify(result, null, 2)
|
||||
}
|
||||
]
|
||||
};
|
||||
} catch (error: any) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Error: ${error.message}`
|
||||
}
|
||||
],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
// The MCP server initializes its database lazily
|
||||
// We can trigger initialization by calling executeTool
|
||||
try {
|
||||
await this.mcpServer.executeTool('get_database_statistics', {});
|
||||
} catch (error) {
|
||||
// Ignore errors, we just want to trigger initialization
|
||||
}
|
||||
}
|
||||
|
||||
async connectToTransport(transport: Transport): Promise<void> {
|
||||
this.transport = transport;
|
||||
await this.server.connect(transport);
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
// The server handles closing the transport
|
||||
await this.mcpServer.shutdown();
|
||||
}
|
||||
}
|
||||
544
tests/integration/mcp-protocol/tool-invocation.test.ts
Normal file
544
tests/integration/mcp-protocol/tool-invocation.test.ts
Normal file
@@ -0,0 +1,544 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { TestableN8NMCPServer } from './test-helpers';
|
||||
|
||||
describe('MCP Tool Invocation', () => {
|
||||
let mcpServer: TestableN8NMCPServer;
|
||||
let client: Client;
|
||||
|
||||
beforeEach(async () => {
|
||||
mcpServer = new TestableN8NMCPServer();
|
||||
await mcpServer.initialize();
|
||||
|
||||
const { serverTransport, clientTransport } = InMemoryTransport.createLinkedPair();
|
||||
await mcpServer.connectToTransport(serverTransport);
|
||||
|
||||
client = new Client({
|
||||
name: 'test-client',
|
||||
version: '1.0.0'
|
||||
}, {
|
||||
capabilities: {}
|
||||
});
|
||||
|
||||
await client.connect(clientTransport);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await client.close();
|
||||
await mcpServer.close();
|
||||
});
|
||||
|
||||
describe('Node Discovery Tools', () => {
|
||||
describe('list_nodes', () => {
|
||||
it('should list nodes with default parameters', async () => {
|
||||
const response = await client.callTool('list_nodes', {});
|
||||
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response[0].type).toBe('text');
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(nodes)).toBe(true);
|
||||
expect(nodes.length).toBeGreaterThan(0);
|
||||
|
||||
// Check node structure
|
||||
const firstNode = nodes[0];
|
||||
expect(firstNode).toHaveProperty('name');
|
||||
expect(firstNode).toHaveProperty('displayName');
|
||||
expect(firstNode).toHaveProperty('type');
|
||||
});
|
||||
|
||||
it('should filter nodes by category', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
category: 'trigger'
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes.length).toBeGreaterThan(0);
|
||||
nodes.forEach((node: any) => {
|
||||
expect(node.category).toBe('trigger');
|
||||
});
|
||||
});
|
||||
|
||||
it('should limit results', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
limit: 5
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes).toHaveLength(5);
|
||||
});
|
||||
|
||||
it('should filter by package', async () => {
|
||||
const response = await client.callTool('list_nodes', {
|
||||
package: 'n8n-nodes-base'
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes.length).toBeGreaterThan(0);
|
||||
nodes.forEach((node: any) => {
|
||||
expect(node.package).toBe('n8n-nodes-base');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('search_nodes', () => {
|
||||
it('should search nodes by keyword', async () => {
|
||||
const response = await client.callTool('search_nodes', {
|
||||
query: 'webhook'
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes.length).toBeGreaterThan(0);
|
||||
|
||||
// Should find webhook node
|
||||
const webhookNode = nodes.find((n: any) => n.name === 'webhook');
|
||||
expect(webhookNode).toBeDefined();
|
||||
});
|
||||
|
||||
it('should support different search modes', async () => {
|
||||
// OR mode
|
||||
const orResponse = await client.callTool('search_nodes', {
|
||||
query: 'http request',
|
||||
mode: 'OR'
|
||||
});
|
||||
const orNodes = JSON.parse(orResponse[0].text);
|
||||
expect(orNodes.length).toBeGreaterThan(0);
|
||||
|
||||
// AND mode
|
||||
const andResponse = await client.callTool('search_nodes', {
|
||||
query: 'http request',
|
||||
mode: 'AND'
|
||||
});
|
||||
const andNodes = JSON.parse(andResponse[0].text);
|
||||
expect(andNodes.length).toBeLessThanOrEqual(orNodes.length);
|
||||
|
||||
// FUZZY mode
|
||||
const fuzzyResponse = await client.callTool('search_nodes', {
|
||||
query: 'htpp requst', // Intentional typos
|
||||
mode: 'FUZZY'
|
||||
});
|
||||
const fuzzyNodes = JSON.parse(fuzzyResponse[0].text);
|
||||
expect(fuzzyNodes.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should respect result limit', async () => {
|
||||
const response = await client.callTool('search_nodes', {
|
||||
query: 'node',
|
||||
limit: 3
|
||||
});
|
||||
|
||||
const nodes = JSON.parse(response[0].text);
|
||||
expect(nodes).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get_node_info', () => {
|
||||
it('should get complete node information', async () => {
|
||||
const response = await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
|
||||
expect(response[0].type).toBe('text');
|
||||
const nodeInfo = JSON.parse(response[0].text);
|
||||
|
||||
expect(nodeInfo).toHaveProperty('name', 'httpRequest');
|
||||
expect(nodeInfo).toHaveProperty('displayName');
|
||||
expect(nodeInfo).toHaveProperty('properties');
|
||||
expect(Array.isArray(nodeInfo.properties)).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle non-existent nodes', async () => {
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.nonExistent'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle invalid node type format', async () => {
|
||||
try {
|
||||
await client.callTool('get_node_info', {
|
||||
nodeType: 'invalidFormat'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('not found');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('get_node_essentials', () => {
|
||||
it('should return condensed node information', async () => {
|
||||
const response = await client.callTool('get_node_essentials', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
|
||||
const essentials = JSON.parse(response[0].text);
|
||||
|
||||
expect(essentials).toHaveProperty('nodeType');
|
||||
expect(essentials).toHaveProperty('displayName');
|
||||
expect(essentials).toHaveProperty('essentialProperties');
|
||||
expect(essentials).toHaveProperty('examples');
|
||||
|
||||
// Should be smaller than full info
|
||||
const fullResponse = await client.callTool('get_node_info', {
|
||||
nodeType: 'nodes-base.httpRequest'
|
||||
});
|
||||
|
||||
expect(response[0].text.length).toBeLessThan(fullResponse[0].text.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validation Tools', () => {
|
||||
describe('validate_node_operation', () => {
|
||||
it('should validate valid node configuration', async () => {
|
||||
const response = await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: {
|
||||
method: 'GET',
|
||||
url: 'https://api.example.com/data'
|
||||
}
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('valid');
|
||||
expect(validation).toHaveProperty('errors');
|
||||
expect(validation).toHaveProperty('warnings');
|
||||
});
|
||||
|
||||
it('should detect missing required fields', async () => {
|
||||
const response = await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: {
|
||||
method: 'GET'
|
||||
// Missing required 'url' field
|
||||
}
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation.valid).toBe(false);
|
||||
expect(validation.errors.length).toBeGreaterThan(0);
|
||||
expect(validation.errors[0].message).toContain('url');
|
||||
});
|
||||
|
||||
it('should support different validation profiles', async () => {
|
||||
const profiles = ['minimal', 'runtime', 'ai-friendly', 'strict'];
|
||||
|
||||
for (const profile of profiles) {
|
||||
const response = await client.callTool('validate_node_operation', {
|
||||
nodeType: 'nodes-base.httpRequest',
|
||||
config: { method: 'GET', url: 'https://api.example.com' },
|
||||
profile
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('profile', profile);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate_workflow', () => {
|
||||
it('should validate complete workflow', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Start',
|
||||
type: 'nodes-base.manualTrigger',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'HTTP Request',
|
||||
type: 'nodes-base.httpRequest',
|
||||
typeVersion: 3,
|
||||
position: [250, 0],
|
||||
parameters: {
|
||||
method: 'GET',
|
||||
url: 'https://api.example.com/data'
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Start': {
|
||||
'main': [[{ node: 'HTTP Request', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('valid');
|
||||
expect(validation).toHaveProperty('errors');
|
||||
expect(validation).toHaveProperty('warnings');
|
||||
});
|
||||
|
||||
it('should detect connection errors', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Start',
|
||||
type: 'nodes-base.manualTrigger',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Start': {
|
||||
'main': [[{ node: 'NonExistent', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation.valid).toBe(false);
|
||||
expect(validation.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should validate expressions', async () => {
|
||||
const workflow = {
|
||||
nodes: [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Start',
|
||||
type: 'nodes-base.manualTrigger',
|
||||
typeVersion: 1,
|
||||
position: [0, 0],
|
||||
parameters: {}
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Set',
|
||||
type: 'nodes-base.set',
|
||||
typeVersion: 1,
|
||||
position: [250, 0],
|
||||
parameters: {
|
||||
values: {
|
||||
string: [
|
||||
{
|
||||
name: 'test',
|
||||
value: '={{ $json.invalidExpression }}'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
connections: {
|
||||
'Start': {
|
||||
'main': [[{ node: 'Set', type: 'main', index: 0 }]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const response = await client.callTool('validate_workflow', {
|
||||
workflow,
|
||||
options: {
|
||||
validateExpressions: true
|
||||
}
|
||||
});
|
||||
|
||||
const validation = JSON.parse(response[0].text);
|
||||
expect(validation).toHaveProperty('expressionWarnings');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Documentation Tools', () => {
|
||||
describe('tools_documentation', () => {
|
||||
it('should get quick start guide', async () => {
|
||||
const response = await client.callTool('tools_documentation', {});
|
||||
|
||||
expect(response[0].type).toBe('text');
|
||||
expect(response[0].text).toContain('Quick Reference');
|
||||
});
|
||||
|
||||
it('should get specific tool documentation', async () => {
|
||||
const response = await client.callTool('tools_documentation', {
|
||||
topic: 'search_nodes'
|
||||
});
|
||||
|
||||
expect(response[0].text).toContain('search_nodes');
|
||||
expect(response[0].text).toContain('Search nodes by keywords');
|
||||
});
|
||||
|
||||
it('should get comprehensive documentation', async () => {
|
||||
const response = await client.callTool('tools_documentation', {
|
||||
depth: 'full'
|
||||
});
|
||||
|
||||
expect(response[0].text.length).toBeGreaterThan(5000);
|
||||
expect(response[0].text).toContain('Comprehensive');
|
||||
});
|
||||
|
||||
it('should handle invalid topics gracefully', async () => {
|
||||
const response = await client.callTool('tools_documentation', {
|
||||
topic: 'nonexistent_tool'
|
||||
});
|
||||
|
||||
expect(response[0].text).toContain('not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('AI Tools', () => {
|
||||
describe('list_ai_tools', () => {
|
||||
it('should list AI-capable nodes', async () => {
|
||||
const response = await client.callTool('list_ai_tools', {});
|
||||
|
||||
const aiTools = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(aiTools)).toBe(true);
|
||||
expect(aiTools.length).toBeGreaterThan(0);
|
||||
|
||||
// All should be AI-capable
|
||||
aiTools.forEach((tool: any) => {
|
||||
expect(tool.isAITool).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('get_node_as_tool_info', () => {
|
||||
it('should provide AI tool usage information', async () => {
|
||||
const response = await client.callTool('get_node_as_tool_info', {
|
||||
nodeType: 'nodes-base.slack'
|
||||
});
|
||||
|
||||
const info = JSON.parse(response[0].text);
|
||||
expect(info).toHaveProperty('nodeType');
|
||||
expect(info).toHaveProperty('canBeUsedAsTool');
|
||||
expect(info).toHaveProperty('requirements');
|
||||
expect(info).toHaveProperty('useCases');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Task Templates', () => {
|
||||
describe('get_node_for_task', () => {
|
||||
it('should return pre-configured node for task', async () => {
|
||||
const response = await client.callTool('get_node_for_task', {
|
||||
task: 'post_json_request'
|
||||
});
|
||||
|
||||
const config = JSON.parse(response[0].text);
|
||||
expect(config).toHaveProperty('nodeType');
|
||||
expect(config).toHaveProperty('displayName');
|
||||
expect(config).toHaveProperty('parameters');
|
||||
expect(config.parameters.method).toBe('POST');
|
||||
});
|
||||
|
||||
it('should handle unknown tasks', async () => {
|
||||
try {
|
||||
await client.callTool('get_node_for_task', {
|
||||
task: 'unknown_task'
|
||||
});
|
||||
expect.fail('Should have thrown an error');
|
||||
} catch (error: any) {
|
||||
expect(error.message).toContain('Unknown task');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('list_tasks', () => {
|
||||
it('should list all available tasks', async () => {
|
||||
const response = await client.callTool('list_tasks', {});
|
||||
|
||||
const tasks = JSON.parse(response[0].text);
|
||||
expect(Array.isArray(tasks)).toBe(true);
|
||||
expect(tasks.length).toBeGreaterThan(0);
|
||||
|
||||
// Check task structure
|
||||
tasks.forEach((task: any) => {
|
||||
expect(task).toHaveProperty('task');
|
||||
expect(task).toHaveProperty('description');
|
||||
expect(task).toHaveProperty('category');
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter by category', async () => {
|
||||
const response = await client.callTool('list_tasks', {
|
||||
category: 'HTTP/API'
|
||||
});
|
||||
|
||||
const tasks = JSON.parse(response[0].text);
|
||||
tasks.forEach((task: any) => {
|
||||
expect(task.category).toBe('HTTP/API');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex Tool Interactions', () => {
|
||||
it('should handle tool chaining', async () => {
|
||||
// Search for nodes
|
||||
const searchResponse = await client.callTool('search_nodes', {
|
||||
query: 'slack'
|
||||
});
|
||||
const nodes = JSON.parse(searchResponse[0].text);
|
||||
|
||||
// Get info for first result
|
||||
const firstNode = nodes[0];
|
||||
const infoResponse = await client.callTool('get_node_info', {
|
||||
nodeType: `${firstNode.package}.${firstNode.name}`
|
||||
});
|
||||
|
||||
expect(infoResponse[0].text).toContain(firstNode.name);
|
||||
});
|
||||
|
||||
it('should handle parallel tool calls', async () => {
|
||||
const tools = [
|
||||
'list_nodes',
|
||||
'get_database_statistics',
|
||||
'list_ai_tools',
|
||||
'list_tasks'
|
||||
];
|
||||
|
||||
const promises = tools.map(tool =>
|
||||
client.callTool(tool, {})
|
||||
);
|
||||
|
||||
const responses = await Promise.all(promises);
|
||||
|
||||
expect(responses).toHaveLength(tools.length);
|
||||
responses.forEach(response => {
|
||||
expect(response).toHaveLength(1);
|
||||
expect(response[0].type).toBe('text');
|
||||
});
|
||||
});
|
||||
|
||||
it('should maintain consistency across related tools', async () => {
|
||||
// Get node via different methods
|
||||
const nodeType = 'nodes-base.httpRequest';
|
||||
|
||||
const [fullInfo, essentials, searchResult] = await Promise.all([
|
||||
client.callTool('get_node_info', { nodeType }),
|
||||
client.callTool('get_node_essentials', { nodeType }),
|
||||
client.callTool('search_nodes', { query: 'httpRequest' })
|
||||
]);
|
||||
|
||||
const full = JSON.parse(fullInfo[0].text);
|
||||
const essential = JSON.parse(essentials[0].text);
|
||||
const search = JSON.parse(searchResult[0].text);
|
||||
|
||||
// Should all reference the same node
|
||||
expect(full.name).toBe('httpRequest');
|
||||
expect(essential.displayName).toBe(full.displayName);
|
||||
expect(search.find((n: any) => n.name === 'httpRequest')).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user