fix: resolve database integration test issues
- Fix better-sqlite3 import statements to use namespace import - Update test schemas to match actual database schema - Align NodeRepository tests with actual API implementation - Fix FTS5 tests to work with templates instead of nodes - Update mock data to match ParsedNode interface - Fix column names to match actual schema (node_type, package_name, etc) - Add proper ParsedNode creation helper function - Remove tests for non-existent foreign key constraints
This commit is contained in:
@@ -311,36 +311,16 @@ describe('Database Connection Management', () => {
|
|||||||
expect(mmap.mmap_size).toBeGreaterThan(0);
|
expect(mmap.mmap_size).toBeGreaterThan(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should enforce foreign key constraints', async () => {
|
it('should have foreign key support enabled', async () => {
|
||||||
testDb = new TestDatabase({ mode: 'memory' });
|
testDb = new TestDatabase({ mode: 'memory' });
|
||||||
const db = await testDb.initialize();
|
const db = await testDb.initialize();
|
||||||
|
|
||||||
// Foreign keys should be enabled by default in our schema
|
// Foreign keys should be enabled by default
|
||||||
const fkEnabled = db.prepare('PRAGMA foreign_keys').get() as { foreign_keys: number };
|
const fkEnabled = db.prepare('PRAGMA foreign_keys').get() as { foreign_keys: number };
|
||||||
expect(fkEnabled.foreign_keys).toBe(1);
|
expect(fkEnabled.foreign_keys).toBe(1);
|
||||||
|
|
||||||
// Test foreign key constraint
|
// Note: The current schema doesn't define foreign key constraints,
|
||||||
const node = TestDataGenerator.generateNode();
|
// but the setting is enabled for future use
|
||||||
db.prepare(`
|
|
||||||
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
`).run(
|
|
||||||
node.name,
|
|
||||||
node.type,
|
|
||||||
node.displayName,
|
|
||||||
node.package,
|
|
||||||
node.version,
|
|
||||||
node.typeVersion,
|
|
||||||
JSON.stringify(node)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Try to insert doc for non-existent node (should fail)
|
|
||||||
expect(() => {
|
|
||||||
db.prepare(`
|
|
||||||
INSERT INTO node_docs (node_name, content, examples)
|
|
||||||
VALUES ('non-existent-node', 'content', '[]')
|
|
||||||
`).run();
|
|
||||||
}).toThrow(/FOREIGN KEY constraint failed/);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
639
tests/integration/database/fts5-search.test.ts
Normal file
639
tests/integration/database/fts5-search.test.ts
Normal file
@@ -0,0 +1,639 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
||||||
|
import * as Database from 'better-sqlite3';
|
||||||
|
import { TestDatabase, TestDataGenerator, PerformanceMonitor } from './test-utils';
|
||||||
|
|
||||||
|
describe('FTS5 Full-Text Search', () => {
|
||||||
|
let testDb: TestDatabase;
|
||||||
|
let db: Database;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
testDb = new TestDatabase({ mode: 'memory', enableFTS5: true });
|
||||||
|
db = await testDb.initialize();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await testDb.cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Availability', () => {
|
||||||
|
it('should have FTS5 extension available', () => {
|
||||||
|
// Try to create an FTS5 table
|
||||||
|
expect(() => {
|
||||||
|
db.exec('CREATE VIRTUAL TABLE test_fts USING fts5(content)');
|
||||||
|
db.exec('DROP TABLE test_fts');
|
||||||
|
}).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support FTS5 for template searches', () => {
|
||||||
|
// Create FTS5 table for templates
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Verify it was created
|
||||||
|
const tables = db.prepare(`
|
||||||
|
SELECT sql FROM sqlite_master
|
||||||
|
WHERE type = 'table' AND name = 'templates_fts'
|
||||||
|
`).all() as { sql: string }[];
|
||||||
|
|
||||||
|
expect(tables).toHaveLength(1);
|
||||||
|
expect(tables[0].sql).toContain('USING fts5');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Template FTS5 Operations', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create FTS5 table
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Insert test templates
|
||||||
|
const templates = [
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
workflow_id: 1001,
|
||||||
|
name: 'Webhook to Slack Notification',
|
||||||
|
description: 'Send Slack messages when webhook is triggered',
|
||||||
|
nodes_used: JSON.stringify(['n8n-nodes-base.webhook', 'n8n-nodes-base.slack']),
|
||||||
|
workflow_json: JSON.stringify({}),
|
||||||
|
categories: JSON.stringify([{ id: 1, name: 'automation' }]),
|
||||||
|
views: 100
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
workflow_id: 1002,
|
||||||
|
name: 'HTTP Request Data Processing',
|
||||||
|
description: 'Fetch data from API and process it',
|
||||||
|
nodes_used: JSON.stringify(['n8n-nodes-base.httpRequest', 'n8n-nodes-base.set']),
|
||||||
|
workflow_json: JSON.stringify({}),
|
||||||
|
categories: JSON.stringify([{ id: 2, name: 'data' }]),
|
||||||
|
views: 200
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
workflow_id: 1003,
|
||||||
|
name: 'Email Automation Workflow',
|
||||||
|
description: 'Automate email sending based on triggers',
|
||||||
|
nodes_used: JSON.stringify(['n8n-nodes-base.emailSend', 'n8n-nodes-base.if']),
|
||||||
|
workflow_json: JSON.stringify({}),
|
||||||
|
categories: JSON.stringify([{ id: 3, name: 'communication' }]),
|
||||||
|
views: 150
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
|
||||||
|
`);
|
||||||
|
|
||||||
|
templates.forEach(template => {
|
||||||
|
stmt.run(
|
||||||
|
template.id,
|
||||||
|
template.workflow_id,
|
||||||
|
template.name,
|
||||||
|
template.description,
|
||||||
|
template.nodes_used,
|
||||||
|
template.workflow_json,
|
||||||
|
template.categories,
|
||||||
|
template.views
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Populate FTS index
|
||||||
|
db.exec(`
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
SELECT id, name, description FROM templates
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search templates by exact term', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'webhook'
|
||||||
|
ORDER BY rank
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]).toMatchObject({
|
||||||
|
name: 'Webhook to Slack Notification'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search with partial term and prefix', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'auto*'
|
||||||
|
ORDER BY rank
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThanOrEqual(1);
|
||||||
|
expect(results.some((r: any) => r.name.includes('Automation'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search across multiple columns', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'email OR send'
|
||||||
|
ORDER BY rank
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]).toMatchObject({
|
||||||
|
name: 'Email Automation Workflow'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle phrase searches', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH '"Slack messages"'
|
||||||
|
ORDER BY rank
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]).toMatchObject({
|
||||||
|
name: 'Webhook to Slack Notification'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support NOT queries', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'automation NOT email'
|
||||||
|
ORDER BY rank
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
expect(results.every((r: any) => !r.name.toLowerCase().includes('email'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Ranking and Scoring', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create FTS5 table
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Insert templates with varying relevance
|
||||||
|
const templates = [
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
name: 'Advanced HTTP Request Handler',
|
||||||
|
description: 'Complex HTTP request processing with error handling and retries'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'Simple HTTP GET Request',
|
||||||
|
description: 'Basic HTTP GET request example'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 3,
|
||||||
|
name: 'Webhook HTTP Receiver',
|
||||||
|
description: 'Receive HTTP webhooks and process requests'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const stmt = db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, '[]', '{}', '[]', 0, datetime('now'), datetime('now'))
|
||||||
|
`);
|
||||||
|
|
||||||
|
templates.forEach(t => {
|
||||||
|
stmt.run(t.id, 1000 + t.id, t.name, t.description);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Populate FTS
|
||||||
|
db.exec(`
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
SELECT id, name, description FROM templates
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should rank results by relevance using bm25', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.*, bm25(templates_fts) as score
|
||||||
|
FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'http request'
|
||||||
|
ORDER BY bm25(templates_fts)
|
||||||
|
`).all() as any[];
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Scores should be negative (lower is better in bm25)
|
||||||
|
expect(results[0].score).toBeLessThan(0);
|
||||||
|
|
||||||
|
// Should be ordered by relevance
|
||||||
|
expect(results[0].name).toContain('HTTP');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use custom weights for columns', () => {
|
||||||
|
// Give more weight to name (2.0) than description (1.0)
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.*, bm25(templates_fts, 2.0, 1.0) as score
|
||||||
|
FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'request'
|
||||||
|
ORDER BY bm25(templates_fts, 2.0, 1.0)
|
||||||
|
`).all() as any[];
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Items with "request" in name should rank higher
|
||||||
|
const nameMatches = results.filter((r: any) =>
|
||||||
|
r.name.toLowerCase().includes('request')
|
||||||
|
);
|
||||||
|
expect(nameMatches.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Advanced Features', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Insert template with longer description
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, '[]', '{}', '[]', 0, datetime('now'), datetime('now'))
|
||||||
|
`).run(
|
||||||
|
1,
|
||||||
|
1001,
|
||||||
|
'Complex Workflow',
|
||||||
|
'This is a complex workflow that handles multiple operations including data transformation, filtering, and aggregation. It can process large datasets efficiently and includes error handling.'
|
||||||
|
);
|
||||||
|
|
||||||
|
db.exec(`
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
SELECT id, name, description FROM templates
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support snippet extraction', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT
|
||||||
|
t.*,
|
||||||
|
snippet(templates_fts, 1, '<b>', '</b>', '...', 10) as snippet
|
||||||
|
FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'transformation'
|
||||||
|
`).all() as any[];
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].snippet).toContain('<b>transformation</b>');
|
||||||
|
expect(results[0].snippet).toContain('...');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support highlight function', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT
|
||||||
|
t.*,
|
||||||
|
highlight(templates_fts, 1, '<mark>', '</mark>') as highlighted_desc
|
||||||
|
FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'workflow'
|
||||||
|
LIMIT 1
|
||||||
|
`).all() as any[];
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].highlighted_desc).toContain('<mark>workflow</mark>');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Triggers and Synchronization', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create FTS5 table with triggers
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER IF NOT EXISTS templates_ai AFTER INSERT ON templates
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
VALUES (new.id, new.name, new.description);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER IF NOT EXISTS templates_au AFTER UPDATE ON templates
|
||||||
|
BEGIN
|
||||||
|
UPDATE templates_fts
|
||||||
|
SET name = new.name, description = new.description
|
||||||
|
WHERE rowid = new.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER IF NOT EXISTS templates_ad AFTER DELETE ON templates
|
||||||
|
BEGIN
|
||||||
|
DELETE FROM templates_fts WHERE rowid = old.id;
|
||||||
|
END;
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should automatically sync FTS on insert', () => {
|
||||||
|
const template = TestDataGenerator.generateTemplate({
|
||||||
|
id: 100,
|
||||||
|
name: 'Auto-synced Template',
|
||||||
|
description: 'This template is automatically indexed'
|
||||||
|
});
|
||||||
|
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
|
||||||
|
`).run(
|
||||||
|
template.id,
|
||||||
|
template.id + 1000,
|
||||||
|
template.name,
|
||||||
|
template.description,
|
||||||
|
JSON.stringify(template.nodeTypes || []),
|
||||||
|
JSON.stringify({}),
|
||||||
|
JSON.stringify(template.categories || []),
|
||||||
|
template.totalViews || 0
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should immediately be searchable
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'automatically'
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]).toMatchObject({ id: 100 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should automatically sync FTS on update', () => {
|
||||||
|
// Insert template
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, '[]', '{}', '[]', 0, datetime('now'), datetime('now'))
|
||||||
|
`).run(200, 2000, 'Original Name', 'Original description');
|
||||||
|
|
||||||
|
// Update description
|
||||||
|
db.prepare(`
|
||||||
|
UPDATE templates
|
||||||
|
SET description = 'Updated description with new keywords'
|
||||||
|
WHERE id = ?
|
||||||
|
`).run(200);
|
||||||
|
|
||||||
|
// Should find with new keywords
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'keywords'
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]).toMatchObject({ id: 200 });
|
||||||
|
|
||||||
|
// Should not find with old keywords
|
||||||
|
const oldResults = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH 'Original'
|
||||||
|
`).all();
|
||||||
|
|
||||||
|
expect(oldResults).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should automatically sync FTS on delete', () => {
|
||||||
|
// Insert template
|
||||||
|
db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, '[]', '{}', '[]', 0, datetime('now'), datetime('now'))
|
||||||
|
`).run(300, 3000, 'Temporary Template', 'This will be deleted');
|
||||||
|
|
||||||
|
// Verify it's searchable
|
||||||
|
let count = db.prepare(`
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM templates_fts
|
||||||
|
WHERE templates_fts MATCH 'Temporary'
|
||||||
|
`).get() as { count: number };
|
||||||
|
expect(count.count).toBe(1);
|
||||||
|
|
||||||
|
// Delete template
|
||||||
|
db.prepare('DELETE FROM templates WHERE id = ?').run(300);
|
||||||
|
|
||||||
|
// Should no longer be searchable
|
||||||
|
count = db.prepare(`
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM templates_fts
|
||||||
|
WHERE templates_fts MATCH 'Temporary'
|
||||||
|
`).get() as { count: number };
|
||||||
|
expect(count.count).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Performance', () => {
|
||||||
|
it('should handle large dataset searches efficiently', () => {
|
||||||
|
// Create FTS5 table
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
const monitor = new PerformanceMonitor();
|
||||||
|
|
||||||
|
// Insert a large number of templates
|
||||||
|
const templates = TestDataGenerator.generateTemplates(1000);
|
||||||
|
const insertStmt = db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
|
||||||
|
`);
|
||||||
|
|
||||||
|
const insertMany = db.transaction((templates: any[]) => {
|
||||||
|
templates.forEach((template, i) => {
|
||||||
|
insertStmt.run(
|
||||||
|
i + 1,
|
||||||
|
template.id,
|
||||||
|
template.name,
|
||||||
|
template.description || `Template ${i} for ${['webhook handling', 'API calls', 'data processing', 'automation'][i % 4]}`,
|
||||||
|
JSON.stringify(template.nodeTypes || []),
|
||||||
|
JSON.stringify(template.workflowInfo || {}),
|
||||||
|
JSON.stringify(template.categories || []),
|
||||||
|
template.totalViews || 0
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Populate FTS in bulk
|
||||||
|
db.exec(`
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
SELECT id, name, description FROM templates
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
const stopInsert = monitor.start('bulk_insert');
|
||||||
|
insertMany(templates);
|
||||||
|
stopInsert();
|
||||||
|
|
||||||
|
// Test search performance
|
||||||
|
const searchTerms = ['workflow', 'webhook', 'automation', 'data processing', 'api'];
|
||||||
|
|
||||||
|
searchTerms.forEach(term => {
|
||||||
|
const stop = monitor.start(`search_${term}`);
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT t.* FROM templates t
|
||||||
|
JOIN templates_fts f ON t.id = f.rowid
|
||||||
|
WHERE templates_fts MATCH ?
|
||||||
|
ORDER BY rank
|
||||||
|
LIMIT 10
|
||||||
|
`).all(term);
|
||||||
|
stop();
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// All searches should complete quickly
|
||||||
|
searchTerms.forEach(term => {
|
||||||
|
const stats = monitor.getStats(`search_${term}`);
|
||||||
|
expect(stats).not.toBeNull();
|
||||||
|
expect(stats!.average).toBeLessThan(10); // Should complete in under 10ms
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should optimize rebuilding FTS index', () => {
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Insert initial data
|
||||||
|
const templates = TestDataGenerator.generateTemplates(100);
|
||||||
|
const insertStmt = db.prepare(`
|
||||||
|
INSERT INTO templates (
|
||||||
|
id, workflow_id, name, description,
|
||||||
|
nodes_used, workflow_json, categories, views,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (?, ?, ?, ?, '[]', '{}', '[]', 0, datetime('now'), datetime('now'))
|
||||||
|
`);
|
||||||
|
|
||||||
|
db.transaction(() => {
|
||||||
|
templates.forEach((template, i) => {
|
||||||
|
insertStmt.run(
|
||||||
|
i + 1,
|
||||||
|
template.id,
|
||||||
|
template.name,
|
||||||
|
template.description || 'Test template'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
db.exec(`
|
||||||
|
INSERT INTO templates_fts(rowid, name, description)
|
||||||
|
SELECT id, name, description FROM templates
|
||||||
|
`);
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Rebuild FTS index
|
||||||
|
const monitor = new PerformanceMonitor();
|
||||||
|
const stop = monitor.start('rebuild_fts');
|
||||||
|
|
||||||
|
db.exec('INSERT INTO templates_fts(templates_fts) VALUES("rebuild")');
|
||||||
|
|
||||||
|
stop();
|
||||||
|
|
||||||
|
const stats = monitor.getStats('rebuild_fts');
|
||||||
|
expect(stats).not.toBeNull();
|
||||||
|
expect(stats!.average).toBeLessThan(100); // Should complete quickly
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FTS5 Error Handling', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
db.exec(`
|
||||||
|
CREATE VIRTUAL TABLE IF NOT EXISTS templates_fts USING fts5(
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
content=templates,
|
||||||
|
content_rowid=id
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle malformed queries gracefully', () => {
|
||||||
|
expect(() => {
|
||||||
|
db.prepare(`
|
||||||
|
SELECT * FROM templates_fts WHERE templates_fts MATCH ?
|
||||||
|
`).all('AND OR NOT'); // Invalid query syntax
|
||||||
|
}).toThrow(/fts5: syntax error/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle special characters in search terms', () => {
|
||||||
|
const specialChars = ['@', '#', '$', '%', '^', '&', '*', '(', ')'];
|
||||||
|
|
||||||
|
specialChars.forEach(char => {
|
||||||
|
// Should not throw when properly escaped
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT * FROM templates_fts WHERE templates_fts MATCH ?
|
||||||
|
`).all(`"${char}"`);
|
||||||
|
|
||||||
|
expect(Array.isArray(results)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty search terms', () => {
|
||||||
|
const results = db.prepare(`
|
||||||
|
SELECT * FROM templates_fts WHERE templates_fts MATCH ?
|
||||||
|
`).all('');
|
||||||
|
|
||||||
|
expect(results).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
615
tests/integration/database/node-repository.test.ts
Normal file
615
tests/integration/database/node-repository.test.ts
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
|
||||||
|
import * as Database from 'better-sqlite3';
|
||||||
|
import { NodeRepository } from '../../../src/database/node-repository';
|
||||||
|
import { DatabaseAdapter } from '../../../src/database/database-adapter';
|
||||||
|
import { TestDatabase, TestDataGenerator, MOCK_NODES } from './test-utils';
|
||||||
|
import { ParsedNode } from '../../../src/parsers/node-parser';
|
||||||
|
|
||||||
|
describe('NodeRepository Integration Tests', () => {
|
||||||
|
let testDb: TestDatabase;
|
||||||
|
let db: Database;
|
||||||
|
let repository: NodeRepository;
|
||||||
|
let adapter: DatabaseAdapter;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
testDb = new TestDatabase({ mode: 'memory' });
|
||||||
|
db = await testDb.initialize();
|
||||||
|
adapter = new DatabaseAdapter(db);
|
||||||
|
repository = new NodeRepository(adapter);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await testDb.cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('saveNode', () => {
|
||||||
|
it('should save single node successfully', () => {
|
||||||
|
const node = createParsedNode(MOCK_NODES.webhook);
|
||||||
|
repository.saveNode(node);
|
||||||
|
|
||||||
|
const saved = repository.getNode(node.nodeType);
|
||||||
|
expect(saved).toBeTruthy();
|
||||||
|
expect(saved.nodeType).toBe(node.nodeType);
|
||||||
|
expect(saved.displayName).toBe(node.displayName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update existing nodes', () => {
|
||||||
|
const node = createParsedNode(MOCK_NODES.webhook);
|
||||||
|
|
||||||
|
// Save initial version
|
||||||
|
repository.saveNode(node);
|
||||||
|
|
||||||
|
// Update and save again
|
||||||
|
const updated = { ...node, displayName: 'Updated Webhook' };
|
||||||
|
repository.saveNode(updated);
|
||||||
|
|
||||||
|
const saved = repository.getNode(node.nodeType);
|
||||||
|
expect(saved?.displayName).toBe('Updated Webhook');
|
||||||
|
|
||||||
|
// Should not create duplicate
|
||||||
|
const count = repository.getNodeCount();
|
||||||
|
expect(count).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nodes with complex properties', () => {
|
||||||
|
const complexNode: ParsedNode = {
|
||||||
|
nodeType: 'n8n-nodes-base.complex',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
|
displayName: 'Complex Node',
|
||||||
|
description: 'A complex node with many properties',
|
||||||
|
category: 'automation',
|
||||||
|
style: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
documentation: 'Complex node documentation',
|
||||||
|
properties: [
|
||||||
|
{
|
||||||
|
displayName: 'Resource',
|
||||||
|
name: 'resource',
|
||||||
|
type: 'options',
|
||||||
|
options: [
|
||||||
|
{ name: 'User', value: 'user' },
|
||||||
|
{ name: 'Post', value: 'post' }
|
||||||
|
],
|
||||||
|
default: 'user'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Operation',
|
||||||
|
name: 'operation',
|
||||||
|
type: 'options',
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
resource: ['user']
|
||||||
|
}
|
||||||
|
},
|
||||||
|
options: [
|
||||||
|
{ name: 'Create', value: 'create' },
|
||||||
|
{ name: 'Get', value: 'get' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
operations: [
|
||||||
|
{ resource: 'user', operation: 'create' },
|
||||||
|
{ resource: 'user', operation: 'get' }
|
||||||
|
],
|
||||||
|
credentials: [
|
||||||
|
{
|
||||||
|
name: 'httpBasicAuth',
|
||||||
|
required: false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
repository.saveNode(complexNode);
|
||||||
|
|
||||||
|
const saved = repository.getNode(complexNode.nodeType);
|
||||||
|
expect(saved).toBeTruthy();
|
||||||
|
expect(saved.properties).toHaveLength(2);
|
||||||
|
expect(saved.credentials).toHaveLength(1);
|
||||||
|
expect(saved.operations).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very large nodes', () => {
|
||||||
|
const largeNode: ParsedNode = {
|
||||||
|
nodeType: 'n8n-nodes-base.large',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
|
displayName: 'Large Node',
|
||||||
|
description: 'A very large node',
|
||||||
|
category: 'automation',
|
||||||
|
style: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
properties: Array.from({ length: 100 }, (_, i) => ({
|
||||||
|
displayName: `Property ${i}`,
|
||||||
|
name: `prop${i}`,
|
||||||
|
type: 'string',
|
||||||
|
default: ''
|
||||||
|
})),
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
|
};
|
||||||
|
|
||||||
|
repository.saveNode(largeNode);
|
||||||
|
|
||||||
|
const saved = repository.getNode(largeNode.nodeType);
|
||||||
|
expect(saved?.properties).toHaveLength(100);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNode', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
repository.saveNode(createParsedNode(MOCK_NODES.webhook));
|
||||||
|
repository.saveNode(createParsedNode(MOCK_NODES.httpRequest));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve node by type', () => {
|
||||||
|
const node = repository.getNode('n8n-nodes-base.webhook');
|
||||||
|
expect(node).toBeTruthy();
|
||||||
|
expect(node.displayName).toBe('Webhook');
|
||||||
|
expect(node.nodeType).toBe('n8n-nodes-base.webhook');
|
||||||
|
expect(node.package).toBe('n8n-nodes-base');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null for non-existent node', () => {
|
||||||
|
const node = repository.getNode('n8n-nodes-base.nonExistent');
|
||||||
|
expect(node).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle special characters in node types', () => {
|
||||||
|
const specialNode: ParsedNode = {
|
||||||
|
nodeType: 'n8n-nodes-base.special-chars_v2.node',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
|
displayName: 'Special Node',
|
||||||
|
description: 'Node with special characters',
|
||||||
|
category: 'automation',
|
||||||
|
style: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '2',
|
||||||
|
properties: [],
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
|
};
|
||||||
|
|
||||||
|
repository.saveNode(specialNode);
|
||||||
|
const retrieved = repository.getNode(specialNode.nodeType);
|
||||||
|
expect(retrieved).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getAllNodes', () => {
|
||||||
|
it('should return empty array when no nodes', () => {
|
||||||
|
const nodes = repository.getAllNodes();
|
||||||
|
expect(nodes).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return all nodes with limit', () => {
|
||||||
|
const nodes = Array.from({ length: 20 }, (_, i) =>
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: `n8n-nodes-base.node${i}`,
|
||||||
|
displayName: `Node ${i}`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
|
||||||
|
const retrieved = repository.getAllNodes(10);
|
||||||
|
expect(retrieved).toHaveLength(10);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return all nodes without limit', () => {
|
||||||
|
const nodes = Array.from({ length: 20 }, (_, i) =>
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: `n8n-nodes-base.node${i}`,
|
||||||
|
displayName: `Node ${i}`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
|
||||||
|
const retrieved = repository.getAllNodes();
|
||||||
|
expect(retrieved).toHaveLength(20);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very large result sets efficiently', () => {
|
||||||
|
const nodes = Array.from({ length: 1000 }, (_, i) =>
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: `n8n-nodes-base.node${i}`,
|
||||||
|
displayName: `Node ${i}`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const insertMany = db.transaction((nodes: ParsedNode[]) => {
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
});
|
||||||
|
|
||||||
|
const start = Date.now();
|
||||||
|
insertMany(nodes);
|
||||||
|
const duration = Date.now() - start;
|
||||||
|
|
||||||
|
expect(duration).toBeLessThan(1000); // Should complete in under 1 second
|
||||||
|
|
||||||
|
const retrieved = repository.getAllNodes();
|
||||||
|
expect(retrieved).toHaveLength(1000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNodesByPackage', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
const nodes = [
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: 'n8n-nodes-base.node1',
|
||||||
|
packageName: 'n8n-nodes-base'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: 'n8n-nodes-base.node2',
|
||||||
|
packageName: 'n8n-nodes-base'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: '@n8n/n8n-nodes-langchain.node3',
|
||||||
|
packageName: '@n8n/n8n-nodes-langchain'
|
||||||
|
})
|
||||||
|
];
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter nodes by package', () => {
|
||||||
|
const baseNodes = repository.getNodesByPackage('n8n-nodes-base');
|
||||||
|
expect(baseNodes).toHaveLength(2);
|
||||||
|
|
||||||
|
const langchainNodes = repository.getNodesByPackage('@n8n/n8n-nodes-langchain');
|
||||||
|
expect(langchainNodes).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array for non-existent package', () => {
|
||||||
|
const nodes = repository.getNodesByPackage('non-existent-package');
|
||||||
|
expect(nodes).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNodesByCategory', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
const nodes = [
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: 'n8n-nodes-base.webhook',
|
||||||
|
category: 'trigger'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: 'n8n-nodes-base.schedule',
|
||||||
|
displayName: 'Schedule',
|
||||||
|
category: 'trigger'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.httpRequest,
|
||||||
|
nodeType: 'n8n-nodes-base.httpRequest',
|
||||||
|
category: 'automation'
|
||||||
|
})
|
||||||
|
];
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter nodes by category', () => {
|
||||||
|
const triggers = repository.getNodesByCategory('trigger');
|
||||||
|
expect(triggers).toHaveLength(2);
|
||||||
|
expect(triggers.every(n => n.category === 'trigger')).toBe(true);
|
||||||
|
|
||||||
|
const automation = repository.getNodesByCategory('automation');
|
||||||
|
expect(automation).toHaveLength(1);
|
||||||
|
expect(automation[0].category).toBe('automation');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('searchNodes', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
const nodes = [
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
description: 'Starts the workflow when webhook is called'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.httpRequest,
|
||||||
|
description: 'Makes HTTP requests to external APIs'
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
nodeType: 'n8n-nodes-base.emailSend',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
|
displayName: 'Send Email',
|
||||||
|
description: 'Sends emails via SMTP protocol',
|
||||||
|
category: 'communication',
|
||||||
|
developmentStyle: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
properties: [],
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
|
})
|
||||||
|
];
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search by node type', () => {
|
||||||
|
const results = repository.searchNodes('webhook');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.webhook');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search by display name', () => {
|
||||||
|
const results = repository.searchNodes('Send Email');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.emailSend');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search by description', () => {
|
||||||
|
const results = repository.searchNodes('SMTP');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.emailSend');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle OR mode (default)', () => {
|
||||||
|
const results = repository.searchNodes('webhook email', 'OR');
|
||||||
|
expect(results).toHaveLength(2);
|
||||||
|
const nodeTypes = results.map(r => r.nodeType);
|
||||||
|
expect(nodeTypes).toContain('n8n-nodes-base.webhook');
|
||||||
|
expect(nodeTypes).toContain('n8n-nodes-base.emailSend');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle AND mode', () => {
|
||||||
|
const results = repository.searchNodes('HTTP request', 'AND');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.httpRequest');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle FUZZY mode', () => {
|
||||||
|
const results = repository.searchNodes('HTT', 'FUZZY');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.httpRequest');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle case-insensitive search', () => {
|
||||||
|
const results = repository.searchNodes('WEBHOOK');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].nodeType).toBe('n8n-nodes-base.webhook');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array for no matches', () => {
|
||||||
|
const results = repository.searchNodes('nonexistent');
|
||||||
|
expect(results).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respect limit parameter', () => {
|
||||||
|
// Add more nodes
|
||||||
|
const nodes = Array.from({ length: 10 }, (_, i) =>
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: `n8n-nodes-base.test${i}`,
|
||||||
|
displayName: `Test Node ${i}`,
|
||||||
|
description: 'Test description'
|
||||||
|
})
|
||||||
|
);
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
|
||||||
|
const results = repository.searchNodes('test', 'OR', 5);
|
||||||
|
expect(results).toHaveLength(5);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getAITools', () => {
|
||||||
|
it('should return only AI tool nodes', () => {
|
||||||
|
const nodes = [
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: 'n8n-nodes-base.webhook',
|
||||||
|
isAITool: false
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: '@n8n/n8n-nodes-langchain.agent',
|
||||||
|
displayName: 'AI Agent',
|
||||||
|
packageName: '@n8n/n8n-nodes-langchain',
|
||||||
|
isAITool: true
|
||||||
|
}),
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: '@n8n/n8n-nodes-langchain.tool',
|
||||||
|
displayName: 'AI Tool',
|
||||||
|
packageName: '@n8n/n8n-nodes-langchain',
|
||||||
|
isAITool: true
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
|
||||||
|
const aiTools = repository.getAITools();
|
||||||
|
expect(aiTools).toHaveLength(2);
|
||||||
|
expect(aiTools.every(node => node.package.includes('langchain'))).toBe(true);
|
||||||
|
expect(aiTools[0].displayName).toBe('AI Agent');
|
||||||
|
expect(aiTools[1].displayName).toBe('AI Tool');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getNodeCount', () => {
|
||||||
|
it('should return correct node count', () => {
|
||||||
|
expect(repository.getNodeCount()).toBe(0);
|
||||||
|
|
||||||
|
repository.saveNode(createParsedNode(MOCK_NODES.webhook));
|
||||||
|
expect(repository.getNodeCount()).toBe(1);
|
||||||
|
|
||||||
|
repository.saveNode(createParsedNode(MOCK_NODES.httpRequest));
|
||||||
|
expect(repository.getNodeCount()).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('searchNodeProperties', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
const node: ParsedNode = {
|
||||||
|
nodeType: 'n8n-nodes-base.complex',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
|
displayName: 'Complex Node',
|
||||||
|
description: 'A complex node',
|
||||||
|
category: 'automation',
|
||||||
|
style: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
properties: [
|
||||||
|
{
|
||||||
|
displayName: 'Authentication',
|
||||||
|
name: 'authentication',
|
||||||
|
type: 'options',
|
||||||
|
options: [
|
||||||
|
{ name: 'Basic', value: 'basic' },
|
||||||
|
{ name: 'OAuth2', value: 'oauth2' }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Headers',
|
||||||
|
name: 'headers',
|
||||||
|
type: 'collection',
|
||||||
|
default: {},
|
||||||
|
options: [
|
||||||
|
{
|
||||||
|
displayName: 'Header',
|
||||||
|
name: 'header',
|
||||||
|
type: 'string'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
|
};
|
||||||
|
repository.saveNode(node);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should find properties by name', () => {
|
||||||
|
const results = repository.searchNodeProperties('n8n-nodes-base.complex', 'auth');
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
expect(results.some(r => r.path.includes('authentication'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should find nested properties', () => {
|
||||||
|
const results = repository.searchNodeProperties('n8n-nodes-base.complex', 'header');
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array for non-existent node', () => {
|
||||||
|
const results = repository.searchNodeProperties('non-existent', 'test');
|
||||||
|
expect(results).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Transaction handling', () => {
|
||||||
|
it('should handle errors gracefully', () => {
|
||||||
|
const invalidNode = {
|
||||||
|
nodeType: null, // This will cause an error
|
||||||
|
packageName: 'test',
|
||||||
|
displayName: 'Test'
|
||||||
|
} as any;
|
||||||
|
|
||||||
|
expect(() => {
|
||||||
|
repository.saveNode(invalidNode);
|
||||||
|
}).toThrow();
|
||||||
|
|
||||||
|
// Repository should still be functional
|
||||||
|
const count = repository.getNodeCount();
|
||||||
|
expect(count).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle concurrent saves', () => {
|
||||||
|
const node = createParsedNode(MOCK_NODES.webhook);
|
||||||
|
|
||||||
|
// Simulate concurrent saves of the same node with different display names
|
||||||
|
const promises = Array.from({ length: 10 }, (_, i) => {
|
||||||
|
const updatedNode = {
|
||||||
|
...node,
|
||||||
|
displayName: `Display ${i}`
|
||||||
|
};
|
||||||
|
return Promise.resolve(repository.saveNode(updatedNode));
|
||||||
|
});
|
||||||
|
|
||||||
|
Promise.all(promises);
|
||||||
|
|
||||||
|
// Should have only one node
|
||||||
|
const count = repository.getNodeCount();
|
||||||
|
expect(count).toBe(1);
|
||||||
|
|
||||||
|
// Should have the last update
|
||||||
|
const saved = repository.getNode(node.nodeType);
|
||||||
|
expect(saved).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Performance characteristics', () => {
|
||||||
|
it('should handle bulk operations efficiently', () => {
|
||||||
|
const nodeCount = 1000;
|
||||||
|
const nodes = Array.from({ length: nodeCount }, (_, i) =>
|
||||||
|
createParsedNode({
|
||||||
|
...MOCK_NODES.webhook,
|
||||||
|
nodeType: `n8n-nodes-base.node${i}`,
|
||||||
|
displayName: `Node ${i}`,
|
||||||
|
description: `Description for node ${i}`
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const insertMany = db.transaction((nodes: ParsedNode[]) => {
|
||||||
|
nodes.forEach(node => repository.saveNode(node));
|
||||||
|
});
|
||||||
|
|
||||||
|
const start = Date.now();
|
||||||
|
insertMany(nodes);
|
||||||
|
const saveDuration = Date.now() - start;
|
||||||
|
|
||||||
|
expect(saveDuration).toBeLessThan(1000); // Should complete in under 1 second
|
||||||
|
|
||||||
|
// Test search performance
|
||||||
|
const searchStart = Date.now();
|
||||||
|
const results = repository.searchNodes('node', 'OR', 100);
|
||||||
|
const searchDuration = Date.now() - searchStart;
|
||||||
|
|
||||||
|
expect(searchDuration).toBeLessThan(50); // Search should be fast
|
||||||
|
expect(results.length).toBe(100); // Respects limit
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Helper function to create ParsedNode from test data
|
||||||
|
function createParsedNode(data: any): ParsedNode {
|
||||||
|
return {
|
||||||
|
nodeType: data.nodeType,
|
||||||
|
packageName: data.packageName,
|
||||||
|
displayName: data.displayName,
|
||||||
|
description: data.description || '',
|
||||||
|
category: data.category || 'automation',
|
||||||
|
style: data.developmentStyle || 'programmatic',
|
||||||
|
isAITool: data.isAITool || false,
|
||||||
|
isTrigger: data.isTrigger || false,
|
||||||
|
isWebhook: data.isWebhook || false,
|
||||||
|
isVersioned: data.isVersioned !== undefined ? data.isVersioned : true,
|
||||||
|
version: data.version || '1',
|
||||||
|
documentation: data.documentation || null,
|
||||||
|
properties: data.properties || [],
|
||||||
|
operations: data.operations || [],
|
||||||
|
credentials: data.credentials || []
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import Database from 'better-sqlite3';
|
import * as Database from 'better-sqlite3';
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
|
|
||||||
export interface TestDatabaseOptions {
|
export interface TestDatabaseOptions {
|
||||||
@@ -11,7 +11,7 @@ export interface TestDatabaseOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class TestDatabase {
|
export class TestDatabase {
|
||||||
private db: Database.Database | null = null;
|
private db: Database | null = null;
|
||||||
private dbPath?: string;
|
private dbPath?: string;
|
||||||
private options: TestDatabaseOptions;
|
private options: TestDatabaseOptions;
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ export class TestDatabase {
|
|||||||
this.options = options;
|
this.options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
async initialize(): Promise<Database.Database> {
|
async initialize(): Promise<Database> {
|
||||||
if (this.db) return this.db;
|
if (this.db) return this.db;
|
||||||
|
|
||||||
if (this.options.mode === 'file') {
|
if (this.options.mode === 'file') {
|
||||||
@@ -28,9 +28,9 @@ export class TestDatabase {
|
|||||||
fs.mkdirSync(testDir, { recursive: true });
|
fs.mkdirSync(testDir, { recursive: true });
|
||||||
}
|
}
|
||||||
this.dbPath = path.join(testDir, this.options.name || `test-${Date.now()}.db`);
|
this.dbPath = path.join(testDir, this.options.name || `test-${Date.now()}.db`);
|
||||||
this.db = new Database(this.dbPath);
|
this.db = new (Database as any)(this.dbPath);
|
||||||
} else {
|
} else {
|
||||||
this.db = new Database(':memory:');
|
this.db = new (Database as any)(':memory:');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enable WAL mode for file databases
|
// Enable WAL mode for file databases
|
||||||
@@ -72,7 +72,7 @@ export class TestDatabase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getDatabase(): Database.Database {
|
getDatabase(): Database {
|
||||||
if (!this.db) throw new Error('Database not initialized');
|
if (!this.db) throw new Error('Database not initialized');
|
||||||
return this.db;
|
return this.db;
|
||||||
}
|
}
|
||||||
@@ -155,17 +155,23 @@ export class PerformanceMonitor {
|
|||||||
// Data generation utilities
|
// Data generation utilities
|
||||||
export class TestDataGenerator {
|
export class TestDataGenerator {
|
||||||
static generateNode(overrides: any = {}): any {
|
static generateNode(overrides: any = {}): any {
|
||||||
|
const nodeName = overrides.name || `testNode${Math.random().toString(36).substr(2, 9)}`;
|
||||||
return {
|
return {
|
||||||
name: `testNode${Math.random().toString(36).substr(2, 9)}`,
|
nodeType: overrides.nodeType || `n8n-nodes-base.${nodeName}`,
|
||||||
displayName: 'Test Node',
|
packageName: overrides.packageName || overrides.package || 'n8n-nodes-base',
|
||||||
description: 'A test node for integration testing',
|
displayName: overrides.displayName || 'Test Node',
|
||||||
version: 1,
|
description: overrides.description || 'A test node for integration testing',
|
||||||
typeVersion: 1,
|
category: overrides.category || 'automation',
|
||||||
type: 'n8n-nodes-base.testNode',
|
developmentStyle: overrides.developmentStyle || overrides.style || 'programmatic',
|
||||||
package: 'n8n-nodes-base',
|
isAITool: overrides.isAITool || false,
|
||||||
category: ['automation'],
|
isTrigger: overrides.isTrigger || false,
|
||||||
properties: [],
|
isWebhook: overrides.isWebhook || false,
|
||||||
credentials: [],
|
isVersioned: overrides.isVersioned !== undefined ? overrides.isVersioned : true,
|
||||||
|
version: overrides.version || '1',
|
||||||
|
documentation: overrides.documentation || null,
|
||||||
|
properties: overrides.properties || [],
|
||||||
|
operations: overrides.operations || [],
|
||||||
|
credentials: overrides.credentials || [],
|
||||||
...overrides
|
...overrides
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -176,7 +182,7 @@ export class TestDataGenerator {
|
|||||||
...template,
|
...template,
|
||||||
name: `testNode${i}`,
|
name: `testNode${i}`,
|
||||||
displayName: `Test Node ${i}`,
|
displayName: `Test Node ${i}`,
|
||||||
type: `n8n-nodes-base.testNode${i}`
|
nodeType: `n8n-nodes-base.testNode${i}`
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -204,7 +210,7 @@ export class TestDataGenerator {
|
|||||||
|
|
||||||
// Transaction test utilities
|
// Transaction test utilities
|
||||||
export async function runInTransaction<T>(
|
export async function runInTransaction<T>(
|
||||||
db: Database.Database,
|
db: Database,
|
||||||
fn: () => T
|
fn: () => T
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
db.exec('BEGIN');
|
db.exec('BEGIN');
|
||||||
@@ -260,7 +266,7 @@ export async function simulateConcurrentAccess(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Database integrity check
|
// Database integrity check
|
||||||
export function checkDatabaseIntegrity(db: Database.Database): {
|
export function checkDatabaseIntegrity(db: Database): {
|
||||||
isValid: boolean;
|
isValid: boolean;
|
||||||
errors: string[];
|
errors: string[];
|
||||||
} {
|
} {
|
||||||
@@ -279,14 +285,14 @@ export function checkDatabaseIntegrity(db: Database.Database): {
|
|||||||
errors.push(`Foreign key violations: ${JSON.stringify(fkResult)}`);
|
errors.push(`Foreign key violations: ${JSON.stringify(fkResult)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for orphaned records
|
// Check table existence
|
||||||
const orphanedDocs = db.prepare(`
|
const tables = db.prepare(`
|
||||||
SELECT COUNT(*) as count FROM node_docs
|
SELECT name FROM sqlite_master
|
||||||
WHERE node_name NOT IN (SELECT name FROM nodes)
|
WHERE type = 'table' AND name = 'nodes'
|
||||||
`).get() as { count: number };
|
`).all();
|
||||||
|
|
||||||
if (orphanedDocs.count > 0) {
|
if (tables.length === 0) {
|
||||||
errors.push(`Found ${orphanedDocs.count} orphaned documentation records`);
|
errors.push('nodes table does not exist');
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@@ -302,13 +308,18 @@ export function checkDatabaseIntegrity(db: Database.Database): {
|
|||||||
// Mock data for testing
|
// Mock data for testing
|
||||||
export const MOCK_NODES = {
|
export const MOCK_NODES = {
|
||||||
webhook: {
|
webhook: {
|
||||||
name: 'webhook',
|
nodeType: 'n8n-nodes-base.webhook',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
displayName: 'Webhook',
|
displayName: 'Webhook',
|
||||||
type: 'n8n-nodes-base.webhook',
|
|
||||||
typeVersion: 1,
|
|
||||||
description: 'Starts the workflow when a webhook is called',
|
description: 'Starts the workflow when a webhook is called',
|
||||||
category: ['trigger'],
|
category: 'trigger',
|
||||||
package: 'n8n-nodes-base',
|
developmentStyle: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: true,
|
||||||
|
isWebhook: true,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
documentation: 'Webhook documentation',
|
||||||
properties: [
|
properties: [
|
||||||
{
|
{
|
||||||
displayName: 'HTTP Method',
|
displayName: 'HTTP Method',
|
||||||
@@ -320,16 +331,23 @@ export const MOCK_NODES = {
|
|||||||
],
|
],
|
||||||
default: 'GET'
|
default: 'GET'
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
},
|
},
|
||||||
httpRequest: {
|
httpRequest: {
|
||||||
name: 'httpRequest',
|
nodeType: 'n8n-nodes-base.httpRequest',
|
||||||
|
packageName: 'n8n-nodes-base',
|
||||||
displayName: 'HTTP Request',
|
displayName: 'HTTP Request',
|
||||||
type: 'n8n-nodes-base.httpRequest',
|
|
||||||
typeVersion: 1,
|
|
||||||
description: 'Makes an HTTP request and returns the response',
|
description: 'Makes an HTTP request and returns the response',
|
||||||
category: ['automation'],
|
category: 'automation',
|
||||||
package: 'n8n-nodes-base',
|
developmentStyle: 'programmatic',
|
||||||
|
isAITool: false,
|
||||||
|
isTrigger: false,
|
||||||
|
isWebhook: false,
|
||||||
|
isVersioned: true,
|
||||||
|
version: '1',
|
||||||
|
documentation: 'HTTP Request documentation',
|
||||||
properties: [
|
properties: [
|
||||||
{
|
{
|
||||||
displayName: 'URL',
|
displayName: 'URL',
|
||||||
@@ -338,6 +356,8 @@ export const MOCK_NODES = {
|
|||||||
required: true,
|
required: true,
|
||||||
default: ''
|
default: ''
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
operations: [],
|
||||||
|
credentials: []
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -112,8 +112,12 @@ describe('Database Transactions', () => {
|
|||||||
|
|
||||||
// Insert first node
|
// Insert first node
|
||||||
const insertStmt = db.prepare(`
|
const insertStmt = db.prepare(`
|
||||||
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
|
INSERT INTO nodes (
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
node_type, package_name, display_name, description,
|
||||||
|
category, development_style, is_ai_tool, is_trigger,
|
||||||
|
is_webhook, is_versioned, version, documentation,
|
||||||
|
properties_schema, operations, credentials_required
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`);
|
`);
|
||||||
|
|
||||||
insertStmt.run(
|
insertStmt.run(
|
||||||
@@ -462,8 +466,12 @@ describe('Database Transactions', () => {
|
|||||||
// Insert initial data
|
// Insert initial data
|
||||||
const nodes = TestDataGenerator.generateNodes(2);
|
const nodes = TestDataGenerator.generateNodes(2);
|
||||||
const insertStmt = db.prepare(`
|
const insertStmt = db.prepare(`
|
||||||
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
|
INSERT INTO nodes (
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
node_type, package_name, display_name, description,
|
||||||
|
category, development_style, is_ai_tool, is_trigger,
|
||||||
|
is_webhook, is_versioned, version, documentation,
|
||||||
|
properties_schema, operations, credentials_required
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`);
|
`);
|
||||||
|
|
||||||
nodes.forEach(node => {
|
nodes.forEach(node => {
|
||||||
|
|||||||
Reference in New Issue
Block a user