test: add Phase 4 database integration tests (partial)

- Add comprehensive test utilities for database testing
- Implement connection management tests for in-memory and file databases
- Add transaction tests including nested transactions and savepoints
- Test database lifecycle, error handling, and performance
- Include tests for WAL mode, connection pooling, and constraints

Part of Phase 4: Integration Testing
This commit is contained in:
czlonkowski
2025-07-29 09:36:14 +02:00
parent e66a17b5c2
commit 1d464e29e5
24 changed files with 5391 additions and 55 deletions

View File

@@ -0,0 +1,346 @@
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
import Database from 'better-sqlite3';
import * as fs from 'fs';
import * as path from 'path';
import { TestDatabase, TestDataGenerator } from './test-utils';
describe('Database Connection Management', () => {
let testDb: TestDatabase;
afterEach(async () => {
if (testDb) {
await testDb.cleanup();
}
});
describe('In-Memory Database', () => {
it('should create and connect to in-memory database', async () => {
testDb = new TestDatabase({ mode: 'memory' });
const db = await testDb.initialize();
expect(db).toBeDefined();
expect(db.open).toBe(true);
expect(db.name).toBe(':memory:');
});
it('should execute queries on in-memory database', async () => {
testDb = new TestDatabase({ mode: 'memory' });
const db = await testDb.initialize();
// Test basic query
const result = db.prepare('SELECT 1 as value').get() as { value: number };
expect(result.value).toBe(1);
// Test table exists
const tables = db.prepare(
"SELECT name FROM sqlite_master WHERE type='table' AND name='nodes'"
).all();
expect(tables.length).toBe(1);
});
it('should handle multiple connections to same in-memory database', async () => {
// Each in-memory database is isolated
const db1 = new TestDatabase({ mode: 'memory' });
const db2 = new TestDatabase({ mode: 'memory' });
const conn1 = await db1.initialize();
const conn2 = await db2.initialize();
// Insert data in first connection
const node = TestDataGenerator.generateNode();
conn1.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Verify data is isolated
const count1 = conn1.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
const count2 = conn2.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count1.count).toBe(1);
expect(count2.count).toBe(0);
await db1.cleanup();
await db2.cleanup();
});
});
describe('File-Based Database', () => {
it('should create and connect to file database', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-connection.db' });
const db = await testDb.initialize();
expect(db).toBeDefined();
expect(db.open).toBe(true);
expect(db.name).toContain('test-connection.db');
// Verify file exists
const dbPath = path.join(__dirname, '../../../.test-dbs/test-connection.db');
expect(fs.existsSync(dbPath)).toBe(true);
});
it('should enable WAL mode by default for file databases', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-wal.db' });
const db = await testDb.initialize();
const mode = db.prepare('PRAGMA journal_mode').get() as { journal_mode: string };
expect(mode.journal_mode).toBe('wal');
// Verify WAL files are created
const dbPath = path.join(__dirname, '../../../.test-dbs/test-wal.db');
expect(fs.existsSync(`${dbPath}-wal`)).toBe(true);
expect(fs.existsSync(`${dbPath}-shm`)).toBe(true);
});
it('should allow disabling WAL mode', async () => {
testDb = new TestDatabase({
mode: 'file',
name: 'test-no-wal.db',
enableWAL: false
});
const db = await testDb.initialize();
const mode = db.prepare('PRAGMA journal_mode').get() as { journal_mode: string };
expect(mode.journal_mode).not.toBe('wal');
});
it('should handle connection pooling simulation', async () => {
const dbPath = path.join(__dirname, '../../../.test-dbs/test-pool.db');
// Create initial database
testDb = new TestDatabase({ mode: 'file', name: 'test-pool.db' });
await testDb.initialize();
await testDb.cleanup();
// Simulate multiple connections
const connections: Database.Database[] = [];
const connectionCount = 5;
try {
for (let i = 0; i < connectionCount; i++) {
const conn = new Database(dbPath, {
readonly: false,
fileMustExist: true
});
connections.push(conn);
}
// All connections should be open
expect(connections.every(conn => conn.open)).toBe(true);
// Test concurrent reads
const promises = connections.map((conn, index) => {
return new Promise((resolve) => {
const result = conn.prepare('SELECT ? as id').get(index);
resolve(result);
});
});
const results = await Promise.all(promises);
expect(results).toHaveLength(connectionCount);
} finally {
// Cleanup connections
connections.forEach(conn => conn.close());
if (fs.existsSync(dbPath)) {
fs.unlinkSync(dbPath);
fs.unlinkSync(`${dbPath}-wal`);
fs.unlinkSync(`${dbPath}-shm`);
}
}
});
});
describe('Connection Error Handling', () => {
it('should handle invalid file path gracefully', async () => {
const invalidPath = '/invalid/path/that/does/not/exist/test.db';
expect(() => {
new Database(invalidPath);
}).toThrow();
});
it('should handle database file corruption', async () => {
const corruptPath = path.join(__dirname, '../../../.test-dbs/corrupt.db');
// Create directory if it doesn't exist
const dir = path.dirname(corruptPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
// Create a corrupt database file
fs.writeFileSync(corruptPath, 'This is not a valid SQLite database');
try {
expect(() => {
new Database(corruptPath);
}).toThrow();
} finally {
if (fs.existsSync(corruptPath)) {
fs.unlinkSync(corruptPath);
}
}
});
it('should handle readonly database access', async () => {
// Create a database first
testDb = new TestDatabase({ mode: 'file', name: 'test-readonly.db' });
const db = await testDb.initialize();
// Insert test data
const node = TestDataGenerator.generateNode();
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
const dbPath = path.join(__dirname, '../../../.test-dbs/test-readonly.db');
// Open as readonly
const readonlyDb = new Database(dbPath, { readonly: true });
try {
// Reading should work
const count = readonlyDb.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
// Writing should fail
expect(() => {
readonlyDb.prepare('DELETE FROM nodes').run();
}).toThrow(/readonly/);
} finally {
readonlyDb.close();
}
});
});
describe('Connection Lifecycle', () => {
it('should properly close database connections', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-lifecycle.db' });
const db = await testDb.initialize();
expect(db.open).toBe(true);
await testDb.cleanup();
expect(db.open).toBe(false);
});
it('should handle multiple open/close cycles', async () => {
const dbPath = path.join(__dirname, '../../../.test-dbs/test-cycles.db');
for (let i = 0; i < 3; i++) {
const db = new TestDatabase({ mode: 'file', name: 'test-cycles.db' });
const conn = await db.initialize();
// Perform operation
const result = conn.prepare('SELECT ? as cycle').get(i) as { cycle: number };
expect(result.cycle).toBe(i);
await db.cleanup();
}
// Ensure file is cleaned up
expect(fs.existsSync(dbPath)).toBe(false);
});
it('should handle connection timeout simulation', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-timeout.db' });
const db = await testDb.initialize();
// Set a busy timeout
db.exec('PRAGMA busy_timeout = 100'); // 100ms timeout
// Start a transaction to lock the database
db.exec('BEGIN EXCLUSIVE');
// Try to access from another connection (should timeout)
const dbPath = path.join(__dirname, '../../../.test-dbs/test-timeout.db');
const conn2 = new Database(dbPath);
conn2.exec('PRAGMA busy_timeout = 100');
try {
expect(() => {
conn2.exec('BEGIN EXCLUSIVE');
}).toThrow(/database is locked/);
} finally {
db.exec('ROLLBACK');
conn2.close();
}
});
});
describe('Database Configuration', () => {
it('should apply optimal pragmas for performance', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-pragmas.db' });
const db = await testDb.initialize();
// Apply performance pragmas
db.exec('PRAGMA synchronous = NORMAL');
db.exec('PRAGMA cache_size = -64000'); // 64MB cache
db.exec('PRAGMA temp_store = MEMORY');
db.exec('PRAGMA mmap_size = 268435456'); // 256MB mmap
// Verify pragmas
const sync = db.prepare('PRAGMA synchronous').get() as { synchronous: number };
const cache = db.prepare('PRAGMA cache_size').get() as { cache_size: number };
const temp = db.prepare('PRAGMA temp_store').get() as { temp_store: number };
const mmap = db.prepare('PRAGMA mmap_size').get() as { mmap_size: number };
expect(sync.synchronous).toBe(1); // NORMAL = 1
expect(cache.cache_size).toBe(-64000);
expect(temp.temp_store).toBe(2); // MEMORY = 2
expect(mmap.mmap_size).toBeGreaterThan(0);
});
it('should enforce foreign key constraints', async () => {
testDb = new TestDatabase({ mode: 'memory' });
const db = await testDb.initialize();
// Foreign keys should be enabled by default in our schema
const fkEnabled = db.prepare('PRAGMA foreign_keys').get() as { foreign_keys: number };
expect(fkEnabled.foreign_keys).toBe(1);
// Test foreign key constraint
const node = TestDataGenerator.generateNode();
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Try to insert doc for non-existent node (should fail)
expect(() => {
db.prepare(`
INSERT INTO node_docs (node_name, content, examples)
VALUES ('non-existent-node', 'content', '[]')
`).run();
}).toThrow(/FOREIGN KEY constraint failed/);
});
});
});

View File

@@ -0,0 +1,343 @@
import * as fs from 'fs';
import * as path from 'path';
import Database from 'better-sqlite3';
import { execSync } from 'child_process';
export interface TestDatabaseOptions {
mode: 'memory' | 'file';
name?: string;
enableWAL?: boolean;
enableFTS5?: boolean;
}
export class TestDatabase {
private db: Database.Database | null = null;
private dbPath?: string;
private options: TestDatabaseOptions;
constructor(options: TestDatabaseOptions = { mode: 'memory' }) {
this.options = options;
}
async initialize(): Promise<Database.Database> {
if (this.db) return this.db;
if (this.options.mode === 'file') {
const testDir = path.join(__dirname, '../../../.test-dbs');
if (!fs.existsSync(testDir)) {
fs.mkdirSync(testDir, { recursive: true });
}
this.dbPath = path.join(testDir, this.options.name || `test-${Date.now()}.db`);
this.db = new Database(this.dbPath);
} else {
this.db = new Database(':memory:');
}
// Enable WAL mode for file databases
if (this.options.mode === 'file' && this.options.enableWAL !== false) {
this.db.exec('PRAGMA journal_mode = WAL');
}
// Load FTS5 extension if requested
if (this.options.enableFTS5) {
// FTS5 is built into SQLite by default in better-sqlite3
try {
this.db.exec('CREATE VIRTUAL TABLE test_fts USING fts5(content)');
this.db.exec('DROP TABLE test_fts');
} catch (error) {
throw new Error('FTS5 extension not available');
}
}
// Apply schema
await this.applySchema();
return this.db;
}
private async applySchema(): Promise<void> {
if (!this.db) throw new Error('Database not initialized');
const schemaPath = path.join(__dirname, '../../../src/database/schema.sql');
const schema = fs.readFileSync(schemaPath, 'utf-8');
// Execute schema statements one by one
const statements = schema
.split(';')
.map(s => s.trim())
.filter(s => s.length > 0);
for (const statement of statements) {
this.db.exec(statement);
}
}
getDatabase(): Database.Database {
if (!this.db) throw new Error('Database not initialized');
return this.db;
}
async cleanup(): Promise<void> {
if (this.db) {
this.db.close();
this.db = null;
}
if (this.dbPath && fs.existsSync(this.dbPath)) {
fs.unlinkSync(this.dbPath);
// Also remove WAL and SHM files if they exist
const walPath = `${this.dbPath}-wal`;
const shmPath = `${this.dbPath}-shm`;
if (fs.existsSync(walPath)) fs.unlinkSync(walPath);
if (fs.existsSync(shmPath)) fs.unlinkSync(shmPath);
}
}
// Helper method to check if database is locked
isLocked(): boolean {
if (!this.db) return false;
try {
this.db.exec('BEGIN IMMEDIATE');
this.db.exec('ROLLBACK');
return false;
} catch (error: any) {
return error.code === 'SQLITE_BUSY';
}
}
}
// Performance measurement utilities
export class PerformanceMonitor {
private measurements: Map<string, number[]> = new Map();
start(label: string): () => void {
const startTime = process.hrtime.bigint();
return () => {
const endTime = process.hrtime.bigint();
const duration = Number(endTime - startTime) / 1_000_000; // Convert to milliseconds
if (!this.measurements.has(label)) {
this.measurements.set(label, []);
}
this.measurements.get(label)!.push(duration);
};
}
getStats(label: string): {
count: number;
total: number;
average: number;
min: number;
max: number;
median: number;
} | null {
const durations = this.measurements.get(label);
if (!durations || durations.length === 0) return null;
const sorted = [...durations].sort((a, b) => a - b);
const total = durations.reduce((sum, d) => sum + d, 0);
return {
count: durations.length,
total,
average: total / durations.length,
min: sorted[0],
max: sorted[sorted.length - 1],
median: sorted[Math.floor(sorted.length / 2)]
};
}
clear(): void {
this.measurements.clear();
}
}
// Data generation utilities
export class TestDataGenerator {
static generateNode(overrides: any = {}): any {
return {
name: `testNode${Math.random().toString(36).substr(2, 9)}`,
displayName: 'Test Node',
description: 'A test node for integration testing',
version: 1,
typeVersion: 1,
type: 'n8n-nodes-base.testNode',
package: 'n8n-nodes-base',
category: ['automation'],
properties: [],
credentials: [],
...overrides
};
}
static generateNodes(count: number, template: any = {}): any[] {
return Array.from({ length: count }, (_, i) =>
this.generateNode({
...template,
name: `testNode${i}`,
displayName: `Test Node ${i}`,
type: `n8n-nodes-base.testNode${i}`
})
);
}
static generateTemplate(overrides: any = {}): any {
return {
id: Math.floor(Math.random() * 100000),
name: `Test Workflow ${Math.random().toString(36).substr(2, 9)}`,
totalViews: Math.floor(Math.random() * 1000),
nodeTypes: ['n8n-nodes-base.webhook', 'n8n-nodes-base.httpRequest'],
categories: [{ id: 1, name: 'automation' }],
description: 'A test workflow template',
workflowInfo: {
nodeCount: 5,
webhookCount: 1
},
...overrides
};
}
static generateTemplates(count: number): any[] {
return Array.from({ length: count }, () => this.generateTemplate());
}
}
// Transaction test utilities
export async function runInTransaction<T>(
db: Database.Database,
fn: () => T
): Promise<T> {
db.exec('BEGIN');
try {
const result = await fn();
db.exec('COMMIT');
return result;
} catch (error) {
db.exec('ROLLBACK');
throw error;
}
}
// Concurrent access simulation
export async function simulateConcurrentAccess(
dbPath: string,
workerCount: number,
operations: number,
workerScript: string
): Promise<{ success: number; failed: number; duration: number }> {
const startTime = Date.now();
const results = { success: 0, failed: 0 };
// Create worker processes
const workers = Array.from({ length: workerCount }, (_, i) => {
return new Promise<void>((resolve) => {
try {
const output = execSync(
`node -e "${workerScript}"`,
{
env: {
...process.env,
DB_PATH: dbPath,
WORKER_ID: i.toString(),
OPERATIONS: operations.toString()
}
}
);
results.success++;
} catch (error) {
results.failed++;
}
resolve();
});
});
await Promise.all(workers);
return {
...results,
duration: Date.now() - startTime
};
}
// Database integrity check
export function checkDatabaseIntegrity(db: Database.Database): {
isValid: boolean;
errors: string[];
} {
const errors: string[] = [];
try {
// Run integrity check
const result = db.prepare('PRAGMA integrity_check').all();
if (result.length !== 1 || result[0].integrity_check !== 'ok') {
errors.push('Database integrity check failed');
}
// Check foreign key constraints
const fkResult = db.prepare('PRAGMA foreign_key_check').all();
if (fkResult.length > 0) {
errors.push(`Foreign key violations: ${JSON.stringify(fkResult)}`);
}
// Check for orphaned records
const orphanedDocs = db.prepare(`
SELECT COUNT(*) as count FROM node_docs
WHERE node_name NOT IN (SELECT name FROM nodes)
`).get() as { count: number };
if (orphanedDocs.count > 0) {
errors.push(`Found ${orphanedDocs.count} orphaned documentation records`);
}
} catch (error: any) {
errors.push(`Integrity check error: ${error.message}`);
}
return {
isValid: errors.length === 0,
errors
};
}
// Mock data for testing
export const MOCK_NODES = {
webhook: {
name: 'webhook',
displayName: 'Webhook',
type: 'n8n-nodes-base.webhook',
typeVersion: 1,
description: 'Starts the workflow when a webhook is called',
category: ['trigger'],
package: 'n8n-nodes-base',
properties: [
{
displayName: 'HTTP Method',
name: 'httpMethod',
type: 'options',
options: [
{ name: 'GET', value: 'GET' },
{ name: 'POST', value: 'POST' }
],
default: 'GET'
}
]
},
httpRequest: {
name: 'httpRequest',
displayName: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
typeVersion: 1,
description: 'Makes an HTTP request and returns the response',
category: ['automation'],
package: 'n8n-nodes-base',
properties: [
{
displayName: 'URL',
name: 'url',
type: 'string',
required: true,
default: ''
}
]
}
};

View File

@@ -0,0 +1,512 @@
import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
import Database from 'better-sqlite3';
import { TestDatabase, TestDataGenerator, runInTransaction } from './test-utils';
describe('Database Transactions', () => {
let testDb: TestDatabase;
let db: Database.Database;
beforeEach(async () => {
testDb = new TestDatabase({ mode: 'memory' });
db = await testDb.initialize();
});
afterEach(async () => {
await testDb.cleanup();
});
describe('Basic Transactions', () => {
it('should commit transaction successfully', async () => {
const node = TestDataGenerator.generateNode();
db.exec('BEGIN');
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Data should be visible within transaction
const countInTx = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(countInTx.count).toBe(1);
db.exec('COMMIT');
// Data should persist after commit
const countAfter = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(countAfter.count).toBe(1);
});
it('should rollback transaction on error', async () => {
const node = TestDataGenerator.generateNode();
db.exec('BEGIN');
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Rollback
db.exec('ROLLBACK');
// Data should not persist
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(0);
});
it('should handle transaction helper function', async () => {
const node = TestDataGenerator.generateNode();
// Successful transaction
await runInTransaction(db, () => {
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
});
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
// Failed transaction
await expect(runInTransaction(db, () => {
db.prepare('INSERT INTO invalid_table VALUES (1)').run();
})).rejects.toThrow();
// Count should remain the same
const countAfterError = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(countAfterError.count).toBe(1);
});
});
describe('Nested Transactions (Savepoints)', () => {
it('should handle nested transactions with savepoints', async () => {
const nodes = TestDataGenerator.generateNodes(3);
db.exec('BEGIN');
// Insert first node
const insertStmt = db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
insertStmt.run(
nodes[0].name,
nodes[0].type,
nodes[0].displayName,
nodes[0].package,
nodes[0].version,
nodes[0].typeVersion,
JSON.stringify(nodes[0])
);
// Create savepoint
db.exec('SAVEPOINT sp1');
// Insert second node
insertStmt.run(
nodes[1].name,
nodes[1].type,
nodes[1].displayName,
nodes[1].package,
nodes[1].version,
nodes[1].typeVersion,
JSON.stringify(nodes[1])
);
// Create another savepoint
db.exec('SAVEPOINT sp2');
// Insert third node
insertStmt.run(
nodes[2].name,
nodes[2].type,
nodes[2].displayName,
nodes[2].package,
nodes[2].version,
nodes[2].typeVersion,
JSON.stringify(nodes[2])
);
// Should have 3 nodes
let count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(3);
// Rollback to sp2
db.exec('ROLLBACK TO sp2');
// Should have 2 nodes
count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(2);
// Rollback to sp1
db.exec('ROLLBACK TO sp1');
// Should have 1 node
count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
// Commit main transaction
db.exec('COMMIT');
// Should still have 1 node
count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
});
it('should release savepoints properly', async () => {
db.exec('BEGIN');
db.exec('SAVEPOINT sp1');
db.exec('SAVEPOINT sp2');
// Release sp2
db.exec('RELEASE sp2');
// Can still rollback to sp1
db.exec('ROLLBACK TO sp1');
// But cannot rollback to sp2
expect(() => {
db.exec('ROLLBACK TO sp2');
}).toThrow(/no such savepoint/);
db.exec('COMMIT');
});
});
describe('Transaction Isolation', () => {
it('should handle IMMEDIATE transactions', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-immediate.db' });
db = await testDb.initialize();
// Start immediate transaction (acquires write lock immediately)
db.exec('BEGIN IMMEDIATE');
// Insert data
const node = TestDataGenerator.generateNode();
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Another connection should not be able to write
const dbPath = db.name;
const conn2 = new Database(dbPath);
conn2.exec('PRAGMA busy_timeout = 100');
expect(() => {
conn2.exec('BEGIN IMMEDIATE');
}).toThrow(/database is locked/);
db.exec('COMMIT');
conn2.close();
});
it('should handle EXCLUSIVE transactions', async () => {
testDb = new TestDatabase({ mode: 'file', name: 'test-exclusive.db' });
db = await testDb.initialize();
// Start exclusive transaction (prevents other connections from reading)
db.exec('BEGIN EXCLUSIVE');
// Another connection should not be able to start any transaction
const dbPath = db.name;
const conn2 = new Database(dbPath);
conn2.exec('PRAGMA busy_timeout = 100');
expect(() => {
conn2.exec('BEGIN');
conn2.prepare('SELECT COUNT(*) FROM nodes').get();
}).toThrow();
db.exec('COMMIT');
conn2.close();
});
});
describe('Transaction with Better-SQLite3 API', () => {
it('should use transaction() method for automatic handling', () => {
const nodes = TestDataGenerator.generateNodes(5);
const insertMany = db.transaction((nodes: any[]) => {
const stmt = db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
for (const node of nodes) {
stmt.run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}
return nodes.length;
});
// Execute transaction
const inserted = insertMany(nodes);
expect(inserted).toBe(5);
// Verify all inserted
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(5);
});
it('should rollback transaction() on error', () => {
const nodes = TestDataGenerator.generateNodes(3);
const insertWithError = db.transaction((nodes: any[]) => {
const stmt = db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
for (let i = 0; i < nodes.length; i++) {
if (i === 2) {
// Cause an error on third insert
throw new Error('Simulated error');
}
const node = nodes[i];
stmt.run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}
});
// Should throw and rollback
expect(() => insertWithError(nodes)).toThrow('Simulated error');
// No nodes should be inserted
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(0);
});
it('should handle immediate transactions with transaction()', () => {
const insertImmediate = db.transaction((node: any) => {
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}).immediate();
const node = TestDataGenerator.generateNode();
insertImmediate(node);
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
});
it('should handle exclusive transactions with transaction()', () => {
const readExclusive = db.transaction(() => {
return db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
}).exclusive();
const result = readExclusive();
expect(result.count).toBe(0);
});
});
describe('Transaction Performance', () => {
it('should show performance benefit of transactions for bulk inserts', () => {
const nodes = TestDataGenerator.generateNodes(1000);
const stmt = db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
// Without transaction
const start1 = process.hrtime.bigint();
for (let i = 0; i < 100; i++) {
const node = nodes[i];
stmt.run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}
const duration1 = Number(process.hrtime.bigint() - start1) / 1_000_000;
// With transaction
const start2 = process.hrtime.bigint();
const insertMany = db.transaction((nodes: any[]) => {
for (const node of nodes) {
stmt.run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}
});
insertMany(nodes.slice(100, 1000));
const duration2 = Number(process.hrtime.bigint() - start2) / 1_000_000;
// Transaction should be significantly faster for bulk operations
expect(duration2).toBeLessThan(duration1 * 5); // Should be at least 5x faster
// Verify all inserted
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1000);
});
});
describe('Transaction Error Scenarios', () => {
it('should handle constraint violations in transactions', () => {
const node = TestDataGenerator.generateNode();
db.exec('BEGIN');
// First insert should succeed
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
// Second insert with same name should fail (unique constraint)
expect(() => {
db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
node.name, // Same name - will violate unique constraint
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
}).toThrow(/UNIQUE constraint failed/);
// Can still commit the transaction with first insert
db.exec('COMMIT');
const count = db.prepare('SELECT COUNT(*) as count FROM nodes').get() as { count: number };
expect(count.count).toBe(1);
});
it('should handle deadlock scenarios', async () => {
// This test simulates a potential deadlock scenario
testDb = new TestDatabase({ mode: 'file', name: 'test-deadlock.db' });
db = await testDb.initialize();
// Insert initial data
const nodes = TestDataGenerator.generateNodes(2);
const insertStmt = db.prepare(`
INSERT INTO nodes (name, type, display_name, package, version, type_version, data)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
nodes.forEach(node => {
insertStmt.run(
node.name,
node.type,
node.displayName,
node.package,
node.version,
node.typeVersion,
JSON.stringify(node)
);
});
// Connection 1 updates node 0 then tries to update node 1
// Connection 2 updates node 1 then tries to update node 0
// This would cause a deadlock in a traditional RDBMS
const dbPath = db.name;
const conn1 = new Database(dbPath);
const conn2 = new Database(dbPath);
// Set short busy timeout to fail fast
conn1.exec('PRAGMA busy_timeout = 100');
conn2.exec('PRAGMA busy_timeout = 100');
// Start transactions
conn1.exec('BEGIN IMMEDIATE');
// Conn1 updates first node
conn1.prepare('UPDATE nodes SET data = ? WHERE name = ?').run(
JSON.stringify({ updated: 1 }),
nodes[0].name
);
// Try to start transaction on conn2 (should fail due to IMMEDIATE lock)
expect(() => {
conn2.exec('BEGIN IMMEDIATE');
}).toThrow(/database is locked/);
conn1.exec('COMMIT');
conn1.close();
conn2.close();
});
});
});