mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-30 06:12:03 +00:00
feat: implement atomic file writing and recovery utilities
- Introduced atomic write functionality for JSON files to ensure data integrity during writes. - Added recovery mechanisms to read JSON files with fallback options for corrupted or missing files. - Enhanced existing services to utilize atomic write and recovery features for improved reliability. - Updated tests to cover new atomic writing and recovery scenarios, ensuring robust error handling and data consistency.
This commit is contained in:
@@ -5,7 +5,7 @@
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import { getFeaturesDir } from '@automaker/platform';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
|
||||
@@ -73,10 +73,10 @@ export async function parseAndCreateFeatures(
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await secureFs.writeFile(
|
||||
path.join(featureDir, 'feature.json'),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
// Use atomic write with backup support for crash protection
|
||||
await atomicWriteJson(path.join(featureDir, 'feature.json'), featureData, {
|
||||
backupCount: DEFAULT_BACKUP_COUNT,
|
||||
});
|
||||
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
|
||||
@@ -29,6 +29,10 @@ import {
|
||||
appendLearning,
|
||||
recordMemoryUsage,
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
@@ -1414,13 +1418,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
allImagePaths.push(...allPaths);
|
||||
}
|
||||
|
||||
// Save updated feature.json with new images
|
||||
// Save updated feature.json with new images (atomic write with backup)
|
||||
if (copiedImagePaths.length > 0 && feature) {
|
||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
@@ -2088,8 +2092,20 @@ Format your response as a structured markdown document.`;
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
feature.status = status;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||
@@ -2100,7 +2116,9 @@ Format your response as a structured markdown document.`;
|
||||
// Clear the timestamp when moving to other statuses
|
||||
feature.justFinishedAt = undefined;
|
||||
}
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
// Sync completed/verified features to app_spec.txt
|
||||
if (status === 'verified' || status === 'completed') {
|
||||
@@ -2111,8 +2129,8 @@ Format your response as a structured markdown document.`;
|
||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Feature file may not exist
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2124,11 +2142,24 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
updates: Partial<PlanSpec>
|
||||
): Promise<void> {
|
||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
||||
// Use getFeatureDir helper for consistent path resolution
|
||||
const featureDir = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize planSpec if it doesn't exist
|
||||
if (!feature.planSpec) {
|
||||
@@ -2148,7 +2179,9 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
@@ -2165,25 +2198,34 @@ Format your response as a structured markdown document.`;
|
||||
const allFeatures: Feature[] = [];
|
||||
const pendingFeatures: Feature[] = [];
|
||||
|
||||
// Load all features (for dependency checking)
|
||||
// Load all features (for dependency checking) with recovery support
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3415,31 +3457,39 @@ After generating the revised spec, output:
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data) as Feature;
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getFeaturesDir,
|
||||
@@ -194,31 +200,31 @@ export class FeatureLoader {
|
||||
})) as any[];
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load all features concurrently (secureFs has built-in concurrency limiting)
|
||||
// Load all features concurrently with automatic recovery from backups
|
||||
const featurePromises = featureDirs.map(async (dir) => {
|
||||
const featureId = dir.name;
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(content);
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return feature as Feature;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
||||
} else {
|
||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
||||
}
|
||||
const feature = result.data;
|
||||
|
||||
if (!feature) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return feature;
|
||||
});
|
||||
|
||||
const results = await Promise.all(featurePromises);
|
||||
@@ -303,19 +309,20 @@ export class FeatureLoader {
|
||||
|
||||
/**
|
||||
* Get a single feature by ID
|
||||
* Uses automatic recovery from backups if the main file is corrupted
|
||||
*/
|
||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -359,8 +366,8 @@ export class FeatureLoader {
|
||||
descriptionHistory: initialHistory,
|
||||
};
|
||||
|
||||
// Write feature.json
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
|
||||
// Write feature.json atomically with backup support
|
||||
await atomicWriteJson(featureJsonPath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Created feature ${featureId}`);
|
||||
return feature;
|
||||
@@ -444,9 +451,9 @@ export class FeatureLoader {
|
||||
descriptionHistory: updatedHistory,
|
||||
};
|
||||
|
||||
// Write back to file
|
||||
// Write back to file atomically with backup support
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
|
||||
await atomicWriteJson(featureJsonPath, updatedFeature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Updated feature ${featureId}`);
|
||||
return updatedFeature;
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
|
||||
import {
|
||||
@@ -42,28 +42,8 @@ import {
|
||||
const logger = createLogger('SettingsService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
* Wrapper for readJsonFile from utils that uses the local secureFs
|
||||
* to maintain compatibility with the server's secure file system
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
@@ -90,6 +70,13 @@ async function fileExists(filePath: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings atomically with backup support
|
||||
*/
|
||||
async function writeSettingsJson(filePath: string, data: unknown): Promise<void> {
|
||||
await atomicWriteJson(filePath, data, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
}
|
||||
|
||||
/**
|
||||
* SettingsService - Manages persistent storage of user settings and credentials
|
||||
*
|
||||
@@ -180,7 +167,7 @@ export class SettingsService {
|
||||
if (needsSave) {
|
||||
try {
|
||||
await ensureDataDir(this.dataDir);
|
||||
await atomicWriteJson(settingsPath, result);
|
||||
await writeSettingsJson(settingsPath, result);
|
||||
logger.info('Settings migration complete');
|
||||
} catch (error) {
|
||||
logger.error('Failed to save migrated settings:', error);
|
||||
@@ -340,7 +327,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info('Global settings updated');
|
||||
|
||||
return updated;
|
||||
@@ -414,7 +401,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(credentialsPath, updated);
|
||||
await writeSettingsJson(credentialsPath, updated);
|
||||
logger.info('Credentials updated');
|
||||
|
||||
return updated;
|
||||
@@ -525,7 +512,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info(`Project settings updated for ${projectPath}`);
|
||||
|
||||
return updated;
|
||||
|
||||
@@ -190,9 +190,10 @@ describe('feature-loader.ts', () => {
|
||||
const result = await loader.getAll(testProjectPath);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
// With recovery-enabled reads, warnings come from AtomicWriter and FeatureLoader
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/WARN.*\[FeatureLoader\]/),
|
||||
expect.stringContaining('Failed to parse feature.json')
|
||||
expect.stringMatching(/WARN.*\[AtomicWriter\]/),
|
||||
expect.stringContaining('unavailable')
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
@@ -260,10 +261,13 @@ describe('feature-loader.ts', () => {
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw on other errors', async () => {
|
||||
it('should return null on other errors (with recovery attempt)', async () => {
|
||||
// With recovery-enabled reads, get() returns null instead of throwing
|
||||
// because it attempts to recover from backups before giving up
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
await expect(loader.get(testProjectPath, 'feature-123')).rejects.toThrow('Permission denied');
|
||||
const result = await loader.get(testProjectPath, 'feature-123');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
362
libs/utils/src/atomic-writer.ts
Normal file
362
libs/utils/src/atomic-writer.ts
Normal file
@@ -0,0 +1,362 @@
|
||||
/**
|
||||
* Atomic file writing utilities for JSON data
|
||||
*
|
||||
* Provides atomic write operations using temp-file + rename pattern,
|
||||
* ensuring data integrity even during crashes or power failures.
|
||||
*/
|
||||
|
||||
import { secureFs } from '@automaker/platform';
|
||||
import path from 'path';
|
||||
import { createLogger } from './logger.js';
|
||||
import { mkdirSafe } from './fs-utils.js';
|
||||
|
||||
const logger = createLogger('AtomicWriter');
|
||||
|
||||
/** Default maximum number of backup files to keep for crash recovery */
|
||||
export const DEFAULT_BACKUP_COUNT = 3;
|
||||
|
||||
/**
|
||||
* Options for atomic write operations
|
||||
*/
|
||||
export interface AtomicWriteOptions {
|
||||
/** Number of spaces for JSON indentation (default: 2) */
|
||||
indent?: number;
|
||||
/** Create parent directories if they don't exist (default: false) */
|
||||
createDirs?: boolean;
|
||||
/** Number of backup files to keep (0 = no backups, default: 0). When > 0, rotates .bak1, .bak2, etc. */
|
||||
backupCount?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotate backup files (.bak1 -> .bak2 -> .bak3, oldest is deleted)
|
||||
* and create a new backup from the current file.
|
||||
*
|
||||
* @param filePath - Absolute path to the file being backed up
|
||||
* @param maxBackups - Maximum number of backup files to keep
|
||||
*/
|
||||
export async function rotateBackups(
|
||||
filePath: string,
|
||||
maxBackups: number = DEFAULT_BACKUP_COUNT
|
||||
): Promise<void> {
|
||||
// Check if the source file exists before attempting backup
|
||||
try {
|
||||
await secureFs.access(filePath);
|
||||
} catch {
|
||||
// No existing file to backup
|
||||
return;
|
||||
}
|
||||
|
||||
// Rotate existing backups: .bak3 is deleted, .bak2 -> .bak3, .bak1 -> .bak2
|
||||
for (let i = maxBackups; i >= 1; i--) {
|
||||
const currentBackup = `${filePath}.bak${i}`;
|
||||
const nextBackup = `${filePath}.bak${i + 1}`;
|
||||
|
||||
try {
|
||||
if (i === maxBackups) {
|
||||
// Delete the oldest backup
|
||||
await secureFs.unlink(currentBackup);
|
||||
} else {
|
||||
// Rename current backup to next slot
|
||||
await secureFs.rename(currentBackup, nextBackup);
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors - backup file may not exist
|
||||
}
|
||||
}
|
||||
|
||||
// Copy current file to .bak1
|
||||
try {
|
||||
await secureFs.copyFile(filePath, `${filePath}.bak1`);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to create backup of ${filePath}:`, error);
|
||||
// Continue with write even if backup fails
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomically write JSON data to a file.
|
||||
*
|
||||
* Uses the temp-file + rename pattern for atomicity:
|
||||
* 1. Writes data to a temporary file
|
||||
* 2. Atomically renames temp file to target path
|
||||
* 3. Cleans up temp file on error
|
||||
*
|
||||
* @param filePath - Absolute path to the target file
|
||||
* @param data - Data to serialize as JSON
|
||||
* @param options - Optional write options
|
||||
* @throws Error if write fails (temp file is cleaned up)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await atomicWriteJson('/path/to/config.json', { key: 'value' });
|
||||
* await atomicWriteJson('/path/to/data.json', data, { indent: 4, createDirs: true });
|
||||
* ```
|
||||
*/
|
||||
export async function atomicWriteJson<T>(
|
||||
filePath: string,
|
||||
data: T,
|
||||
options: AtomicWriteOptions = {}
|
||||
): Promise<void> {
|
||||
const { indent = 2, createDirs = false, backupCount = 0 } = options;
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const tempPath = `${resolvedPath}.tmp.${Date.now()}`;
|
||||
|
||||
// Create parent directories if requested
|
||||
if (createDirs) {
|
||||
const dirPath = path.dirname(resolvedPath);
|
||||
await mkdirSafe(dirPath);
|
||||
}
|
||||
|
||||
const content = JSON.stringify(data, null, indent);
|
||||
|
||||
try {
|
||||
// Rotate backups before writing (if backups are enabled)
|
||||
if (backupCount > 0) {
|
||||
await rotateBackups(resolvedPath, backupCount);
|
||||
}
|
||||
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, resolvedPath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors - best effort
|
||||
}
|
||||
logger.error(`Failed to atomically write to ${resolvedPath}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON from a file with fallback to default value.
|
||||
*
|
||||
* Returns the default value if:
|
||||
* - File doesn't exist (ENOENT)
|
||||
* - File content is invalid JSON
|
||||
*
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param defaultValue - Value to return if file doesn't exist or is invalid
|
||||
* @returns Parsed JSON data or default value
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const config = await readJsonFile('/path/to/config.json', { version: 1 });
|
||||
* ```
|
||||
*/
|
||||
export async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
|
||||
try {
|
||||
const content = (await secureFs.readFile(resolvedPath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
const nodeError = error as NodeJS.ErrnoException;
|
||||
if (nodeError.code === 'ENOENT') {
|
||||
return defaultValue;
|
||||
}
|
||||
logger.error(`Error reading JSON from ${resolvedPath}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomically update a JSON file by reading, transforming, and writing.
|
||||
*
|
||||
* Provides a safe read-modify-write pattern:
|
||||
* 1. Reads existing file (or uses default)
|
||||
* 2. Applies updater function
|
||||
* 3. Atomically writes result
|
||||
*
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param defaultValue - Default value if file doesn't exist
|
||||
* @param updater - Function that transforms the data
|
||||
* @param options - Optional write options
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await updateJsonAtomically(
|
||||
* '/path/to/counter.json',
|
||||
* { count: 0 },
|
||||
* (data) => ({ ...data, count: data.count + 1 })
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
export async function updateJsonAtomically<T>(
|
||||
filePath: string,
|
||||
defaultValue: T,
|
||||
updater: (current: T) => T | Promise<T>,
|
||||
options: AtomicWriteOptions = {}
|
||||
): Promise<void> {
|
||||
const current = await readJsonFile(filePath, defaultValue);
|
||||
const updated = await updater(current);
|
||||
await atomicWriteJson(filePath, updated, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a JSON read operation with recovery information
|
||||
*/
|
||||
export interface ReadJsonRecoveryResult<T> {
|
||||
/** The data that was successfully read */
|
||||
data: T;
|
||||
/** Whether recovery was needed (main file was corrupted or missing) */
|
||||
recovered: boolean;
|
||||
/** Source of the data: 'main', 'backup', 'temp', or 'default' */
|
||||
source: 'main' | 'backup' | 'temp' | 'default';
|
||||
/** Error message if the main file had an issue */
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for readJsonWithRecovery
|
||||
*/
|
||||
export interface ReadJsonRecoveryOptions {
|
||||
/** Maximum number of backup files to check (.bak1, .bak2, etc.) Default: 3 */
|
||||
maxBackups?: number;
|
||||
/** Whether to automatically restore main file from backup when corrupted. Default: true */
|
||||
autoRestore?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a warning if recovery was needed (from backup or temp file).
|
||||
*
|
||||
* Use this helper to reduce duplicate logging code when using readJsonWithRecovery.
|
||||
*
|
||||
* @param result - The result from readJsonWithRecovery
|
||||
* @param identifier - A human-readable identifier for the file being recovered (e.g., "Feature abc123")
|
||||
* @param loggerInstance - Optional logger instance to use (defaults to AtomicWriter logger)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await readJsonWithRecovery(featurePath, null);
|
||||
* logRecoveryWarning(result, `Feature ${featureId}`);
|
||||
* ```
|
||||
*/
|
||||
export function logRecoveryWarning<T>(
|
||||
result: ReadJsonRecoveryResult<T>,
|
||||
identifier: string,
|
||||
loggerInstance: { warn: (msg: string, ...args: unknown[]) => void } = logger
|
||||
): void {
|
||||
if (result.recovered && result.source !== 'default') {
|
||||
loggerInstance.warn(`${identifier} was recovered from ${result.source}: ${result.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read JSON file with automatic recovery from backups.
|
||||
*
|
||||
* This function attempts to read a JSON file with fallback to backups:
|
||||
* 1. Try to read the main file
|
||||
* 2. If corrupted, check for temp files (.tmp.*) that might have valid data
|
||||
* 3. If no valid temp file, try backup files (.bak1, .bak2, .bak3)
|
||||
* 4. If all fail, return the default value
|
||||
*
|
||||
* Optionally restores the main file from a valid backup (autoRestore: true).
|
||||
*
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param defaultValue - Value to return if no valid data found
|
||||
* @param options - Recovery options
|
||||
* @returns Result containing the data and recovery information
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await readJsonWithRecovery('/path/to/config.json', { version: 1 });
|
||||
* if (result.recovered) {
|
||||
* console.log(`Recovered from ${result.source}: ${result.error}`);
|
||||
* }
|
||||
* const config = result.data;
|
||||
* ```
|
||||
*/
|
||||
export async function readJsonWithRecovery<T>(
|
||||
filePath: string,
|
||||
defaultValue: T,
|
||||
options: ReadJsonRecoveryOptions = {}
|
||||
): Promise<ReadJsonRecoveryResult<T>> {
|
||||
const { maxBackups = 3, autoRestore = true } = options;
|
||||
const resolvedPath = path.resolve(filePath);
|
||||
const dirPath = path.dirname(resolvedPath);
|
||||
const fileName = path.basename(resolvedPath);
|
||||
|
||||
// Try to read the main file first
|
||||
try {
|
||||
const content = (await secureFs.readFile(resolvedPath, 'utf-8')) as string;
|
||||
const data = JSON.parse(content) as T;
|
||||
return { data, recovered: false, source: 'main' };
|
||||
} catch (mainError) {
|
||||
const nodeError = mainError as NodeJS.ErrnoException;
|
||||
const errorMessage =
|
||||
nodeError.code === 'ENOENT'
|
||||
? 'File does not exist'
|
||||
: `Failed to parse: ${mainError instanceof Error ? mainError.message : String(mainError)}`;
|
||||
|
||||
// If file doesn't exist, check for temp files or backups
|
||||
logger.warn(`Main file ${resolvedPath} unavailable: ${errorMessage}`);
|
||||
|
||||
// Try to find and recover from temp files first (in case of interrupted write)
|
||||
try {
|
||||
const files = (await secureFs.readdir(dirPath)) as string[];
|
||||
const tempFiles = files
|
||||
.filter((f: string) => f.startsWith(`${fileName}.tmp.`))
|
||||
.sort()
|
||||
.reverse(); // Most recent first
|
||||
|
||||
for (const tempFile of tempFiles) {
|
||||
const tempPath = path.join(dirPath, tempFile);
|
||||
try {
|
||||
const content = (await secureFs.readFile(tempPath, 'utf-8')) as string;
|
||||
const data = JSON.parse(content) as T;
|
||||
|
||||
logger.info(`Recovered data from temp file: ${tempPath}`);
|
||||
|
||||
// Optionally restore main file from temp
|
||||
if (autoRestore) {
|
||||
try {
|
||||
await secureFs.rename(tempPath, resolvedPath);
|
||||
logger.info(`Restored main file from temp: ${tempPath}`);
|
||||
} catch (restoreError) {
|
||||
logger.warn(`Failed to restore main file from temp: ${restoreError}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { data, recovered: true, source: 'temp', error: errorMessage };
|
||||
} catch {
|
||||
// This temp file is also corrupted, try next
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Could not read directory, skip temp file check
|
||||
}
|
||||
|
||||
// Try backup files (.bak1, .bak2, .bak3)
|
||||
for (let i = 1; i <= maxBackups; i++) {
|
||||
const backupPath = `${resolvedPath}.bak${i}`;
|
||||
try {
|
||||
const content = (await secureFs.readFile(backupPath, 'utf-8')) as string;
|
||||
const data = JSON.parse(content) as T;
|
||||
|
||||
logger.info(`Recovered data from backup: ${backupPath}`);
|
||||
|
||||
// Optionally restore main file from backup
|
||||
if (autoRestore) {
|
||||
try {
|
||||
await secureFs.copyFile(backupPath, resolvedPath);
|
||||
logger.info(`Restored main file from backup: ${backupPath}`);
|
||||
} catch (restoreError) {
|
||||
logger.warn(`Failed to restore main file from backup: ${restoreError}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { data, recovered: true, source: 'backup', error: errorMessage };
|
||||
} catch {
|
||||
// This backup doesn't exist or is corrupted, try next
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// All recovery attempts failed, return default
|
||||
logger.warn(`All recovery attempts failed for ${resolvedPath}, using default value`);
|
||||
return { data: defaultValue, recovered: true, source: 'default', error: errorMessage };
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,20 @@ export {
|
||||
// File system utilities
|
||||
export { mkdirSafe, existsSafe } from './fs-utils.js';
|
||||
|
||||
// Atomic file operations
|
||||
export {
|
||||
atomicWriteJson,
|
||||
readJsonFile,
|
||||
updateJsonAtomically,
|
||||
readJsonWithRecovery,
|
||||
rotateBackups,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
type AtomicWriteOptions,
|
||||
type ReadJsonRecoveryResult,
|
||||
type ReadJsonRecoveryOptions,
|
||||
} from './atomic-writer.js';
|
||||
|
||||
// Path utilities
|
||||
export { normalizePath, pathsEqual } from './path-utils.js';
|
||||
|
||||
|
||||
709
libs/utils/tests/atomic-writer.test.ts
Normal file
709
libs/utils/tests/atomic-writer.test.ts
Normal file
@@ -0,0 +1,709 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi, type MockInstance } from 'vitest';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { secureFs } from '@automaker/platform';
|
||||
import {
|
||||
atomicWriteJson,
|
||||
readJsonFile,
|
||||
updateJsonAtomically,
|
||||
readJsonWithRecovery,
|
||||
} from '../src/atomic-writer';
|
||||
|
||||
// Mock secureFs
|
||||
vi.mock('@automaker/platform', () => ({
|
||||
secureFs: {
|
||||
writeFile: vi.fn(),
|
||||
readFile: vi.fn(),
|
||||
rename: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
readdir: vi.fn(),
|
||||
copyFile: vi.fn(),
|
||||
access: vi.fn(),
|
||||
lstat: vi.fn(),
|
||||
mkdir: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock logger to suppress output during tests
|
||||
vi.mock('../src/logger.js', () => ({
|
||||
createLogger: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('atomic-writer.ts', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a temporary directory for integration tests
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'atomic-writer-test-'));
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe('atomicWriteJson', () => {
|
||||
it('should write JSON data atomically', async () => {
|
||||
const filePath = path.join(tempDir, 'test.json');
|
||||
const data = { key: 'value', number: 42 };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, data);
|
||||
|
||||
// Verify writeFile was called with temp file path and JSON content
|
||||
expect(secureFs.writeFile).toHaveBeenCalledTimes(1);
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(writeCall[0]).toMatch(/\.tmp\.\d+$/);
|
||||
expect(writeCall[1]).toBe(JSON.stringify(data, null, 2));
|
||||
expect(writeCall[2]).toBe('utf-8');
|
||||
|
||||
// Verify rename was called with temp -> target
|
||||
expect(secureFs.rename).toHaveBeenCalledTimes(1);
|
||||
const renameCall = (secureFs.rename as unknown as MockInstance).mock.calls[0];
|
||||
expect(renameCall[0]).toMatch(/\.tmp\.\d+$/);
|
||||
expect(renameCall[1]).toBe(path.resolve(filePath));
|
||||
});
|
||||
|
||||
it('should use custom indentation', async () => {
|
||||
const filePath = path.join(tempDir, 'test.json');
|
||||
const data = { key: 'value' };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, data, { indent: 4 });
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(writeCall[1]).toBe(JSON.stringify(data, null, 4));
|
||||
});
|
||||
|
||||
it('should clean up temp file on write failure', async () => {
|
||||
const filePath = path.join(tempDir, 'test.json');
|
||||
const data = { key: 'value' };
|
||||
|
||||
const writeError = new Error('Write failed');
|
||||
(secureFs.writeFile as unknown as MockInstance).mockRejectedValue(writeError);
|
||||
(secureFs.unlink as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Write failed');
|
||||
|
||||
expect(secureFs.unlink).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should clean up temp file on rename failure', async () => {
|
||||
const filePath = path.join(tempDir, 'test.json');
|
||||
const data = { key: 'value' };
|
||||
|
||||
const renameError = new Error('Rename failed');
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockRejectedValue(renameError);
|
||||
(secureFs.unlink as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Rename failed');
|
||||
|
||||
expect(secureFs.unlink).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should ignore cleanup errors', async () => {
|
||||
const filePath = path.join(tempDir, 'test.json');
|
||||
const data = { key: 'value' };
|
||||
|
||||
const writeError = new Error('Write failed');
|
||||
const unlinkError = new Error('Unlink failed');
|
||||
(secureFs.writeFile as unknown as MockInstance).mockRejectedValue(writeError);
|
||||
(secureFs.unlink as unknown as MockInstance).mockRejectedValue(unlinkError);
|
||||
|
||||
// Should still throw the original error, not the cleanup error
|
||||
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Write failed');
|
||||
});
|
||||
|
||||
it('should resolve relative paths', async () => {
|
||||
const relativePath = 'test.json';
|
||||
const data = { key: 'value' };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(relativePath, data);
|
||||
|
||||
const renameCall = (secureFs.rename as unknown as MockInstance).mock.calls[0];
|
||||
expect(renameCall[1]).toBe(path.resolve(relativePath));
|
||||
});
|
||||
|
||||
it('should handle arrays as data', async () => {
|
||||
const filePath = path.join(tempDir, 'array.json');
|
||||
const data = [1, 2, 3, { nested: 'value' }];
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, data);
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(writeCall[1]).toBe(JSON.stringify(data, null, 2));
|
||||
});
|
||||
|
||||
it('should handle null and primitive values', async () => {
|
||||
const filePath = path.join(tempDir, 'primitive.json');
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, null);
|
||||
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[0][1]).toBe('null');
|
||||
|
||||
await atomicWriteJson(filePath, 'string');
|
||||
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[1][1]).toBe('"string"');
|
||||
|
||||
await atomicWriteJson(filePath, 123);
|
||||
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[2][1]).toBe('123');
|
||||
});
|
||||
|
||||
it('should create directories when createDirs is true', async () => {
|
||||
const filePath = path.join(tempDir, 'nested', 'deep', 'test.json');
|
||||
const data = { key: 'value' };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
// Mock lstat to indicate directory already exists
|
||||
(secureFs.lstat as unknown as MockInstance).mockResolvedValue({
|
||||
isDirectory: () => true,
|
||||
isSymbolicLink: () => false,
|
||||
});
|
||||
|
||||
await atomicWriteJson(filePath, data, { createDirs: true });
|
||||
|
||||
expect(secureFs.writeFile).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('readJsonFile', () => {
|
||||
it('should read and parse JSON file', async () => {
|
||||
const filePath = path.join(tempDir, 'read.json');
|
||||
const data = { key: 'value', count: 5 };
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||
|
||||
const result = await readJsonFile(filePath, {});
|
||||
|
||||
expect(result).toEqual(data);
|
||||
expect(secureFs.readFile).toHaveBeenCalledWith(path.resolve(filePath), 'utf-8');
|
||||
});
|
||||
|
||||
it('should return default value when file does not exist', async () => {
|
||||
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
|
||||
const result = await readJsonFile(filePath, defaultValue);
|
||||
|
||||
expect(result).toEqual(defaultValue);
|
||||
});
|
||||
|
||||
it('should return default value when JSON is invalid', async () => {
|
||||
const filePath = path.join(tempDir, 'invalid.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue('not valid json');
|
||||
|
||||
const result = await readJsonFile(filePath, defaultValue);
|
||||
|
||||
expect(result).toEqual(defaultValue);
|
||||
});
|
||||
|
||||
it('should return default value for other read errors', async () => {
|
||||
const filePath = path.join(tempDir, 'error.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
const accessError = new Error('Access denied') as NodeJS.ErrnoException;
|
||||
accessError.code = 'EACCES';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(accessError);
|
||||
|
||||
const result = await readJsonFile(filePath, defaultValue);
|
||||
|
||||
expect(result).toEqual(defaultValue);
|
||||
});
|
||||
|
||||
it('should handle empty object as default', async () => {
|
||||
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
|
||||
const result = await readJsonFile<Record<string, unknown>>(filePath, {});
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should handle array as default', async () => {
|
||||
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
|
||||
const result = await readJsonFile<string[]>(filePath, []);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should parse nested objects correctly', async () => {
|
||||
const filePath = path.join(tempDir, 'nested.json');
|
||||
const data = {
|
||||
level1: {
|
||||
level2: {
|
||||
value: 'deep',
|
||||
array: [1, 2, { nested: true }],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||
|
||||
const result = await readJsonFile(filePath, {});
|
||||
|
||||
expect(result).toEqual(data);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateJsonAtomically', () => {
|
||||
it('should read, update, and write file atomically', async () => {
|
||||
const filePath = path.join(tempDir, 'update.json');
|
||||
const initialData = { count: 5 };
|
||||
const defaultValue = { count: 0 };
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(initialData));
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await updateJsonAtomically(filePath, defaultValue, (data) => ({
|
||||
...data,
|
||||
count: data.count + 1,
|
||||
}));
|
||||
|
||||
// Verify the write was called with updated data
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1]);
|
||||
expect(writtenData.count).toBe(6);
|
||||
});
|
||||
|
||||
it('should use default value when file does not exist', async () => {
|
||||
const filePath = path.join(tempDir, 'new.json');
|
||||
const defaultValue = { count: 0 };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await updateJsonAtomically(filePath, defaultValue, (data) => ({
|
||||
...data,
|
||||
count: data.count + 1,
|
||||
}));
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1]);
|
||||
expect(writtenData.count).toBe(1);
|
||||
});
|
||||
|
||||
it('should support async updater function', async () => {
|
||||
const filePath = path.join(tempDir, 'async.json');
|
||||
const initialData = { value: 'initial' };
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(initialData));
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await updateJsonAtomically(filePath, {}, async (data) => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
return { ...data, value: 'updated' };
|
||||
});
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
const writtenData = JSON.parse(writeCall[1]);
|
||||
expect(writtenData.value).toBe('updated');
|
||||
});
|
||||
|
||||
it('should pass through options to atomicWriteJson', async () => {
|
||||
const filePath = path.join(tempDir, 'options.json');
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await updateJsonAtomically(filePath, { key: 'value' }, (d) => d, { indent: 4 });
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(writeCall[1]).toBe(JSON.stringify({ key: 'value' }, null, 4));
|
||||
});
|
||||
});
|
||||
|
||||
describe('readJsonWithRecovery', () => {
|
||||
it('should return main file data when available', async () => {
|
||||
const filePath = path.join(tempDir, 'main.json');
|
||||
const data = { main: true };
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
expect(result.data).toEqual(data);
|
||||
expect(result.recovered).toBe(false);
|
||||
expect(result.source).toBe('main');
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should recover from temp file when main file is missing', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const tempData = { fromTemp: true };
|
||||
const fileName = path.basename(filePath);
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError) // Main file
|
||||
.mockResolvedValueOnce(JSON.stringify(tempData)); // Temp file
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([
|
||||
`${fileName}.tmp.1234567890`,
|
||||
'other-file.json',
|
||||
]);
|
||||
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
expect(result.data).toEqual(tempData);
|
||||
expect(result.recovered).toBe(true);
|
||||
expect(result.source).toBe('temp');
|
||||
expect(result.error).toBe('File does not exist');
|
||||
});
|
||||
|
||||
it('should recover from backup file when main and temp are unavailable', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const backupData = { fromBackup: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError) // Main file
|
||||
.mockRejectedValueOnce(enoentError) // backup1
|
||||
.mockResolvedValueOnce(JSON.stringify(backupData)); // backup2
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]); // No temp files
|
||||
|
||||
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
expect(result.data).toEqual(backupData);
|
||||
expect(result.recovered).toBe(true);
|
||||
expect(result.source).toBe('backup');
|
||||
});
|
||||
|
||||
it('should return default when all recovery attempts fail', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, defaultValue);
|
||||
|
||||
expect(result.data).toEqual(defaultValue);
|
||||
expect(result.recovered).toBe(true);
|
||||
expect(result.source).toBe('default');
|
||||
expect(result.error).toBe('File does not exist');
|
||||
});
|
||||
|
||||
it('should try multiple temp files in order', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const fileName = path.basename(filePath);
|
||||
const validTempData = { valid: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError) // Main file
|
||||
.mockResolvedValueOnce('invalid json') // First temp file (invalid)
|
||||
.mockResolvedValueOnce(JSON.stringify(validTempData)); // Second temp file
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([
|
||||
`${fileName}.tmp.9999999999`, // Most recent
|
||||
`${fileName}.tmp.1111111111`, // Older
|
||||
]);
|
||||
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
expect(result.data).toEqual(validTempData);
|
||||
expect(result.source).toBe('temp');
|
||||
});
|
||||
|
||||
it('should try multiple backup files in order', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const backupData = { backup2: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError) // Main file
|
||||
.mockRejectedValueOnce(enoentError) // .bak1
|
||||
.mockResolvedValueOnce(JSON.stringify(backupData)); // .bak2
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||
|
||||
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
expect(result.data).toEqual(backupData);
|
||||
expect(result.source).toBe('backup');
|
||||
|
||||
// Verify it tried .bak1 first
|
||||
expect(secureFs.readFile).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
`${path.resolve(filePath)}.bak1`,
|
||||
'utf-8'
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect maxBackups option', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, defaultValue, { maxBackups: 1 });
|
||||
|
||||
expect(result.source).toBe('default');
|
||||
// Should only have tried main + 1 backup
|
||||
expect(secureFs.readFile).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should not auto-restore when autoRestore is false', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const fileName = path.basename(filePath);
|
||||
const tempData = { fromTemp: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError)
|
||||
.mockResolvedValueOnce(JSON.stringify(tempData));
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([`${fileName}.tmp.123`]);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {}, { autoRestore: false });
|
||||
|
||||
expect(result.data).toEqual(tempData);
|
||||
expect(secureFs.rename).not.toHaveBeenCalled();
|
||||
expect(secureFs.copyFile).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle directory read errors gracefully', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const backupData = { backup: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError) // Main file
|
||||
.mockResolvedValueOnce(JSON.stringify(backupData)); // backup1
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockRejectedValue(new Error('Dir read failed'));
|
||||
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
// Should skip temp files and go to backups
|
||||
expect(result.data).toEqual(backupData);
|
||||
expect(result.source).toBe('backup');
|
||||
});
|
||||
|
||||
it('should handle corrupted main file with valid error message', async () => {
|
||||
const filePath = path.join(tempDir, 'corrupted.json');
|
||||
const defaultValue = { default: true };
|
||||
|
||||
const parseError = new SyntaxError('Unexpected token');
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValueOnce('{{invalid');
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||
|
||||
// Mock to actually throw parse error
|
||||
(secureFs.readFile as unknown as MockInstance).mockImplementationOnce(() => {
|
||||
return Promise.resolve('{{invalid json');
|
||||
});
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, defaultValue);
|
||||
|
||||
expect(result.recovered).toBe(true);
|
||||
expect(result.error).toContain('Failed to parse');
|
||||
});
|
||||
|
||||
it('should handle restore failures gracefully', async () => {
|
||||
const filePath = path.join(tempDir, 'data.json');
|
||||
const fileName = path.basename(filePath);
|
||||
const tempData = { fromTemp: true };
|
||||
|
||||
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance)
|
||||
.mockRejectedValueOnce(enoentError)
|
||||
.mockResolvedValueOnce(JSON.stringify(tempData));
|
||||
|
||||
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([`${fileName}.tmp.123`]);
|
||||
(secureFs.rename as unknown as MockInstance).mockRejectedValue(new Error('Restore failed'));
|
||||
|
||||
const result = await readJsonWithRecovery(filePath, {});
|
||||
|
||||
// Should still return data even if restore failed
|
||||
expect(result.data).toEqual(tempData);
|
||||
expect(result.source).toBe('temp');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty file path gracefully', async () => {
|
||||
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(new Error('Invalid path'));
|
||||
|
||||
const result = await readJsonFile('', { default: true });
|
||||
|
||||
expect(result).toEqual({ default: true });
|
||||
});
|
||||
|
||||
it('should handle special characters in file path', async () => {
|
||||
const filePath = path.join(tempDir, 'file with spaces & special!.json');
|
||||
const data = { special: 'chars' };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, data);
|
||||
|
||||
expect(secureFs.writeFile).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle very large objects', async () => {
|
||||
const filePath = path.join(tempDir, 'large.json');
|
||||
const largeArray = Array.from({ length: 10000 }, (_, i) => ({
|
||||
id: i,
|
||||
data: `item-${i}`,
|
||||
}));
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, largeArray);
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(JSON.parse(writeCall[1])).toEqual(largeArray);
|
||||
});
|
||||
|
||||
it('should handle unicode content', async () => {
|
||||
const filePath = path.join(tempDir, 'unicode.json');
|
||||
const data = { emoji: '🎉', japanese: 'こんにちは', chinese: '你好' };
|
||||
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await atomicWriteJson(filePath, data);
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
expect(JSON.parse(writeCall[1])).toEqual(data);
|
||||
});
|
||||
|
||||
it('should handle circular reference error in JSON', async () => {
|
||||
const filePath = path.join(tempDir, 'circular.json');
|
||||
const circular: Record<string, unknown> = { key: 'value' };
|
||||
circular.self = circular;
|
||||
|
||||
await expect(atomicWriteJson(filePath, circular)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type safety', () => {
|
||||
interface TestConfig {
|
||||
version: number;
|
||||
settings: {
|
||||
enabled: boolean;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
it('should preserve types in readJsonFile', async () => {
|
||||
const filePath = path.join(tempDir, 'config.json');
|
||||
const expected: TestConfig = {
|
||||
version: 1,
|
||||
settings: { enabled: true, name: 'test' },
|
||||
};
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(expected));
|
||||
|
||||
const result = await readJsonFile<TestConfig>(filePath, {
|
||||
version: 0,
|
||||
settings: { enabled: false, name: '' },
|
||||
});
|
||||
|
||||
expect(result.version).toBe(1);
|
||||
expect(result.settings.enabled).toBe(true);
|
||||
expect(result.settings.name).toBe('test');
|
||||
});
|
||||
|
||||
it('should preserve types in updateJsonAtomically', async () => {
|
||||
const filePath = path.join(tempDir, 'counter.json');
|
||||
|
||||
interface Counter {
|
||||
count: number;
|
||||
}
|
||||
|
||||
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(
|
||||
JSON.stringify({ count: 5 })
|
||||
);
|
||||
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||
|
||||
await updateJsonAtomically<Counter>(filePath, { count: 0 }, (data) => ({
|
||||
count: data.count + 1,
|
||||
}));
|
||||
|
||||
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||
const writtenData: Counter = JSON.parse(writeCall[1]);
|
||||
expect(writtenData.count).toBe(6);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user