mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 20:43:36 +00:00
Merge pull request #532 from AutoMaker-Org/feature/v0.12.0rc-1768605251997-8ufb
fix: feature.json corruption on crash lose
This commit is contained in:
@@ -5,7 +5,7 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import * as secureFs from '../../lib/secure-fs.js';
|
import * as secureFs from '../../lib/secure-fs.js';
|
||||||
import type { EventEmitter } from '../../lib/events.js';
|
import type { EventEmitter } from '../../lib/events.js';
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||||
import { getFeaturesDir } from '@automaker/platform';
|
import { getFeaturesDir } from '@automaker/platform';
|
||||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||||
import { getNotificationService } from '../../services/notification-service.js';
|
import { getNotificationService } from '../../services/notification-service.js';
|
||||||
@@ -74,10 +74,10 @@ export async function parseAndCreateFeatures(
|
|||||||
updatedAt: new Date().toISOString(),
|
updatedAt: new Date().toISOString(),
|
||||||
};
|
};
|
||||||
|
|
||||||
await secureFs.writeFile(
|
// Use atomic write with backup support for crash protection
|
||||||
path.join(featureDir, 'feature.json'),
|
await atomicWriteJson(path.join(featureDir, 'feature.json'), featureData, {
|
||||||
JSON.stringify(featureData, null, 2)
|
backupCount: DEFAULT_BACKUP_COUNT,
|
||||||
);
|
});
|
||||||
|
|
||||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ import {
|
|||||||
appendLearning,
|
appendLearning,
|
||||||
recordMemoryUsage,
|
recordMemoryUsage,
|
||||||
createLogger,
|
createLogger,
|
||||||
|
atomicWriteJson,
|
||||||
|
readJsonWithRecovery,
|
||||||
|
logRecoveryWarning,
|
||||||
|
DEFAULT_BACKUP_COUNT,
|
||||||
} from '@automaker/utils';
|
} from '@automaker/utils';
|
||||||
|
|
||||||
const logger = createLogger('AutoMode');
|
const logger = createLogger('AutoMode');
|
||||||
@@ -1416,13 +1420,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
|||||||
allImagePaths.push(...allPaths);
|
allImagePaths.push(...allPaths);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save updated feature.json with new images
|
// Save updated feature.json with new images (atomic write with backup)
|
||||||
if (copiedImagePaths.length > 0 && feature) {
|
if (copiedImagePaths.length > 0 && feature) {
|
||||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to save feature.json:`, error);
|
logger.error(`Failed to save feature.json:`, error);
|
||||||
}
|
}
|
||||||
@@ -2092,8 +2096,20 @@ Format your response as a structured markdown document.`;
|
|||||||
const featurePath = path.join(featureDir, 'feature.json');
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
// Use recovery-enabled read for corrupted file handling
|
||||||
const feature = JSON.parse(data);
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
feature.status = status;
|
feature.status = status;
|
||||||
feature.updatedAt = new Date().toISOString();
|
feature.updatedAt = new Date().toISOString();
|
||||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||||
@@ -2104,7 +2120,9 @@ Format your response as a structured markdown document.`;
|
|||||||
// Clear the timestamp when moving to other statuses
|
// Clear the timestamp when moving to other statuses
|
||||||
feature.justFinishedAt = undefined;
|
feature.justFinishedAt = undefined;
|
||||||
}
|
}
|
||||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
|
||||||
|
// Use atomic write with backup support
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
// Create notifications for important status changes
|
// Create notifications for important status changes
|
||||||
const notificationService = getNotificationService();
|
const notificationService = getNotificationService();
|
||||||
@@ -2135,8 +2153,8 @@ Format your response as a structured markdown document.`;
|
|||||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
// Feature file may not exist
|
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2148,11 +2166,24 @@ Format your response as a structured markdown document.`;
|
|||||||
featureId: string,
|
featureId: string,
|
||||||
updates: Partial<PlanSpec>
|
updates: Partial<PlanSpec>
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
// Use getFeatureDir helper for consistent path resolution
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
// Use recovery-enabled read for corrupted file handling
|
||||||
const feature = JSON.parse(data);
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Initialize planSpec if it doesn't exist
|
// Initialize planSpec if it doesn't exist
|
||||||
if (!feature.planSpec) {
|
if (!feature.planSpec) {
|
||||||
@@ -2172,7 +2203,9 @@ Format your response as a structured markdown document.`;
|
|||||||
}
|
}
|
||||||
|
|
||||||
feature.updatedAt = new Date().toISOString();
|
feature.updatedAt = new Date().toISOString();
|
||||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
|
||||||
|
// Use atomic write with backup support
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||||
}
|
}
|
||||||
@@ -2189,25 +2222,34 @@ Format your response as a structured markdown document.`;
|
|||||||
const allFeatures: Feature[] = [];
|
const allFeatures: Feature[] = [];
|
||||||
const pendingFeatures: Feature[] = [];
|
const pendingFeatures: Feature[] = [];
|
||||||
|
|
||||||
// Load all features (for dependency checking)
|
// Load all features (for dependency checking) with recovery support
|
||||||
for (const entry of entries) {
|
for (const entry of entries) {
|
||||||
if (entry.isDirectory()) {
|
if (entry.isDirectory()) {
|
||||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||||
try {
|
|
||||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
|
||||||
const feature = JSON.parse(data);
|
|
||||||
allFeatures.push(feature);
|
|
||||||
|
|
||||||
// Track pending features separately
|
// Use recovery-enabled read for corrupted file handling
|
||||||
if (
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
feature.status === 'pending' ||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
feature.status === 'ready' ||
|
autoRestore: true,
|
||||||
feature.status === 'backlog'
|
});
|
||||||
) {
|
|
||||||
pendingFeatures.push(feature);
|
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||||
}
|
|
||||||
} catch {
|
const feature = result.data;
|
||||||
// Skip invalid features
|
if (!feature) {
|
||||||
|
// Skip features that couldn't be loaded or recovered
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
allFeatures.push(feature);
|
||||||
|
|
||||||
|
// Track pending features separately
|
||||||
|
if (
|
||||||
|
feature.status === 'pending' ||
|
||||||
|
feature.status === 'ready' ||
|
||||||
|
feature.status === 'backlog'
|
||||||
|
) {
|
||||||
|
pendingFeatures.push(feature);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -3439,31 +3481,39 @@ After generating the revised spec, output:
|
|||||||
for (const entry of entries) {
|
for (const entry of entries) {
|
||||||
if (entry.isDirectory()) {
|
if (entry.isDirectory()) {
|
||||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||||
try {
|
|
||||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
|
||||||
const feature = JSON.parse(data) as Feature;
|
|
||||||
|
|
||||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
// Use recovery-enabled read for corrupted file handling
|
||||||
if (
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
feature.status === 'in_progress' ||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
(feature.status && feature.status.startsWith('pipeline_'))
|
autoRestore: true,
|
||||||
) {
|
});
|
||||||
// Verify it has existing context (agent-output.md)
|
|
||||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
|
||||||
try {
|
const feature = result.data;
|
||||||
await secureFs.access(contextPath);
|
if (!feature) {
|
||||||
interruptedFeatures.push(feature);
|
// Skip features that couldn't be loaded or recovered
|
||||||
logger.info(
|
continue;
|
||||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
}
|
||||||
);
|
|
||||||
} catch {
|
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||||
// No context file, skip this feature - it will be restarted fresh
|
if (
|
||||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
feature.status === 'in_progress' ||
|
||||||
}
|
(feature.status && feature.status.startsWith('pipeline_'))
|
||||||
|
) {
|
||||||
|
// Verify it has existing context (agent-output.md)
|
||||||
|
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||||
|
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||||
|
try {
|
||||||
|
await secureFs.access(contextPath);
|
||||||
|
interruptedFeatures.push(feature);
|
||||||
|
logger.info(
|
||||||
|
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
// No context file, skip this feature - it will be restarted fresh
|
||||||
|
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||||
}
|
}
|
||||||
} catch {
|
|
||||||
// Skip invalid features
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,13 @@
|
|||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
||||||
import { createLogger } from '@automaker/utils';
|
import {
|
||||||
|
createLogger,
|
||||||
|
atomicWriteJson,
|
||||||
|
readJsonWithRecovery,
|
||||||
|
logRecoveryWarning,
|
||||||
|
DEFAULT_BACKUP_COUNT,
|
||||||
|
} from '@automaker/utils';
|
||||||
import * as secureFs from '../lib/secure-fs.js';
|
import * as secureFs from '../lib/secure-fs.js';
|
||||||
import {
|
import {
|
||||||
getFeaturesDir,
|
getFeaturesDir,
|
||||||
@@ -194,31 +200,31 @@ export class FeatureLoader {
|
|||||||
})) as any[];
|
})) as any[];
|
||||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||||
|
|
||||||
// Load all features concurrently (secureFs has built-in concurrency limiting)
|
// Load all features concurrently with automatic recovery from backups
|
||||||
const featurePromises = featureDirs.map(async (dir) => {
|
const featurePromises = featureDirs.map(async (dir) => {
|
||||||
const featureId = dir.name;
|
const featureId = dir.name;
|
||||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||||
|
|
||||||
try {
|
// Use recovery-enabled read to handle corrupted files
|
||||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||||
const feature = JSON.parse(content);
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
if (!feature.id) {
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return feature as Feature;
|
const feature = result.data;
|
||||||
} catch (error) {
|
|
||||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
if (!feature) {
|
||||||
return null;
|
|
||||||
} else if (error instanceof SyntaxError) {
|
|
||||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
|
||||||
} else {
|
|
||||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!feature.id) {
|
||||||
|
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return feature;
|
||||||
});
|
});
|
||||||
|
|
||||||
const results = await Promise.all(featurePromises);
|
const results = await Promise.all(featurePromises);
|
||||||
@@ -303,19 +309,20 @@ export class FeatureLoader {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a single feature by ID
|
* Get a single feature by ID
|
||||||
|
* Uses automatic recovery from backups if the main file is corrupted
|
||||||
*/
|
*/
|
||||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||||
try {
|
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
|
||||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
// Use recovery-enabled read to handle corrupted files
|
||||||
return JSON.parse(content);
|
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||||
} catch (error) {
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
autoRestore: true,
|
||||||
return null;
|
});
|
||||||
}
|
|
||||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
throw error;
|
|
||||||
}
|
return result.data;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -359,8 +366,8 @@ export class FeatureLoader {
|
|||||||
descriptionHistory: initialHistory,
|
descriptionHistory: initialHistory,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Write feature.json
|
// Write feature.json atomically with backup support
|
||||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
|
await atomicWriteJson(featureJsonPath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
logger.info(`Created feature ${featureId}`);
|
logger.info(`Created feature ${featureId}`);
|
||||||
return feature;
|
return feature;
|
||||||
@@ -444,9 +451,9 @@ export class FeatureLoader {
|
|||||||
descriptionHistory: updatedHistory,
|
descriptionHistory: updatedHistory,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Write back to file
|
// Write back to file atomically with backup support
|
||||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
|
await atomicWriteJson(featureJsonPath, updatedFeature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
logger.info(`Updated feature ${featureId}`);
|
logger.info(`Updated feature ${featureId}`);
|
||||||
return updatedFeature;
|
return updatedFeature;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { createLogger } from '@automaker/utils';
|
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||||
import * as secureFs from '../lib/secure-fs.js';
|
import * as secureFs from '../lib/secure-fs.js';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@@ -42,28 +42,8 @@ import {
|
|||||||
const logger = createLogger('SettingsService');
|
const logger = createLogger('SettingsService');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Atomic file write - write to temp file then rename
|
* Wrapper for readJsonFile from utils that uses the local secureFs
|
||||||
*/
|
* to maintain compatibility with the server's secure file system
|
||||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
|
||||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
|
||||||
const content = JSON.stringify(data, null, 2);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
|
||||||
await secureFs.rename(tempPath, filePath);
|
|
||||||
} catch (error) {
|
|
||||||
// Clean up temp file if it exists
|
|
||||||
try {
|
|
||||||
await secureFs.unlink(tempPath);
|
|
||||||
} catch {
|
|
||||||
// Ignore cleanup errors
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely read JSON file with fallback to default
|
|
||||||
*/
|
*/
|
||||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||||
try {
|
try {
|
||||||
@@ -90,6 +70,13 @@ async function fileExists(filePath: string): Promise<boolean> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write settings atomically with backup support
|
||||||
|
*/
|
||||||
|
async function writeSettingsJson(filePath: string, data: unknown): Promise<void> {
|
||||||
|
await atomicWriteJson(filePath, data, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SettingsService - Manages persistent storage of user settings and credentials
|
* SettingsService - Manages persistent storage of user settings and credentials
|
||||||
*
|
*
|
||||||
@@ -180,7 +167,7 @@ export class SettingsService {
|
|||||||
if (needsSave) {
|
if (needsSave) {
|
||||||
try {
|
try {
|
||||||
await ensureDataDir(this.dataDir);
|
await ensureDataDir(this.dataDir);
|
||||||
await atomicWriteJson(settingsPath, result);
|
await writeSettingsJson(settingsPath, result);
|
||||||
logger.info('Settings migration complete');
|
logger.info('Settings migration complete');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to save migrated settings:', error);
|
logger.error('Failed to save migrated settings:', error);
|
||||||
@@ -340,7 +327,7 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
await atomicWriteJson(settingsPath, updated);
|
await writeSettingsJson(settingsPath, updated);
|
||||||
logger.info('Global settings updated');
|
logger.info('Global settings updated');
|
||||||
|
|
||||||
return updated;
|
return updated;
|
||||||
@@ -414,7 +401,7 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
await atomicWriteJson(credentialsPath, updated);
|
await writeSettingsJson(credentialsPath, updated);
|
||||||
logger.info('Credentials updated');
|
logger.info('Credentials updated');
|
||||||
|
|
||||||
return updated;
|
return updated;
|
||||||
@@ -525,7 +512,7 @@ export class SettingsService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
await atomicWriteJson(settingsPath, updated);
|
await writeSettingsJson(settingsPath, updated);
|
||||||
logger.info(`Project settings updated for ${projectPath}`);
|
logger.info(`Project settings updated for ${projectPath}`);
|
||||||
|
|
||||||
return updated;
|
return updated;
|
||||||
|
|||||||
@@ -190,9 +190,10 @@ describe('feature-loader.ts', () => {
|
|||||||
const result = await loader.getAll(testProjectPath);
|
const result = await loader.getAll(testProjectPath);
|
||||||
|
|
||||||
expect(result).toEqual([]);
|
expect(result).toEqual([]);
|
||||||
|
// With recovery-enabled reads, warnings come from AtomicWriter and FeatureLoader
|
||||||
expect(consoleSpy).toHaveBeenCalledWith(
|
expect(consoleSpy).toHaveBeenCalledWith(
|
||||||
expect.stringMatching(/WARN.*\[FeatureLoader\]/),
|
expect.stringMatching(/WARN.*\[AtomicWriter\]/),
|
||||||
expect.stringContaining('Failed to parse feature.json')
|
expect.stringContaining('unavailable')
|
||||||
);
|
);
|
||||||
|
|
||||||
consoleSpy.mockRestore();
|
consoleSpy.mockRestore();
|
||||||
@@ -260,10 +261,13 @@ describe('feature-loader.ts', () => {
|
|||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw on other errors', async () => {
|
it('should return null on other errors (with recovery attempt)', async () => {
|
||||||
|
// With recovery-enabled reads, get() returns null instead of throwing
|
||||||
|
// because it attempts to recover from backups before giving up
|
||||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
|
vi.mocked(fs.readFile).mockRejectedValue(new Error('Permission denied'));
|
||||||
|
|
||||||
await expect(loader.get(testProjectPath, 'feature-123')).rejects.toThrow('Permission denied');
|
const result = await loader.get(testProjectPath, 'feature-123');
|
||||||
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
362
libs/utils/src/atomic-writer.ts
Normal file
362
libs/utils/src/atomic-writer.ts
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
/**
|
||||||
|
* Atomic file writing utilities for JSON data
|
||||||
|
*
|
||||||
|
* Provides atomic write operations using temp-file + rename pattern,
|
||||||
|
* ensuring data integrity even during crashes or power failures.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { secureFs } from '@automaker/platform';
|
||||||
|
import path from 'path';
|
||||||
|
import { createLogger } from './logger.js';
|
||||||
|
import { mkdirSafe } from './fs-utils.js';
|
||||||
|
|
||||||
|
const logger = createLogger('AtomicWriter');
|
||||||
|
|
||||||
|
/** Default maximum number of backup files to keep for crash recovery */
|
||||||
|
export const DEFAULT_BACKUP_COUNT = 3;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for atomic write operations
|
||||||
|
*/
|
||||||
|
export interface AtomicWriteOptions {
|
||||||
|
/** Number of spaces for JSON indentation (default: 2) */
|
||||||
|
indent?: number;
|
||||||
|
/** Create parent directories if they don't exist (default: false) */
|
||||||
|
createDirs?: boolean;
|
||||||
|
/** Number of backup files to keep (0 = no backups, default: 0). When > 0, rotates .bak1, .bak2, etc. */
|
||||||
|
backupCount?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rotate backup files (.bak1 -> .bak2 -> .bak3, oldest is deleted)
|
||||||
|
* and create a new backup from the current file.
|
||||||
|
*
|
||||||
|
* @param filePath - Absolute path to the file being backed up
|
||||||
|
* @param maxBackups - Maximum number of backup files to keep
|
||||||
|
*/
|
||||||
|
export async function rotateBackups(
|
||||||
|
filePath: string,
|
||||||
|
maxBackups: number = DEFAULT_BACKUP_COUNT
|
||||||
|
): Promise<void> {
|
||||||
|
// Check if the source file exists before attempting backup
|
||||||
|
try {
|
||||||
|
await secureFs.access(filePath);
|
||||||
|
} catch {
|
||||||
|
// No existing file to backup
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rotate existing backups: .bak3 is deleted, .bak2 -> .bak3, .bak1 -> .bak2
|
||||||
|
for (let i = maxBackups; i >= 1; i--) {
|
||||||
|
const currentBackup = `${filePath}.bak${i}`;
|
||||||
|
const nextBackup = `${filePath}.bak${i + 1}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (i === maxBackups) {
|
||||||
|
// Delete the oldest backup
|
||||||
|
await secureFs.unlink(currentBackup);
|
||||||
|
} else {
|
||||||
|
// Rename current backup to next slot
|
||||||
|
await secureFs.rename(currentBackup, nextBackup);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore errors - backup file may not exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy current file to .bak1
|
||||||
|
try {
|
||||||
|
await secureFs.copyFile(filePath, `${filePath}.bak1`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to create backup of ${filePath}:`, error);
|
||||||
|
// Continue with write even if backup fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Atomically write JSON data to a file.
|
||||||
|
*
|
||||||
|
* Uses the temp-file + rename pattern for atomicity:
|
||||||
|
* 1. Writes data to a temporary file
|
||||||
|
* 2. Atomically renames temp file to target path
|
||||||
|
* 3. Cleans up temp file on error
|
||||||
|
*
|
||||||
|
* @param filePath - Absolute path to the target file
|
||||||
|
* @param data - Data to serialize as JSON
|
||||||
|
* @param options - Optional write options
|
||||||
|
* @throws Error if write fails (temp file is cleaned up)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* await atomicWriteJson('/path/to/config.json', { key: 'value' });
|
||||||
|
* await atomicWriteJson('/path/to/data.json', data, { indent: 4, createDirs: true });
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export async function atomicWriteJson<T>(
|
||||||
|
filePath: string,
|
||||||
|
data: T,
|
||||||
|
options: AtomicWriteOptions = {}
|
||||||
|
): Promise<void> {
|
||||||
|
const { indent = 2, createDirs = false, backupCount = 0 } = options;
|
||||||
|
const resolvedPath = path.resolve(filePath);
|
||||||
|
const tempPath = `${resolvedPath}.tmp.${Date.now()}`;
|
||||||
|
|
||||||
|
// Create parent directories if requested
|
||||||
|
if (createDirs) {
|
||||||
|
const dirPath = path.dirname(resolvedPath);
|
||||||
|
await mkdirSafe(dirPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = JSON.stringify(data, null, indent);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Rotate backups before writing (if backups are enabled)
|
||||||
|
if (backupCount > 0) {
|
||||||
|
await rotateBackups(resolvedPath, backupCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||||
|
await secureFs.rename(tempPath, resolvedPath);
|
||||||
|
} catch (error) {
|
||||||
|
// Clean up temp file if it exists
|
||||||
|
try {
|
||||||
|
await secureFs.unlink(tempPath);
|
||||||
|
} catch {
|
||||||
|
// Ignore cleanup errors - best effort
|
||||||
|
}
|
||||||
|
logger.error(`Failed to atomically write to ${resolvedPath}:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely read JSON from a file with fallback to default value.
|
||||||
|
*
|
||||||
|
* Returns the default value if:
|
||||||
|
* - File doesn't exist (ENOENT)
|
||||||
|
* - File content is invalid JSON
|
||||||
|
*
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param defaultValue - Value to return if file doesn't exist or is invalid
|
||||||
|
* @returns Parsed JSON data or default value
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const config = await readJsonFile('/path/to/config.json', { version: 1 });
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||||
|
const resolvedPath = path.resolve(filePath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const content = (await secureFs.readFile(resolvedPath, 'utf-8')) as string;
|
||||||
|
return JSON.parse(content) as T;
|
||||||
|
} catch (error) {
|
||||||
|
const nodeError = error as NodeJS.ErrnoException;
|
||||||
|
if (nodeError.code === 'ENOENT') {
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
logger.error(`Error reading JSON from ${resolvedPath}:`, error);
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Atomically update a JSON file by reading, transforming, and writing.
|
||||||
|
*
|
||||||
|
* Provides a safe read-modify-write pattern:
|
||||||
|
* 1. Reads existing file (or uses default)
|
||||||
|
* 2. Applies updater function
|
||||||
|
* 3. Atomically writes result
|
||||||
|
*
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param defaultValue - Default value if file doesn't exist
|
||||||
|
* @param updater - Function that transforms the data
|
||||||
|
* @param options - Optional write options
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* await updateJsonAtomically(
|
||||||
|
* '/path/to/counter.json',
|
||||||
|
* { count: 0 },
|
||||||
|
* (data) => ({ ...data, count: data.count + 1 })
|
||||||
|
* );
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export async function updateJsonAtomically<T>(
|
||||||
|
filePath: string,
|
||||||
|
defaultValue: T,
|
||||||
|
updater: (current: T) => T | Promise<T>,
|
||||||
|
options: AtomicWriteOptions = {}
|
||||||
|
): Promise<void> {
|
||||||
|
const current = await readJsonFile(filePath, defaultValue);
|
||||||
|
const updated = await updater(current);
|
||||||
|
await atomicWriteJson(filePath, updated, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of a JSON read operation with recovery information
|
||||||
|
*/
|
||||||
|
export interface ReadJsonRecoveryResult<T> {
|
||||||
|
/** The data that was successfully read */
|
||||||
|
data: T;
|
||||||
|
/** Whether recovery was needed (main file was corrupted or missing) */
|
||||||
|
recovered: boolean;
|
||||||
|
/** Source of the data: 'main', 'backup', 'temp', or 'default' */
|
||||||
|
source: 'main' | 'backup' | 'temp' | 'default';
|
||||||
|
/** Error message if the main file had an issue */
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for readJsonWithRecovery
|
||||||
|
*/
|
||||||
|
export interface ReadJsonRecoveryOptions {
|
||||||
|
/** Maximum number of backup files to check (.bak1, .bak2, etc.) Default: 3 */
|
||||||
|
maxBackups?: number;
|
||||||
|
/** Whether to automatically restore main file from backup when corrupted. Default: true */
|
||||||
|
autoRestore?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a warning if recovery was needed (from backup or temp file).
|
||||||
|
*
|
||||||
|
* Use this helper to reduce duplicate logging code when using readJsonWithRecovery.
|
||||||
|
*
|
||||||
|
* @param result - The result from readJsonWithRecovery
|
||||||
|
* @param identifier - A human-readable identifier for the file being recovered (e.g., "Feature abc123")
|
||||||
|
* @param loggerInstance - Optional logger instance to use (defaults to AtomicWriter logger)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const result = await readJsonWithRecovery(featurePath, null);
|
||||||
|
* logRecoveryWarning(result, `Feature ${featureId}`);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export function logRecoveryWarning<T>(
|
||||||
|
result: ReadJsonRecoveryResult<T>,
|
||||||
|
identifier: string,
|
||||||
|
loggerInstance: { warn: (msg: string, ...args: unknown[]) => void } = logger
|
||||||
|
): void {
|
||||||
|
if (result.recovered && result.source !== 'default') {
|
||||||
|
loggerInstance.warn(`${identifier} was recovered from ${result.source}: ${result.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read JSON file with automatic recovery from backups.
|
||||||
|
*
|
||||||
|
* This function attempts to read a JSON file with fallback to backups:
|
||||||
|
* 1. Try to read the main file
|
||||||
|
* 2. If corrupted, check for temp files (.tmp.*) that might have valid data
|
||||||
|
* 3. If no valid temp file, try backup files (.bak1, .bak2, .bak3)
|
||||||
|
* 4. If all fail, return the default value
|
||||||
|
*
|
||||||
|
* Optionally restores the main file from a valid backup (autoRestore: true).
|
||||||
|
*
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param defaultValue - Value to return if no valid data found
|
||||||
|
* @param options - Recovery options
|
||||||
|
* @returns Result containing the data and recovery information
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const result = await readJsonWithRecovery('/path/to/config.json', { version: 1 });
|
||||||
|
* if (result.recovered) {
|
||||||
|
* console.log(`Recovered from ${result.source}: ${result.error}`);
|
||||||
|
* }
|
||||||
|
* const config = result.data;
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export async function readJsonWithRecovery<T>(
|
||||||
|
filePath: string,
|
||||||
|
defaultValue: T,
|
||||||
|
options: ReadJsonRecoveryOptions = {}
|
||||||
|
): Promise<ReadJsonRecoveryResult<T>> {
|
||||||
|
const { maxBackups = 3, autoRestore = true } = options;
|
||||||
|
const resolvedPath = path.resolve(filePath);
|
||||||
|
const dirPath = path.dirname(resolvedPath);
|
||||||
|
const fileName = path.basename(resolvedPath);
|
||||||
|
|
||||||
|
// Try to read the main file first
|
||||||
|
try {
|
||||||
|
const content = (await secureFs.readFile(resolvedPath, 'utf-8')) as string;
|
||||||
|
const data = JSON.parse(content) as T;
|
||||||
|
return { data, recovered: false, source: 'main' };
|
||||||
|
} catch (mainError) {
|
||||||
|
const nodeError = mainError as NodeJS.ErrnoException;
|
||||||
|
const errorMessage =
|
||||||
|
nodeError.code === 'ENOENT'
|
||||||
|
? 'File does not exist'
|
||||||
|
: `Failed to parse: ${mainError instanceof Error ? mainError.message : String(mainError)}`;
|
||||||
|
|
||||||
|
// If file doesn't exist, check for temp files or backups
|
||||||
|
logger.warn(`Main file ${resolvedPath} unavailable: ${errorMessage}`);
|
||||||
|
|
||||||
|
// Try to find and recover from temp files first (in case of interrupted write)
|
||||||
|
try {
|
||||||
|
const files = (await secureFs.readdir(dirPath)) as string[];
|
||||||
|
const tempFiles = files
|
||||||
|
.filter((f: string) => f.startsWith(`${fileName}.tmp.`))
|
||||||
|
.sort()
|
||||||
|
.reverse(); // Most recent first
|
||||||
|
|
||||||
|
for (const tempFile of tempFiles) {
|
||||||
|
const tempPath = path.join(dirPath, tempFile);
|
||||||
|
try {
|
||||||
|
const content = (await secureFs.readFile(tempPath, 'utf-8')) as string;
|
||||||
|
const data = JSON.parse(content) as T;
|
||||||
|
|
||||||
|
logger.info(`Recovered data from temp file: ${tempPath}`);
|
||||||
|
|
||||||
|
// Optionally restore main file from temp
|
||||||
|
if (autoRestore) {
|
||||||
|
try {
|
||||||
|
await secureFs.rename(tempPath, resolvedPath);
|
||||||
|
logger.info(`Restored main file from temp: ${tempPath}`);
|
||||||
|
} catch (restoreError) {
|
||||||
|
logger.warn(`Failed to restore main file from temp: ${restoreError}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { data, recovered: true, source: 'temp', error: errorMessage };
|
||||||
|
} catch {
|
||||||
|
// This temp file is also corrupted, try next
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Could not read directory, skip temp file check
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try backup files (.bak1, .bak2, .bak3)
|
||||||
|
for (let i = 1; i <= maxBackups; i++) {
|
||||||
|
const backupPath = `${resolvedPath}.bak${i}`;
|
||||||
|
try {
|
||||||
|
const content = (await secureFs.readFile(backupPath, 'utf-8')) as string;
|
||||||
|
const data = JSON.parse(content) as T;
|
||||||
|
|
||||||
|
logger.info(`Recovered data from backup: ${backupPath}`);
|
||||||
|
|
||||||
|
// Optionally restore main file from backup
|
||||||
|
if (autoRestore) {
|
||||||
|
try {
|
||||||
|
await secureFs.copyFile(backupPath, resolvedPath);
|
||||||
|
logger.info(`Restored main file from backup: ${backupPath}`);
|
||||||
|
} catch (restoreError) {
|
||||||
|
logger.warn(`Failed to restore main file from backup: ${restoreError}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { data, recovered: true, source: 'backup', error: errorMessage };
|
||||||
|
} catch {
|
||||||
|
// This backup doesn't exist or is corrupted, try next
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// All recovery attempts failed, return default
|
||||||
|
logger.warn(`All recovery attempts failed for ${resolvedPath}, using default value`);
|
||||||
|
return { data: defaultValue, recovered: true, source: 'default', error: errorMessage };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -53,6 +53,20 @@ export {
|
|||||||
// File system utilities
|
// File system utilities
|
||||||
export { mkdirSafe, existsSafe } from './fs-utils.js';
|
export { mkdirSafe, existsSafe } from './fs-utils.js';
|
||||||
|
|
||||||
|
// Atomic file operations
|
||||||
|
export {
|
||||||
|
atomicWriteJson,
|
||||||
|
readJsonFile,
|
||||||
|
updateJsonAtomically,
|
||||||
|
readJsonWithRecovery,
|
||||||
|
rotateBackups,
|
||||||
|
logRecoveryWarning,
|
||||||
|
DEFAULT_BACKUP_COUNT,
|
||||||
|
type AtomicWriteOptions,
|
||||||
|
type ReadJsonRecoveryResult,
|
||||||
|
type ReadJsonRecoveryOptions,
|
||||||
|
} from './atomic-writer.js';
|
||||||
|
|
||||||
// Path utilities
|
// Path utilities
|
||||||
export { normalizePath, pathsEqual } from './path-utils.js';
|
export { normalizePath, pathsEqual } from './path-utils.js';
|
||||||
|
|
||||||
|
|||||||
709
libs/utils/tests/atomic-writer.test.ts
Normal file
709
libs/utils/tests/atomic-writer.test.ts
Normal file
@@ -0,0 +1,709 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi, type MockInstance } from 'vitest';
|
||||||
|
import fs from 'fs/promises';
|
||||||
|
import path from 'path';
|
||||||
|
import os from 'os';
|
||||||
|
import { secureFs } from '@automaker/platform';
|
||||||
|
import {
|
||||||
|
atomicWriteJson,
|
||||||
|
readJsonFile,
|
||||||
|
updateJsonAtomically,
|
||||||
|
readJsonWithRecovery,
|
||||||
|
} from '../src/atomic-writer';
|
||||||
|
|
||||||
|
// Mock secureFs
|
||||||
|
vi.mock('@automaker/platform', () => ({
|
||||||
|
secureFs: {
|
||||||
|
writeFile: vi.fn(),
|
||||||
|
readFile: vi.fn(),
|
||||||
|
rename: vi.fn(),
|
||||||
|
unlink: vi.fn(),
|
||||||
|
readdir: vi.fn(),
|
||||||
|
copyFile: vi.fn(),
|
||||||
|
access: vi.fn(),
|
||||||
|
lstat: vi.fn(),
|
||||||
|
mkdir: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock logger to suppress output during tests
|
||||||
|
vi.mock('../src/logger.js', () => ({
|
||||||
|
createLogger: () => ({
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('atomic-writer.ts', () => {
|
||||||
|
let tempDir: string;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create a temporary directory for integration tests
|
||||||
|
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'atomic-writer-test-'));
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
// Clean up temporary directory
|
||||||
|
try {
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true });
|
||||||
|
} catch {
|
||||||
|
// Ignore cleanup errors
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('atomicWriteJson', () => {
|
||||||
|
it('should write JSON data atomically', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'test.json');
|
||||||
|
const data = { key: 'value', number: 42 };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data);
|
||||||
|
|
||||||
|
// Verify writeFile was called with temp file path and JSON content
|
||||||
|
expect(secureFs.writeFile).toHaveBeenCalledTimes(1);
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(writeCall[0]).toMatch(/\.tmp\.\d+$/);
|
||||||
|
expect(writeCall[1]).toBe(JSON.stringify(data, null, 2));
|
||||||
|
expect(writeCall[2]).toBe('utf-8');
|
||||||
|
|
||||||
|
// Verify rename was called with temp -> target
|
||||||
|
expect(secureFs.rename).toHaveBeenCalledTimes(1);
|
||||||
|
const renameCall = (secureFs.rename as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(renameCall[0]).toMatch(/\.tmp\.\d+$/);
|
||||||
|
expect(renameCall[1]).toBe(path.resolve(filePath));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use custom indentation', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'test.json');
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data, { indent: 4 });
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(writeCall[1]).toBe(JSON.stringify(data, null, 4));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clean up temp file on write failure', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'test.json');
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
const writeError = new Error('Write failed');
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockRejectedValue(writeError);
|
||||||
|
(secureFs.unlink as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Write failed');
|
||||||
|
|
||||||
|
expect(secureFs.unlink).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clean up temp file on rename failure', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'test.json');
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
const renameError = new Error('Rename failed');
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockRejectedValue(renameError);
|
||||||
|
(secureFs.unlink as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Rename failed');
|
||||||
|
|
||||||
|
expect(secureFs.unlink).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore cleanup errors', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'test.json');
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
const writeError = new Error('Write failed');
|
||||||
|
const unlinkError = new Error('Unlink failed');
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockRejectedValue(writeError);
|
||||||
|
(secureFs.unlink as unknown as MockInstance).mockRejectedValue(unlinkError);
|
||||||
|
|
||||||
|
// Should still throw the original error, not the cleanup error
|
||||||
|
await expect(atomicWriteJson(filePath, data)).rejects.toThrow('Write failed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve relative paths', async () => {
|
||||||
|
const relativePath = 'test.json';
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(relativePath, data);
|
||||||
|
|
||||||
|
const renameCall = (secureFs.rename as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(renameCall[1]).toBe(path.resolve(relativePath));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle arrays as data', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'array.json');
|
||||||
|
const data = [1, 2, 3, { nested: 'value' }];
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data);
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(writeCall[1]).toBe(JSON.stringify(data, null, 2));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle null and primitive values', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'primitive.json');
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, null);
|
||||||
|
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[0][1]).toBe('null');
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, 'string');
|
||||||
|
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[1][1]).toBe('"string"');
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, 123);
|
||||||
|
expect((secureFs.writeFile as unknown as MockInstance).mock.calls[2][1]).toBe('123');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create directories when createDirs is true', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'nested', 'deep', 'test.json');
|
||||||
|
const data = { key: 'value' };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
// Mock lstat to indicate directory already exists
|
||||||
|
(secureFs.lstat as unknown as MockInstance).mockResolvedValue({
|
||||||
|
isDirectory: () => true,
|
||||||
|
isSymbolicLink: () => false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data, { createDirs: true });
|
||||||
|
|
||||||
|
expect(secureFs.writeFile).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readJsonFile', () => {
|
||||||
|
it('should read and parse JSON file', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'read.json');
|
||||||
|
const data = { key: 'value', count: 5 };
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||||
|
|
||||||
|
const result = await readJsonFile(filePath, {});
|
||||||
|
|
||||||
|
expect(result).toEqual(data);
|
||||||
|
expect(secureFs.readFile).toHaveBeenCalledWith(path.resolve(filePath), 'utf-8');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default value when file does not exist', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
|
||||||
|
const result = await readJsonFile(filePath, defaultValue);
|
||||||
|
|
||||||
|
expect(result).toEqual(defaultValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default value when JSON is invalid', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'invalid.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue('not valid json');
|
||||||
|
|
||||||
|
const result = await readJsonFile(filePath, defaultValue);
|
||||||
|
|
||||||
|
expect(result).toEqual(defaultValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default value for other read errors', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'error.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
const accessError = new Error('Access denied') as NodeJS.ErrnoException;
|
||||||
|
accessError.code = 'EACCES';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(accessError);
|
||||||
|
|
||||||
|
const result = await readJsonFile(filePath, defaultValue);
|
||||||
|
|
||||||
|
expect(result).toEqual(defaultValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty object as default', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
|
||||||
|
const result = await readJsonFile<Record<string, unknown>>(filePath, {});
|
||||||
|
|
||||||
|
expect(result).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle array as default', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'nonexistent.json');
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
|
||||||
|
const result = await readJsonFile<string[]>(filePath, []);
|
||||||
|
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse nested objects correctly', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'nested.json');
|
||||||
|
const data = {
|
||||||
|
level1: {
|
||||||
|
level2: {
|
||||||
|
value: 'deep',
|
||||||
|
array: [1, 2, { nested: true }],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||||
|
|
||||||
|
const result = await readJsonFile(filePath, {});
|
||||||
|
|
||||||
|
expect(result).toEqual(data);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateJsonAtomically', () => {
|
||||||
|
it('should read, update, and write file atomically', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'update.json');
|
||||||
|
const initialData = { count: 5 };
|
||||||
|
const defaultValue = { count: 0 };
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(initialData));
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await updateJsonAtomically(filePath, defaultValue, (data) => ({
|
||||||
|
...data,
|
||||||
|
count: data.count + 1,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Verify the write was called with updated data
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
const writtenData = JSON.parse(writeCall[1]);
|
||||||
|
expect(writtenData.count).toBe(6);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use default value when file does not exist', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'new.json');
|
||||||
|
const defaultValue = { count: 0 };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await updateJsonAtomically(filePath, defaultValue, (data) => ({
|
||||||
|
...data,
|
||||||
|
count: data.count + 1,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
const writtenData = JSON.parse(writeCall[1]);
|
||||||
|
expect(writtenData.count).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support async updater function', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'async.json');
|
||||||
|
const initialData = { value: 'initial' };
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(initialData));
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await updateJsonAtomically(filePath, {}, async (data) => {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||||
|
return { ...data, value: 'updated' };
|
||||||
|
});
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
const writtenData = JSON.parse(writeCall[1]);
|
||||||
|
expect(writtenData.value).toBe('updated');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass through options to atomicWriteJson', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'options.json');
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await updateJsonAtomically(filePath, { key: 'value' }, (d) => d, { indent: 4 });
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(writeCall[1]).toBe(JSON.stringify({ key: 'value' }, null, 4));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readJsonWithRecovery', () => {
|
||||||
|
it('should return main file data when available', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'main.json');
|
||||||
|
const data = { main: true };
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(data));
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
expect(result.data).toEqual(data);
|
||||||
|
expect(result.recovered).toBe(false);
|
||||||
|
expect(result.source).toBe('main');
|
||||||
|
expect(result.error).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recover from temp file when main file is missing', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const tempData = { fromTemp: true };
|
||||||
|
const fileName = path.basename(filePath);
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError) // Main file
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(tempData)); // Temp file
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([
|
||||||
|
`${fileName}.tmp.1234567890`,
|
||||||
|
'other-file.json',
|
||||||
|
]);
|
||||||
|
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
expect(result.data).toEqual(tempData);
|
||||||
|
expect(result.recovered).toBe(true);
|
||||||
|
expect(result.source).toBe('temp');
|
||||||
|
expect(result.error).toBe('File does not exist');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should recover from backup file when main and temp are unavailable', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const backupData = { fromBackup: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError) // Main file
|
||||||
|
.mockRejectedValueOnce(enoentError) // backup1
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(backupData)); // backup2
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]); // No temp files
|
||||||
|
|
||||||
|
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
expect(result.data).toEqual(backupData);
|
||||||
|
expect(result.recovered).toBe(true);
|
||||||
|
expect(result.source).toBe('backup');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default when all recovery attempts fail', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, defaultValue);
|
||||||
|
|
||||||
|
expect(result.data).toEqual(defaultValue);
|
||||||
|
expect(result.recovered).toBe(true);
|
||||||
|
expect(result.source).toBe('default');
|
||||||
|
expect(result.error).toBe('File does not exist');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should try multiple temp files in order', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const fileName = path.basename(filePath);
|
||||||
|
const validTempData = { valid: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError) // Main file
|
||||||
|
.mockResolvedValueOnce('invalid json') // First temp file (invalid)
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(validTempData)); // Second temp file
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([
|
||||||
|
`${fileName}.tmp.9999999999`, // Most recent
|
||||||
|
`${fileName}.tmp.1111111111`, // Older
|
||||||
|
]);
|
||||||
|
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
expect(result.data).toEqual(validTempData);
|
||||||
|
expect(result.source).toBe('temp');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should try multiple backup files in order', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const backupData = { backup2: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError) // Main file
|
||||||
|
.mockRejectedValueOnce(enoentError) // .bak1
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(backupData)); // .bak2
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||||
|
|
||||||
|
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
expect(result.data).toEqual(backupData);
|
||||||
|
expect(result.source).toBe('backup');
|
||||||
|
|
||||||
|
// Verify it tried .bak1 first
|
||||||
|
expect(secureFs.readFile).toHaveBeenNthCalledWith(
|
||||||
|
2,
|
||||||
|
`${path.resolve(filePath)}.bak1`,
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respect maxBackups option', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(enoentError);
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, defaultValue, { maxBackups: 1 });
|
||||||
|
|
||||||
|
expect(result.source).toBe('default');
|
||||||
|
// Should only have tried main + 1 backup
|
||||||
|
expect(secureFs.readFile).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not auto-restore when autoRestore is false', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const fileName = path.basename(filePath);
|
||||||
|
const tempData = { fromTemp: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError)
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(tempData));
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([`${fileName}.tmp.123`]);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {}, { autoRestore: false });
|
||||||
|
|
||||||
|
expect(result.data).toEqual(tempData);
|
||||||
|
expect(secureFs.rename).not.toHaveBeenCalled();
|
||||||
|
expect(secureFs.copyFile).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle directory read errors gracefully', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const backupData = { backup: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError) // Main file
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(backupData)); // backup1
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockRejectedValue(new Error('Dir read failed'));
|
||||||
|
(secureFs.copyFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
// Should skip temp files and go to backups
|
||||||
|
expect(result.data).toEqual(backupData);
|
||||||
|
expect(result.source).toBe('backup');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle corrupted main file with valid error message', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'corrupted.json');
|
||||||
|
const defaultValue = { default: true };
|
||||||
|
|
||||||
|
const parseError = new SyntaxError('Unexpected token');
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValueOnce('{{invalid');
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([]);
|
||||||
|
|
||||||
|
// Mock to actually throw parse error
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve('{{invalid json');
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, defaultValue);
|
||||||
|
|
||||||
|
expect(result.recovered).toBe(true);
|
||||||
|
expect(result.error).toContain('Failed to parse');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle restore failures gracefully', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'data.json');
|
||||||
|
const fileName = path.basename(filePath);
|
||||||
|
const tempData = { fromTemp: true };
|
||||||
|
|
||||||
|
const enoentError = new Error('File not found') as NodeJS.ErrnoException;
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance)
|
||||||
|
.mockRejectedValueOnce(enoentError)
|
||||||
|
.mockResolvedValueOnce(JSON.stringify(tempData));
|
||||||
|
|
||||||
|
(secureFs.readdir as unknown as MockInstance).mockResolvedValue([`${fileName}.tmp.123`]);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockRejectedValue(new Error('Restore failed'));
|
||||||
|
|
||||||
|
const result = await readJsonWithRecovery(filePath, {});
|
||||||
|
|
||||||
|
// Should still return data even if restore failed
|
||||||
|
expect(result.data).toEqual(tempData);
|
||||||
|
expect(result.source).toBe('temp');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Edge cases', () => {
|
||||||
|
it('should handle empty file path gracefully', async () => {
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockRejectedValue(new Error('Invalid path'));
|
||||||
|
|
||||||
|
const result = await readJsonFile('', { default: true });
|
||||||
|
|
||||||
|
expect(result).toEqual({ default: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle special characters in file path', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'file with spaces & special!.json');
|
||||||
|
const data = { special: 'chars' };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data);
|
||||||
|
|
||||||
|
expect(secureFs.writeFile).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very large objects', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'large.json');
|
||||||
|
const largeArray = Array.from({ length: 10000 }, (_, i) => ({
|
||||||
|
id: i,
|
||||||
|
data: `item-${i}`,
|
||||||
|
}));
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, largeArray);
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(JSON.parse(writeCall[1])).toEqual(largeArray);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unicode content', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'unicode.json');
|
||||||
|
const data = { emoji: '🎉', japanese: 'こんにちは', chinese: '你好' };
|
||||||
|
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await atomicWriteJson(filePath, data);
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
expect(JSON.parse(writeCall[1])).toEqual(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle circular reference error in JSON', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'circular.json');
|
||||||
|
const circular: Record<string, unknown> = { key: 'value' };
|
||||||
|
circular.self = circular;
|
||||||
|
|
||||||
|
await expect(atomicWriteJson(filePath, circular)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Type safety', () => {
|
||||||
|
interface TestConfig {
|
||||||
|
version: number;
|
||||||
|
settings: {
|
||||||
|
enabled: boolean;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should preserve types in readJsonFile', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'config.json');
|
||||||
|
const expected: TestConfig = {
|
||||||
|
version: 1,
|
||||||
|
settings: { enabled: true, name: 'test' },
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(JSON.stringify(expected));
|
||||||
|
|
||||||
|
const result = await readJsonFile<TestConfig>(filePath, {
|
||||||
|
version: 0,
|
||||||
|
settings: { enabled: false, name: '' },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.version).toBe(1);
|
||||||
|
expect(result.settings.enabled).toBe(true);
|
||||||
|
expect(result.settings.name).toBe('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve types in updateJsonAtomically', async () => {
|
||||||
|
const filePath = path.join(tempDir, 'counter.json');
|
||||||
|
|
||||||
|
interface Counter {
|
||||||
|
count: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
(secureFs.readFile as unknown as MockInstance).mockResolvedValue(
|
||||||
|
JSON.stringify({ count: 5 })
|
||||||
|
);
|
||||||
|
(secureFs.writeFile as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
(secureFs.rename as unknown as MockInstance).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await updateJsonAtomically<Counter>(filePath, { count: 0 }, (data) => ({
|
||||||
|
count: data.count + 1,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const writeCall = (secureFs.writeFile as unknown as MockInstance).mock.calls[0];
|
||||||
|
const writtenData: Counter = JSON.parse(writeCall[1]);
|
||||||
|
expect(writtenData.count).toBe(6);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user