feat: implement atomic file writing and recovery utilities

- Introduced atomic write functionality for JSON files to ensure data integrity during writes.
- Added recovery mechanisms to read JSON files with fallback options for corrupted or missing files.
- Enhanced existing services to utilize atomic write and recovery features for improved reliability.
- Updated tests to cover new atomic writing and recovery scenarios, ensuring robust error handling and data consistency.
This commit is contained in:
Shirone
2026-01-17 00:50:51 +01:00
parent cc9f7d48c8
commit 8661f33c6d
8 changed files with 1251 additions and 118 deletions

View File

@@ -29,6 +29,10 @@ import {
appendLearning,
recordMemoryUsage,
createLogger,
atomicWriteJson,
readJsonWithRecovery,
logRecoveryWarning,
DEFAULT_BACKUP_COUNT,
} from '@automaker/utils';
const logger = createLogger('AutoMode');
@@ -1414,13 +1418,13 @@ Address the follow-up instructions above. Review the previous work and make the
allImagePaths.push(...allPaths);
}
// Save updated feature.json with new images
// Save updated feature.json with new images (atomic write with backup)
if (copiedImagePaths.length > 0 && feature) {
const featureDirForSave = getFeatureDir(projectPath, featureId);
const featurePath = path.join(featureDirForSave, 'feature.json');
try {
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
} catch (error) {
logger.error(`Failed to save feature.json:`, error);
}
@@ -2088,8 +2092,20 @@ Format your response as a structured markdown document.`;
const featurePath = path.join(featureDir, 'feature.json');
try {
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
const feature = JSON.parse(data);
// Use recovery-enabled read for corrupted file handling
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
maxBackups: DEFAULT_BACKUP_COUNT,
autoRestore: true,
});
logRecoveryWarning(result, `Feature ${featureId}`, logger);
const feature = result.data;
if (!feature) {
logger.warn(`Feature ${featureId} not found or could not be recovered`);
return;
}
feature.status = status;
feature.updatedAt = new Date().toISOString();
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
@@ -2100,7 +2116,9 @@ Format your response as a structured markdown document.`;
// Clear the timestamp when moving to other statuses
feature.justFinishedAt = undefined;
}
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
// Use atomic write with backup support
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
// Sync completed/verified features to app_spec.txt
if (status === 'verified' || status === 'completed') {
@@ -2111,8 +2129,8 @@ Format your response as a structured markdown document.`;
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
}
}
} catch {
// Feature file may not exist
} catch (error) {
logger.error(`Failed to update feature status for ${featureId}:`, error);
}
}
@@ -2124,11 +2142,24 @@ Format your response as a structured markdown document.`;
featureId: string,
updates: Partial<PlanSpec>
): Promise<void> {
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
// Use getFeatureDir helper for consistent path resolution
const featureDir = getFeatureDir(projectPath, featureId);
const featurePath = path.join(featureDir, 'feature.json');
try {
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
const feature = JSON.parse(data);
// Use recovery-enabled read for corrupted file handling
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
maxBackups: DEFAULT_BACKUP_COUNT,
autoRestore: true,
});
logRecoveryWarning(result, `Feature ${featureId}`, logger);
const feature = result.data;
if (!feature) {
logger.warn(`Feature ${featureId} not found or could not be recovered`);
return;
}
// Initialize planSpec if it doesn't exist
if (!feature.planSpec) {
@@ -2148,7 +2179,9 @@ Format your response as a structured markdown document.`;
}
feature.updatedAt = new Date().toISOString();
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
// Use atomic write with backup support
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
} catch (error) {
logger.error(`Failed to update planSpec for ${featureId}:`, error);
}
@@ -2165,25 +2198,34 @@ Format your response as a structured markdown document.`;
const allFeatures: Feature[] = [];
const pendingFeatures: Feature[] = [];
// Load all features (for dependency checking)
// Load all features (for dependency checking) with recovery support
for (const entry of entries) {
if (entry.isDirectory()) {
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
try {
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
const feature = JSON.parse(data);
allFeatures.push(feature);
// Track pending features separately
if (
feature.status === 'pending' ||
feature.status === 'ready' ||
feature.status === 'backlog'
) {
pendingFeatures.push(feature);
}
} catch {
// Skip invalid features
// Use recovery-enabled read for corrupted file handling
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
maxBackups: DEFAULT_BACKUP_COUNT,
autoRestore: true,
});
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
const feature = result.data;
if (!feature) {
// Skip features that couldn't be loaded or recovered
continue;
}
allFeatures.push(feature);
// Track pending features separately
if (
feature.status === 'pending' ||
feature.status === 'ready' ||
feature.status === 'backlog'
) {
pendingFeatures.push(feature);
}
}
}
@@ -3415,31 +3457,39 @@ After generating the revised spec, output:
for (const entry of entries) {
if (entry.isDirectory()) {
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
try {
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
const feature = JSON.parse(data) as Feature;
// Check if feature was interrupted (in_progress or pipeline_*)
if (
feature.status === 'in_progress' ||
(feature.status && feature.status.startsWith('pipeline_'))
) {
// Verify it has existing context (agent-output.md)
const featureDir = getFeatureDir(projectPath, feature.id);
const contextPath = path.join(featureDir, 'agent-output.md');
try {
await secureFs.access(contextPath);
interruptedFeatures.push(feature);
logger.info(
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
);
} catch {
// No context file, skip this feature - it will be restarted fresh
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
}
// Use recovery-enabled read for corrupted file handling
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
maxBackups: DEFAULT_BACKUP_COUNT,
autoRestore: true,
});
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
const feature = result.data;
if (!feature) {
// Skip features that couldn't be loaded or recovered
continue;
}
// Check if feature was interrupted (in_progress or pipeline_*)
if (
feature.status === 'in_progress' ||
(feature.status && feature.status.startsWith('pipeline_'))
) {
// Verify it has existing context (agent-output.md)
const featureDir = getFeatureDir(projectPath, feature.id);
const contextPath = path.join(featureDir, 'agent-output.md');
try {
await secureFs.access(contextPath);
interruptedFeatures.push(feature);
logger.info(
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
);
} catch {
// No context file, skip this feature - it will be restarted fresh
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
}
} catch {
// Skip invalid features
}
}
}