mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-01 20:23:36 +00:00
Merge pull request #532 from AutoMaker-Org/feature/v0.12.0rc-1768605251997-8ufb
fix: feature.json corruption on crash lose
This commit is contained in:
@@ -29,6 +29,10 @@ import {
|
||||
appendLearning,
|
||||
recordMemoryUsage,
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
@@ -1416,13 +1420,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
allImagePaths.push(...allPaths);
|
||||
}
|
||||
|
||||
// Save updated feature.json with new images
|
||||
// Save updated feature.json with new images (atomic write with backup)
|
||||
if (copiedImagePaths.length > 0 && feature) {
|
||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
@@ -2092,8 +2096,20 @@ Format your response as a structured markdown document.`;
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
feature.status = status;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||
@@ -2104,7 +2120,9 @@ Format your response as a structured markdown document.`;
|
||||
// Clear the timestamp when moving to other statuses
|
||||
feature.justFinishedAt = undefined;
|
||||
}
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
// Create notifications for important status changes
|
||||
const notificationService = getNotificationService();
|
||||
@@ -2135,8 +2153,8 @@ Format your response as a structured markdown document.`;
|
||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Feature file may not exist
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2148,11 +2166,24 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
updates: Partial<PlanSpec>
|
||||
): Promise<void> {
|
||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
||||
// Use getFeatureDir helper for consistent path resolution
|
||||
const featureDir = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize planSpec if it doesn't exist
|
||||
if (!feature.planSpec) {
|
||||
@@ -2172,7 +2203,9 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
@@ -2189,25 +2222,34 @@ Format your response as a structured markdown document.`;
|
||||
const allFeatures: Feature[] = [];
|
||||
const pendingFeatures: Feature[] = [];
|
||||
|
||||
// Load all features (for dependency checking)
|
||||
// Load all features (for dependency checking) with recovery support
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3439,31 +3481,39 @@ After generating the revised spec, output:
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data) as Feature;
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user