mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 06:42:03 +00:00
Merge pull request #532 from AutoMaker-Org/feature/v0.12.0rc-1768605251997-8ufb
fix: feature.json corruption on crash lose
This commit is contained in:
@@ -5,7 +5,7 @@
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import { getFeaturesDir } from '@automaker/platform';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
import { getNotificationService } from '../../services/notification-service.js';
|
||||
@@ -74,10 +74,10 @@ export async function parseAndCreateFeatures(
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await secureFs.writeFile(
|
||||
path.join(featureDir, 'feature.json'),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
// Use atomic write with backup support for crash protection
|
||||
await atomicWriteJson(path.join(featureDir, 'feature.json'), featureData, {
|
||||
backupCount: DEFAULT_BACKUP_COUNT,
|
||||
});
|
||||
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
|
||||
@@ -29,6 +29,10 @@ import {
|
||||
appendLearning,
|
||||
recordMemoryUsage,
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
@@ -1416,13 +1420,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
allImagePaths.push(...allPaths);
|
||||
}
|
||||
|
||||
// Save updated feature.json with new images
|
||||
// Save updated feature.json with new images (atomic write with backup)
|
||||
if (copiedImagePaths.length > 0 && feature) {
|
||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
@@ -2092,8 +2096,20 @@ Format your response as a structured markdown document.`;
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
feature.status = status;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||
@@ -2104,7 +2120,9 @@ Format your response as a structured markdown document.`;
|
||||
// Clear the timestamp when moving to other statuses
|
||||
feature.justFinishedAt = undefined;
|
||||
}
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
// Create notifications for important status changes
|
||||
const notificationService = getNotificationService();
|
||||
@@ -2135,8 +2153,8 @@ Format your response as a structured markdown document.`;
|
||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Feature file may not exist
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2148,11 +2166,24 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
updates: Partial<PlanSpec>
|
||||
): Promise<void> {
|
||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
||||
// Use getFeatureDir helper for consistent path resolution
|
||||
const featureDir = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize planSpec if it doesn't exist
|
||||
if (!feature.planSpec) {
|
||||
@@ -2172,7 +2203,9 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
@@ -2189,25 +2222,34 @@ Format your response as a structured markdown document.`;
|
||||
const allFeatures: Feature[] = [];
|
||||
const pendingFeatures: Feature[] = [];
|
||||
|
||||
// Load all features (for dependency checking)
|
||||
// Load all features (for dependency checking) with recovery support
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3439,31 +3481,39 @@ After generating the revised spec, output:
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data) as Feature;
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getFeaturesDir,
|
||||
@@ -194,31 +200,31 @@ export class FeatureLoader {
|
||||
})) as any[];
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load all features concurrently (secureFs has built-in concurrency limiting)
|
||||
// Load all features concurrently with automatic recovery from backups
|
||||
const featurePromises = featureDirs.map(async (dir) => {
|
||||
const featureId = dir.name;
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(content);
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return feature as Feature;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
||||
} else {
|
||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
||||
}
|
||||
const feature = result.data;
|
||||
|
||||
if (!feature) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return feature;
|
||||
});
|
||||
|
||||
const results = await Promise.all(featurePromises);
|
||||
@@ -303,19 +309,20 @@ export class FeatureLoader {
|
||||
|
||||
/**
|
||||
* Get a single feature by ID
|
||||
* Uses automatic recovery from backups if the main file is corrupted
|
||||
*/
|
||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -359,8 +366,8 @@ export class FeatureLoader {
|
||||
descriptionHistory: initialHistory,
|
||||
};
|
||||
|
||||
// Write feature.json
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
|
||||
// Write feature.json atomically with backup support
|
||||
await atomicWriteJson(featureJsonPath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Created feature ${featureId}`);
|
||||
return feature;
|
||||
@@ -444,9 +451,9 @@ export class FeatureLoader {
|
||||
descriptionHistory: updatedHistory,
|
||||
};
|
||||
|
||||
// Write back to file
|
||||
// Write back to file atomically with backup support
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
|
||||
await atomicWriteJson(featureJsonPath, updatedFeature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Updated feature ${featureId}`);
|
||||
return updatedFeature;
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
|
||||
import {
|
||||
@@ -42,28 +42,8 @@ import {
|
||||
const logger = createLogger('SettingsService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
* Wrapper for readJsonFile from utils that uses the local secureFs
|
||||
* to maintain compatibility with the server's secure file system
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
@@ -90,6 +70,13 @@ async function fileExists(filePath: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings atomically with backup support
|
||||
*/
|
||||
async function writeSettingsJson(filePath: string, data: unknown): Promise<void> {
|
||||
await atomicWriteJson(filePath, data, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
}
|
||||
|
||||
/**
|
||||
* SettingsService - Manages persistent storage of user settings and credentials
|
||||
*
|
||||
@@ -180,7 +167,7 @@ export class SettingsService {
|
||||
if (needsSave) {
|
||||
try {
|
||||
await ensureDataDir(this.dataDir);
|
||||
await atomicWriteJson(settingsPath, result);
|
||||
await writeSettingsJson(settingsPath, result);
|
||||
logger.info('Settings migration complete');
|
||||
} catch (error) {
|
||||
logger.error('Failed to save migrated settings:', error);
|
||||
@@ -340,7 +327,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info('Global settings updated');
|
||||
|
||||
return updated;
|
||||
@@ -414,7 +401,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(credentialsPath, updated);
|
||||
await writeSettingsJson(credentialsPath, updated);
|
||||
logger.info('Credentials updated');
|
||||
|
||||
return updated;
|
||||
@@ -525,7 +512,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info(`Project settings updated for ${projectPath}`);
|
||||
|
||||
return updated;
|
||||
|
||||
Reference in New Issue
Block a user