mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 20:43:36 +00:00
feat(01-02): extract FeatureStateManager from AutoModeService
- Create FeatureStateManager class for feature status updates - Extract updateFeatureStatus, markFeatureInterrupted, resetStuckFeatures - Extract updateFeaturePlanSpec, saveFeatureSummary, updateTaskStatus - Persist BEFORE emit pattern for data integrity (Pitfall 2) - Handle corrupted JSON with readJsonWithRecovery backup support - Preserve pipeline_* statuses in markFeatureInterrupted - Fix bug: version increment now checks old content before applying updates - Add 33 unit tests covering all state management operations
This commit is contained in:
442
apps/server/src/services/feature-state-manager.ts
Normal file
442
apps/server/src/services/feature-state-manager.ts
Normal file
@@ -0,0 +1,442 @@
|
|||||||
|
/**
|
||||||
|
* FeatureStateManager - Manages feature status updates with proper persistence
|
||||||
|
*
|
||||||
|
* Extracted from AutoModeService to provide a standalone service for:
|
||||||
|
* - Updating feature status with proper disk persistence
|
||||||
|
* - Handling corrupted JSON with backup recovery
|
||||||
|
* - Emitting events AFTER successful persistence (prevent stale data on refresh)
|
||||||
|
* - Resetting stuck features after server restart
|
||||||
|
*
|
||||||
|
* Key behaviors:
|
||||||
|
* - Persist BEFORE emit (Pitfall 2 from research)
|
||||||
|
* - Use readJsonWithRecovery for all reads
|
||||||
|
* - markInterrupted preserves pipeline_* statuses
|
||||||
|
*/
|
||||||
|
|
||||||
|
import path from 'path';
|
||||||
|
import type { Feature, ParsedTask, PlanSpec } from '@automaker/types';
|
||||||
|
import {
|
||||||
|
atomicWriteJson,
|
||||||
|
readJsonWithRecovery,
|
||||||
|
logRecoveryWarning,
|
||||||
|
DEFAULT_BACKUP_COUNT,
|
||||||
|
createLogger,
|
||||||
|
} from '@automaker/utils';
|
||||||
|
import { getFeatureDir, getFeaturesDir } from '@automaker/platform';
|
||||||
|
import * as secureFs from '../lib/secure-fs.js';
|
||||||
|
import type { EventEmitter } from '../lib/events.js';
|
||||||
|
import { getNotificationService } from './notification-service.js';
|
||||||
|
import { FeatureLoader } from './feature-loader.js';
|
||||||
|
|
||||||
|
const logger = createLogger('FeatureStateManager');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* FeatureStateManager handles feature status updates with persistence guarantees.
|
||||||
|
*
|
||||||
|
* This service is responsible for:
|
||||||
|
* 1. Updating feature status and persisting to disk BEFORE emitting events
|
||||||
|
* 2. Handling corrupted JSON with automatic backup recovery
|
||||||
|
* 3. Resetting stuck features after server restarts
|
||||||
|
* 4. Managing justFinishedAt timestamps for UI badges
|
||||||
|
*/
|
||||||
|
export class FeatureStateManager {
|
||||||
|
private events: EventEmitter;
|
||||||
|
private featureLoader: FeatureLoader;
|
||||||
|
|
||||||
|
constructor(events: EventEmitter, featureLoader: FeatureLoader) {
|
||||||
|
this.events = events;
|
||||||
|
this.featureLoader = featureLoader;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a feature from disk with recovery support
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param featureId - ID of the feature to load
|
||||||
|
* @returns The feature data, or null if not found/recoverable
|
||||||
|
*/
|
||||||
|
async loadFeature(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||||
|
return JSON.parse(data);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update feature status with proper persistence and event ordering.
|
||||||
|
*
|
||||||
|
* IMPORTANT: Persists to disk BEFORE emitting events to prevent stale data
|
||||||
|
* on client refresh (Pitfall 2 from research).
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param featureId - ID of the feature to update
|
||||||
|
* @param status - New status value
|
||||||
|
*/
|
||||||
|
async updateFeatureStatus(projectPath: string, featureId: string, status: string): Promise<void> {
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use recovery-enabled read for corrupted file handling
|
||||||
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
feature.status = status;
|
||||||
|
feature.updatedAt = new Date().toISOString();
|
||||||
|
|
||||||
|
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||||
|
// Badge will show for 2 minutes after this timestamp
|
||||||
|
if (status === 'waiting_approval') {
|
||||||
|
feature.justFinishedAt = new Date().toISOString();
|
||||||
|
} else {
|
||||||
|
// Clear the timestamp when moving to other statuses
|
||||||
|
feature.justFinishedAt = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
// PERSIST BEFORE EMIT (Pitfall 2)
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
|
// Create notifications for important status changes
|
||||||
|
const notificationService = getNotificationService();
|
||||||
|
if (status === 'waiting_approval') {
|
||||||
|
await notificationService.createNotification({
|
||||||
|
type: 'feature_waiting_approval',
|
||||||
|
title: 'Feature Ready for Review',
|
||||||
|
message: `"${feature.name || featureId}" is ready for your review and approval.`,
|
||||||
|
featureId,
|
||||||
|
projectPath,
|
||||||
|
});
|
||||||
|
} else if (status === 'verified') {
|
||||||
|
await notificationService.createNotification({
|
||||||
|
type: 'feature_verified',
|
||||||
|
title: 'Feature Verified',
|
||||||
|
message: `"${feature.name || featureId}" has been verified and is complete.`,
|
||||||
|
featureId,
|
||||||
|
projectPath,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sync completed/verified features to app_spec.txt
|
||||||
|
if (status === 'verified' || status === 'completed') {
|
||||||
|
try {
|
||||||
|
await this.featureLoader.syncFeatureToAppSpec(projectPath, feature);
|
||||||
|
} catch (syncError) {
|
||||||
|
// Log but don't fail the status update if sync fails
|
||||||
|
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a feature as interrupted due to server restart or other interruption.
|
||||||
|
*
|
||||||
|
* This is a convenience helper that updates the feature status to 'interrupted',
|
||||||
|
* indicating the feature was in progress but execution was disrupted (e.g., server
|
||||||
|
* restart, process crash, or manual stop). Features with this status can be
|
||||||
|
* resumed later using the resume functionality.
|
||||||
|
*
|
||||||
|
* Note: Features with pipeline_* statuses are preserved rather than overwritten
|
||||||
|
* to 'interrupted'. This ensures that resumePipelineFeature() can pick up from
|
||||||
|
* the correct pipeline step after a restart.
|
||||||
|
*
|
||||||
|
* @param projectPath - Path to the project
|
||||||
|
* @param featureId - ID of the feature to mark as interrupted
|
||||||
|
* @param reason - Optional reason for the interruption (logged for debugging)
|
||||||
|
*/
|
||||||
|
async markFeatureInterrupted(
|
||||||
|
projectPath: string,
|
||||||
|
featureId: string,
|
||||||
|
reason?: string
|
||||||
|
): Promise<void> {
|
||||||
|
// Load the feature to check its current status
|
||||||
|
const feature = await this.loadFeature(projectPath, featureId);
|
||||||
|
const currentStatus = feature?.status;
|
||||||
|
|
||||||
|
// Preserve pipeline_* statuses so resumePipelineFeature can resume from the correct step
|
||||||
|
if (currentStatus && currentStatus.startsWith('pipeline_')) {
|
||||||
|
logger.info(
|
||||||
|
`Feature ${featureId} was in ${currentStatus}; preserving pipeline status for resume`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reason) {
|
||||||
|
logger.info(`Marking feature ${featureId} as interrupted: ${reason}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`Marking feature ${featureId} as interrupted`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.updateFeatureStatus(projectPath, featureId, 'interrupted');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset features that were stuck in transient states due to server crash.
|
||||||
|
* Called when auto mode is enabled to clean up from previous session.
|
||||||
|
*
|
||||||
|
* Resets:
|
||||||
|
* - in_progress features back to ready (if has plan) or backlog (if no plan)
|
||||||
|
* - generating planSpec status back to pending
|
||||||
|
* - in_progress tasks back to pending
|
||||||
|
*
|
||||||
|
* @param projectPath - The project path to reset features for
|
||||||
|
*/
|
||||||
|
async resetStuckFeatures(projectPath: string): Promise<void> {
|
||||||
|
const featuresDir = getFeaturesDir(projectPath);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const entries = await secureFs.readdir(featuresDir, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (!entry.isDirectory()) continue;
|
||||||
|
|
||||||
|
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||||
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) continue;
|
||||||
|
|
||||||
|
let needsUpdate = false;
|
||||||
|
|
||||||
|
// Reset in_progress features back to ready/backlog
|
||||||
|
if (feature.status === 'in_progress') {
|
||||||
|
const hasApprovedPlan = feature.planSpec?.status === 'approved';
|
||||||
|
feature.status = hasApprovedPlan ? 'ready' : 'backlog';
|
||||||
|
needsUpdate = true;
|
||||||
|
logger.info(
|
||||||
|
`[resetStuckFeatures] Reset feature ${feature.id} from in_progress to ${feature.status}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset generating planSpec status back to pending (spec generation was interrupted)
|
||||||
|
if (feature.planSpec?.status === 'generating') {
|
||||||
|
feature.planSpec.status = 'pending';
|
||||||
|
needsUpdate = true;
|
||||||
|
logger.info(
|
||||||
|
`[resetStuckFeatures] Reset feature ${feature.id} planSpec status from generating to pending`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset any in_progress tasks back to pending (task execution was interrupted)
|
||||||
|
if (feature.planSpec?.tasks) {
|
||||||
|
for (const task of feature.planSpec.tasks) {
|
||||||
|
if (task.status === 'in_progress') {
|
||||||
|
task.status = 'pending';
|
||||||
|
needsUpdate = true;
|
||||||
|
logger.info(
|
||||||
|
`[resetStuckFeatures] Reset task ${task.id} for feature ${feature.id} from in_progress to pending`
|
||||||
|
);
|
||||||
|
// Clear currentTaskId if it points to this reverted task
|
||||||
|
if (feature.planSpec?.currentTaskId === task.id) {
|
||||||
|
feature.planSpec.currentTaskId = undefined;
|
||||||
|
logger.info(
|
||||||
|
`[resetStuckFeatures] Cleared planSpec.currentTaskId for feature ${feature.id} (was pointing to reverted task ${task.id})`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
feature.updatedAt = new Date().toISOString();
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// If features directory doesn't exist, that's fine
|
||||||
|
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
|
logger.error(`[resetStuckFeatures] Error resetting features for ${projectPath}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the planSpec of a feature with partial updates.
|
||||||
|
*
|
||||||
|
* @param projectPath - The project path
|
||||||
|
* @param featureId - The feature ID
|
||||||
|
* @param updates - Partial PlanSpec updates to apply
|
||||||
|
*/
|
||||||
|
async updateFeaturePlanSpec(
|
||||||
|
projectPath: string,
|
||||||
|
featureId: string,
|
||||||
|
updates: Partial<PlanSpec>
|
||||||
|
): Promise<void> {
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize planSpec if it doesn't exist
|
||||||
|
if (!feature.planSpec) {
|
||||||
|
feature.planSpec = {
|
||||||
|
status: 'pending',
|
||||||
|
version: 1,
|
||||||
|
reviewedByUser: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture old content BEFORE applying updates for version comparison
|
||||||
|
const oldContent = feature.planSpec.content;
|
||||||
|
|
||||||
|
// Apply updates
|
||||||
|
Object.assign(feature.planSpec, updates);
|
||||||
|
|
||||||
|
// If content is being updated and it's different from old content, increment version
|
||||||
|
if (updates.content && updates.content !== oldContent) {
|
||||||
|
feature.planSpec.version = (feature.planSpec.version || 0) + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
feature.updatedAt = new Date().toISOString();
|
||||||
|
|
||||||
|
// PERSIST BEFORE EMIT
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the extracted summary to a feature's summary field.
|
||||||
|
* This is called after agent execution completes to save a summary
|
||||||
|
* extracted from the agent's output using <summary> tags.
|
||||||
|
*
|
||||||
|
* @param projectPath - The project path
|
||||||
|
* @param featureId - The feature ID
|
||||||
|
* @param summary - The summary text to save
|
||||||
|
*/
|
||||||
|
async saveFeatureSummary(projectPath: string, featureId: string, summary: string): Promise<void> {
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
feature.summary = summary;
|
||||||
|
feature.updatedAt = new Date().toISOString();
|
||||||
|
|
||||||
|
// PERSIST BEFORE EMIT
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
|
// Emit event for UI update
|
||||||
|
this.emitAutoModeEvent('auto_mode_summary', {
|
||||||
|
featureId,
|
||||||
|
projectPath,
|
||||||
|
summary,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to save summary for ${featureId}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the status of a specific task within planSpec.tasks
|
||||||
|
*
|
||||||
|
* @param projectPath - The project path
|
||||||
|
* @param featureId - The feature ID
|
||||||
|
* @param taskId - The task ID to update
|
||||||
|
* @param status - The new task status
|
||||||
|
*/
|
||||||
|
async updateTaskStatus(
|
||||||
|
projectPath: string,
|
||||||
|
featureId: string,
|
||||||
|
taskId: string,
|
||||||
|
status: ParsedTask['status']
|
||||||
|
): Promise<void> {
|
||||||
|
const featureDir = getFeatureDir(projectPath, featureId);
|
||||||
|
const featurePath = path.join(featureDir, 'feature.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||||
|
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||||
|
autoRestore: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||||
|
|
||||||
|
const feature = result.data;
|
||||||
|
if (!feature || !feature.planSpec?.tasks) {
|
||||||
|
logger.warn(`Feature ${featureId} not found or has no tasks`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find and update the task
|
||||||
|
const task = feature.planSpec.tasks.find((t) => t.id === taskId);
|
||||||
|
if (task) {
|
||||||
|
task.status = status;
|
||||||
|
feature.updatedAt = new Date().toISOString();
|
||||||
|
|
||||||
|
// PERSIST BEFORE EMIT
|
||||||
|
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||||
|
|
||||||
|
// Emit event for UI update
|
||||||
|
this.emitAutoModeEvent('auto_mode_task_status', {
|
||||||
|
featureId,
|
||||||
|
projectPath,
|
||||||
|
taskId,
|
||||||
|
status,
|
||||||
|
tasks: feature.planSpec.tasks,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to update task ${taskId} status for ${featureId}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit an auto-mode event via the event emitter
|
||||||
|
*
|
||||||
|
* @param eventType - The event type (e.g., 'auto_mode_summary')
|
||||||
|
* @param data - The event payload
|
||||||
|
*/
|
||||||
|
private emitAutoModeEvent(eventType: string, data: Record<string, unknown>): void {
|
||||||
|
// Wrap the event in auto-mode:event format expected by the client
|
||||||
|
this.events.emit('auto-mode:event', {
|
||||||
|
type: eventType,
|
||||||
|
...data,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
657
apps/server/tests/unit/services/feature-state-manager.test.ts
Normal file
657
apps/server/tests/unit/services/feature-state-manager.test.ts
Normal file
@@ -0,0 +1,657 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest';
|
||||||
|
import { FeatureStateManager } from '@/services/feature-state-manager.js';
|
||||||
|
import type { Feature } from '@automaker/types';
|
||||||
|
import type { EventEmitter } from '@/lib/events.js';
|
||||||
|
import type { FeatureLoader } from '@/services/feature-loader.js';
|
||||||
|
import * as secureFs from '@/lib/secure-fs.js';
|
||||||
|
import { atomicWriteJson, readJsonWithRecovery } from '@automaker/utils';
|
||||||
|
import { getFeatureDir, getFeaturesDir } from '@automaker/platform';
|
||||||
|
import { getNotificationService } from '@/services/notification-service.js';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('@/lib/secure-fs.js', () => ({
|
||||||
|
readFile: vi.fn(),
|
||||||
|
readdir: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('@automaker/utils', async (importOriginal) => {
|
||||||
|
const actual = await importOriginal<typeof import('@automaker/utils')>();
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
atomicWriteJson: vi.fn(),
|
||||||
|
readJsonWithRecovery: vi.fn(),
|
||||||
|
logRecoveryWarning: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('@automaker/platform', () => ({
|
||||||
|
getFeatureDir: vi.fn(),
|
||||||
|
getFeaturesDir: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('@/services/notification-service.js', () => ({
|
||||||
|
getNotificationService: vi.fn(() => ({
|
||||||
|
createNotification: vi.fn(),
|
||||||
|
})),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('FeatureStateManager', () => {
|
||||||
|
let manager: FeatureStateManager;
|
||||||
|
let mockEvents: EventEmitter;
|
||||||
|
let mockFeatureLoader: FeatureLoader;
|
||||||
|
|
||||||
|
const mockFeature: Feature = {
|
||||||
|
id: 'feature-123',
|
||||||
|
name: 'Test Feature',
|
||||||
|
title: 'Test Feature Title',
|
||||||
|
description: 'A test feature',
|
||||||
|
status: 'pending',
|
||||||
|
createdAt: '2024-01-01T00:00:00Z',
|
||||||
|
updatedAt: '2024-01-01T00:00:00Z',
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
mockEvents = {
|
||||||
|
emit: vi.fn(),
|
||||||
|
subscribe: vi.fn(() => vi.fn()),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockFeatureLoader = {
|
||||||
|
syncFeatureToAppSpec: vi.fn(),
|
||||||
|
} as unknown as FeatureLoader;
|
||||||
|
|
||||||
|
manager = new FeatureStateManager(mockEvents, mockFeatureLoader);
|
||||||
|
|
||||||
|
// Default mocks
|
||||||
|
(getFeatureDir as Mock).mockReturnValue('/project/.automaker/features/feature-123');
|
||||||
|
(getFeaturesDir as Mock).mockReturnValue('/project/.automaker/features');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadFeature', () => {
|
||||||
|
it('should load feature from disk', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockResolvedValue(JSON.stringify(mockFeature));
|
||||||
|
|
||||||
|
const feature = await manager.loadFeature('/project', 'feature-123');
|
||||||
|
|
||||||
|
expect(feature).toEqual(mockFeature);
|
||||||
|
expect(getFeatureDir).toHaveBeenCalledWith('/project', 'feature-123');
|
||||||
|
expect(secureFs.readFile).toHaveBeenCalledWith(
|
||||||
|
'/project/.automaker/features/feature-123/feature.json',
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null if feature does not exist', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockRejectedValue(new Error('ENOENT'));
|
||||||
|
|
||||||
|
const feature = await manager.loadFeature('/project', 'non-existent');
|
||||||
|
|
||||||
|
expect(feature).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null if feature JSON is invalid', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockResolvedValue('not valid json');
|
||||||
|
|
||||||
|
const feature = await manager.loadFeature('/project', 'feature-123');
|
||||||
|
|
||||||
|
expect(feature).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateFeatureStatus', () => {
|
||||||
|
it('should update feature status and persist to disk', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'in_progress');
|
||||||
|
|
||||||
|
expect(atomicWriteJson).toHaveBeenCalled();
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.status).toBe('in_progress');
|
||||||
|
expect(savedFeature.updatedAt).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set justFinishedAt when status is waiting_approval', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'waiting_approval');
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.justFinishedAt).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clear justFinishedAt when status is not waiting_approval', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature, justFinishedAt: '2024-01-01T00:00:00Z' },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'in_progress');
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.justFinishedAt).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create notification for waiting_approval status', async () => {
|
||||||
|
const mockNotificationService = { createNotification: vi.fn() };
|
||||||
|
(getNotificationService as Mock).mockReturnValue(mockNotificationService);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'waiting_approval');
|
||||||
|
|
||||||
|
expect(mockNotificationService.createNotification).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
type: 'feature_waiting_approval',
|
||||||
|
featureId: 'feature-123',
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create notification for verified status', async () => {
|
||||||
|
const mockNotificationService = { createNotification: vi.fn() };
|
||||||
|
(getNotificationService as Mock).mockReturnValue(mockNotificationService);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'verified');
|
||||||
|
|
||||||
|
expect(mockNotificationService.createNotification).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
type: 'feature_verified',
|
||||||
|
featureId: 'feature-123',
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sync to app_spec for completed status', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'completed');
|
||||||
|
|
||||||
|
expect(mockFeatureLoader.syncFeatureToAppSpec).toHaveBeenCalledWith(
|
||||||
|
'/project',
|
||||||
|
expect.objectContaining({ status: 'completed' })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sync to app_spec for verified status', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeatureStatus('/project', 'feature-123', 'verified');
|
||||||
|
|
||||||
|
expect(mockFeatureLoader.syncFeatureToAppSpec).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not fail if sync to app_spec fails', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
(mockFeatureLoader.syncFeatureToAppSpec as Mock).mockRejectedValue(new Error('Sync failed'));
|
||||||
|
|
||||||
|
// Should not throw
|
||||||
|
await expect(
|
||||||
|
manager.updateFeatureStatus('/project', 'feature-123', 'completed')
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle feature not found gracefully', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: null,
|
||||||
|
recovered: true,
|
||||||
|
source: 'default',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should not throw
|
||||||
|
await expect(
|
||||||
|
manager.updateFeatureStatus('/project', 'non-existent', 'in_progress')
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('markFeatureInterrupted', () => {
|
||||||
|
it('should mark feature as interrupted', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockResolvedValue(
|
||||||
|
JSON.stringify({ ...mockFeature, status: 'in_progress' })
|
||||||
|
);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature, status: 'in_progress' },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.markFeatureInterrupted('/project', 'feature-123', 'server shutdown');
|
||||||
|
|
||||||
|
expect(atomicWriteJson).toHaveBeenCalled();
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.status).toBe('interrupted');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve pipeline_* statuses', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockResolvedValue(
|
||||||
|
JSON.stringify({ ...mockFeature, status: 'pipeline_step_1' })
|
||||||
|
);
|
||||||
|
|
||||||
|
await manager.markFeatureInterrupted('/project', 'feature-123', 'server shutdown');
|
||||||
|
|
||||||
|
// Should NOT call atomicWriteJson because pipeline status is preserved
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve pipeline_complete status', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockResolvedValue(
|
||||||
|
JSON.stringify({ ...mockFeature, status: 'pipeline_complete' })
|
||||||
|
);
|
||||||
|
|
||||||
|
await manager.markFeatureInterrupted('/project', 'feature-123');
|
||||||
|
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle feature not found', async () => {
|
||||||
|
(secureFs.readFile as Mock).mockRejectedValue(new Error('ENOENT'));
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: null,
|
||||||
|
recovered: true,
|
||||||
|
source: 'default',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should not throw
|
||||||
|
await expect(
|
||||||
|
manager.markFeatureInterrupted('/project', 'non-existent')
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('resetStuckFeatures', () => {
|
||||||
|
it('should reset in_progress features to ready if has approved plan', async () => {
|
||||||
|
const stuckFeature: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
status: 'in_progress',
|
||||||
|
planSpec: { status: 'approved', version: 1, reviewedByUser: true },
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: stuckFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
expect(atomicWriteJson).toHaveBeenCalled();
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.status).toBe('ready');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset in_progress features to backlog if no approved plan', async () => {
|
||||||
|
const stuckFeature: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
status: 'in_progress',
|
||||||
|
planSpec: undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: stuckFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.status).toBe('backlog');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset generating planSpec status to pending', async () => {
|
||||||
|
const stuckFeature: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
status: 'pending',
|
||||||
|
planSpec: { status: 'generating', version: 1, reviewedByUser: false },
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: stuckFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec?.status).toBe('pending');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset in_progress tasks to pending', async () => {
|
||||||
|
const stuckFeature: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
status: 'pending',
|
||||||
|
planSpec: {
|
||||||
|
status: 'approved',
|
||||||
|
version: 1,
|
||||||
|
reviewedByUser: true,
|
||||||
|
tasks: [
|
||||||
|
{ id: 'task-1', title: 'Task 1', status: 'completed', description: '' },
|
||||||
|
{ id: 'task-2', title: 'Task 2', status: 'in_progress', description: '' },
|
||||||
|
{ id: 'task-3', title: 'Task 3', status: 'pending', description: '' },
|
||||||
|
],
|
||||||
|
currentTaskId: 'task-2',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: stuckFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec?.tasks?.[1].status).toBe('pending');
|
||||||
|
expect(savedFeature.planSpec?.currentTaskId).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip non-directory entries', async () => {
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
{ name: 'some-file.txt', isDirectory: () => false },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: mockFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
// Should only process the directory
|
||||||
|
expect(readJsonWithRecovery).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle features directory not existing', async () => {
|
||||||
|
const error = new Error('ENOENT') as NodeJS.ErrnoException;
|
||||||
|
error.code = 'ENOENT';
|
||||||
|
(secureFs.readdir as Mock).mockRejectedValue(error);
|
||||||
|
|
||||||
|
// Should not throw
|
||||||
|
await expect(manager.resetStuckFeatures('/project')).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not update feature if nothing is stuck', async () => {
|
||||||
|
const normalFeature: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
status: 'completed',
|
||||||
|
planSpec: { status: 'approved', version: 1, reviewedByUser: true },
|
||||||
|
};
|
||||||
|
|
||||||
|
(secureFs.readdir as Mock).mockResolvedValue([
|
||||||
|
{ name: 'feature-123', isDirectory: () => true },
|
||||||
|
]);
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: normalFeature,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.resetStuckFeatures('/project');
|
||||||
|
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateFeaturePlanSpec', () => {
|
||||||
|
it('should update planSpec with partial updates', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeaturePlanSpec('/project', 'feature-123', { status: 'approved' });
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec?.status).toBe('approved');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should initialize planSpec if not exists', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature, planSpec: undefined },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeaturePlanSpec('/project', 'feature-123', { status: 'approved' });
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec).toBeDefined();
|
||||||
|
expect(savedFeature.planSpec?.version).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should increment version when content changes', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: {
|
||||||
|
...mockFeature,
|
||||||
|
planSpec: {
|
||||||
|
status: 'pending',
|
||||||
|
version: 2,
|
||||||
|
content: 'old content',
|
||||||
|
reviewedByUser: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateFeaturePlanSpec('/project', 'feature-123', { content: 'new content' });
|
||||||
|
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec?.version).toBe(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('saveFeatureSummary', () => {
|
||||||
|
it('should save summary and emit event', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.saveFeatureSummary('/project', 'feature-123', 'This is the summary');
|
||||||
|
|
||||||
|
// Verify persisted
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.summary).toBe('This is the summary');
|
||||||
|
|
||||||
|
// Verify event emitted AFTER persistence
|
||||||
|
expect(mockEvents.emit).toHaveBeenCalledWith('auto-mode:event', {
|
||||||
|
type: 'auto_mode_summary',
|
||||||
|
featureId: 'feature-123',
|
||||||
|
projectPath: '/project',
|
||||||
|
summary: 'This is the summary',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle feature not found', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: null,
|
||||||
|
recovered: true,
|
||||||
|
source: 'default',
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
manager.saveFeatureSummary('/project', 'non-existent', 'Summary')
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
expect(mockEvents.emit).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('updateTaskStatus', () => {
|
||||||
|
it('should update task status and emit event', async () => {
|
||||||
|
const featureWithTasks: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
planSpec: {
|
||||||
|
status: 'approved',
|
||||||
|
version: 1,
|
||||||
|
reviewedByUser: true,
|
||||||
|
tasks: [
|
||||||
|
{ id: 'task-1', title: 'Task 1', status: 'pending', description: '' },
|
||||||
|
{ id: 'task-2', title: 'Task 2', status: 'pending', description: '' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: featureWithTasks,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateTaskStatus('/project', 'feature-123', 'task-1', 'completed');
|
||||||
|
|
||||||
|
// Verify persisted
|
||||||
|
const savedFeature = (atomicWriteJson as Mock).mock.calls[0][1] as Feature;
|
||||||
|
expect(savedFeature.planSpec?.tasks?.[0].status).toBe('completed');
|
||||||
|
|
||||||
|
// Verify event emitted
|
||||||
|
expect(mockEvents.emit).toHaveBeenCalledWith('auto-mode:event', {
|
||||||
|
type: 'auto_mode_task_status',
|
||||||
|
featureId: 'feature-123',
|
||||||
|
projectPath: '/project',
|
||||||
|
taskId: 'task-1',
|
||||||
|
status: 'completed',
|
||||||
|
tasks: expect.any(Array),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle task not found', async () => {
|
||||||
|
const featureWithTasks: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
planSpec: {
|
||||||
|
status: 'approved',
|
||||||
|
version: 1,
|
||||||
|
reviewedByUser: true,
|
||||||
|
tasks: [{ id: 'task-1', title: 'Task 1', status: 'pending', description: '' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: featureWithTasks,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateTaskStatus('/project', 'feature-123', 'non-existent-task', 'completed');
|
||||||
|
|
||||||
|
// Should not persist or emit if task not found
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
expect(mockEvents.emit).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle feature without tasks', async () => {
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
manager.updateTaskStatus('/project', 'feature-123', 'task-1', 'completed')
|
||||||
|
).resolves.not.toThrow();
|
||||||
|
expect(atomicWriteJson).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('persist BEFORE emit ordering', () => {
|
||||||
|
it('saveFeatureSummary should persist before emitting event', async () => {
|
||||||
|
const callOrder: string[] = [];
|
||||||
|
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: { ...mockFeature },
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
(atomicWriteJson as Mock).mockImplementation(async () => {
|
||||||
|
callOrder.push('persist');
|
||||||
|
});
|
||||||
|
(mockEvents.emit as Mock).mockImplementation(() => {
|
||||||
|
callOrder.push('emit');
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.saveFeatureSummary('/project', 'feature-123', 'Summary');
|
||||||
|
|
||||||
|
expect(callOrder).toEqual(['persist', 'emit']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('updateTaskStatus should persist before emitting event', async () => {
|
||||||
|
const callOrder: string[] = [];
|
||||||
|
|
||||||
|
const featureWithTasks: Feature = {
|
||||||
|
...mockFeature,
|
||||||
|
planSpec: {
|
||||||
|
status: 'approved',
|
||||||
|
version: 1,
|
||||||
|
reviewedByUser: true,
|
||||||
|
tasks: [{ id: 'task-1', title: 'Task 1', status: 'pending', description: '' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
(readJsonWithRecovery as Mock).mockResolvedValue({
|
||||||
|
data: featureWithTasks,
|
||||||
|
recovered: false,
|
||||||
|
source: 'main',
|
||||||
|
});
|
||||||
|
(atomicWriteJson as Mock).mockImplementation(async () => {
|
||||||
|
callOrder.push('persist');
|
||||||
|
});
|
||||||
|
(mockEvents.emit as Mock).mockImplementation(() => {
|
||||||
|
callOrder.push('emit');
|
||||||
|
});
|
||||||
|
|
||||||
|
await manager.updateTaskStatus('/project', 'feature-123', 'task-1', 'completed');
|
||||||
|
|
||||||
|
expect(callOrder).toEqual(['persist', 'emit']);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user