mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 06:42:03 +00:00
Merge upstream/v0.12.0rc into feature/fedora-rpm-support
Resolved conflict in backlog-plan/common.ts: - Kept local (stricter) validation: Array.isArray(parsed?.result?.changes) - This ensures type safety for the changes array
This commit is contained in:
@@ -79,11 +79,17 @@ import { createIdeationRoutes } from './routes/ideation/index.js';
|
||||
import { IdeationService } from './services/ideation-service.js';
|
||||
import { getDevServerService } from './services/dev-server-service.js';
|
||||
import { eventHookService } from './services/event-hook-service.js';
|
||||
import { createNotificationsRoutes } from './routes/notifications/index.js';
|
||||
import { getNotificationService } from './services/notification-service.js';
|
||||
import { createEventHistoryRoutes } from './routes/event-history/index.js';
|
||||
import { getEventHistoryService } from './services/event-history-service.js';
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
const PORT = parseInt(process.env.PORT || '3008', 10);
|
||||
const HOST = process.env.HOST || '0.0.0.0';
|
||||
const HOSTNAME = process.env.HOSTNAME || 'localhost';
|
||||
const DATA_DIR = process.env.DATA_DIR || './data';
|
||||
const ENABLE_REQUEST_LOGGING_DEFAULT = process.env.ENABLE_REQUEST_LOGGING !== 'false'; // Default to true
|
||||
|
||||
@@ -208,8 +214,15 @@ const ideationService = new IdeationService(events, settingsService, featureLoad
|
||||
const devServerService = getDevServerService();
|
||||
devServerService.setEventEmitter(events);
|
||||
|
||||
// Initialize Event Hook Service for custom event triggers
|
||||
eventHookService.initialize(events, settingsService);
|
||||
// Initialize Notification Service with event emitter for real-time updates
|
||||
const notificationService = getNotificationService();
|
||||
notificationService.setEventEmitter(events);
|
||||
|
||||
// Initialize Event History Service
|
||||
const eventHistoryService = getEventHistoryService();
|
||||
|
||||
// Initialize Event Hook Service for custom event triggers (with history storage)
|
||||
eventHookService.initialize(events, settingsService, eventHistoryService);
|
||||
|
||||
// Initialize services
|
||||
(async () => {
|
||||
@@ -264,7 +277,7 @@ app.get('/api/health/detailed', createDetailedHandler());
|
||||
app.use('/api/fs', createFsRoutes(events));
|
||||
app.use('/api/agent', createAgentRoutes(agentService, events));
|
||||
app.use('/api/sessions', createSessionsRoutes(agentService));
|
||||
app.use('/api/features', createFeaturesRoutes(featureLoader, settingsService));
|
||||
app.use('/api/features', createFeaturesRoutes(featureLoader, settingsService, events));
|
||||
app.use('/api/auto-mode', createAutoModeRoutes(autoModeService));
|
||||
app.use('/api/enhance-prompt', createEnhancePromptRoutes(settingsService));
|
||||
app.use('/api/worktree', createWorktreeRoutes(events, settingsService));
|
||||
@@ -285,6 +298,8 @@ app.use('/api/backlog-plan', createBacklogPlanRoutes(events, settingsService));
|
||||
app.use('/api/mcp', createMCPRoutes(mcpTestService));
|
||||
app.use('/api/pipeline', createPipelineRoutes(pipelineService));
|
||||
app.use('/api/ideation', createIdeationRoutes(events, ideationService, featureLoader));
|
||||
app.use('/api/notifications', createNotificationsRoutes(notificationService));
|
||||
app.use('/api/event-history', createEventHistoryRoutes(eventHistoryService, settingsService));
|
||||
|
||||
// Create HTTP server
|
||||
const server = createServer(app);
|
||||
@@ -596,8 +611,8 @@ terminalWss.on('connection', (ws: WebSocket, req: import('http').IncomingMessage
|
||||
});
|
||||
|
||||
// Start server with error handling for port conflicts
|
||||
const startServer = (port: number) => {
|
||||
server.listen(port, () => {
|
||||
const startServer = (port: number, host: string) => {
|
||||
server.listen(port, host, () => {
|
||||
const terminalStatus = isTerminalEnabled()
|
||||
? isTerminalPasswordRequired()
|
||||
? 'enabled (password protected)'
|
||||
@@ -608,10 +623,11 @@ const startServer = (port: number) => {
|
||||
╔═══════════════════════════════════════════════════════╗
|
||||
║ Automaker Backend Server ║
|
||||
╠═══════════════════════════════════════════════════════╣
|
||||
║ HTTP API: http://localhost:${portStr} ║
|
||||
║ WebSocket: ws://localhost:${portStr}/api/events ║
|
||||
║ Terminal: ws://localhost:${portStr}/api/terminal/ws ║
|
||||
║ Health: http://localhost:${portStr}/api/health ║
|
||||
║ Listening: ${host}:${port}${' '.repeat(Math.max(0, 34 - host.length - port.toString().length))}║
|
||||
║ HTTP API: http://${HOSTNAME}:${portStr} ║
|
||||
║ WebSocket: ws://${HOSTNAME}:${portStr}/api/events ║
|
||||
║ Terminal: ws://${HOSTNAME}:${portStr}/api/terminal/ws ║
|
||||
║ Health: http://${HOSTNAME}:${portStr}/api/health ║
|
||||
║ Terminal: ${terminalStatus.padEnd(37)}║
|
||||
╚═══════════════════════════════════════════════════════╝
|
||||
`);
|
||||
@@ -645,7 +661,7 @@ const startServer = (port: number) => {
|
||||
});
|
||||
};
|
||||
|
||||
startServer(PORT);
|
||||
startServer(PORT, HOST);
|
||||
|
||||
// Global error handlers to prevent crashes from uncaught errors
|
||||
process.on('unhandledRejection', (reason: unknown, _promise: Promise<unknown>) => {
|
||||
|
||||
@@ -11,8 +11,12 @@ export { specOutputSchema } from '@automaker/types';
|
||||
|
||||
/**
|
||||
* Escape special XML characters
|
||||
* Handles undefined/null values by converting them to empty strings
|
||||
*/
|
||||
function escapeXml(str: string): string {
|
||||
export function escapeXml(str: string | undefined | null): string {
|
||||
if (str == null) {
|
||||
return '';
|
||||
}
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
|
||||
611
apps/server/src/lib/xml-extractor.ts
Normal file
611
apps/server/src/lib/xml-extractor.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
/**
|
||||
* XML Extraction Utilities
|
||||
*
|
||||
* Robust XML parsing utilities for extracting and updating sections
|
||||
* from app_spec.txt XML content. Uses regex-based parsing which is
|
||||
* sufficient for our controlled XML structure.
|
||||
*
|
||||
* Note: If more complex XML parsing is needed in the future, consider
|
||||
* using a library like 'fast-xml-parser' or 'xml2js'.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { SpecOutput } from '@automaker/types';
|
||||
|
||||
const logger = createLogger('XmlExtractor');
|
||||
|
||||
/**
|
||||
* Represents an implemented feature extracted from XML
|
||||
*/
|
||||
export interface ImplementedFeature {
|
||||
name: string;
|
||||
description: string;
|
||||
file_locations?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger interface for optional custom logging
|
||||
*/
|
||||
export interface XmlExtractorLogger {
|
||||
debug: (message: string, ...args: unknown[]) => void;
|
||||
warn?: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for XML extraction operations
|
||||
*/
|
||||
export interface ExtractXmlOptions {
|
||||
/** Custom logger (defaults to internal logger) */
|
||||
logger?: XmlExtractorLogger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special XML characters
|
||||
* Handles undefined/null values by converting them to empty strings
|
||||
*/
|
||||
export function escapeXml(str: string | undefined | null): string {
|
||||
if (str == null) {
|
||||
return '';
|
||||
}
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
/**
|
||||
* Unescape XML entities back to regular characters
|
||||
*/
|
||||
export function unescapeXml(str: string): string {
|
||||
return str
|
||||
.replace(/'/g, "'")
|
||||
.replace(/"/g, '"')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/</g, '<')
|
||||
.replace(/&/g, '&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the content of a specific XML section
|
||||
*
|
||||
* @param xmlContent - The full XML content
|
||||
* @param tagName - The tag name to extract (e.g., 'implemented_features')
|
||||
* @param options - Optional extraction options
|
||||
* @returns The content between the tags, or null if not found
|
||||
*/
|
||||
export function extractXmlSection(
|
||||
xmlContent: string,
|
||||
tagName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string | null {
|
||||
const log = options.logger || logger;
|
||||
|
||||
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)<\\/${tagName}>`, 'i');
|
||||
const match = xmlContent.match(regex);
|
||||
|
||||
if (match) {
|
||||
log.debug(`Extracted <${tagName}> section`);
|
||||
return match[1];
|
||||
}
|
||||
|
||||
log.debug(`Section <${tagName}> not found`);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all values from repeated XML elements
|
||||
*
|
||||
* @param xmlContent - The XML content to search
|
||||
* @param tagName - The tag name to extract values from
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of extracted values (unescaped)
|
||||
*/
|
||||
export function extractXmlElements(
|
||||
xmlContent: string,
|
||||
tagName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const log = options.logger || logger;
|
||||
const values: string[] = [];
|
||||
|
||||
const regex = new RegExp(`<${tagName}>([\\s\\S]*?)<\\/${tagName}>`, 'g');
|
||||
const matches = xmlContent.matchAll(regex);
|
||||
|
||||
for (const match of matches) {
|
||||
values.push(unescapeXml(match[1].trim()));
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${values.length} <${tagName}> elements`);
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract implemented features from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content of app_spec.txt
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of implemented features with name, description, and optional file_locations
|
||||
*/
|
||||
export function extractImplementedFeatures(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): ImplementedFeature[] {
|
||||
const log = options.logger || logger;
|
||||
const features: ImplementedFeature[] = [];
|
||||
|
||||
// Match <implemented_features>...</implemented_features> section
|
||||
const implementedSection = extractXmlSection(specContent, 'implemented_features', options);
|
||||
|
||||
if (!implementedSection) {
|
||||
log.debug('No implemented_features section found');
|
||||
return features;
|
||||
}
|
||||
|
||||
// Extract individual feature blocks
|
||||
const featureRegex = /<feature>([\s\S]*?)<\/feature>/g;
|
||||
const featureMatches = implementedSection.matchAll(featureRegex);
|
||||
|
||||
for (const featureMatch of featureMatches) {
|
||||
const featureContent = featureMatch[1];
|
||||
|
||||
// Extract name
|
||||
const nameMatch = featureContent.match(/<name>([\s\S]*?)<\/name>/);
|
||||
const name = nameMatch ? unescapeXml(nameMatch[1].trim()) : '';
|
||||
|
||||
// Extract description
|
||||
const descMatch = featureContent.match(/<description>([\s\S]*?)<\/description>/);
|
||||
const description = descMatch ? unescapeXml(descMatch[1].trim()) : '';
|
||||
|
||||
// Extract file_locations if present
|
||||
const locationsSection = extractXmlSection(featureContent, 'file_locations', options);
|
||||
const file_locations = locationsSection
|
||||
? extractXmlElements(locationsSection, 'location', options)
|
||||
: undefined;
|
||||
|
||||
if (name) {
|
||||
features.push({
|
||||
name,
|
||||
description,
|
||||
...(file_locations && file_locations.length > 0 ? { file_locations } : {}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${features.length} implemented features`);
|
||||
return features;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract only the feature names from implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content of app_spec.txt
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of feature names
|
||||
*/
|
||||
export function extractImplementedFeatureNames(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const features = extractImplementedFeatures(specContent, options);
|
||||
return features.map((f) => f.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate XML for a single implemented feature
|
||||
*
|
||||
* @param feature - The feature to convert to XML
|
||||
* @param indent - The base indentation level (default: 2 spaces)
|
||||
* @returns XML string for the feature
|
||||
*/
|
||||
export function featureToXml(feature: ImplementedFeature, indent: string = ' '): string {
|
||||
const i2 = indent.repeat(2);
|
||||
const i3 = indent.repeat(3);
|
||||
const i4 = indent.repeat(4);
|
||||
|
||||
let xml = `${i2}<feature>
|
||||
${i3}<name>${escapeXml(feature.name)}</name>
|
||||
${i3}<description>${escapeXml(feature.description)}</description>`;
|
||||
|
||||
if (feature.file_locations && feature.file_locations.length > 0) {
|
||||
xml += `
|
||||
${i3}<file_locations>
|
||||
${feature.file_locations.map((loc) => `${i4}<location>${escapeXml(loc)}</location>`).join('\n')}
|
||||
${i3}</file_locations>`;
|
||||
}
|
||||
|
||||
xml += `
|
||||
${i2}</feature>`;
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate XML for an array of implemented features
|
||||
*
|
||||
* @param features - Array of features to convert to XML
|
||||
* @param indent - The base indentation level (default: 2 spaces)
|
||||
* @returns XML string for the implemented_features section content
|
||||
*/
|
||||
export function featuresToXml(features: ImplementedFeature[], indent: string = ' '): string {
|
||||
return features.map((f) => featureToXml(f, indent)).join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the implemented_features section in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param newFeatures - The new features to set
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the new implemented_features section
|
||||
*/
|
||||
export function updateImplementedFeaturesSection(
|
||||
specContent: string,
|
||||
newFeatures: ImplementedFeature[],
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
const indent = ' ';
|
||||
|
||||
// Generate new section content
|
||||
const newSectionContent = featuresToXml(newFeatures, indent);
|
||||
|
||||
// Build the new section
|
||||
const newSection = `<implemented_features>
|
||||
${newSectionContent}
|
||||
${indent}</implemented_features>`;
|
||||
|
||||
// Check if section exists
|
||||
const sectionRegex = /<implemented_features>[\s\S]*?<\/implemented_features>/;
|
||||
|
||||
if (sectionRegex.test(specContent)) {
|
||||
log.debug('Replacing existing implemented_features section');
|
||||
return specContent.replace(sectionRegex, newSection);
|
||||
}
|
||||
|
||||
// If section doesn't exist, try to insert after core_capabilities
|
||||
const coreCapabilitiesEnd = '</core_capabilities>';
|
||||
const insertIndex = specContent.indexOf(coreCapabilitiesEnd);
|
||||
|
||||
if (insertIndex !== -1) {
|
||||
const insertPosition = insertIndex + coreCapabilitiesEnd.length;
|
||||
log.debug('Inserting implemented_features after core_capabilities');
|
||||
return (
|
||||
specContent.slice(0, insertPosition) +
|
||||
'\n\n' +
|
||||
indent +
|
||||
newSection +
|
||||
specContent.slice(insertPosition)
|
||||
);
|
||||
}
|
||||
|
||||
// As a fallback, insert before </project_specification>
|
||||
const projectSpecEnd = '</project_specification>';
|
||||
const fallbackIndex = specContent.indexOf(projectSpecEnd);
|
||||
|
||||
if (fallbackIndex !== -1) {
|
||||
log.debug('Inserting implemented_features before </project_specification>');
|
||||
return (
|
||||
specContent.slice(0, fallbackIndex) +
|
||||
indent +
|
||||
newSection +
|
||||
'\n' +
|
||||
specContent.slice(fallbackIndex)
|
||||
);
|
||||
}
|
||||
|
||||
log.warn?.('Could not find appropriate insertion point for implemented_features');
|
||||
log.debug('Could not find appropriate insertion point for implemented_features');
|
||||
return specContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new feature to the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param newFeature - The feature to add
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the new feature added
|
||||
*/
|
||||
export function addImplementedFeature(
|
||||
specContent: string,
|
||||
newFeature: ImplementedFeature,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Check for duplicates by name
|
||||
const isDuplicate = existingFeatures.some(
|
||||
(f) => f.name.toLowerCase() === newFeature.name.toLowerCase()
|
||||
);
|
||||
|
||||
if (isDuplicate) {
|
||||
log.debug(`Feature "${newFeature.name}" already exists, skipping`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
// Add the new feature
|
||||
const updatedFeatures = [...existingFeatures, newFeature];
|
||||
|
||||
log.debug(`Adding feature "${newFeature.name}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a feature from the implemented_features section by name
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to remove
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the feature removed
|
||||
*/
|
||||
export function removeImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Filter out the feature to remove
|
||||
const updatedFeatures = existingFeatures.filter(
|
||||
(f) => f.name.toLowerCase() !== featureName.toLowerCase()
|
||||
);
|
||||
|
||||
if (updatedFeatures.length === existingFeatures.length) {
|
||||
log.debug(`Feature "${featureName}" not found, no changes made`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
log.debug(`Removing feature "${featureName}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing feature in the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to update
|
||||
* @param updates - Partial updates to apply to the feature
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content with the feature modified
|
||||
*/
|
||||
export function updateImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
updates: Partial<ImplementedFeature>,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Extract existing features
|
||||
const existingFeatures = extractImplementedFeatures(specContent, options);
|
||||
|
||||
// Find and update the feature
|
||||
let found = false;
|
||||
const updatedFeatures = existingFeatures.map((f) => {
|
||||
if (f.name.toLowerCase() === featureName.toLowerCase()) {
|
||||
found = true;
|
||||
return {
|
||||
...f,
|
||||
...updates,
|
||||
// Preserve the original name if not explicitly updated
|
||||
name: updates.name ?? f.name,
|
||||
};
|
||||
}
|
||||
return f;
|
||||
});
|
||||
|
||||
if (!found) {
|
||||
log.debug(`Feature "${featureName}" not found, no changes made`);
|
||||
return specContent;
|
||||
}
|
||||
|
||||
log.debug(`Updating feature "${featureName}"`);
|
||||
return updateImplementedFeaturesSection(specContent, updatedFeatures, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a feature exists in the implemented_features section
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param featureName - The name of the feature to check
|
||||
* @param options - Optional extraction options
|
||||
* @returns True if the feature exists
|
||||
*/
|
||||
export function hasImplementedFeature(
|
||||
specContent: string,
|
||||
featureName: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): boolean {
|
||||
const features = extractImplementedFeatures(specContent, options);
|
||||
return features.some((f) => f.name.toLowerCase() === featureName.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert extracted features to SpecOutput.implemented_features format
|
||||
*
|
||||
* @param features - Array of extracted features
|
||||
* @returns Features in SpecOutput format
|
||||
*/
|
||||
export function toSpecOutputFeatures(
|
||||
features: ImplementedFeature[]
|
||||
): SpecOutput['implemented_features'] {
|
||||
return features.map((f) => ({
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
...(f.file_locations && f.file_locations.length > 0
|
||||
? { file_locations: f.file_locations }
|
||||
: {}),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert SpecOutput.implemented_features to ImplementedFeature format
|
||||
*
|
||||
* @param specFeatures - Features from SpecOutput
|
||||
* @returns Features in ImplementedFeature format
|
||||
*/
|
||||
export function fromSpecOutputFeatures(
|
||||
specFeatures: SpecOutput['implemented_features']
|
||||
): ImplementedFeature[] {
|
||||
return specFeatures.map((f) => ({
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
...(f.file_locations && f.file_locations.length > 0
|
||||
? { file_locations: f.file_locations }
|
||||
: {}),
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a roadmap phase extracted from XML
|
||||
*/
|
||||
export interface RoadmapPhase {
|
||||
name: string;
|
||||
status: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the technology stack from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of technology names
|
||||
*/
|
||||
export function extractTechnologyStack(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string[] {
|
||||
const log = options.logger || logger;
|
||||
|
||||
const techSection = extractXmlSection(specContent, 'technology_stack', options);
|
||||
if (!techSection) {
|
||||
log.debug('No technology_stack section found');
|
||||
return [];
|
||||
}
|
||||
|
||||
const technologies = extractXmlElements(techSection, 'technology', options);
|
||||
log.debug(`Extracted ${technologies.length} technologies`);
|
||||
return technologies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the technology_stack section in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param technologies - The new technology list
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content
|
||||
*/
|
||||
export function updateTechnologyStack(
|
||||
specContent: string,
|
||||
technologies: string[],
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
const indent = ' ';
|
||||
const i2 = indent.repeat(2);
|
||||
|
||||
// Generate new section content
|
||||
const techXml = technologies
|
||||
.map((t) => `${i2}<technology>${escapeXml(t)}</technology>`)
|
||||
.join('\n');
|
||||
const newSection = `<technology_stack>\n${techXml}\n${indent}</technology_stack>`;
|
||||
|
||||
// Check if section exists
|
||||
const sectionRegex = /<technology_stack>[\s\S]*?<\/technology_stack>/;
|
||||
|
||||
if (sectionRegex.test(specContent)) {
|
||||
log.debug('Replacing existing technology_stack section');
|
||||
return specContent.replace(sectionRegex, newSection);
|
||||
}
|
||||
|
||||
log.debug('No technology_stack section found to update');
|
||||
return specContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract roadmap phases from app_spec.txt XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param options - Optional extraction options
|
||||
* @returns Array of roadmap phases
|
||||
*/
|
||||
export function extractRoadmapPhases(
|
||||
specContent: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): RoadmapPhase[] {
|
||||
const log = options.logger || logger;
|
||||
const phases: RoadmapPhase[] = [];
|
||||
|
||||
const roadmapSection = extractXmlSection(specContent, 'implementation_roadmap', options);
|
||||
if (!roadmapSection) {
|
||||
log.debug('No implementation_roadmap section found');
|
||||
return phases;
|
||||
}
|
||||
|
||||
// Extract individual phase blocks
|
||||
const phaseRegex = /<phase>([\s\S]*?)<\/phase>/g;
|
||||
const phaseMatches = roadmapSection.matchAll(phaseRegex);
|
||||
|
||||
for (const phaseMatch of phaseMatches) {
|
||||
const phaseContent = phaseMatch[1];
|
||||
|
||||
const nameMatch = phaseContent.match(/<name>([\s\S]*?)<\/name>/);
|
||||
const name = nameMatch ? unescapeXml(nameMatch[1].trim()) : '';
|
||||
|
||||
const statusMatch = phaseContent.match(/<status>([\s\S]*?)<\/status>/);
|
||||
const status = statusMatch ? unescapeXml(statusMatch[1].trim()) : 'pending';
|
||||
|
||||
const descMatch = phaseContent.match(/<description>([\s\S]*?)<\/description>/);
|
||||
const description = descMatch ? unescapeXml(descMatch[1].trim()) : undefined;
|
||||
|
||||
if (name) {
|
||||
phases.push({ name, status, description });
|
||||
}
|
||||
}
|
||||
|
||||
log.debug(`Extracted ${phases.length} roadmap phases`);
|
||||
return phases;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a roadmap phase status in XML content
|
||||
*
|
||||
* @param specContent - The full XML content
|
||||
* @param phaseName - The name of the phase to update
|
||||
* @param newStatus - The new status value
|
||||
* @param options - Optional extraction options
|
||||
* @returns Updated XML content
|
||||
*/
|
||||
export function updateRoadmapPhaseStatus(
|
||||
specContent: string,
|
||||
phaseName: string,
|
||||
newStatus: string,
|
||||
options: ExtractXmlOptions = {}
|
||||
): string {
|
||||
const log = options.logger || logger;
|
||||
|
||||
// Find the phase and update its status
|
||||
// Match the phase block containing the specific name
|
||||
const phaseRegex = new RegExp(
|
||||
`(<phase>\\s*<name>\\s*${escapeXml(phaseName)}\\s*<\\/name>\\s*<status>)[\\s\\S]*?(<\\/status>)`,
|
||||
'i'
|
||||
);
|
||||
|
||||
if (phaseRegex.test(specContent)) {
|
||||
log.debug(`Updating phase "${phaseName}" status to "${newStatus}"`);
|
||||
return specContent.replace(phaseRegex, `$1${escapeXml(newStatus)}$2`);
|
||||
}
|
||||
|
||||
log.debug(`Phase "${phaseName}" not found`);
|
||||
return specContent;
|
||||
}
|
||||
@@ -35,6 +35,7 @@ import {
|
||||
type SubprocessOptions,
|
||||
type WslCliResult,
|
||||
} from '@automaker/platform';
|
||||
import { calculateReasoningTimeout } from '@automaker/types';
|
||||
import { createLogger, isAbortError } from '@automaker/utils';
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
@@ -107,6 +108,15 @@ export interface CliDetectionResult {
|
||||
// Create logger for CLI operations
|
||||
const cliLogger = createLogger('CliProvider');
|
||||
|
||||
/**
|
||||
* Base timeout for CLI operations in milliseconds.
|
||||
* CLI tools have longer startup and processing times compared to direct API calls,
|
||||
* so we use a higher base timeout (120s) than the default provider timeout (30s).
|
||||
* This is multiplied by reasoning effort multipliers when applicable.
|
||||
* @see calculateReasoningTimeout from @automaker/types
|
||||
*/
|
||||
const CLI_BASE_TIMEOUT_MS = 120000;
|
||||
|
||||
/**
|
||||
* Abstract base class for CLI-based providers
|
||||
*
|
||||
@@ -450,6 +460,10 @@ export abstract class CliProvider extends BaseProvider {
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate dynamic timeout based on reasoning effort.
|
||||
// This addresses GitHub issue #530 where reasoning models with 'xhigh' effort would timeout.
|
||||
const timeout = calculateReasoningTimeout(options.reasoningEffort, CLI_BASE_TIMEOUT_MS);
|
||||
|
||||
// WSL strategy
|
||||
if (this.useWsl && this.wslCliPath) {
|
||||
const wslCwd = windowsToWslPath(cwd);
|
||||
@@ -473,7 +487,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd, // Windows cwd for spawn
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000, // CLI operations may take longer
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -488,7 +502,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd,
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000,
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -501,7 +515,7 @@ export abstract class CliProvider extends BaseProvider {
|
||||
cwd,
|
||||
env: filteredEnv,
|
||||
abortController: options.abortController,
|
||||
timeout: 120000,
|
||||
timeout,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -33,6 +33,8 @@ import {
|
||||
CODEX_MODEL_MAP,
|
||||
supportsReasoningEffort,
|
||||
validateBareModelId,
|
||||
calculateReasoningTimeout,
|
||||
DEFAULT_TIMEOUT_MS,
|
||||
type CodexApprovalPolicy,
|
||||
type CodexSandboxMode,
|
||||
type CodexAuthStatus,
|
||||
@@ -91,7 +93,14 @@ const CODEX_ITEM_TYPES = {
|
||||
const SYSTEM_PROMPT_LABEL = 'System instructions';
|
||||
const HISTORY_HEADER = 'Current request:\n';
|
||||
const TEXT_ENCODING = 'utf-8';
|
||||
const DEFAULT_TIMEOUT_MS = 30000;
|
||||
/**
|
||||
* Default timeout for Codex CLI operations in milliseconds.
|
||||
* This is the "no output" timeout - if the CLI doesn't produce any JSONL output
|
||||
* for this duration, the process is killed. For reasoning models with high
|
||||
* reasoning effort, this timeout is dynamically extended via calculateReasoningTimeout().
|
||||
* @see calculateReasoningTimeout from @automaker/types
|
||||
*/
|
||||
const CODEX_CLI_TIMEOUT_MS = DEFAULT_TIMEOUT_MS;
|
||||
const CONTEXT_WINDOW_256K = 256000;
|
||||
const MAX_OUTPUT_32K = 32000;
|
||||
const MAX_OUTPUT_16K = 16000;
|
||||
@@ -814,13 +823,19 @@ export class CodexProvider extends BaseProvider {
|
||||
envOverrides[OPENAI_API_KEY_ENV] = executionPlan.openAiApiKey;
|
||||
}
|
||||
|
||||
// Calculate dynamic timeout based on reasoning effort.
|
||||
// Higher reasoning effort (e.g., 'xhigh' for "xtra thinking" mode) requires more time
|
||||
// for the model to generate reasoning tokens before producing output.
|
||||
// This fixes GitHub issue #530 where features would get stuck with reasoning models.
|
||||
const timeout = calculateReasoningTimeout(options.reasoningEffort, CODEX_CLI_TIMEOUT_MS);
|
||||
|
||||
const stream = spawnJSONLProcess({
|
||||
command: commandPath,
|
||||
args,
|
||||
cwd: options.cwd,
|
||||
env: envOverrides,
|
||||
abortController: options.abortController,
|
||||
timeout: DEFAULT_TIMEOUT_MS,
|
||||
timeout,
|
||||
stdinData: promptText, // Pass prompt via stdin
|
||||
});
|
||||
|
||||
|
||||
@@ -6,8 +6,17 @@ import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
// Types for running generation
|
||||
export type GenerationType = 'spec_regeneration' | 'feature_generation' | 'sync';
|
||||
|
||||
interface RunningGeneration {
|
||||
isRunning: boolean;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}
|
||||
|
||||
// Shared state for tracking generation status - scoped by project path
|
||||
const runningProjects = new Map<string, boolean>();
|
||||
const runningProjects = new Map<string, RunningGeneration>();
|
||||
const abortControllers = new Map<string, AbortController>();
|
||||
|
||||
/**
|
||||
@@ -17,16 +26,21 @@ export function getSpecRegenerationStatus(projectPath?: string): {
|
||||
isRunning: boolean;
|
||||
currentAbortController: AbortController | null;
|
||||
projectPath?: string;
|
||||
type?: GenerationType;
|
||||
startedAt?: string;
|
||||
} {
|
||||
if (projectPath) {
|
||||
const generation = runningProjects.get(projectPath);
|
||||
return {
|
||||
isRunning: runningProjects.get(projectPath) || false,
|
||||
isRunning: generation?.isRunning || false,
|
||||
currentAbortController: abortControllers.get(projectPath) || null,
|
||||
projectPath,
|
||||
type: generation?.type,
|
||||
startedAt: generation?.startedAt,
|
||||
};
|
||||
}
|
||||
// Fallback: check if any project is running (for backward compatibility)
|
||||
const isAnyRunning = Array.from(runningProjects.values()).some((running) => running);
|
||||
const isAnyRunning = Array.from(runningProjects.values()).some((g) => g.isRunning);
|
||||
return { isRunning: isAnyRunning, currentAbortController: null };
|
||||
}
|
||||
|
||||
@@ -46,10 +60,15 @@ export function getRunningProjectPath(): string | null {
|
||||
export function setRunningState(
|
||||
projectPath: string,
|
||||
running: boolean,
|
||||
controller: AbortController | null = null
|
||||
controller: AbortController | null = null,
|
||||
type: GenerationType = 'spec_regeneration'
|
||||
): void {
|
||||
if (running) {
|
||||
runningProjects.set(projectPath, true);
|
||||
runningProjects.set(projectPath, {
|
||||
isRunning: true,
|
||||
type,
|
||||
startedAt: new Date().toISOString(),
|
||||
});
|
||||
if (controller) {
|
||||
abortControllers.set(projectPath, controller);
|
||||
}
|
||||
@@ -59,6 +78,33 @@ export function setRunningState(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all running spec/feature generations for the running agents view
|
||||
*/
|
||||
export function getAllRunningGenerations(): Array<{
|
||||
projectPath: string;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}> {
|
||||
const results: Array<{
|
||||
projectPath: string;
|
||||
type: GenerationType;
|
||||
startedAt: string;
|
||||
}> = [];
|
||||
|
||||
for (const [projectPath, generation] of runningProjects.entries()) {
|
||||
if (generation.isRunning) {
|
||||
results.push({
|
||||
projectPath,
|
||||
type: generation.type,
|
||||
startedAt: generation.startedAt,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to log authentication status
|
||||
*/
|
||||
|
||||
@@ -15,6 +15,7 @@ import { parseAndCreateFeatures } from './parse-and-create-features.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting, getPromptCustomization } from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
@@ -56,13 +57,45 @@ export async function generateFeaturesFromSpec(
|
||||
// Get customized prompts from settings
|
||||
const prompts = await getPromptCustomization(settingsService, '[FeatureGeneration]');
|
||||
|
||||
// Load existing features to prevent duplicates
|
||||
const featureLoader = new FeatureLoader();
|
||||
const existingFeatures = await featureLoader.getAll(projectPath);
|
||||
|
||||
logger.info(`Found ${existingFeatures.length} existing features to exclude from generation`);
|
||||
|
||||
// Build existing features context for the prompt
|
||||
let existingFeaturesContext = '';
|
||||
if (existingFeatures.length > 0) {
|
||||
const featuresList = existingFeatures
|
||||
.map(
|
||||
(f) =>
|
||||
`- "${f.title}" (ID: ${f.id}): ${f.description?.substring(0, 100) || 'No description'}`
|
||||
)
|
||||
.join('\n');
|
||||
existingFeaturesContext = `
|
||||
|
||||
## EXISTING FEATURES (DO NOT REGENERATE THESE)
|
||||
|
||||
The following ${existingFeatures.length} features already exist in the project. You MUST NOT generate features that duplicate or overlap with these:
|
||||
|
||||
${featuresList}
|
||||
|
||||
CRITICAL INSTRUCTIONS:
|
||||
- DO NOT generate any features with the same or similar titles as the existing features listed above
|
||||
- DO NOT generate features that cover the same functionality as existing features
|
||||
- ONLY generate NEW features that are not yet in the system
|
||||
- If a feature from the roadmap already exists, skip it entirely
|
||||
- Generate unique feature IDs that do not conflict with existing IDs: ${existingFeatures.map((f) => f.id).join(', ')}
|
||||
`;
|
||||
}
|
||||
|
||||
const prompt = `Based on this project specification:
|
||||
|
||||
${spec}
|
||||
|
||||
${existingFeaturesContext}
|
||||
${prompts.appSpec.generateFeaturesFromSpecPrompt}
|
||||
|
||||
Generate ${featureCount} features that build on each other logically.`;
|
||||
Generate ${featureCount} NEW features that build on each other logically. Remember: ONLY generate features that DO NOT already exist.`;
|
||||
|
||||
logger.info('========== PROMPT BEING SENT ==========');
|
||||
logger.info(`Prompt length: ${prompt.length} chars`);
|
||||
|
||||
@@ -201,19 +201,33 @@ Your entire response should be valid JSON starting with { and ending with }. No
|
||||
xmlContent = responseText.substring(xmlStart, xmlEnd + '</project_specification>'.length);
|
||||
logger.info(`Extracted XML content: ${xmlContent.length} chars (from position ${xmlStart})`);
|
||||
} else {
|
||||
// No valid XML structure found in the response text
|
||||
// This happens when structured output was expected but not received, and the agent
|
||||
// output conversational text instead of XML (e.g., "The project directory appears to be empty...")
|
||||
// We should NOT save this conversational text as it's not a valid spec
|
||||
logger.error('❌ Response does not contain valid <project_specification> XML structure');
|
||||
logger.error(
|
||||
'This typically happens when structured output failed and the agent produced conversational text instead of XML'
|
||||
);
|
||||
throw new Error(
|
||||
'Failed to generate spec: No valid XML structure found in response. ' +
|
||||
'The response contained conversational text but no <project_specification> tags. ' +
|
||||
'Please try again.'
|
||||
);
|
||||
// No XML found, try JSON extraction
|
||||
logger.warn('⚠️ No XML tags found, attempting JSON extraction...');
|
||||
const extractedJson = extractJson<SpecOutput>(responseText, { logger });
|
||||
|
||||
if (
|
||||
extractedJson &&
|
||||
typeof extractedJson.project_name === 'string' &&
|
||||
typeof extractedJson.overview === 'string' &&
|
||||
Array.isArray(extractedJson.technology_stack) &&
|
||||
Array.isArray(extractedJson.core_capabilities) &&
|
||||
Array.isArray(extractedJson.implemented_features)
|
||||
) {
|
||||
logger.info('✅ Successfully extracted JSON from response text');
|
||||
xmlContent = specToXml(extractedJson);
|
||||
logger.info(`✅ Converted extracted JSON to XML: ${xmlContent.length} chars`);
|
||||
} else {
|
||||
// Neither XML nor valid JSON found
|
||||
logger.error('❌ Response does not contain valid XML or JSON structure');
|
||||
logger.error(
|
||||
'This typically happens when structured output failed and the agent produced conversational text instead of structured output'
|
||||
);
|
||||
throw new Error(
|
||||
'Failed to generate spec: No valid XML or JSON structure found in response. ' +
|
||||
'The response contained conversational text but no <project_specification> tags or valid JSON. ' +
|
||||
'Please try again.'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createCreateHandler } from './routes/create.js';
|
||||
import { createGenerateHandler } from './routes/generate.js';
|
||||
import { createGenerateFeaturesHandler } from './routes/generate-features.js';
|
||||
import { createSyncHandler } from './routes/sync.js';
|
||||
import { createStopHandler } from './routes/stop.js';
|
||||
import { createStatusHandler } from './routes/status.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
@@ -20,6 +21,7 @@ export function createSpecRegenerationRoutes(
|
||||
router.post('/create', createCreateHandler(events));
|
||||
router.post('/generate', createGenerateHandler(events, settingsService));
|
||||
router.post('/generate-features', createGenerateFeaturesHandler(events, settingsService));
|
||||
router.post('/sync', createSyncHandler(events, settingsService));
|
||||
router.post('/stop', createStopHandler());
|
||||
router.get('/status', createStatusHandler());
|
||||
|
||||
|
||||
@@ -5,9 +5,10 @@
|
||||
import path from 'path';
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import { getFeaturesDir } from '@automaker/platform';
|
||||
import { extractJsonWithArray } from '../../lib/json-extractor.js';
|
||||
import { getNotificationService } from '../../services/notification-service.js';
|
||||
|
||||
const logger = createLogger('SpecRegeneration');
|
||||
|
||||
@@ -73,10 +74,10 @@ export async function parseAndCreateFeatures(
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await secureFs.writeFile(
|
||||
path.join(featureDir, 'feature.json'),
|
||||
JSON.stringify(featureData, null, 2)
|
||||
);
|
||||
// Use atomic write with backup support for crash protection
|
||||
await atomicWriteJson(path.join(featureDir, 'feature.json'), featureData, {
|
||||
backupCount: DEFAULT_BACKUP_COUNT,
|
||||
});
|
||||
|
||||
createdFeatures.push({ id: feature.id, title: feature.title });
|
||||
}
|
||||
@@ -88,6 +89,15 @@ export async function parseAndCreateFeatures(
|
||||
message: `Spec regeneration complete! Created ${createdFeatures.length} features.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
|
||||
// Create notification for spec generation completion
|
||||
const notificationService = getNotificationService();
|
||||
await notificationService.createNotification({
|
||||
type: 'spec_regeneration_complete',
|
||||
title: 'Spec Generation Complete',
|
||||
message: `Created ${createdFeatures.length} features from the project specification.`,
|
||||
projectPath: projectPath,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('❌ parseAndCreateFeatures() failed:');
|
||||
logger.error('Error:', error);
|
||||
|
||||
@@ -50,7 +50,7 @@ export function createGenerateFeaturesHandler(
|
||||
logAuthStatus('Before starting feature generation');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(projectPath, true, abortController);
|
||||
setRunningState(projectPath, true, abortController, 'feature_generation');
|
||||
logger.info('Starting background feature generation task...');
|
||||
|
||||
generateFeaturesFromSpec(projectPath, events, abortController, maxFeatures, settingsService)
|
||||
|
||||
76
apps/server/src/routes/app-spec/routes/sync.ts
Normal file
76
apps/server/src/routes/app-spec/routes/sync.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* POST /sync endpoint - Sync spec with codebase and features
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
getSpecRegenerationStatus,
|
||||
setRunningState,
|
||||
logAuthStatus,
|
||||
logError,
|
||||
getErrorMessage,
|
||||
} from '../common.js';
|
||||
import { syncSpec } from '../sync-spec.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
|
||||
const logger = createLogger('SpecSync');
|
||||
|
||||
export function createSyncHandler(events: EventEmitter, settingsService?: SettingsService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
logger.info('========== /sync endpoint called ==========');
|
||||
logger.debug('Request body:', JSON.stringify(req.body, null, 2));
|
||||
|
||||
try {
|
||||
const { projectPath } = req.body as {
|
||||
projectPath: string;
|
||||
};
|
||||
|
||||
logger.debug('projectPath:', projectPath);
|
||||
|
||||
if (!projectPath) {
|
||||
logger.error('Missing projectPath parameter');
|
||||
res.status(400).json({ success: false, error: 'projectPath required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const { isRunning } = getSpecRegenerationStatus(projectPath);
|
||||
if (isRunning) {
|
||||
logger.warn('Generation/sync already running for project:', projectPath);
|
||||
res.json({ success: false, error: 'Operation already running for this project' });
|
||||
return;
|
||||
}
|
||||
|
||||
logAuthStatus('Before starting spec sync');
|
||||
|
||||
const abortController = new AbortController();
|
||||
setRunningState(projectPath, true, abortController, 'sync');
|
||||
logger.info('Starting background spec sync task...');
|
||||
|
||||
syncSpec(projectPath, events, abortController, settingsService)
|
||||
.then((result) => {
|
||||
logger.info('Spec sync completed successfully');
|
||||
logger.info('Result:', JSON.stringify(result, null, 2));
|
||||
})
|
||||
.catch((error) => {
|
||||
logError(error, 'Spec sync failed with error');
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: getErrorMessage(error),
|
||||
projectPath,
|
||||
});
|
||||
})
|
||||
.finally(() => {
|
||||
logger.info('Spec sync task finished (success or error)');
|
||||
setRunningState(projectPath, false, null);
|
||||
});
|
||||
|
||||
logger.info('Returning success response (sync running in background)');
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Sync route handler failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
307
apps/server/src/routes/app-spec/sync-spec.ts
Normal file
307
apps/server/src/routes/app-spec/sync-spec.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
/**
|
||||
* Sync spec with current codebase and feature state
|
||||
*
|
||||
* Updates the spec file based on:
|
||||
* - Completed Automaker features
|
||||
* - Code analysis for tech stack and implementations
|
||||
* - Roadmap phase status updates
|
||||
*/
|
||||
|
||||
import * as secureFs from '../../lib/secure-fs.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { DEFAULT_PHASE_MODELS } from '@automaker/types';
|
||||
import { resolvePhaseModel } from '@automaker/model-resolver';
|
||||
import { streamingQuery } from '../../providers/simple-query-service.js';
|
||||
import { getAppSpecPath } from '@automaker/platform';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { getAutoLoadClaudeMdSetting } from '../../lib/settings-helpers.js';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import {
|
||||
extractImplementedFeatures,
|
||||
extractTechnologyStack,
|
||||
extractRoadmapPhases,
|
||||
updateImplementedFeaturesSection,
|
||||
updateTechnologyStack,
|
||||
updateRoadmapPhaseStatus,
|
||||
type ImplementedFeature,
|
||||
type RoadmapPhase,
|
||||
} from '../../lib/xml-extractor.js';
|
||||
import { getNotificationService } from '../../services/notification-service.js';
|
||||
|
||||
const logger = createLogger('SpecSync');
|
||||
|
||||
/**
|
||||
* Result of a sync operation
|
||||
*/
|
||||
export interface SyncResult {
|
||||
techStackUpdates: {
|
||||
added: string[];
|
||||
removed: string[];
|
||||
};
|
||||
implementedFeaturesUpdates: {
|
||||
addedFromFeatures: string[];
|
||||
removed: string[];
|
||||
};
|
||||
roadmapUpdates: Array<{ phaseName: string; newStatus: string }>;
|
||||
summary: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync the spec with current codebase and feature state
|
||||
*/
|
||||
export async function syncSpec(
|
||||
projectPath: string,
|
||||
events: EventEmitter,
|
||||
abortController: AbortController,
|
||||
settingsService?: SettingsService
|
||||
): Promise<SyncResult> {
|
||||
logger.info('========== syncSpec() started ==========');
|
||||
logger.info('projectPath:', projectPath);
|
||||
|
||||
const result: SyncResult = {
|
||||
techStackUpdates: { added: [], removed: [] },
|
||||
implementedFeaturesUpdates: { addedFromFeatures: [], removed: [] },
|
||||
roadmapUpdates: [],
|
||||
summary: '',
|
||||
};
|
||||
|
||||
// Read existing spec
|
||||
const specPath = getAppSpecPath(projectPath);
|
||||
let specContent: string;
|
||||
|
||||
try {
|
||||
specContent = (await secureFs.readFile(specPath, 'utf-8')) as string;
|
||||
logger.info(`Spec loaded successfully (${specContent.length} chars)`);
|
||||
} catch (readError) {
|
||||
logger.error('Failed to read spec file:', readError);
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_error',
|
||||
error: 'No project spec found. Create or regenerate spec first.',
|
||||
projectPath,
|
||||
});
|
||||
throw new Error('No project spec found');
|
||||
}
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: '[Phase: sync] Starting spec sync...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// Extract current state from spec
|
||||
const currentImplementedFeatures = extractImplementedFeatures(specContent);
|
||||
const currentTechStack = extractTechnologyStack(specContent);
|
||||
const currentRoadmapPhases = extractRoadmapPhases(specContent);
|
||||
|
||||
logger.info(`Current spec has ${currentImplementedFeatures.length} implemented features`);
|
||||
logger.info(`Current spec has ${currentTechStack.length} technologies`);
|
||||
logger.info(`Current spec has ${currentRoadmapPhases.length} roadmap phases`);
|
||||
|
||||
// Load completed Automaker features
|
||||
const featureLoader = new FeatureLoader();
|
||||
const allFeatures = await featureLoader.getAll(projectPath);
|
||||
const completedFeatures = allFeatures.filter(
|
||||
(f) => f.status === 'completed' || f.status === 'verified'
|
||||
);
|
||||
|
||||
logger.info(`Found ${completedFeatures.length} completed/verified features in Automaker`);
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: `Found ${completedFeatures.length} completed features to sync...\n`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// Build new implemented features list from completed Automaker features
|
||||
const newImplementedFeatures: ImplementedFeature[] = [];
|
||||
const existingNames = new Set(currentImplementedFeatures.map((f) => f.name.toLowerCase()));
|
||||
|
||||
for (const feature of completedFeatures) {
|
||||
const name = feature.title || `Feature: ${feature.id}`;
|
||||
if (!existingNames.has(name.toLowerCase())) {
|
||||
newImplementedFeatures.push({
|
||||
name,
|
||||
description: feature.description || '',
|
||||
});
|
||||
result.implementedFeaturesUpdates.addedFromFeatures.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge: keep existing + add new from completed features
|
||||
const mergedFeatures = [...currentImplementedFeatures, ...newImplementedFeatures];
|
||||
|
||||
// Update spec with merged features
|
||||
if (result.implementedFeaturesUpdates.addedFromFeatures.length > 0) {
|
||||
specContent = updateImplementedFeaturesSection(specContent, mergedFeatures);
|
||||
logger.info(
|
||||
`Added ${result.implementedFeaturesUpdates.addedFromFeatures.length} features to spec`
|
||||
);
|
||||
}
|
||||
|
||||
// Analyze codebase for tech stack updates using AI
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: 'Analyzing codebase for technology updates...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
const autoLoadClaudeMd = await getAutoLoadClaudeMdSetting(
|
||||
projectPath,
|
||||
settingsService,
|
||||
'[SpecSync]'
|
||||
);
|
||||
|
||||
const settings = await settingsService?.getGlobalSettings();
|
||||
const phaseModelEntry =
|
||||
settings?.phaseModels?.specGenerationModel || DEFAULT_PHASE_MODELS.specGenerationModel;
|
||||
const { model, thinkingLevel } = resolvePhaseModel(phaseModelEntry);
|
||||
|
||||
// Use AI to analyze tech stack
|
||||
const techAnalysisPrompt = `Analyze this project and return ONLY a JSON object with the current technology stack.
|
||||
|
||||
Current known technologies: ${currentTechStack.join(', ')}
|
||||
|
||||
Look at package.json, config files, and source code to identify:
|
||||
- Frameworks (React, Vue, Express, etc.)
|
||||
- Languages (TypeScript, JavaScript, Python, etc.)
|
||||
- Build tools (Vite, Webpack, etc.)
|
||||
- Databases (PostgreSQL, MongoDB, etc.)
|
||||
- Key libraries and tools
|
||||
|
||||
Return ONLY this JSON format, no other text:
|
||||
{
|
||||
"technologies": ["Technology 1", "Technology 2", ...]
|
||||
}`;
|
||||
|
||||
try {
|
||||
const techResult = await streamingQuery({
|
||||
prompt: techAnalysisPrompt,
|
||||
model,
|
||||
cwd: projectPath,
|
||||
maxTurns: 10,
|
||||
allowedTools: ['Read', 'Glob', 'Grep'],
|
||||
abortController,
|
||||
thinkingLevel,
|
||||
readOnly: true,
|
||||
settingSources: autoLoadClaudeMd ? ['user', 'project', 'local'] : undefined,
|
||||
onText: (text) => {
|
||||
logger.debug(`Tech analysis text: ${text.substring(0, 100)}`);
|
||||
},
|
||||
});
|
||||
|
||||
// Parse tech stack from response
|
||||
const jsonMatch = techResult.text.match(/\{[\s\S]*"technologies"[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
const parsed = JSON.parse(jsonMatch[0]);
|
||||
if (Array.isArray(parsed.technologies)) {
|
||||
const newTechStack = parsed.technologies as string[];
|
||||
|
||||
// Calculate differences
|
||||
const currentSet = new Set(currentTechStack.map((t) => t.toLowerCase()));
|
||||
const newSet = new Set(newTechStack.map((t) => t.toLowerCase()));
|
||||
|
||||
for (const tech of newTechStack) {
|
||||
if (!currentSet.has(tech.toLowerCase())) {
|
||||
result.techStackUpdates.added.push(tech);
|
||||
}
|
||||
}
|
||||
|
||||
for (const tech of currentTechStack) {
|
||||
if (!newSet.has(tech.toLowerCase())) {
|
||||
result.techStackUpdates.removed.push(tech);
|
||||
}
|
||||
}
|
||||
|
||||
// Update spec with new tech stack if there are changes
|
||||
if (
|
||||
result.techStackUpdates.added.length > 0 ||
|
||||
result.techStackUpdates.removed.length > 0
|
||||
) {
|
||||
specContent = updateTechnologyStack(specContent, newTechStack);
|
||||
logger.info(
|
||||
`Updated tech stack: +${result.techStackUpdates.added.length}, -${result.techStackUpdates.removed.length}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to analyze tech stack:', error);
|
||||
// Continue with other sync operations
|
||||
}
|
||||
|
||||
// Update roadmap phase statuses based on completed features
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_progress',
|
||||
content: 'Checking roadmap phase statuses...\n',
|
||||
projectPath,
|
||||
});
|
||||
|
||||
// For each phase, check if all its features are completed
|
||||
// This is a heuristic - we check if the phase name appears in any feature titles/descriptions
|
||||
for (const phase of currentRoadmapPhases) {
|
||||
if (phase.status === 'completed') continue; // Already completed
|
||||
|
||||
// Check if this phase should be marked as completed
|
||||
// A phase is considered complete if we have completed features that mention it
|
||||
const phaseNameLower = phase.name.toLowerCase();
|
||||
const relatedCompletedFeatures = completedFeatures.filter(
|
||||
(f) =>
|
||||
f.title?.toLowerCase().includes(phaseNameLower) ||
|
||||
f.description?.toLowerCase().includes(phaseNameLower) ||
|
||||
f.category?.toLowerCase().includes(phaseNameLower)
|
||||
);
|
||||
|
||||
// If we have related completed features and the phase is still pending/in_progress,
|
||||
// update it to in_progress or completed based on feature count
|
||||
if (relatedCompletedFeatures.length > 0 && phase.status !== 'completed') {
|
||||
const newStatus = 'in_progress';
|
||||
specContent = updateRoadmapPhaseStatus(specContent, phase.name, newStatus);
|
||||
result.roadmapUpdates.push({ phaseName: phase.name, newStatus });
|
||||
logger.info(`Updated phase "${phase.name}" to ${newStatus}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Save updated spec
|
||||
await secureFs.writeFile(specPath, specContent, 'utf-8');
|
||||
logger.info('Spec saved successfully');
|
||||
|
||||
// Build summary
|
||||
const summaryParts: string[] = [];
|
||||
if (result.implementedFeaturesUpdates.addedFromFeatures.length > 0) {
|
||||
summaryParts.push(
|
||||
`Added ${result.implementedFeaturesUpdates.addedFromFeatures.length} implemented features`
|
||||
);
|
||||
}
|
||||
if (result.techStackUpdates.added.length > 0) {
|
||||
summaryParts.push(`Added ${result.techStackUpdates.added.length} technologies`);
|
||||
}
|
||||
if (result.techStackUpdates.removed.length > 0) {
|
||||
summaryParts.push(`Removed ${result.techStackUpdates.removed.length} technologies`);
|
||||
}
|
||||
if (result.roadmapUpdates.length > 0) {
|
||||
summaryParts.push(`Updated ${result.roadmapUpdates.length} roadmap phases`);
|
||||
}
|
||||
|
||||
result.summary = summaryParts.length > 0 ? summaryParts.join(', ') : 'Spec is already up to date';
|
||||
|
||||
// Create notification
|
||||
const notificationService = getNotificationService();
|
||||
await notificationService.createNotification({
|
||||
type: 'spec_regeneration_complete',
|
||||
title: 'Spec Sync Complete',
|
||||
message: result.summary,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
events.emit('spec-regeneration:event', {
|
||||
type: 'spec_regeneration_complete',
|
||||
message: `Spec sync complete! ${result.summary}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
logger.info('========== syncSpec() completed ==========');
|
||||
logger.info('Summary:', result.summary);
|
||||
|
||||
return result;
|
||||
}
|
||||
19
apps/server/src/routes/event-history/common.ts
Normal file
19
apps/server/src/routes/event-history/common.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Common utilities for event history routes
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
/** Logger instance for event history operations */
|
||||
export const logger = createLogger('EventHistory');
|
||||
|
||||
/**
|
||||
* Extract user-friendly error message from error objects
|
||||
*/
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
/**
|
||||
* Log error with automatic logger binding
|
||||
*/
|
||||
export const logError = createLogError(logger);
|
||||
68
apps/server/src/routes/event-history/index.ts
Normal file
68
apps/server/src/routes/event-history/index.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* Event History routes - HTTP API for event history management
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Listing events with filtering
|
||||
* - Getting individual event details
|
||||
* - Deleting events
|
||||
* - Clearing all events
|
||||
* - Replaying events to test hooks
|
||||
*
|
||||
* Mounted at /api/event-history in the main server.
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { EventHistoryService } from '../../services/event-history-service.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createGetHandler } from './routes/get.js';
|
||||
import { createDeleteHandler } from './routes/delete.js';
|
||||
import { createClearHandler } from './routes/clear.js';
|
||||
import { createReplayHandler } from './routes/replay.js';
|
||||
|
||||
/**
|
||||
* Create event history router with all endpoints
|
||||
*
|
||||
* Endpoints:
|
||||
* - POST /list - List events with optional filtering
|
||||
* - POST /get - Get a single event by ID
|
||||
* - POST /delete - Delete an event by ID
|
||||
* - POST /clear - Clear all events for a project
|
||||
* - POST /replay - Replay an event to trigger hooks
|
||||
*
|
||||
* @param eventHistoryService - Instance of EventHistoryService
|
||||
* @param settingsService - Instance of SettingsService (for replay)
|
||||
* @returns Express Router configured with all event history endpoints
|
||||
*/
|
||||
export function createEventHistoryRoutes(
|
||||
eventHistoryService: EventHistoryService,
|
||||
settingsService: SettingsService
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
// List events with filtering
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(eventHistoryService));
|
||||
|
||||
// Get single event
|
||||
router.post('/get', validatePathParams('projectPath'), createGetHandler(eventHistoryService));
|
||||
|
||||
// Delete event
|
||||
router.post(
|
||||
'/delete',
|
||||
validatePathParams('projectPath'),
|
||||
createDeleteHandler(eventHistoryService)
|
||||
);
|
||||
|
||||
// Clear all events
|
||||
router.post('/clear', validatePathParams('projectPath'), createClearHandler(eventHistoryService));
|
||||
|
||||
// Replay event
|
||||
router.post(
|
||||
'/replay',
|
||||
validatePathParams('projectPath'),
|
||||
createReplayHandler(eventHistoryService, settingsService)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
33
apps/server/src/routes/event-history/routes/clear.ts
Normal file
33
apps/server/src/routes/event-history/routes/clear.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* POST /api/event-history/clear - Clear all events for a project
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, cleared: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createClearHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body as { projectPath: string };
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const cleared = await eventHistoryService.clearEvents(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
cleared,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Clear events failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
43
apps/server/src/routes/event-history/routes/delete.ts
Normal file
43
apps/server/src/routes/event-history/routes/delete.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* POST /api/event-history/delete - Delete an event by ID
|
||||
*
|
||||
* Request body: { projectPath: string, eventId: string }
|
||||
* Response: { success: true } or { success: false, error: string }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createDeleteHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const deleted = await eventHistoryService.deleteEvent(projectPath, eventId);
|
||||
|
||||
if (!deleted) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (error) {
|
||||
logError(error, 'Delete event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
46
apps/server/src/routes/event-history/routes/get.ts
Normal file
46
apps/server/src/routes/event-history/routes/get.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* POST /api/event-history/get - Get a single event by ID
|
||||
*
|
||||
* Request body: { projectPath: string, eventId: string }
|
||||
* Response: { success: true, event: StoredEvent } or { success: false, error: string }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createGetHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const event = await eventHistoryService.getEvent(projectPath, eventId);
|
||||
|
||||
if (!event) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
event,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
53
apps/server/src/routes/event-history/routes/list.ts
Normal file
53
apps/server/src/routes/event-history/routes/list.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* POST /api/event-history/list - List events for a project
|
||||
*
|
||||
* Request body: {
|
||||
* projectPath: string,
|
||||
* filter?: {
|
||||
* trigger?: EventHookTrigger,
|
||||
* featureId?: string,
|
||||
* since?: string,
|
||||
* until?: string,
|
||||
* limit?: number,
|
||||
* offset?: number
|
||||
* }
|
||||
* }
|
||||
* Response: { success: true, events: StoredEventSummary[], total: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import type { EventHistoryFilter } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createListHandler(eventHistoryService: EventHistoryService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, filter } = req.body as {
|
||||
projectPath: string;
|
||||
filter?: EventHistoryFilter;
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const events = await eventHistoryService.getEvents(projectPath, filter);
|
||||
const total = await eventHistoryService.getEventCount(projectPath, {
|
||||
...filter,
|
||||
limit: undefined,
|
||||
offset: undefined,
|
||||
});
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
events,
|
||||
total,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'List events failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
234
apps/server/src/routes/event-history/routes/replay.ts
Normal file
234
apps/server/src/routes/event-history/routes/replay.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
/**
|
||||
* POST /api/event-history/replay - Replay an event to trigger hooks
|
||||
*
|
||||
* Request body: {
|
||||
* projectPath: string,
|
||||
* eventId: string,
|
||||
* hookIds?: string[] // Optional: specific hooks to run (if not provided, runs all enabled matching hooks)
|
||||
* }
|
||||
* Response: { success: true, result: EventReplayResult }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { EventHistoryService } from '../../../services/event-history-service.js';
|
||||
import type { SettingsService } from '../../../services/settings-service.js';
|
||||
import type { EventReplayResult, EventReplayHookResult, EventHook } from '@automaker/types';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { getErrorMessage, logError, logger } from '../common.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
/** Default timeout for shell commands (30 seconds) */
|
||||
const DEFAULT_SHELL_TIMEOUT = 30000;
|
||||
|
||||
/** Default timeout for HTTP requests (10 seconds) */
|
||||
const DEFAULT_HTTP_TIMEOUT = 10000;
|
||||
|
||||
interface HookContext {
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
projectPath?: string;
|
||||
projectName?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
timestamp: string;
|
||||
eventType: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Substitute {{variable}} placeholders in a string
|
||||
*/
|
||||
function substituteVariables(template: string, context: HookContext): string {
|
||||
return template.replace(/\{\{(\w+)\}\}/g, (match, variable) => {
|
||||
const value = context[variable as keyof HookContext];
|
||||
if (value === undefined || value === null) {
|
||||
return '';
|
||||
}
|
||||
return String(value);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a single hook and return the result
|
||||
*/
|
||||
async function executeHook(hook: EventHook, context: HookContext): Promise<EventReplayHookResult> {
|
||||
const hookName = hook.name || hook.id;
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
if (hook.action.type === 'shell') {
|
||||
const command = substituteVariables(hook.action.command, context);
|
||||
const timeout = hook.action.timeout || DEFAULT_SHELL_TIMEOUT;
|
||||
|
||||
logger.info(`Replaying shell hook "${hookName}": ${command}`);
|
||||
|
||||
await execAsync(command, {
|
||||
timeout,
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: true,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
} else if (hook.action.type === 'http') {
|
||||
const url = substituteVariables(hook.action.url, context);
|
||||
const method = hook.action.method || 'POST';
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
if (hook.action.headers) {
|
||||
for (const [key, value] of Object.entries(hook.action.headers)) {
|
||||
headers[key] = substituteVariables(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
let body: string | undefined;
|
||||
if (hook.action.body) {
|
||||
body = substituteVariables(hook.action.body, context);
|
||||
} else if (method !== 'GET') {
|
||||
body = JSON.stringify({
|
||||
eventType: context.eventType,
|
||||
timestamp: context.timestamp,
|
||||
featureId: context.featureId,
|
||||
projectPath: context.projectPath,
|
||||
projectName: context.projectName,
|
||||
error: context.error,
|
||||
});
|
||||
}
|
||||
|
||||
logger.info(`Replaying HTTP hook "${hookName}": ${method} ${url}`);
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), DEFAULT_HTTP_TIMEOUT);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers,
|
||||
body: method !== 'GET' ? body : undefined,
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: `HTTP ${response.status}: ${response.statusText}`,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: true,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: 'Unknown hook action type',
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.name === 'AbortError'
|
||||
? 'Request timed out'
|
||||
: error.message
|
||||
: String(error);
|
||||
|
||||
return {
|
||||
hookId: hook.id,
|
||||
hookName: hook.name,
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
durationMs: Date.now() - startTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function createReplayHandler(
|
||||
eventHistoryService: EventHistoryService,
|
||||
settingsService: SettingsService
|
||||
) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, eventId, hookIds } = req.body as {
|
||||
projectPath: string;
|
||||
eventId: string;
|
||||
hookIds?: string[];
|
||||
};
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!eventId || typeof eventId !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'eventId is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the event
|
||||
const event = await eventHistoryService.getEvent(projectPath, eventId);
|
||||
if (!event) {
|
||||
res.status(404).json({ success: false, error: 'Event not found' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Get hooks from settings
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
let hooks = settings.eventHooks || [];
|
||||
|
||||
// Filter to matching trigger and enabled hooks
|
||||
hooks = hooks.filter((h) => h.enabled && h.trigger === event.trigger);
|
||||
|
||||
// If specific hook IDs requested, filter to those
|
||||
if (hookIds && hookIds.length > 0) {
|
||||
hooks = hooks.filter((h) => hookIds.includes(h.id));
|
||||
}
|
||||
|
||||
// Build context for variable substitution
|
||||
const context: HookContext = {
|
||||
featureId: event.featureId,
|
||||
featureName: event.featureName,
|
||||
projectPath: event.projectPath,
|
||||
projectName: event.projectName,
|
||||
error: event.error,
|
||||
errorType: event.errorType,
|
||||
timestamp: event.timestamp,
|
||||
eventType: event.trigger,
|
||||
};
|
||||
|
||||
// Execute all hooks in parallel
|
||||
const hookResults = await Promise.all(hooks.map((hook) => executeHook(hook, context)));
|
||||
|
||||
const result: EventReplayResult = {
|
||||
eventId,
|
||||
hooksTriggered: hooks.length,
|
||||
hookResults,
|
||||
};
|
||||
|
||||
logger.info(`Replayed event ${eventId}: ${hooks.length} hooks triggered`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
result,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Replay event failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
import { Router } from 'express';
|
||||
import { FeatureLoader } from '../../services/feature-loader.js';
|
||||
import type { SettingsService } from '../../services/settings-service.js';
|
||||
import type { EventEmitter } from '../../lib/events.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createGetHandler } from './routes/get.js';
|
||||
@@ -18,13 +19,18 @@ import { createGenerateTitleHandler } from './routes/generate-title.js';
|
||||
|
||||
export function createFeaturesRoutes(
|
||||
featureLoader: FeatureLoader,
|
||||
settingsService?: SettingsService
|
||||
settingsService?: SettingsService,
|
||||
events?: EventEmitter
|
||||
): Router {
|
||||
const router = Router();
|
||||
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(featureLoader));
|
||||
router.post('/get', validatePathParams('projectPath'), createGetHandler(featureLoader));
|
||||
router.post('/create', validatePathParams('projectPath'), createCreateHandler(featureLoader));
|
||||
router.post(
|
||||
'/create',
|
||||
validatePathParams('projectPath'),
|
||||
createCreateHandler(featureLoader, events)
|
||||
);
|
||||
router.post('/update', validatePathParams('projectPath'), createUpdateHandler(featureLoader));
|
||||
router.post(
|
||||
'/bulk-update',
|
||||
|
||||
@@ -4,10 +4,11 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { EventEmitter } from '../../../lib/events.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
export function createCreateHandler(featureLoader: FeatureLoader, events?: EventEmitter) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, feature } = req.body as {
|
||||
@@ -23,7 +24,30 @@ export function createCreateHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for duplicate title if title is provided
|
||||
if (feature.title && feature.title.trim()) {
|
||||
const duplicate = await featureLoader.findDuplicateTitle(projectPath, feature.title);
|
||||
if (duplicate) {
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `A feature with title "${feature.title}" already exists`,
|
||||
duplicateFeatureId: duplicate.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const created = await featureLoader.create(projectPath, feature);
|
||||
|
||||
// Emit feature_created event for hooks
|
||||
if (events) {
|
||||
events.emit('feature:created', {
|
||||
featureId: created.id,
|
||||
featureName: created.name,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({ success: true, feature: created });
|
||||
} catch (error) {
|
||||
logError(error, 'Create feature failed');
|
||||
|
||||
@@ -4,8 +4,14 @@
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import { FeatureLoader } from '../../../services/feature-loader.js';
|
||||
import type { Feature } from '@automaker/types';
|
||||
import type { Feature, FeatureStatus } from '@automaker/types';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('features/update');
|
||||
|
||||
// Statuses that should trigger syncing to app_spec.txt
|
||||
const SYNC_TRIGGER_STATUSES: FeatureStatus[] = ['verified', 'completed'];
|
||||
|
||||
export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
@@ -34,6 +40,28 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for duplicate title if title is being updated
|
||||
if (updates.title && updates.title.trim()) {
|
||||
const duplicate = await featureLoader.findDuplicateTitle(
|
||||
projectPath,
|
||||
updates.title,
|
||||
featureId // Exclude the current feature from duplicate check
|
||||
);
|
||||
if (duplicate) {
|
||||
res.status(409).json({
|
||||
success: false,
|
||||
error: `A feature with title "${updates.title}" already exists`,
|
||||
duplicateFeatureId: duplicate.id,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current feature to detect status changes
|
||||
const currentFeature = await featureLoader.get(projectPath, featureId);
|
||||
const previousStatus = currentFeature?.status as FeatureStatus | undefined;
|
||||
const newStatus = updates.status as FeatureStatus | undefined;
|
||||
|
||||
const updated = await featureLoader.update(
|
||||
projectPath,
|
||||
featureId,
|
||||
@@ -42,6 +70,22 @@ export function createUpdateHandler(featureLoader: FeatureLoader) {
|
||||
enhancementMode,
|
||||
preEnhancementDescription
|
||||
);
|
||||
|
||||
// Trigger sync to app_spec.txt when status changes to verified or completed
|
||||
if (newStatus && SYNC_TRIGGER_STATUSES.includes(newStatus) && previousStatus !== newStatus) {
|
||||
try {
|
||||
const synced = await featureLoader.syncFeatureToAppSpec(projectPath, updated);
|
||||
if (synced) {
|
||||
logger.info(
|
||||
`Synced feature "${updated.title || updated.id}" to app_spec.txt on status change to ${newStatus}`
|
||||
);
|
||||
}
|
||||
} catch (syncError) {
|
||||
// Log the sync error but don't fail the update operation
|
||||
logger.error(`Failed to sync feature to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, feature: updated });
|
||||
} catch (error) {
|
||||
logError(error, 'Update feature failed');
|
||||
|
||||
21
apps/server/src/routes/notifications/common.ts
Normal file
21
apps/server/src/routes/notifications/common.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Common utilities for notification routes
|
||||
*
|
||||
* Provides logger and error handling utilities shared across all notification endpoints.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { getErrorMessage as getErrorMessageShared, createLogError } from '../common.js';
|
||||
|
||||
/** Logger instance for notification-related operations */
|
||||
export const logger = createLogger('Notifications');
|
||||
|
||||
/**
|
||||
* Extract user-friendly error message from error objects
|
||||
*/
|
||||
export { getErrorMessageShared as getErrorMessage };
|
||||
|
||||
/**
|
||||
* Log error with automatic logger binding
|
||||
*/
|
||||
export const logError = createLogError(logger);
|
||||
62
apps/server/src/routes/notifications/index.ts
Normal file
62
apps/server/src/routes/notifications/index.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Notifications routes - HTTP API for project-level notifications
|
||||
*
|
||||
* Provides endpoints for:
|
||||
* - Listing notifications
|
||||
* - Getting unread count
|
||||
* - Marking notifications as read
|
||||
* - Dismissing notifications
|
||||
*
|
||||
* All endpoints use handler factories that receive the NotificationService instance.
|
||||
* Mounted at /api/notifications in the main server.
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import type { NotificationService } from '../../services/notification-service.js';
|
||||
import { validatePathParams } from '../../middleware/validate-paths.js';
|
||||
import { createListHandler } from './routes/list.js';
|
||||
import { createUnreadCountHandler } from './routes/unread-count.js';
|
||||
import { createMarkReadHandler } from './routes/mark-read.js';
|
||||
import { createDismissHandler } from './routes/dismiss.js';
|
||||
|
||||
/**
|
||||
* Create notifications router with all endpoints
|
||||
*
|
||||
* Endpoints:
|
||||
* - POST /list - List all notifications for a project
|
||||
* - POST /unread-count - Get unread notification count
|
||||
* - POST /mark-read - Mark notification(s) as read
|
||||
* - POST /dismiss - Dismiss notification(s)
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express Router configured with all notification endpoints
|
||||
*/
|
||||
export function createNotificationsRoutes(notificationService: NotificationService): Router {
|
||||
const router = Router();
|
||||
|
||||
// List notifications
|
||||
router.post('/list', validatePathParams('projectPath'), createListHandler(notificationService));
|
||||
|
||||
// Get unread count
|
||||
router.post(
|
||||
'/unread-count',
|
||||
validatePathParams('projectPath'),
|
||||
createUnreadCountHandler(notificationService)
|
||||
);
|
||||
|
||||
// Mark as read (single or all)
|
||||
router.post(
|
||||
'/mark-read',
|
||||
validatePathParams('projectPath'),
|
||||
createMarkReadHandler(notificationService)
|
||||
);
|
||||
|
||||
// Dismiss (single or all)
|
||||
router.post(
|
||||
'/dismiss',
|
||||
validatePathParams('projectPath'),
|
||||
createDismissHandler(notificationService)
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
53
apps/server/src/routes/notifications/routes/dismiss.ts
Normal file
53
apps/server/src/routes/notifications/routes/dismiss.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* POST /api/notifications/dismiss - Dismiss notification(s)
|
||||
*
|
||||
* Request body: { projectPath: string, notificationId?: string }
|
||||
* - If notificationId provided: dismisses that notification
|
||||
* - If notificationId not provided: dismisses all notifications
|
||||
*
|
||||
* Response: { success: true, dismissed: boolean | count: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/dismiss
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createDismissHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, notificationId } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// If notificationId provided, dismiss single notification
|
||||
if (notificationId) {
|
||||
const dismissed = await notificationService.dismissNotification(
|
||||
projectPath,
|
||||
notificationId
|
||||
);
|
||||
if (!dismissed) {
|
||||
res.status(404).json({ success: false, error: 'Notification not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true, dismissed: true });
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise dismiss all
|
||||
const count = await notificationService.dismissAll(projectPath);
|
||||
res.json({ success: true, count });
|
||||
} catch (error) {
|
||||
logError(error, 'Dismiss failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
39
apps/server/src/routes/notifications/routes/list.ts
Normal file
39
apps/server/src/routes/notifications/routes/list.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* POST /api/notifications/list - List all notifications for a project
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, notifications: Notification[] }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/list
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createListHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const notifications = await notificationService.getNotifications(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
notifications,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'List notifications failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
50
apps/server/src/routes/notifications/routes/mark-read.ts
Normal file
50
apps/server/src/routes/notifications/routes/mark-read.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* POST /api/notifications/mark-read - Mark notification(s) as read
|
||||
*
|
||||
* Request body: { projectPath: string, notificationId?: string }
|
||||
* - If notificationId provided: marks that notification as read
|
||||
* - If notificationId not provided: marks all notifications as read
|
||||
*
|
||||
* Response: { success: true, count?: number, notification?: Notification }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/mark-read
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createMarkReadHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath, notificationId } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// If notificationId provided, mark single notification
|
||||
if (notificationId) {
|
||||
const notification = await notificationService.markAsRead(projectPath, notificationId);
|
||||
if (!notification) {
|
||||
res.status(404).json({ success: false, error: 'Notification not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true, notification });
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise mark all as read
|
||||
const count = await notificationService.markAllAsRead(projectPath);
|
||||
res.json({ success: true, count });
|
||||
} catch (error) {
|
||||
logError(error, 'Mark read failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
39
apps/server/src/routes/notifications/routes/unread-count.ts
Normal file
39
apps/server/src/routes/notifications/routes/unread-count.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* POST /api/notifications/unread-count - Get unread notification count
|
||||
*
|
||||
* Request body: { projectPath: string }
|
||||
* Response: { success: true, count: number }
|
||||
*/
|
||||
|
||||
import type { Request, Response } from 'express';
|
||||
import type { NotificationService } from '../../../services/notification-service.js';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
/**
|
||||
* Create handler for POST /api/notifications/unread-count
|
||||
*
|
||||
* @param notificationService - Instance of NotificationService
|
||||
* @returns Express request handler
|
||||
*/
|
||||
export function createUnreadCountHandler(notificationService: NotificationService) {
|
||||
return async (req: Request, res: Response): Promise<void> => {
|
||||
try {
|
||||
const { projectPath } = req.body;
|
||||
|
||||
if (!projectPath || typeof projectPath !== 'string') {
|
||||
res.status(400).json({ success: false, error: 'projectPath is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const count = await notificationService.getUnreadCount(projectPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
count,
|
||||
});
|
||||
} catch (error) {
|
||||
logError(error, 'Get unread count failed');
|
||||
res.status(500).json({ success: false, error: getErrorMessage(error) });
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
import type { Request, Response } from 'express';
|
||||
import type { AutoModeService } from '../../../services/auto-mode-service.js';
|
||||
import { getBacklogPlanStatus, getRunningDetails } from '../../backlog-plan/common.js';
|
||||
import { getAllRunningGenerations } from '../../app-spec/common.js';
|
||||
import path from 'path';
|
||||
import { getErrorMessage, logError } from '../common.js';
|
||||
|
||||
@@ -26,6 +27,36 @@ export function createIndexHandler(autoModeService: AutoModeService) {
|
||||
});
|
||||
}
|
||||
|
||||
// Add spec/feature generation tasks
|
||||
const specGenerations = getAllRunningGenerations();
|
||||
for (const generation of specGenerations) {
|
||||
let title: string;
|
||||
let description: string;
|
||||
|
||||
switch (generation.type) {
|
||||
case 'feature_generation':
|
||||
title = 'Generating features from spec';
|
||||
description = 'Creating features from the project specification';
|
||||
break;
|
||||
case 'sync':
|
||||
title = 'Syncing spec with code';
|
||||
description = 'Updating spec from codebase and completed features';
|
||||
break;
|
||||
default:
|
||||
title = 'Regenerating spec';
|
||||
description = 'Analyzing project and generating specification';
|
||||
}
|
||||
|
||||
runningAgents.push({
|
||||
featureId: `spec-generation:${generation.projectPath}`,
|
||||
projectPath: generation.projectPath,
|
||||
projectName: path.basename(generation.projectPath),
|
||||
isAutoMode: false,
|
||||
title,
|
||||
description,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
runningAgents,
|
||||
|
||||
@@ -34,6 +34,7 @@ export function createGetDevServerLogsHandler() {
|
||||
result: {
|
||||
worktreePath: result.result.worktreePath,
|
||||
port: result.result.port,
|
||||
url: result.result.url,
|
||||
logs: result.result.logs,
|
||||
startedAt: result.result.startedAt,
|
||||
},
|
||||
|
||||
@@ -29,6 +29,10 @@ import {
|
||||
appendLearning,
|
||||
recordMemoryUsage,
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
|
||||
const logger = createLogger('AutoMode');
|
||||
@@ -60,6 +64,7 @@ import {
|
||||
getMCPServersFromSettings,
|
||||
getPromptCustomization,
|
||||
} from '../lib/settings-helpers.js';
|
||||
import { getNotificationService } from './notification-service.js';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -386,6 +391,7 @@ export class AutoModeService {
|
||||
this.emitAutoModeEvent('auto_mode_error', {
|
||||
error: errorInfo.message,
|
||||
errorType: errorInfo.type,
|
||||
projectPath,
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1414,13 +1420,13 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
allImagePaths.push(...allPaths);
|
||||
}
|
||||
|
||||
// Save updated feature.json with new images
|
||||
// Save updated feature.json with new images (atomic write with backup)
|
||||
if (copiedImagePaths.length > 0 && feature) {
|
||||
const featureDirForSave = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDirForSave, 'feature.json');
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save feature.json:`, error);
|
||||
}
|
||||
@@ -1547,6 +1553,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
message: allPassed
|
||||
? 'All verification checks passed'
|
||||
: `Verification failed: ${results.find((r) => !r.passed)?.check || 'Unknown'}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
return allPassed;
|
||||
@@ -1620,6 +1627,7 @@ Address the follow-up instructions above. Review the previous work and make the
|
||||
featureId,
|
||||
passes: true,
|
||||
message: `Changes committed: ${hash.trim().substring(0, 8)}`,
|
||||
projectPath,
|
||||
});
|
||||
|
||||
return hash.trim();
|
||||
@@ -2088,8 +2096,20 @@ Format your response as a structured markdown document.`;
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
feature.status = status;
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
// Set justFinishedAt timestamp when moving to waiting_approval (agent just completed)
|
||||
@@ -2100,9 +2120,41 @@ Format your response as a structured markdown document.`;
|
||||
// Clear the timestamp when moving to other statuses
|
||||
feature.justFinishedAt = undefined;
|
||||
}
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
} catch {
|
||||
// Feature file may not exist
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
// Create notifications for important status changes
|
||||
const notificationService = getNotificationService();
|
||||
if (status === 'waiting_approval') {
|
||||
await notificationService.createNotification({
|
||||
type: 'feature_waiting_approval',
|
||||
title: 'Feature Ready for Review',
|
||||
message: `"${feature.name || featureId}" is ready for your review and approval.`,
|
||||
featureId,
|
||||
projectPath,
|
||||
});
|
||||
} else if (status === 'verified') {
|
||||
await notificationService.createNotification({
|
||||
type: 'feature_verified',
|
||||
title: 'Feature Verified',
|
||||
message: `"${feature.name || featureId}" has been verified and is complete.`,
|
||||
featureId,
|
||||
projectPath,
|
||||
});
|
||||
}
|
||||
|
||||
// Sync completed/verified features to app_spec.txt
|
||||
if (status === 'verified' || status === 'completed') {
|
||||
try {
|
||||
await this.featureLoader.syncFeatureToAppSpec(projectPath, feature);
|
||||
} catch (syncError) {
|
||||
// Log but don't fail the status update if sync fails
|
||||
logger.warn(`Failed to sync feature ${featureId} to app_spec.txt:`, syncError);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update feature status for ${featureId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2114,11 +2166,24 @@ Format your response as a structured markdown document.`;
|
||||
featureId: string,
|
||||
updates: Partial<PlanSpec>
|
||||
): Promise<void> {
|
||||
const featurePath = path.join(projectPath, '.automaker', 'features', featureId, 'feature.json');
|
||||
// Use getFeatureDir helper for consistent path resolution
|
||||
const featureDir = getFeatureDir(projectPath, featureId);
|
||||
const featurePath = path.join(featureDir, 'feature.json');
|
||||
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
logger.warn(`Feature ${featureId} not found or could not be recovered`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize planSpec if it doesn't exist
|
||||
if (!feature.planSpec) {
|
||||
@@ -2138,7 +2203,9 @@ Format your response as a structured markdown document.`;
|
||||
}
|
||||
|
||||
feature.updatedAt = new Date().toISOString();
|
||||
await secureFs.writeFile(featurePath, JSON.stringify(feature, null, 2));
|
||||
|
||||
// Use atomic write with backup support
|
||||
await atomicWriteJson(featurePath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update planSpec for ${featureId}:`, error);
|
||||
}
|
||||
@@ -2155,25 +2222,34 @@ Format your response as a structured markdown document.`;
|
||||
const allFeatures: Feature[] = [];
|
||||
const pendingFeatures: Feature[] = [];
|
||||
|
||||
// Load all features (for dependency checking)
|
||||
// Load all features (for dependency checking) with recovery support
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data);
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
allFeatures.push(feature);
|
||||
|
||||
// Track pending features separately
|
||||
if (
|
||||
feature.status === 'pending' ||
|
||||
feature.status === 'ready' ||
|
||||
feature.status === 'backlog'
|
||||
) {
|
||||
pendingFeatures.push(feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3405,31 +3481,39 @@ After generating the revised spec, output:
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const featurePath = path.join(featuresDir, entry.name, 'feature.json');
|
||||
try {
|
||||
const data = (await secureFs.readFile(featurePath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(data) as Feature;
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
// Use recovery-enabled read for corrupted file handling
|
||||
const result = await readJsonWithRecovery<Feature | null>(featurePath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${entry.name}`, logger);
|
||||
|
||||
const feature = result.data;
|
||||
if (!feature) {
|
||||
// Skip features that couldn't be loaded or recovered
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if feature was interrupted (in_progress or pipeline_*)
|
||||
if (
|
||||
feature.status === 'in_progress' ||
|
||||
(feature.status && feature.status.startsWith('pipeline_'))
|
||||
) {
|
||||
// Verify it has existing context (agent-output.md)
|
||||
const featureDir = getFeatureDir(projectPath, feature.id);
|
||||
const contextPath = path.join(featureDir, 'agent-output.md');
|
||||
try {
|
||||
await secureFs.access(contextPath);
|
||||
interruptedFeatures.push(feature);
|
||||
logger.info(
|
||||
`Found interrupted feature: ${feature.id} (${feature.title}) - status: ${feature.status}`
|
||||
);
|
||||
} catch {
|
||||
// No context file, skip this feature - it will be restarted fresh
|
||||
logger.info(`Interrupted feature ${feature.id} has no context, will restart fresh`);
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid features
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,29 @@ export class ClaudeUsageService {
|
||||
private timeout = 30000; // 30 second timeout
|
||||
private isWindows = os.platform() === 'win32';
|
||||
private isLinux = os.platform() === 'linux';
|
||||
// On Windows, ConPTY requires AttachConsole which fails in Electron/service mode
|
||||
// Detect Electron by checking for electron-specific env vars or process properties
|
||||
// When in Electron, always use winpty to avoid ConPTY's AttachConsole errors
|
||||
private isElectron =
|
||||
!!(process.versions && (process.versions as Record<string, string>).electron) ||
|
||||
!!process.env.ELECTRON_RUN_AS_NODE;
|
||||
private useConptyFallback = false; // Track if we need to use winpty fallback on Windows
|
||||
|
||||
/**
|
||||
* Kill a PTY process with platform-specific handling.
|
||||
* Windows doesn't support Unix signals like SIGTERM, so we call kill() without arguments.
|
||||
* On Unix-like systems (macOS, Linux), we can specify the signal.
|
||||
*
|
||||
* @param ptyProcess - The PTY process to kill
|
||||
* @param signal - The signal to send on Unix-like systems (default: 'SIGTERM')
|
||||
*/
|
||||
private killPtyProcess(ptyProcess: pty.IPty, signal: string = 'SIGTERM'): void {
|
||||
if (this.isWindows) {
|
||||
ptyProcess.kill();
|
||||
} else {
|
||||
ptyProcess.kill(signal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Claude CLI is available on the system
|
||||
@@ -181,37 +204,94 @@ export class ClaudeUsageService {
|
||||
? ['/c', 'claude', '--add-dir', workingDirectory]
|
||||
: ['-c', `claude --add-dir "${workingDirectory}"`];
|
||||
|
||||
// Using 'any' for ptyProcess because node-pty types don't include 'killed' property
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let ptyProcess: any = null;
|
||||
|
||||
// Build PTY spawn options
|
||||
const ptyOptions: pty.IPtyForkOptions = {
|
||||
name: 'xterm-256color',
|
||||
cols: 120,
|
||||
rows: 30,
|
||||
cwd: workingDirectory,
|
||||
env: {
|
||||
...process.env,
|
||||
TERM: 'xterm-256color',
|
||||
} as Record<string, string>,
|
||||
};
|
||||
|
||||
// On Windows, always use winpty instead of ConPTY
|
||||
// ConPTY requires AttachConsole which fails in many contexts:
|
||||
// - Electron apps without a console
|
||||
// - VS Code integrated terminal
|
||||
// - Spawned from other applications
|
||||
// The error happens in a subprocess so we can't catch it - must proactively disable
|
||||
if (this.isWindows) {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
logger.info(
|
||||
'[executeClaudeUsageCommandPty] Using winpty on Windows (ConPTY disabled for compatibility)'
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
ptyProcess = pty.spawn(shell, args, {
|
||||
name: 'xterm-256color',
|
||||
cols: 120,
|
||||
rows: 30,
|
||||
cwd: workingDirectory,
|
||||
env: {
|
||||
...process.env,
|
||||
TERM: 'xterm-256color',
|
||||
} as Record<string, string>,
|
||||
});
|
||||
ptyProcess = pty.spawn(shell, args, ptyOptions);
|
||||
} catch (spawnError) {
|
||||
const errorMessage = spawnError instanceof Error ? spawnError.message : String(spawnError);
|
||||
logger.error('[executeClaudeUsageCommandPty] Failed to spawn PTY:', errorMessage);
|
||||
|
||||
// Return a user-friendly error instead of crashing
|
||||
reject(
|
||||
new Error(
|
||||
`Unable to access terminal: ${errorMessage}. Claude CLI may not be available or PTY support is limited in this environment.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
// Check for Windows ConPTY-specific errors
|
||||
if (this.isWindows && errorMessage.includes('AttachConsole failed')) {
|
||||
// ConPTY failed - try winpty fallback
|
||||
if (!this.useConptyFallback) {
|
||||
logger.warn(
|
||||
'[executeClaudeUsageCommandPty] ConPTY AttachConsole failed, retrying with winpty fallback'
|
||||
);
|
||||
this.useConptyFallback = true;
|
||||
|
||||
try {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
ptyProcess = pty.spawn(shell, args, ptyOptions);
|
||||
logger.info(
|
||||
'[executeClaudeUsageCommandPty] Successfully spawned with winpty fallback'
|
||||
);
|
||||
} catch (fallbackError) {
|
||||
const fallbackMessage =
|
||||
fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
|
||||
logger.error(
|
||||
'[executeClaudeUsageCommandPty] Winpty fallback also failed:',
|
||||
fallbackMessage
|
||||
);
|
||||
reject(
|
||||
new Error(
|
||||
`Windows PTY unavailable: Both ConPTY and winpty failed. This typically happens when running in Electron without a console. ConPTY error: ${errorMessage}. Winpty error: ${fallbackMessage}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
logger.error('[executeClaudeUsageCommandPty] Winpty fallback failed:', errorMessage);
|
||||
reject(
|
||||
new Error(
|
||||
`Windows PTY unavailable: ${errorMessage}. The application is running without console access (common in Electron). Try running from a terminal window.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
logger.error('[executeClaudeUsageCommandPty] Failed to spawn PTY:', errorMessage);
|
||||
reject(
|
||||
new Error(
|
||||
`Unable to access terminal: ${errorMessage}. Claude CLI may not be available or PTY support is limited in this environment.`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
if (ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill();
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
// Don't fail if we have data - return it instead
|
||||
if (output.includes('Current session')) {
|
||||
@@ -244,16 +324,23 @@ export class ClaudeUsageService {
|
||||
const cleanOutput = output.replace(/\x1B\[[0-9;]*[A-Za-z]/g, '');
|
||||
|
||||
// Check for specific authentication/permission errors
|
||||
if (
|
||||
cleanOutput.includes('OAuth token does not meet scope requirement') ||
|
||||
cleanOutput.includes('permission_error') ||
|
||||
cleanOutput.includes('token_expired') ||
|
||||
cleanOutput.includes('authentication_error')
|
||||
) {
|
||||
// Must be very specific to avoid false positives from garbled terminal encoding
|
||||
// Removed permission_error check as it was causing false positives with winpty encoding
|
||||
const authChecks = {
|
||||
oauth: cleanOutput.includes('OAuth token does not meet scope requirement'),
|
||||
tokenExpired: cleanOutput.includes('token_expired'),
|
||||
// Only match if it looks like a JSON API error response
|
||||
authError:
|
||||
cleanOutput.includes('"type":"authentication_error"') ||
|
||||
cleanOutput.includes('"type": "authentication_error"'),
|
||||
};
|
||||
const hasAuthError = authChecks.oauth || authChecks.tokenExpired || authChecks.authError;
|
||||
|
||||
if (hasAuthError) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
if (ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill();
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
@@ -265,11 +352,16 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Check if we've seen the usage data (look for "Current session" or the TUI Usage header)
|
||||
if (
|
||||
!hasSeenUsageData &&
|
||||
(cleanOutput.includes('Current session') ||
|
||||
(cleanOutput.includes('Usage') && cleanOutput.includes('% left')))
|
||||
) {
|
||||
// Also check for percentage patterns that appear in usage output
|
||||
const hasUsageIndicators =
|
||||
cleanOutput.includes('Current session') ||
|
||||
(cleanOutput.includes('Usage') && cleanOutput.includes('% left')) ||
|
||||
// Additional patterns for winpty - look for percentage patterns
|
||||
/\d+%\s*(left|used|remaining)/i.test(cleanOutput) ||
|
||||
cleanOutput.includes('Resets in') ||
|
||||
cleanOutput.includes('Current week');
|
||||
|
||||
if (!hasSeenUsageData && hasUsageIndicators) {
|
||||
hasSeenUsageData = true;
|
||||
// Wait for full output, then send escape to exit
|
||||
setTimeout(() => {
|
||||
@@ -277,9 +369,10 @@ export class ClaudeUsageService {
|
||||
ptyProcess.write('\x1b'); // Send escape key
|
||||
|
||||
// Fallback: if ESC doesn't exit (Linux), use SIGTERM after 2s
|
||||
// Windows doesn't support signals, so killPtyProcess handles platform differences
|
||||
setTimeout(() => {
|
||||
if (!settled && ptyProcess && !ptyProcess.killed) {
|
||||
ptyProcess.kill('SIGTERM');
|
||||
this.killPtyProcess(ptyProcess);
|
||||
}
|
||||
}, 2000);
|
||||
}
|
||||
@@ -307,10 +400,18 @@ export class ClaudeUsageService {
|
||||
}
|
||||
|
||||
// Detect REPL prompt and send /usage command
|
||||
if (
|
||||
!hasSentCommand &&
|
||||
(cleanOutput.includes('❯') || cleanOutput.includes('? for shortcuts'))
|
||||
) {
|
||||
// On Windows with winpty, Unicode prompt char ❯ gets garbled, so also check for ASCII indicators
|
||||
const isReplReady =
|
||||
cleanOutput.includes('❯') ||
|
||||
cleanOutput.includes('? for shortcuts') ||
|
||||
// Fallback for winpty garbled encoding - detect CLI welcome screen elements
|
||||
(cleanOutput.includes('Welcome back') && cleanOutput.includes('Claude')) ||
|
||||
(cleanOutput.includes('Tips for getting started') && cleanOutput.includes('Claude')) ||
|
||||
// Detect model indicator which appears when REPL is ready
|
||||
(cleanOutput.includes('Opus') && cleanOutput.includes('Claude API')) ||
|
||||
(cleanOutput.includes('Sonnet') && cleanOutput.includes('Claude API'));
|
||||
|
||||
if (!hasSentCommand && isReplReady) {
|
||||
hasSentCommand = true;
|
||||
// Wait for REPL to fully settle
|
||||
setTimeout(() => {
|
||||
@@ -347,11 +448,9 @@ export class ClaudeUsageService {
|
||||
if (settled) return;
|
||||
settled = true;
|
||||
|
||||
if (
|
||||
output.includes('token_expired') ||
|
||||
output.includes('authentication_error') ||
|
||||
output.includes('permission_error')
|
||||
) {
|
||||
// Check for auth errors - must be specific to avoid false positives
|
||||
// Removed permission_error check as it was causing false positives with winpty encoding
|
||||
if (output.includes('token_expired') || output.includes('"type":"authentication_error"')) {
|
||||
reject(new Error("Authentication required - please run 'claude login'"));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -379,10 +379,11 @@ class DevServerService {
|
||||
|
||||
// Create server info early so we can reference it in handlers
|
||||
// We'll add it to runningServers after verifying the process started successfully
|
||||
const hostname = process.env.HOSTNAME || 'localhost';
|
||||
const serverInfo: DevServerInfo = {
|
||||
worktreePath,
|
||||
port,
|
||||
url: `http://localhost:${port}`,
|
||||
url: `http://${hostname}:${port}`,
|
||||
process: devProcess,
|
||||
startedAt: new Date(),
|
||||
scrollbackBuffer: '',
|
||||
@@ -474,7 +475,7 @@ class DevServerService {
|
||||
result: {
|
||||
worktreePath,
|
||||
port,
|
||||
url: `http://localhost:${port}`,
|
||||
url: `http://${hostname}:${port}`,
|
||||
message: `Dev server started on port ${port}`,
|
||||
},
|
||||
};
|
||||
@@ -594,6 +595,7 @@ class DevServerService {
|
||||
result?: {
|
||||
worktreePath: string;
|
||||
port: number;
|
||||
url: string;
|
||||
logs: string;
|
||||
startedAt: string;
|
||||
};
|
||||
@@ -613,6 +615,7 @@ class DevServerService {
|
||||
result: {
|
||||
worktreePath: server.worktreePath,
|
||||
port: server.port,
|
||||
url: server.url,
|
||||
logs: server.scrollbackBuffer,
|
||||
startedAt: server.startedAt.toISOString(),
|
||||
},
|
||||
|
||||
338
apps/server/src/services/event-history-service.ts
Normal file
338
apps/server/src/services/event-history-service.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
/**
|
||||
* Event History Service - Stores and retrieves event records for debugging and replay
|
||||
*
|
||||
* Provides persistent storage for events in {projectPath}/.automaker/events/
|
||||
* Each event is stored as a separate JSON file with an index for quick listing.
|
||||
*
|
||||
* Features:
|
||||
* - Store events when they occur
|
||||
* - List and filter historical events
|
||||
* - Replay events to test hook configurations
|
||||
* - Delete old events to manage disk space
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getEventHistoryDir,
|
||||
getEventHistoryIndexPath,
|
||||
getEventPath,
|
||||
ensureEventHistoryDir,
|
||||
} from '@automaker/platform';
|
||||
import type {
|
||||
StoredEvent,
|
||||
StoredEventIndex,
|
||||
StoredEventSummary,
|
||||
EventHistoryFilter,
|
||||
EventHookTrigger,
|
||||
} from '@automaker/types';
|
||||
import { DEFAULT_EVENT_HISTORY_INDEX } from '@automaker/types';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
const logger = createLogger('EventHistoryService');
|
||||
|
||||
/** Maximum events to keep in the index (oldest are pruned) */
|
||||
const MAX_EVENTS_IN_INDEX = 1000;
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return defaultValue;
|
||||
}
|
||||
logger.error(`Error reading ${filePath}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for storing a new event
|
||||
*/
|
||||
export interface StoreEventInput {
|
||||
trigger: EventHookTrigger;
|
||||
projectPath: string;
|
||||
featureId?: string;
|
||||
featureName?: string;
|
||||
error?: string;
|
||||
errorType?: string;
|
||||
passes?: boolean;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* EventHistoryService - Manages persistent storage of events
|
||||
*/
|
||||
export class EventHistoryService {
|
||||
/**
|
||||
* Store a new event to history
|
||||
*
|
||||
* @param input - Event data to store
|
||||
* @returns Promise resolving to the stored event
|
||||
*/
|
||||
async storeEvent(input: StoreEventInput): Promise<StoredEvent> {
|
||||
const { projectPath, trigger, featureId, featureName, error, errorType, passes, metadata } =
|
||||
input;
|
||||
|
||||
// Ensure events directory exists
|
||||
await ensureEventHistoryDir(projectPath);
|
||||
|
||||
const eventId = `evt-${Date.now()}-${randomUUID().slice(0, 8)}`;
|
||||
const timestamp = new Date().toISOString();
|
||||
const projectName = this.extractProjectName(projectPath);
|
||||
|
||||
const event: StoredEvent = {
|
||||
id: eventId,
|
||||
trigger,
|
||||
timestamp,
|
||||
projectPath,
|
||||
projectName,
|
||||
featureId,
|
||||
featureName,
|
||||
error,
|
||||
errorType,
|
||||
passes,
|
||||
metadata,
|
||||
};
|
||||
|
||||
// Write the full event to its own file
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
await atomicWriteJson(eventPath, event);
|
||||
|
||||
// Update the index
|
||||
await this.addToIndex(projectPath, event);
|
||||
|
||||
logger.info(`Stored event ${eventId} (${trigger}) for project ${projectName}`);
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all events for a project with optional filtering
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Promise resolving to array of event summaries
|
||||
*/
|
||||
async getEvents(projectPath: string, filter?: EventHistoryFilter): Promise<StoredEventSummary[]> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
let events = [...index.events];
|
||||
|
||||
// Apply filters
|
||||
if (filter) {
|
||||
if (filter.trigger) {
|
||||
events = events.filter((e) => e.trigger === filter.trigger);
|
||||
}
|
||||
if (filter.featureId) {
|
||||
events = events.filter((e) => e.featureId === filter.featureId);
|
||||
}
|
||||
if (filter.since) {
|
||||
const sinceDate = new Date(filter.since).getTime();
|
||||
events = events.filter((e) => new Date(e.timestamp).getTime() >= sinceDate);
|
||||
}
|
||||
if (filter.until) {
|
||||
const untilDate = new Date(filter.until).getTime();
|
||||
events = events.filter((e) => new Date(e.timestamp).getTime() <= untilDate);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp (newest first)
|
||||
events.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
||||
|
||||
// Apply pagination
|
||||
if (filter?.offset) {
|
||||
events = events.slice(filter.offset);
|
||||
}
|
||||
if (filter?.limit) {
|
||||
events = events.slice(0, filter.limit);
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single event by ID
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param eventId - Event identifier
|
||||
* @returns Promise resolving to the full event or null if not found
|
||||
*/
|
||||
async getEvent(projectPath: string, eventId: string): Promise<StoredEvent | null> {
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
try {
|
||||
const content = (await secureFs.readFile(eventPath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as StoredEvent;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Error reading event ${eventId}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an event by ID
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param eventId - Event identifier
|
||||
* @returns Promise resolving to true if deleted
|
||||
*/
|
||||
async deleteEvent(projectPath: string, eventId: string): Promise<boolean> {
|
||||
// Remove from index
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const initialLength = index.events.length;
|
||||
index.events = index.events.filter((e) => e.id !== eventId);
|
||||
|
||||
if (index.events.length === initialLength) {
|
||||
return false; // Event not found in index
|
||||
}
|
||||
|
||||
await atomicWriteJson(indexPath, index);
|
||||
|
||||
// Delete the event file
|
||||
const eventPath = getEventPath(projectPath, eventId);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
logger.error(`Error deleting event file ${eventId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Deleted event ${eventId}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all events for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of events cleared
|
||||
*/
|
||||
async clearEvents(projectPath: string): Promise<number> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const count = index.events.length;
|
||||
|
||||
// Delete all event files
|
||||
for (const event of index.events) {
|
||||
const eventPath = getEventPath(projectPath, event.id);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
logger.error(`Error deleting event file ${event.id}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the index
|
||||
await atomicWriteJson(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
logger.info(`Cleared ${count} events for project`);
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get event count for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Promise resolving to event count
|
||||
*/
|
||||
async getEventCount(projectPath: string, filter?: EventHistoryFilter): Promise<number> {
|
||||
const events = await this.getEvents(projectPath, {
|
||||
...filter,
|
||||
limit: undefined,
|
||||
offset: undefined,
|
||||
});
|
||||
return events.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an event to the index (internal)
|
||||
*/
|
||||
private async addToIndex(projectPath: string, event: StoredEvent): Promise<void> {
|
||||
const indexPath = getEventHistoryIndexPath(projectPath);
|
||||
const index = await readJsonFile<StoredEventIndex>(indexPath, DEFAULT_EVENT_HISTORY_INDEX);
|
||||
|
||||
const summary: StoredEventSummary = {
|
||||
id: event.id,
|
||||
trigger: event.trigger,
|
||||
timestamp: event.timestamp,
|
||||
featureName: event.featureName,
|
||||
featureId: event.featureId,
|
||||
};
|
||||
|
||||
// Add to beginning (newest first)
|
||||
index.events.unshift(summary);
|
||||
|
||||
// Prune old events if over limit
|
||||
if (index.events.length > MAX_EVENTS_IN_INDEX) {
|
||||
const removed = index.events.splice(MAX_EVENTS_IN_INDEX);
|
||||
// Delete the pruned event files
|
||||
for (const oldEvent of removed) {
|
||||
const eventPath = getEventPath(projectPath, oldEvent.id);
|
||||
try {
|
||||
await secureFs.unlink(eventPath);
|
||||
} catch {
|
||||
// Ignore deletion errors for pruned events
|
||||
}
|
||||
}
|
||||
logger.info(`Pruned ${removed.length} old events from history`);
|
||||
}
|
||||
|
||||
await atomicWriteJson(indexPath, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract project name from path
|
||||
*/
|
||||
private extractProjectName(projectPath: string): string {
|
||||
const parts = projectPath.split(/[/\\]/);
|
||||
return parts[parts.length - 1] || projectPath;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let eventHistoryServiceInstance: EventHistoryService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton event history service instance
|
||||
*/
|
||||
export function getEventHistoryService(): EventHistoryService {
|
||||
if (!eventHistoryServiceInstance) {
|
||||
eventHistoryServiceInstance = new EventHistoryService();
|
||||
}
|
||||
return eventHistoryServiceInstance;
|
||||
}
|
||||
@@ -5,7 +5,10 @@
|
||||
* - Shell commands: Executed with configurable timeout
|
||||
* - HTTP webhooks: POST/GET/PUT/PATCH requests with variable substitution
|
||||
*
|
||||
* Also stores events to history for debugging and replay.
|
||||
*
|
||||
* Supported events:
|
||||
* - feature_created: A new feature was created
|
||||
* - feature_success: Feature completed successfully
|
||||
* - feature_error: Feature failed with an error
|
||||
* - auto_mode_complete: Auto mode finished all features (idle state)
|
||||
@@ -17,6 +20,7 @@ import { promisify } from 'util';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
import type { SettingsService } from './settings-service.js';
|
||||
import type { EventHistoryService } from './event-history-service.js';
|
||||
import type {
|
||||
EventHook,
|
||||
EventHookTrigger,
|
||||
@@ -60,27 +64,45 @@ interface AutoModeEventPayload {
|
||||
projectPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Feature created event payload structure
|
||||
*/
|
||||
interface FeatureCreatedPayload {
|
||||
featureId: string;
|
||||
featureName?: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event Hook Service
|
||||
*
|
||||
* Manages execution of user-configured event hooks in response to system events.
|
||||
* Also stores events to history for debugging and replay.
|
||||
*/
|
||||
export class EventHookService {
|
||||
private emitter: EventEmitter | null = null;
|
||||
private settingsService: SettingsService | null = null;
|
||||
private eventHistoryService: EventHistoryService | null = null;
|
||||
private unsubscribe: (() => void) | null = null;
|
||||
|
||||
/**
|
||||
* Initialize the service with event emitter and settings service
|
||||
* Initialize the service with event emitter, settings service, and event history service
|
||||
*/
|
||||
initialize(emitter: EventEmitter, settingsService: SettingsService): void {
|
||||
initialize(
|
||||
emitter: EventEmitter,
|
||||
settingsService: SettingsService,
|
||||
eventHistoryService?: EventHistoryService
|
||||
): void {
|
||||
this.emitter = emitter;
|
||||
this.settingsService = settingsService;
|
||||
this.eventHistoryService = eventHistoryService || null;
|
||||
|
||||
// Subscribe to auto-mode events
|
||||
// Subscribe to events
|
||||
this.unsubscribe = emitter.subscribe((type, payload) => {
|
||||
if (type === 'auto-mode:event') {
|
||||
this.handleAutoModeEvent(payload as AutoModeEventPayload);
|
||||
} else if (type === 'feature:created') {
|
||||
this.handleFeatureCreatedEvent(payload as FeatureCreatedPayload);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -97,6 +119,7 @@ export class EventHookService {
|
||||
}
|
||||
this.emitter = null;
|
||||
this.settingsService = null;
|
||||
this.eventHistoryService = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -137,17 +160,51 @@ export class EventHookService {
|
||||
eventType: trigger,
|
||||
};
|
||||
|
||||
// Execute matching hooks
|
||||
await this.executeHooksForTrigger(trigger, context);
|
||||
// Execute matching hooks (pass passes for feature completion events)
|
||||
await this.executeHooksForTrigger(trigger, context, { passes: payload.passes });
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute all enabled hooks matching the given trigger
|
||||
* Handle feature:created events and trigger matching hooks
|
||||
*/
|
||||
private async handleFeatureCreatedEvent(payload: FeatureCreatedPayload): Promise<void> {
|
||||
const context: HookContext = {
|
||||
featureId: payload.featureId,
|
||||
featureName: payload.featureName,
|
||||
projectPath: payload.projectPath,
|
||||
projectName: this.extractProjectName(payload.projectPath),
|
||||
timestamp: new Date().toISOString(),
|
||||
eventType: 'feature_created',
|
||||
};
|
||||
|
||||
await this.executeHooksForTrigger('feature_created', context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute all enabled hooks matching the given trigger and store event to history
|
||||
*/
|
||||
private async executeHooksForTrigger(
|
||||
trigger: EventHookTrigger,
|
||||
context: HookContext
|
||||
context: HookContext,
|
||||
additionalData?: { passes?: boolean }
|
||||
): Promise<void> {
|
||||
// Store event to history (even if no hooks match)
|
||||
if (this.eventHistoryService && context.projectPath) {
|
||||
try {
|
||||
await this.eventHistoryService.storeEvent({
|
||||
trigger,
|
||||
projectPath: context.projectPath,
|
||||
featureId: context.featureId,
|
||||
featureName: context.featureName,
|
||||
error: context.error,
|
||||
errorType: context.errorType,
|
||||
passes: additionalData?.passes,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to store event to history:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.settingsService) {
|
||||
logger.warn('Settings service not available');
|
||||
return;
|
||||
|
||||
@@ -5,14 +5,22 @@
|
||||
|
||||
import path from 'path';
|
||||
import type { Feature, DescriptionHistoryEntry } from '@automaker/types';
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import {
|
||||
createLogger,
|
||||
atomicWriteJson,
|
||||
readJsonWithRecovery,
|
||||
logRecoveryWarning,
|
||||
DEFAULT_BACKUP_COUNT,
|
||||
} from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import {
|
||||
getFeaturesDir,
|
||||
getFeatureDir,
|
||||
getFeatureImagesDir,
|
||||
getAppSpecPath,
|
||||
ensureAutomakerDir,
|
||||
} from '@automaker/platform';
|
||||
import { addImplementedFeature, type ImplementedFeature } from '../lib/xml-extractor.js';
|
||||
|
||||
const logger = createLogger('FeatureLoader');
|
||||
|
||||
@@ -192,31 +200,31 @@ export class FeatureLoader {
|
||||
})) as any[];
|
||||
const featureDirs = entries.filter((entry) => entry.isDirectory());
|
||||
|
||||
// Load all features concurrently (secureFs has built-in concurrency limiting)
|
||||
// Load all features concurrently with automatic recovery from backups
|
||||
const featurePromises = featureDirs.map(async (dir) => {
|
||||
const featureId = dir.name;
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
try {
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
const feature = JSON.parse(content);
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return feature as Feature;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
} else if (error instanceof SyntaxError) {
|
||||
logger.warn(`Failed to parse feature.json for ${featureId}: ${error.message}`);
|
||||
} else {
|
||||
logger.error(`Failed to load feature ${featureId}:`, (error as Error).message);
|
||||
}
|
||||
const feature = result.data;
|
||||
|
||||
if (!feature) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!feature.id) {
|
||||
logger.warn(`Feature ${featureId} missing required 'id' field, skipping`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return feature;
|
||||
});
|
||||
|
||||
const results = await Promise.all(featurePromises);
|
||||
@@ -236,21 +244,85 @@ export class FeatureLoader {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a title for comparison (case-insensitive, trimmed)
|
||||
*/
|
||||
private normalizeTitle(title: string): string {
|
||||
return title.toLowerCase().trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a feature by its title (case-insensitive match)
|
||||
* @param projectPath - Path to the project
|
||||
* @param title - Title to search for
|
||||
* @returns The matching feature or null if not found
|
||||
*/
|
||||
async findByTitle(projectPath: string, title: string): Promise<Feature | null> {
|
||||
if (!title || !title.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedTitle = this.normalizeTitle(title);
|
||||
const features = await this.getAll(projectPath);
|
||||
|
||||
for (const feature of features) {
|
||||
if (feature.title && this.normalizeTitle(feature.title) === normalizedTitle) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a title already exists on another feature (for duplicate detection)
|
||||
* @param projectPath - Path to the project
|
||||
* @param title - Title to check
|
||||
* @param excludeFeatureId - Optional feature ID to exclude from the check (for updates)
|
||||
* @returns The duplicate feature if found, null otherwise
|
||||
*/
|
||||
async findDuplicateTitle(
|
||||
projectPath: string,
|
||||
title: string,
|
||||
excludeFeatureId?: string
|
||||
): Promise<Feature | null> {
|
||||
if (!title || !title.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedTitle = this.normalizeTitle(title);
|
||||
const features = await this.getAll(projectPath);
|
||||
|
||||
for (const feature of features) {
|
||||
// Skip the feature being updated (if provided)
|
||||
if (excludeFeatureId && feature.id === excludeFeatureId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (feature.title && this.normalizeTitle(feature.title) === normalizedTitle) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single feature by ID
|
||||
* Uses automatic recovery from backups if the main file is corrupted
|
||||
*/
|
||||
async get(projectPath: string, featureId: string): Promise<Feature | null> {
|
||||
try {
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
const content = (await secureFs.readFile(featureJsonPath, 'utf-8')) as string;
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
logger.error(`Failed to get feature ${featureId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
|
||||
// Use recovery-enabled read to handle corrupted files
|
||||
const result = await readJsonWithRecovery<Feature | null>(featureJsonPath, null, {
|
||||
maxBackups: DEFAULT_BACKUP_COUNT,
|
||||
autoRestore: true,
|
||||
});
|
||||
|
||||
logRecoveryWarning(result, `Feature ${featureId}`, logger);
|
||||
|
||||
return result.data;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -294,8 +366,8 @@ export class FeatureLoader {
|
||||
descriptionHistory: initialHistory,
|
||||
};
|
||||
|
||||
// Write feature.json
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(feature, null, 2), 'utf-8');
|
||||
// Write feature.json atomically with backup support
|
||||
await atomicWriteJson(featureJsonPath, feature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Created feature ${featureId}`);
|
||||
return feature;
|
||||
@@ -379,9 +451,9 @@ export class FeatureLoader {
|
||||
descriptionHistory: updatedHistory,
|
||||
};
|
||||
|
||||
// Write back to file
|
||||
// Write back to file atomically with backup support
|
||||
const featureJsonPath = this.getFeatureJsonPath(projectPath, featureId);
|
||||
await secureFs.writeFile(featureJsonPath, JSON.stringify(updatedFeature, null, 2), 'utf-8');
|
||||
await atomicWriteJson(featureJsonPath, updatedFeature, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
|
||||
logger.info(`Updated feature ${featureId}`);
|
||||
return updatedFeature;
|
||||
@@ -460,4 +532,64 @@ export class FeatureLoader {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a completed feature to the app_spec.txt implemented_features section
|
||||
*
|
||||
* When a feature is completed, this method adds it to the implemented_features
|
||||
* section of the project's app_spec.txt file. This keeps the spec in sync
|
||||
* with the actual state of the codebase.
|
||||
*
|
||||
* @param projectPath - Path to the project
|
||||
* @param feature - The feature to sync (must have title or description)
|
||||
* @param fileLocations - Optional array of file paths where the feature was implemented
|
||||
* @returns True if the spec was updated, false if no spec exists or feature was skipped
|
||||
*/
|
||||
async syncFeatureToAppSpec(
|
||||
projectPath: string,
|
||||
feature: Feature,
|
||||
fileLocations?: string[]
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const appSpecPath = getAppSpecPath(projectPath);
|
||||
|
||||
// Read the current app_spec.txt
|
||||
let specContent: string;
|
||||
try {
|
||||
specContent = (await secureFs.readFile(appSpecPath, 'utf-8')) as string;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
logger.info(`No app_spec.txt found for project, skipping sync for feature ${feature.id}`);
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Build the implemented feature entry
|
||||
const featureName = feature.title || `Feature: ${feature.id}`;
|
||||
const implementedFeature: ImplementedFeature = {
|
||||
name: featureName,
|
||||
description: feature.description,
|
||||
...(fileLocations && fileLocations.length > 0 ? { file_locations: fileLocations } : {}),
|
||||
};
|
||||
|
||||
// Add the feature to the implemented_features section
|
||||
const updatedSpecContent = addImplementedFeature(specContent, implementedFeature);
|
||||
|
||||
// Check if the content actually changed (feature might already exist)
|
||||
if (updatedSpecContent === specContent) {
|
||||
logger.info(`Feature "${featureName}" already exists in app_spec.txt, skipping`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Write the updated spec back to the file
|
||||
await secureFs.writeFile(appSpecPath, updatedSpecContent, 'utf-8');
|
||||
|
||||
logger.info(`Synced feature "${featureName}" to app_spec.txt`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to sync feature ${feature.id} to app_spec.txt:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
280
apps/server/src/services/notification-service.ts
Normal file
280
apps/server/src/services/notification-service.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
/**
|
||||
* Notification Service - Handles reading/writing notifications to JSON files
|
||||
*
|
||||
* Provides persistent storage for project-level notifications in
|
||||
* {projectPath}/.automaker/notifications.json
|
||||
*
|
||||
* Notifications alert users when:
|
||||
* - Features reach specific statuses (waiting_approval, verified)
|
||||
* - Long-running operations complete (spec generation)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
import { getNotificationsPath, ensureAutomakerDir } from '@automaker/platform';
|
||||
import type { Notification, NotificationsFile, NotificationType } from '@automaker/types';
|
||||
import { DEFAULT_NOTIFICATIONS_FILE } from '@automaker/types';
|
||||
import type { EventEmitter } from '../lib/events.js';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
const logger = createLogger('NotificationService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
const content = (await secureFs.readFile(filePath, 'utf-8')) as string;
|
||||
return JSON.parse(content) as T;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
return defaultValue;
|
||||
}
|
||||
logger.error(`Error reading ${filePath}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for creating a new notification
|
||||
*/
|
||||
export interface CreateNotificationInput {
|
||||
type: NotificationType;
|
||||
title: string;
|
||||
message: string;
|
||||
featureId?: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* NotificationService - Manages persistent storage of notifications
|
||||
*
|
||||
* Handles reading and writing notifications to JSON files with atomic operations
|
||||
* for reliability. Each project has its own notifications.json file.
|
||||
*/
|
||||
export class NotificationService {
|
||||
private events: EventEmitter | null = null;
|
||||
|
||||
/**
|
||||
* Set the event emitter for broadcasting notification events
|
||||
*/
|
||||
setEventEmitter(events: EventEmitter): void {
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all notifications for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to array of notifications
|
||||
*/
|
||||
async getNotifications(projectPath: string): Promise<Notification[]> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
// Filter out dismissed notifications and sort by date (newest first)
|
||||
return file.notifications
|
||||
.filter((n) => !n.dismissed)
|
||||
.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get unread notification count for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to unread count
|
||||
*/
|
||||
async getUnreadCount(projectPath: string): Promise<number> {
|
||||
const notifications = await this.getNotifications(projectPath);
|
||||
return notifications.filter((n) => !n.read).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new notification
|
||||
*
|
||||
* @param input - Notification creation input
|
||||
* @returns Promise resolving to the created notification
|
||||
*/
|
||||
async createNotification(input: CreateNotificationInput): Promise<Notification> {
|
||||
const { projectPath, type, title, message, featureId } = input;
|
||||
|
||||
// Ensure automaker directory exists
|
||||
await ensureAutomakerDir(projectPath);
|
||||
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification: Notification = {
|
||||
id: randomUUID(),
|
||||
type,
|
||||
title,
|
||||
message,
|
||||
createdAt: new Date().toISOString(),
|
||||
read: false,
|
||||
dismissed: false,
|
||||
featureId,
|
||||
projectPath,
|
||||
};
|
||||
|
||||
file.notifications.push(notification);
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Created notification: ${title} for project ${projectPath}`);
|
||||
|
||||
// Emit event for real-time updates
|
||||
if (this.events) {
|
||||
this.events.emit('notification:created', notification);
|
||||
}
|
||||
|
||||
return notification;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a notification as read
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param notificationId - ID of the notification to mark as read
|
||||
* @returns Promise resolving to the updated notification or null if not found
|
||||
*/
|
||||
async markAsRead(projectPath: string, notificationId: string): Promise<Notification | null> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification = file.notifications.find((n) => n.id === notificationId);
|
||||
if (!notification) {
|
||||
return null;
|
||||
}
|
||||
|
||||
notification.read = true;
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Marked notification ${notificationId} as read`);
|
||||
return notification;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark all notifications as read for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of notifications marked as read
|
||||
*/
|
||||
async markAllAsRead(projectPath: string): Promise<number> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
let count = 0;
|
||||
for (const notification of file.notifications) {
|
||||
if (!notification.read && !notification.dismissed) {
|
||||
notification.read = true;
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
logger.info(`Marked ${count} notifications as read`);
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dismiss a notification
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @param notificationId - ID of the notification to dismiss
|
||||
* @returns Promise resolving to true if notification was dismissed
|
||||
*/
|
||||
async dismissNotification(projectPath: string, notificationId: string): Promise<boolean> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
const notification = file.notifications.find((n) => n.id === notificationId);
|
||||
if (!notification) {
|
||||
return false;
|
||||
}
|
||||
|
||||
notification.dismissed = true;
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
|
||||
logger.info(`Dismissed notification ${notificationId}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dismiss all notifications for a project
|
||||
*
|
||||
* @param projectPath - Absolute path to project directory
|
||||
* @returns Promise resolving to number of notifications dismissed
|
||||
*/
|
||||
async dismissAll(projectPath: string): Promise<number> {
|
||||
const notificationsPath = getNotificationsPath(projectPath);
|
||||
const file = await readJsonFile<NotificationsFile>(
|
||||
notificationsPath,
|
||||
DEFAULT_NOTIFICATIONS_FILE
|
||||
);
|
||||
|
||||
let count = 0;
|
||||
for (const notification of file.notifications) {
|
||||
if (!notification.dismissed) {
|
||||
notification.dismissed = true;
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
await atomicWriteJson(notificationsPath, file);
|
||||
logger.info(`Dismissed ${count} notifications`);
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let notificationServiceInstance: NotificationService | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton notification service instance
|
||||
*/
|
||||
export function getNotificationService(): NotificationService {
|
||||
if (!notificationServiceInstance) {
|
||||
notificationServiceInstance = new NotificationService();
|
||||
}
|
||||
return notificationServiceInstance;
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
* - Per-project settings ({projectPath}/.automaker/settings.json)
|
||||
*/
|
||||
|
||||
import { createLogger } from '@automaker/utils';
|
||||
import { createLogger, atomicWriteJson, DEFAULT_BACKUP_COUNT } from '@automaker/utils';
|
||||
import * as secureFs from '../lib/secure-fs.js';
|
||||
|
||||
import {
|
||||
@@ -42,28 +42,8 @@ import {
|
||||
const logger = createLogger('SettingsService');
|
||||
|
||||
/**
|
||||
* Atomic file write - write to temp file then rename
|
||||
*/
|
||||
async function atomicWriteJson(filePath: string, data: unknown): Promise<void> {
|
||||
const tempPath = `${filePath}.tmp.${Date.now()}`;
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
|
||||
try {
|
||||
await secureFs.writeFile(tempPath, content, 'utf-8');
|
||||
await secureFs.rename(tempPath, filePath);
|
||||
} catch (error) {
|
||||
// Clean up temp file if it exists
|
||||
try {
|
||||
await secureFs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read JSON file with fallback to default
|
||||
* Wrapper for readJsonFile from utils that uses the local secureFs
|
||||
* to maintain compatibility with the server's secure file system
|
||||
*/
|
||||
async function readJsonFile<T>(filePath: string, defaultValue: T): Promise<T> {
|
||||
try {
|
||||
@@ -90,6 +70,13 @@ async function fileExists(filePath: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write settings atomically with backup support
|
||||
*/
|
||||
async function writeSettingsJson(filePath: string, data: unknown): Promise<void> {
|
||||
await atomicWriteJson(filePath, data, { backupCount: DEFAULT_BACKUP_COUNT });
|
||||
}
|
||||
|
||||
/**
|
||||
* SettingsService - Manages persistent storage of user settings and credentials
|
||||
*
|
||||
@@ -180,7 +167,7 @@ export class SettingsService {
|
||||
if (needsSave) {
|
||||
try {
|
||||
await ensureDataDir(this.dataDir);
|
||||
await atomicWriteJson(settingsPath, result);
|
||||
await writeSettingsJson(settingsPath, result);
|
||||
logger.info('Settings migration complete');
|
||||
} catch (error) {
|
||||
logger.error('Failed to save migrated settings:', error);
|
||||
@@ -340,7 +327,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info('Global settings updated');
|
||||
|
||||
return updated;
|
||||
@@ -414,7 +401,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(credentialsPath, updated);
|
||||
await writeSettingsJson(credentialsPath, updated);
|
||||
logger.info('Credentials updated');
|
||||
|
||||
return updated;
|
||||
@@ -525,7 +512,7 @@ export class SettingsService {
|
||||
};
|
||||
}
|
||||
|
||||
await atomicWriteJson(settingsPath, updated);
|
||||
await writeSettingsJson(settingsPath, updated);
|
||||
logger.info(`Project settings updated for ${projectPath}`);
|
||||
|
||||
return updated;
|
||||
|
||||
@@ -70,6 +70,29 @@ export class TerminalService extends EventEmitter {
|
||||
private sessions: Map<string, TerminalSession> = new Map();
|
||||
private dataCallbacks: Set<DataCallback> = new Set();
|
||||
private exitCallbacks: Set<ExitCallback> = new Set();
|
||||
private isWindows = os.platform() === 'win32';
|
||||
// On Windows, ConPTY requires AttachConsole which fails in Electron/service mode
|
||||
// Detect Electron by checking for electron-specific env vars or process properties
|
||||
private isElectron =
|
||||
!!(process.versions && (process.versions as Record<string, string>).electron) ||
|
||||
!!process.env.ELECTRON_RUN_AS_NODE;
|
||||
private useConptyFallback = false; // Track if we need to use winpty fallback on Windows
|
||||
|
||||
/**
|
||||
* Kill a PTY process with platform-specific handling.
|
||||
* Windows doesn't support Unix signals like SIGTERM/SIGKILL, so we call kill() without arguments.
|
||||
* On Unix-like systems (macOS, Linux), we can specify the signal.
|
||||
*
|
||||
* @param ptyProcess - The PTY process to kill
|
||||
* @param signal - The signal to send on Unix-like systems (default: 'SIGTERM')
|
||||
*/
|
||||
private killPtyProcess(ptyProcess: pty.IPty, signal: string = 'SIGTERM'): void {
|
||||
if (this.isWindows) {
|
||||
ptyProcess.kill();
|
||||
} else {
|
||||
ptyProcess.kill(signal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the best shell for the current platform
|
||||
@@ -322,13 +345,60 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
logger.info(`Creating session ${id} with shell: ${shell} in ${cwd}`);
|
||||
|
||||
const ptyProcess = pty.spawn(shell, shellArgs, {
|
||||
// Build PTY spawn options
|
||||
const ptyOptions: pty.IPtyForkOptions = {
|
||||
name: 'xterm-256color',
|
||||
cols: options.cols || 80,
|
||||
rows: options.rows || 24,
|
||||
cwd,
|
||||
env,
|
||||
});
|
||||
};
|
||||
|
||||
// On Windows, always use winpty instead of ConPTY
|
||||
// ConPTY requires AttachConsole which fails in many contexts:
|
||||
// - Electron apps without a console
|
||||
// - VS Code integrated terminal
|
||||
// - Spawned from other applications
|
||||
// The error happens in a subprocess so we can't catch it - must proactively disable
|
||||
if (this.isWindows) {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
logger.info(
|
||||
`[createSession] Using winpty for session ${id} (ConPTY disabled for compatibility)`
|
||||
);
|
||||
}
|
||||
|
||||
let ptyProcess: pty.IPty;
|
||||
try {
|
||||
ptyProcess = pty.spawn(shell, shellArgs, ptyOptions);
|
||||
} catch (spawnError) {
|
||||
const errorMessage = spawnError instanceof Error ? spawnError.message : String(spawnError);
|
||||
|
||||
// Check for Windows ConPTY-specific errors
|
||||
if (this.isWindows && errorMessage.includes('AttachConsole failed')) {
|
||||
// ConPTY failed - try winpty fallback
|
||||
if (!this.useConptyFallback) {
|
||||
logger.warn(`[createSession] ConPTY AttachConsole failed, retrying with winpty fallback`);
|
||||
this.useConptyFallback = true;
|
||||
|
||||
try {
|
||||
(ptyOptions as pty.IWindowsPtyForkOptions).useConpty = false;
|
||||
ptyProcess = pty.spawn(shell, shellArgs, ptyOptions);
|
||||
logger.info(`[createSession] Successfully spawned session ${id} with winpty fallback`);
|
||||
} catch (fallbackError) {
|
||||
const fallbackMessage =
|
||||
fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
|
||||
logger.error(`[createSession] Winpty fallback also failed:`, fallbackMessage);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
logger.error(`[createSession] PTY spawn failed (winpty):`, errorMessage);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
logger.error(`[createSession] PTY spawn failed:`, errorMessage);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const session: TerminalSession = {
|
||||
id,
|
||||
@@ -392,7 +462,11 @@ export class TerminalService extends EventEmitter {
|
||||
|
||||
// Handle exit
|
||||
ptyProcess.onExit(({ exitCode }) => {
|
||||
logger.info(`Session ${id} exited with code ${exitCode}`);
|
||||
const exitMessage =
|
||||
exitCode === undefined || exitCode === null
|
||||
? 'Session terminated'
|
||||
: `Session exited with code ${exitCode}`;
|
||||
logger.info(`${exitMessage} (${id})`);
|
||||
this.sessions.delete(id);
|
||||
this.exitCallbacks.forEach((cb) => cb(id, exitCode));
|
||||
this.emit('exit', id, exitCode);
|
||||
@@ -477,8 +551,9 @@ export class TerminalService extends EventEmitter {
|
||||
}
|
||||
|
||||
// First try graceful SIGTERM to allow process cleanup
|
||||
// On Windows, killPtyProcess calls kill() without signal since Windows doesn't support Unix signals
|
||||
logger.info(`Session ${sessionId} sending SIGTERM`);
|
||||
session.pty.kill('SIGTERM');
|
||||
this.killPtyProcess(session.pty, 'SIGTERM');
|
||||
|
||||
// Schedule SIGKILL fallback if process doesn't exit gracefully
|
||||
// The onExit handler will remove session from map when it actually exits
|
||||
@@ -486,7 +561,7 @@ export class TerminalService extends EventEmitter {
|
||||
if (this.sessions.has(sessionId)) {
|
||||
logger.info(`Session ${sessionId} still alive after SIGTERM, sending SIGKILL`);
|
||||
try {
|
||||
session.pty.kill('SIGKILL');
|
||||
this.killPtyProcess(session.pty, 'SIGKILL');
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
@@ -588,7 +663,8 @@ export class TerminalService extends EventEmitter {
|
||||
if (session.flushTimeout) {
|
||||
clearTimeout(session.flushTimeout);
|
||||
}
|
||||
session.pty.kill();
|
||||
// Use platform-specific kill to ensure proper termination on Windows
|
||||
this.killPtyProcess(session.pty);
|
||||
} catch {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user